diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 3e74e3caa89..ecca3b33caf 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -41,7 +41,7 @@ ddtrace/internal/_file_queue.py @DataDog/python-guild ddtrace/internal/_unpatched.py @DataDog/python-guild ddtrace/internal/compat.py @DataDog/python-guild @DataDog/apm-core-python ddtrace/internal/endpoints.py @DataDog/python-guild @DataDog/asm-python -ddtrace/settings/config.py @DataDog/python-guild @DataDog/apm-sdk-capabilities-python +ddtrace/internal/settings/_config.py @DataDog/python-guild @DataDog/apm-sdk-capabilities-python docs/ @DataDog/python-guild tests/utils.py @DataDog/python-guild tests/suitespec.yml @DataDog/python-guild @DataDog/apm-core-python @@ -95,7 +95,7 @@ tests/snapshots/test_selenium_* @DataDog/ci-app-libraries # Debugger ddtrace/debugging/ @DataDog/debugger-python -ddtrace/settings/dynamic_instrumentation.py @DataDog/debugger-python +ddtrace/internal/settings/dynamic_instrumentation.py @DataDog/debugger-python ddtrace/internal/injection.py @DataDog/debugger-python @DataDog/apm-core-python ddtrace/internal/wrapping.py @DataDog/debugger-python @DataDog/apm-core-python ddtrace/internal/module.py @DataDog/debugger-python @DataDog/apm-core-python @@ -114,7 +114,7 @@ benchmarks/bm/iast_utils* @DataDog/asm-python benchmarks/bm/iast_fixtures* @DataDog/asm-python benchmarks/base/aspects_benchmarks_generate.py @DataDog/asm-python ddtrace/appsec/ @DataDog/asm-python -ddtrace/settings/asm.py @DataDog/asm-python +ddtrace/internal/settings/asm.py @DataDog/asm-python ddtrace/contrib/internal/subprocess/ @DataDog/asm-python ddtrace/contrib/internal/flask_login/ @DataDog/asm-python ddtrace/contrib/internal/webbrowser @DataDog/asm-python @@ -131,10 +131,9 @@ scripts/iast/* @DataDog/asm-python # Profiling ddtrace/profiling @DataDog/profiling-python -ddtrace/settings/profiling.py @DataDog/profiling-python +ddtrace/internal/settings/profiling.py @DataDog/profiling-python ddtrace/internal/datadog/profiling @DataDog/profiling-python -tests/profiling @DataDog/profiling-python -tests/profiling_v2 @DataDog/profiling-python +tests/profiling @DataDog/profiling-python .gitlab/tests/profiling.yml @DataDog/profiling-python # MLObs @@ -207,7 +206,7 @@ ddtrace/opentracer/ @DataDog/apm-sdk-capabilities ddtrace/propagation/ @DataDog/apm-sdk-capabilities-python ddtrace/openfeature/ @DataDog/asm-python @DataDog/apm-core-python tests/openfeature/ @DataDog/asm-python @DataDog/apm-core-python -ddtrace/settings/_opentelemetry.py @DataDog/apm-sdk-capabilities-python +ddtrace/internal/settings/_opentelemetry.py @DataDog/apm-sdk-capabilities-python ddtrace/internal/sampling.py @DataDog/apm-sdk-capabilities-python ddtrace/internal/tracemethods.py @DataDog/apm-sdk-capabilities-python @@ -215,7 +214,7 @@ ddtrace/internal/metrics.py @DataDog/apm-sdk-capabilities ddtrace/internal/rate_limiter.py @DataDog/apm-sdk-capabilities-python ddtrace/runtime/ @DataDog/apm-sdk-capabilities-python ddtrace/internal/runtime/ @DataDog/apm-sdk-capabilities-python -ddtrace/settings/_otel_remapper.py @DataDog/apm-sdk-capabilities-python +ddtrace/internal/settings/_otel_remapper.py @DataDog/apm-sdk-capabilities-python tests/integration/test_priority_sampling.py @DataDog/apm-sdk-capabilities-python tests/integration/test_propagation.py @DataDog/apm-sdk-capabilities-python tests/runtime/ @DataDog/apm-sdk-capabilities-python diff --git a/.github/workflows/build_deploy.yml b/.github/workflows/build_deploy.yml index 5202ebd4c41..cc97a3c8220 100644 --- a/.github/workflows/build_deploy.yml +++ b/.github/workflows/build_deploy.yml @@ -69,8 +69,8 @@ jobs: needs: [ "compute_version" ] uses: ./.github/workflows/build_python_3.yml with: - cibw_build: 'cp38* cp39* cp310* cp311* cp312* cp313* cp314*' - cibw_skip: 'cp38-win_arm64 cp39-win_arm64 cp310-win_arm64 cp314t*' + cibw_build: 'cp39* cp310* cp311* cp312* cp313* cp314*' + cibw_skip: 'cp39-win_arm64 cp310-win_arm64 cp314t*' library_version: ${{ needs.compute_version.outputs.library_version }} build_sdist: diff --git a/.github/workflows/django-overhead-profile.yml b/.github/workflows/django-overhead-profile.yml index b04199cb57f..01d9326b2b6 100644 --- a/.github/workflows/django-overhead-profile.yml +++ b/.github/workflows/django-overhead-profile.yml @@ -13,18 +13,10 @@ on: jobs: django-overhead-profile: runs-on: ubuntu-latest - strategy: - matrix: - include: - - suffix: "-v1" - stack_v2: "0" - - suffix: "-v2" - stack_v2: "1" env: PREFIX: ${{ github.workspace }}/prefix DD_CODE_ORIGIN_FOR_SPANS_ENABLED: "1" DD_PROFILING_ENABLED: "1" - DD_PROFILING_STACK_V2_ENABLED: ${{ matrix.stack_v2 }} DD_PROFILING_OUTPUT_PPROF: ${{ github.workspace }}/prefix/artifacts/ddtrace_profile DD_EXCEPTION_REPLAY_ENABLED: "1" defaults: @@ -50,5 +42,5 @@ jobs: - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: - name: django-overhead-profile${{ matrix.suffix }} + name: django-overhead-profile path: ${{ github.workspace }}/prefix/artifacts diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index 7451f923229..93c6cb727e2 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -16,7 +16,7 @@ jobs: matrix: os: [ubuntu-latest, windows-latest, macos-latest] # Keep this in sync with hatch.toml - python-version: ["3.8", "3.10", "3.12", "3.14"] + python-version: ["3.10", "3.12", "3.14"] steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 diff --git a/.gitlab/benchmarks/bp-runner.microbenchmarks.fail-on-breach.yml b/.gitlab/benchmarks/bp-runner.microbenchmarks.fail-on-breach.yml index b593d9b043a..cfb84263620 100644 --- a/.gitlab/benchmarks/bp-runner.microbenchmarks.fail-on-breach.yml +++ b/.gitlab/benchmarks/bp-runner.microbenchmarks.fail-on-breach.yml @@ -802,7 +802,7 @@ experiments: - max_rss_usage < 675.00 MB - name: otelspan-add-tags thresholds: - - execution_time < 314.00 ms + - execution_time < 344.80 ms - max_rss_usage < 675.00 MB - name: otelspan-get-context thresholds: diff --git a/.gitlab/package.yml b/.gitlab/package.yml index 2534e8c9e7e..151664c18c1 100644 --- a/.gitlab/package.yml +++ b/.gitlab/package.yml @@ -59,8 +59,6 @@ download_dependency_wheels: PIP_CACHE_DIR: "${CI_PROJECT_DIR}/.cache/pip" parallel: matrix: # The image tags that are mirrored are in: https://github.com/DataDog/images/blob/master/mirror.yaml - - PYTHON_IMAGE_TAG: "3.8" - PYTHON_VERSION: "3.8" - PYTHON_IMAGE_TAG: "3.9.13" PYTHON_VERSION: "3.9" - PYTHON_IMAGE_TAG: "3.10.13" diff --git a/.gitlab/templates/build-base-venvs.yml b/.gitlab/templates/build-base-venvs.yml index de8d29218ea..36557c6d510 100644 --- a/.gitlab/templates/build-base-venvs.yml +++ b/.gitlab/templates/build-base-venvs.yml @@ -4,7 +4,7 @@ build_base_venvs: needs: [] parallel: matrix: - - PYTHON_VERSION: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"] + - PYTHON_VERSION: ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14"] variables: CMAKE_BUILD_PARALLEL_LEVEL: '12' PIP_VERBOSE: '0' diff --git a/.gitlab/templates/cached-testrunner.yml b/.gitlab/templates/cached-testrunner.yml index 1faef291770..5c4f91dee7b 100644 --- a/.gitlab/templates/cached-testrunner.yml +++ b/.gitlab/templates/cached-testrunner.yml @@ -5,7 +5,7 @@ EXT_CACHE_VENV: '${{CI_PROJECT_DIR}}/.cache/ext_cache_venv${{PYTHON_VERSION}}' before_script: | ulimit -c unlimited - pyenv global 3.12 3.8 3.9 3.10 3.11 3.13 3.14 + pyenv global 3.12 3.9 3.10 3.11 3.13 3.14 export _CI_DD_AGENT_URL=http://${{HOST_IP}}:8126/ set -e -o pipefail if [ ! -d $EXT_CACHE_VENV ]; then diff --git a/.gitlab/templates/detect-global-locks.yml b/.gitlab/templates/detect-global-locks.yml index 18e5a7f5281..5b16e8d1722 100644 --- a/.gitlab/templates/detect-global-locks.yml +++ b/.gitlab/templates/detect-global-locks.yml @@ -4,7 +4,7 @@ detect-global-locks: needs: [] parallel: matrix: - - PYTHON_VERSION: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"] + - PYTHON_VERSION: ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14"] variables: DD_DYNAMIC_INSTRUMENTATION_ENABLED: '1' DD_CODE_ORIGIN_FOR_SPANS_ENABLED: '1' diff --git a/.gitlab/testrunner.yml b/.gitlab/testrunner.yml index fe60339dd09..e605b74e229 100644 --- a/.gitlab/testrunner.yml +++ b/.gitlab/testrunner.yml @@ -12,7 +12,7 @@ variables: before_script: - ulimit -c unlimited - git config --global --add safe.directory ${CI_PROJECT_DIR} - - pyenv global 3.12 3.8 3.9 3.10 3.11 3.13 3.14 + - pyenv global 3.12 3.9 3.10 3.11 3.13 3.14 - export _CI_DD_AGENT_URL=http://${HOST_IP}:8126/ retry: 2 artifacts: diff --git a/.riot/requirements/1002685.txt b/.riot/requirements/1002685.txt deleted file mode 100644 index 8bea0b26ba6..00000000000 --- a/.riot/requirements/1002685.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1002685.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -mysql-connector-python==8.2.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -protobuf==4.21.12 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/1067a9b.txt b/.riot/requirements/1067a9b.txt deleted file mode 100644 index d9b1caa7c54..00000000000 --- a/.riot/requirements/1067a9b.txt +++ /dev/null @@ -1,45 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1067a9b.in -# -aiofiles==23.2.1 -anyio==4.2.0 -attrs==23.1.0 -certifi==2023.11.17 -charset-normalizer==3.3.2 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -h11==0.14.0 -httpcore==0.16.3 -httptools==0.6.1 -httpx==0.23.3 -hypothesis==6.45.0 -idna==3.6 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -multidict==6.0.4 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -requests==2.31.0 -rfc3986[idna2008]==1.5.0 -sanic==22.12.0 -sanic-routing==23.6.0 -sanic-testing==22.3.1 -sniffio==1.3.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -typing-extensions==4.9.0 -ujson==5.9.0 -urllib3==2.1.0 -uvloop==0.19.0 -websockets==10.4 -zipp==3.17.0 diff --git a/.riot/requirements/106f38d.txt b/.riot/requirements/106f38d.txt deleted file mode 100644 index 35ad753ef8f..00000000000 --- a/.riot/requirements/106f38d.txt +++ /dev/null @@ -1,23 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.14 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/106f38d.in -# -attrs==25.3.0 -coverage[toml]==7.10.5 -dnspython==2.7.0 -hypothesis==6.45.0 -iniconfig==2.1.0 -mock==5.2.0 -mongoengine==0.29.1 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -pygments==2.19.2 -pymongo==4.8.0 -pytest==8.4.1 -pytest-cov==6.2.1 -pytest-mock==3.14.1 -pytest-randomly==3.16.0 -sortedcontainers==2.4.0 diff --git a/.riot/requirements/118fd10.txt b/.riot/requirements/1072660.txt similarity index 82% rename from .riot/requirements/118fd10.txt rename to .riot/requirements/1072660.txt index 702baae7aab..583f1bbe640 100644 --- a/.riot/requirements/118fd10.txt +++ b/.riot/requirements/1072660.txt @@ -2,13 +2,13 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/118fd10.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1072660.in # -asgiref==3.9.1 -attrs==25.3.0 +asgiref==3.10.0 +attrs==25.4.0 bcrypt==4.2.1 -certifi==2025.8.3 -charset-normalizer==3.4.3 +certifi==2025.10.5 +charset-normalizer==3.4.4 coverage[toml]==7.10.7 dill==0.4.0 django==4.0.10 @@ -18,8 +18,9 @@ gevent==25.9.1 greenlet==3.2.4 gunicorn==23.0.0 hypothesis==6.45.0 -idna==3.10 +idna==3.11 iniconfig==2.1.0 +legacy-cgi==2.6.4 mock==5.2.0 opentracing==2.4.0 packaging==25.0 @@ -30,16 +31,16 @@ pytest==8.4.2 pytest-cov==7.0.0 pytest-django[testing]==3.10.0 pytest-mock==3.15.1 -pyyaml==6.0.2 +pyyaml==6.0.3 requests==2.32.5 six==1.17.0 sortedcontainers==2.4.0 sqlparse==0.5.3 -tomli==2.2.1 +tomli==2.3.0 typing-extensions==4.15.0 urllib3==2.5.0 zope-event==6.0 -zope-interface==8.0 +zope-interface==8.0.1 # The following packages are considered to be unsafe in a requirements file: setuptools==80.9.0 diff --git a/.riot/requirements/1078c3b.txt b/.riot/requirements/1078c3b.txt deleted file mode 100644 index 3dfee8f68b4..00000000000 --- a/.riot/requirements/1078c3b.txt +++ /dev/null @@ -1,27 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1078c3b.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -greenlet==3.0.3 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -mysql-connector-python==9.0.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -psycopg2-binary==2.9.10 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -sqlalchemy==1.3.24 -tomli==2.2.1 -zipp==3.20.2 diff --git a/.riot/requirements/1087ca6.txt b/.riot/requirements/1087ca6.txt deleted file mode 100644 index 875cc5be3a4..00000000000 --- a/.riot/requirements/1087ca6.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1087ca6.in -# -attrs==25.3.0 -coverage[toml]==7.8.2 -dnspython==2.7.0 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -iniconfig==2.1.0 -mock==5.2.0 -mongoengine==0.24.2 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -pygments==2.19.1 -pymongo==4.8.0 -pytest==8.4.0 -pytest-cov==6.1.1 -pytest-mock==3.14.1 -pytest-randomly==3.16.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.14.0 diff --git a/.riot/requirements/108bb1d.txt b/.riot/requirements/108bb1d.txt deleted file mode 100644 index 12b7109ac29..00000000000 --- a/.riot/requirements/108bb1d.txt +++ /dev/null @@ -1,31 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/108bb1d.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -gunicorn==23.0.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -protobuf==5.29.5 -py-cpuinfo==8.0.0 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-benchmark==4.0.0 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.3.0 -typing-extensions==4.13.2 -uwsgi==2.0.31 -zipp==3.20.2 -zstandard==0.23.0 diff --git a/.riot/requirements/108d1af.txt b/.riot/requirements/108d1af.txt deleted file mode 100644 index 95aa2e94b5b..00000000000 --- a/.riot/requirements/108d1af.txt +++ /dev/null @@ -1,42 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/108d1af.in -# -aiofiles==24.1.0 -annotated-types==0.7.0 -anyio==4.5.2 -attrs==25.3.0 -certifi==2025.8.3 -charset-normalizer==3.4.3 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -fastapi==0.116.1 -h11==0.16.0 -httpcore==1.0.9 -httpx==0.27.2 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pydantic==2.10.6 -pydantic-core==2.27.2 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -python-multipart==0.0.20 -requests==2.32.4 -sniffio==1.3.1 -sortedcontainers==2.4.0 -starlette==0.44.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 -zipp==3.20.2 diff --git a/.riot/requirements/1097f9f.txt b/.riot/requirements/1097f9f.txt deleted file mode 100644 index 3154cac7e78..00000000000 --- a/.riot/requirements/1097f9f.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1097f9f.in -# -attrs==23.1.0 -certifi==2023.11.17 -coverage[toml]==7.3.4 -elasticsearch7==7.13.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -urllib3==1.26.18 -zipp==3.17.0 diff --git a/.riot/requirements/10a00e7.txt b/.riot/requirements/10a00e7.txt deleted file mode 100644 index ed2fd846015..00000000000 --- a/.riot/requirements/10a00e7.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --no-annotate --resolver=backtracking .riot/requirements/10a00e7.in -# -attrs==24.2.0 -coverage[toml]==7.6.1 -dnspython==2.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -iniconfig==2.0.0 -mock==5.1.0 -mongoengine==0.29.1 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pymongo==4.8.0 -pytest==8.3.3 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 diff --git a/.riot/requirements/10b490c.txt b/.riot/requirements/10b490c.txt deleted file mode 100644 index 4126321ff11..00000000000 --- a/.riot/requirements/10b490c.txt +++ /dev/null @@ -1,23 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.11 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/10b490c.in -# -attrs==25.3.0 -coverage[toml]==7.8.2 -dnspython==2.7.0 -hypothesis==6.45.0 -iniconfig==2.1.0 -mock==5.2.0 -mongoengine==0.24.2 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -pygments==2.19.1 -pymongo==4.8.0 -pytest==8.4.0 -pytest-cov==6.1.1 -pytest-mock==3.14.1 -pytest-randomly==3.16.0 -sortedcontainers==2.4.0 diff --git a/.riot/requirements/10b89f6.txt b/.riot/requirements/10b89f6.txt deleted file mode 100644 index 59297b1e0b1..00000000000 --- a/.riot/requirements/10b89f6.txt +++ /dev/null @@ -1,33 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/10b89f6.in -# -attrs==23.2.0 -blinker==1.7.0 -click==8.1.7 -coverage[toml]==7.4.2 -exceptiongroup==1.2.0 -flask==3.0.2 -flask-caching==1.10.1 -hypothesis==6.45.0 -importlib-metadata==7.0.1 -iniconfig==2.0.0 -itsdangerous==2.1.2 -jinja2==3.1.3 -markupsafe==2.1.5 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.4.0 -pytest==8.0.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -python-memcached==1.62 -redis==2.10.6 -sortedcontainers==2.4.0 -tomli==2.0.1 -werkzeug==3.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/10bae0a.txt b/.riot/requirements/10bae0a.txt deleted file mode 100644 index b6ac23fbc1a..00000000000 --- a/.riot/requirements/10bae0a.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/10bae0a.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.0.0 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -tornado==6.0.4 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/11047da.txt b/.riot/requirements/11047da.txt deleted file mode 100644 index 205ab7860ff..00000000000 --- a/.riot/requirements/11047da.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/11047da.in -# -aiomysql==0.1.1 -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pymysql==1.1.0 -pytest==7.4.3 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/11091fd.txt b/.riot/requirements/11091fd.txt deleted file mode 100644 index 90586cdcc5f..00000000000 --- a/.riot/requirements/11091fd.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/11091fd.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pymemcache==4.0.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/110b5c2.txt b/.riot/requirements/110b5c2.txt deleted file mode 100644 index d2a20cc6715..00000000000 --- a/.riot/requirements/110b5c2.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/110b5c2.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mako==1.0.14 -markupsafe==2.1.5 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/111559c.txt b/.riot/requirements/111559c.txt deleted file mode 100644 index 1440229c1ce..00000000000 --- a/.riot/requirements/111559c.txt +++ /dev/null @@ -1,74 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/111559c.in -# -annotated-types==0.7.0 -attrs==25.3.0 -aws-sam-translator==1.97.0 -aws-xray-sdk==2.14.0 -boto==2.49.0 -boto3==1.37.38 -botocore==1.37.38 -certifi==2025.4.26 -cffi==1.17.1 -cfn-lint==0.53.1 -charset-normalizer==3.4.2 -coverage[toml]==7.6.1 -cryptography==45.0.3 -docker==7.1.0 -ecdsa==0.14.1 -exceptiongroup==1.3.0 -execnet==2.1.1 -hypothesis==6.45.0 -idna==2.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -jinja2==2.10.3 -jmespath==1.0.1 -jsondiff==2.2.1 -jsonpatch==1.33 -jsonpointer==3.0.0 -jsonschema==3.2.0 -junit-xml==1.9 -markupsafe==1.1.1 -mock==5.2.0 -more-itertools==10.5.0 -moto==1.3.16 -networkx==2.8.8 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pyasn1==0.4.8 -pycparser==2.22 -pydantic==2.10.6 -pydantic-core==2.27.2 -pynamodb==5.5.1 -pyrsistent==0.20.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -pytest-xdist==3.6.1 -python-dateutil==2.9.0.post0 -python-jose[cryptography]==3.4.0 -pytz==2025.2 -pyyaml==6.0.2 -requests==2.32.3 -responses==0.25.7 -rsa==4.9.1 -s3transfer==0.11.5 -six==1.17.0 -sortedcontainers==2.4.0 -sshpubkeys==3.3.1 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==1.26.20 -werkzeug==2.1.2 -wrapt==1.17.2 -xmltodict==0.14.2 -zipp==3.20.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/112e093.txt b/.riot/requirements/112e093.txt deleted file mode 100644 index 5fff90d1609..00000000000 --- a/.riot/requirements/112e093.txt +++ /dev/null @@ -1,42 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/112e093.in -# -aiofiles==24.1.0 -aiosqlite==0.20.0 -anyio==3.7.1 -attrs==25.3.0 -certifi==2025.8.3 -charset-normalizer==3.4.3 -coverage[toml]==7.6.1 -databases==0.8.0 -exceptiongroup==1.3.0 -greenlet==3.1.1 -h11==0.12.0 -httpcore==0.14.7 -httpx==0.22.0 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -requests==2.32.4 -rfc3986[idna2008]==1.5.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -sqlalchemy==1.4.54 -starlette==0.44.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 -zipp==3.20.2 diff --git a/.riot/requirements/114922a.txt b/.riot/requirements/114922a.txt deleted file mode 100644 index 9e2467bba9a..00000000000 --- a/.riot/requirements/114922a.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/114922a.in -# -async-timeout==5.0.1 -attrs==25.3.0 -coverage[toml]==7.6.1 -dramatiq==1.10.0 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pika==1.3.2 -pluggy==1.5.0 -prometheus-client==0.21.1 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -redis==6.1.1 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 diff --git a/.riot/requirements/116b01f.txt b/.riot/requirements/116b01f.txt deleted file mode 100644 index d3d083bf336..00000000000 --- a/.riot/requirements/116b01f.txt +++ /dev/null @@ -1,60 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/116b01f.in -# -attrs==25.3.0 -certifi==2025.6.15 -charset-normalizer==2.1.1 -click==8.1.8 -coverage[toml]==7.6.1 -deprecated==1.2.18 -exceptiongroup==1.3.0 -flask==2.1.3 -gevent==24.2.1 -googleapis-common-protos==1.70.0 -greenlet==3.1.1 -grpcio==1.70.0 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -itsdangerous==2.2.0 -jinja2==3.1.6 -markupsafe==2.0.1 -mock==5.2.0 -opentelemetry-api==1.33.1 -opentelemetry-exporter-otlp==1.33.1 -opentelemetry-exporter-otlp-proto-common==1.33.1 -opentelemetry-exporter-otlp-proto-grpc==1.33.1 -opentelemetry-exporter-otlp-proto-http==1.33.1 -opentelemetry-instrumentation==0.54b1 -opentelemetry-instrumentation-flask==0.54b1 -opentelemetry-instrumentation-wsgi==0.54b1 -opentelemetry-proto==1.33.1 -opentelemetry-sdk==1.33.1 -opentelemetry-semantic-conventions==0.54b1 -opentelemetry-util-http==0.54b1 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -protobuf==5.29.5 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -requests==2.28.1 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==1.26.20 -werkzeug==2.1.2 -wrapt==1.17.2 -zipp==3.20.2 -zope-event==5.0 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/116bda6.txt b/.riot/requirements/116bda6.txt deleted file mode 100644 index af72c55f3d2..00000000000 --- a/.riot/requirements/116bda6.txt +++ /dev/null @@ -1,39 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.13 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/116bda6.in -# -attrs==25.4.0 -coverage[toml]==7.11.0 -gevent==25.9.1 -greenlet==3.2.4 -gunicorn[gevent]==23.0.0 -hypothesis==6.45.0 -iniconfig==2.3.0 -jsonschema==4.25.1 -jsonschema-specifications==2025.9.1 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -protobuf==6.33.0 -py-cpuinfo==8.0.0 -pygments==2.19.2 -pytest==8.4.2 -pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 -pytest-cov==7.0.0 -pytest-cpp==2.6.0 -pytest-mock==3.15.1 -pytest-randomly==4.0.1 -referencing==0.37.0 -rpds-py==0.28.0 -sortedcontainers==2.4.0 -uwsgi==2.0.31 -zope-event==6.0 -zope-interface==8.0.1 -zstandard==0.25.0 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==80.9.0 diff --git a/.riot/requirements/119044a.txt b/.riot/requirements/119044a.txt deleted file mode 100644 index cae7551e20a..00000000000 --- a/.riot/requirements/119044a.txt +++ /dev/null @@ -1,33 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/119044a.in -# -attrs==25.3.0 -azure-core==1.33.0 -azure-functions==1.23.0 -azure-servicebus==7.14.2 -certifi==2025.8.3 -charset-normalizer==3.4.3 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -isodate==0.7.2 -markupsafe==2.1.5 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -requests==2.32.4 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 -werkzeug==3.0.6 diff --git a/.riot/requirements/11ac941.txt b/.riot/requirements/11ac941.txt deleted file mode 100644 index 92df617ba6e..00000000000 --- a/.riot/requirements/11ac941.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/11ac941.in -# -async-timeout==5.0.1 -attrs==24.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.4 -pytest-asyncio==0.23.7 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -valkey==6.0.2 -zipp==3.20.2 diff --git a/.riot/requirements/11d9fc2.txt b/.riot/requirements/11d9fc2.txt deleted file mode 100644 index b89da5d9931..00000000000 --- a/.riot/requirements/11d9fc2.txt +++ /dev/null @@ -1,45 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/11d9fc2.in -# -aiofiles==23.2.1 -anyio==4.2.0 -attrs==23.1.0 -certifi==2023.11.17 -charset-normalizer==3.3.2 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -h11==0.14.0 -httpcore==0.16.3 -httptools==0.6.1 -httpx==0.23.3 -hypothesis==6.45.0 -idna==3.6 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -multidict==6.0.4 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -requests==2.31.0 -rfc3986[idna2008]==1.5.0 -sanic==22.12.0 -sanic-routing==23.6.0 -sanic-testing==22.3.1 -sniffio==1.3.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -typing-extensions==4.9.0 -ujson==5.9.0 -urllib3==2.1.0 -uvloop==0.19.0 -websockets==10.4 -zipp==3.17.0 diff --git a/.riot/requirements/1213604.txt b/.riot/requirements/1213604.txt deleted file mode 100644 index df2535c1773..00000000000 --- a/.riot/requirements/1213604.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1213604.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/1214426.txt b/.riot/requirements/1214426.txt deleted file mode 100644 index 27ac717aad0..00000000000 --- a/.riot/requirements/1214426.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1214426.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -msgpack==1.0.7 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/122e427.txt b/.riot/requirements/122e427.txt deleted file mode 100644 index 58d51498b2c..00000000000 --- a/.riot/requirements/122e427.txt +++ /dev/null @@ -1,31 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/122e427.in -# -attrs==25.3.0 -certifi==2025.1.31 -coverage[toml]==7.6.1 -elastic-transport==8.17.1 -elasticsearch==9.0.0 -elasticsearch7==7.17.12 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==1.26.20 -zipp==3.20.2 diff --git a/.riot/requirements/12304dc.txt b/.riot/requirements/12304dc.txt deleted file mode 100644 index a7efa420de5..00000000000 --- a/.riot/requirements/12304dc.txt +++ /dev/null @@ -1,27 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/12304dc.in -# -attrs==25.3.0 -backports-zoneinfo==0.2.1 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -psycopg==3.0.18 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/1258e80.txt b/.riot/requirements/1258e80.txt deleted file mode 100644 index 449021d50d8..00000000000 --- a/.riot/requirements/1258e80.txt +++ /dev/null @@ -1,30 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1258e80.in -# -attrs==23.1.0 -certifi==2023.11.17 -charset-normalizer==3.3.2 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -idna==3.6 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -python-consul==1.1.0 -requests==2.31.0 -six==1.16.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -urllib3==2.1.0 -zipp==3.17.0 diff --git a/.riot/requirements/127e019.txt b/.riot/requirements/127e019.txt deleted file mode 100644 index 337c2b160ed..00000000000 --- a/.riot/requirements/127e019.txt +++ /dev/null @@ -1,42 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/127e019.in -# -attrs==25.4.0 -coverage[toml]==7.11.0 -exceptiongroup==1.3.0 -gevent==25.9.1 -greenlet==3.2.4 -gunicorn[gevent]==23.0.0 -hypothesis==6.45.0 -iniconfig==2.3.0 -jsonschema==4.25.1 -jsonschema-specifications==2025.9.1 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -protobuf==6.33.0 -py-cpuinfo==8.0.0 -pygments==2.19.2 -pytest==8.4.2 -pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 -pytest-cov==7.0.0 -pytest-cpp==2.6.0 -pytest-mock==3.15.1 -pytest-randomly==4.0.1 -referencing==0.37.0 -rpds-py==0.28.0 -sortedcontainers==2.4.0 -tomli==2.3.0 -typing-extensions==4.15.0 -uwsgi==2.0.31 -zope-event==6.0 -zope-interface==8.0.1 -zstandard==0.25.0 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==80.9.0 diff --git a/.riot/requirements/1280196.txt b/.riot/requirements/1280196.txt deleted file mode 100644 index 9ddea946400..00000000000 --- a/.riot/requirements/1280196.txt +++ /dev/null @@ -1,30 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1280196.in -# -attrs==25.3.0 -beautifulsoup4==4.14.2 -bottle==0.13.4 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -soupsieve==2.7 -tomli==2.3.0 -typing-extensions==4.13.2 -waitress==3.0.0 -webob==1.8.9 -webtest==3.0.1 -zipp==3.20.2 diff --git a/.riot/requirements/128a8db.txt b/.riot/requirements/128a8db.txt deleted file mode 100644 index 8fbc2c95ecf..00000000000 --- a/.riot/requirements/128a8db.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/128a8db.in -# -attrs==25.3.0 -clang==20.1.5 -cmake==4.0.3 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pybind11==3.0.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 diff --git a/.riot/requirements/1291b76.txt b/.riot/requirements/1291b76.txt deleted file mode 100644 index 383d3c58109..00000000000 --- a/.riot/requirements/1291b76.txt +++ /dev/null @@ -1,30 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1291b76.in -# -asgiref==3.8.1 -attrs==25.3.0 -certifi==2025.6.15 -charset-normalizer==3.4.2 -coverage[toml]==7.6.1 -django==3.2.25 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytz==2025.2 -requests==2.32.4 -sortedcontainers==2.4.0 -sqlparse==0.5.3 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 diff --git a/.riot/requirements/12aa44c.txt b/.riot/requirements/12aa44c.txt deleted file mode 100644 index 2c11e62efab..00000000000 --- a/.riot/requirements/12aa44c.txt +++ /dev/null @@ -1,27 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/12aa44c.in -# -attrs==23.1.0 -certifi==2023.11.17 -coverage[toml]==7.3.4 -elastic-transport==8.11.0 -elasticsearch==8.0.1 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -urllib3==2.1.0 -zipp==3.17.0 diff --git a/.riot/requirements/12b4a54.txt b/.riot/requirements/12b4a54.txt deleted file mode 100644 index 11a84b3a69a..00000000000 --- a/.riot/requirements/12b4a54.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/12b4a54.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -logbook==1.0.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/13015fd.txt b/.riot/requirements/13015fd.txt deleted file mode 100644 index 29ed26daa1c..00000000000 --- a/.riot/requirements/13015fd.txt +++ /dev/null @@ -1,27 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/13015fd.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -dnspython==2.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -mongoengine==0.29.1 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pymongo==4.10.1 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/130dd21.txt b/.riot/requirements/130dd21.txt deleted file mode 100644 index a1eb686cbfd..00000000000 --- a/.riot/requirements/130dd21.txt +++ /dev/null @@ -1,32 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/130dd21.in -# -attrs==25.3.0 -cheroot==10.0.1 -cherrypy==17.0.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -jaraco-functools==4.1.0 -mock==5.2.0 -more-itertools==8.10.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -portend==3.2.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -six==1.17.0 -sortedcontainers==2.4.0 -tempora==5.7.1 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/132305d.txt b/.riot/requirements/132305d.txt deleted file mode 100644 index a2483e423ad..00000000000 --- a/.riot/requirements/132305d.txt +++ /dev/null @@ -1,46 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/132305d.in -# -asgiref==3.8.1 -attrs==25.3.0 -bcrypt==4.2.1 -certifi==2025.10.5 -charset-normalizer==3.4.4 -coverage[toml]==7.6.1 -dill==0.4.0 -django==3.2.25 -django-configurations==2.5.1 -exceptiongroup==1.3.0 -gevent==22.10.2 -greenlet==3.1.1 -gunicorn==23.0.0 -hypothesis==6.45.0 -idna==3.11 -iniconfig==2.1.0 -legacy-cgi==2.6.4 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pylibmc==1.6.3 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-django[testing]==3.10.0 -pytest-mock==3.14.1 -pytz==2025.2 -pyyaml==6.0.3 -requests==2.32.4 -six==1.17.0 -sortedcontainers==2.4.0 -sqlparse==0.5.3 -tomli==2.3.0 -typing-extensions==4.13.2 -urllib3==2.2.3 -zope-event==5.0 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/132915c.txt b/.riot/requirements/132915c.txt deleted file mode 100644 index 7b85f7727d7..00000000000 --- a/.riot/requirements/132915c.txt +++ /dev/null @@ -1,27 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/132915c.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -googleapis-common-protos==1.70.0 -grpcio==1.59.5 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -protobuf==5.29.4 -pytest==8.3.5 -pytest-asyncio==0.23.7 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -zipp==3.20.2 diff --git a/.riot/requirements/13342d2.txt b/.riot/requirements/13342d2.txt deleted file mode 100644 index bca1e8dc140..00000000000 --- a/.riot/requirements/13342d2.txt +++ /dev/null @@ -1,36 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/13342d2.in -# -anyio==4.5.2 -attrs==25.3.0 -certifi==2025.8.3 -coverage==7.6.1 -exceptiongroup==1.3.0 -execnet==2.1.1 -h11==0.16.0 -httpcore==1.0.9 -httpx==0.27.2 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -more-itertools==8.10.0 -msgpack==1.1.1 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -py==1.11.0 -pytest==6.2.5 -pytest-cov==2.9.0 -pytest-mock==2.0.0 -pytest-randomly==3.15.0 -pytest-xdist==3.5.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -toml==0.10.2 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/1337ee3.txt b/.riot/requirements/1337ee3.txt deleted file mode 100644 index 7a2b39ce1e6..00000000000 --- a/.riot/requirements/1337ee3.txt +++ /dev/null @@ -1,29 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1337ee3.in -# -attrs==25.3.0 -azure-functions==1.23.0 -certifi==2025.8.3 -charset-normalizer==3.4.3 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -markupsafe==2.1.5 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -requests==2.32.4 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 -werkzeug==3.0.6 diff --git a/.riot/requirements/1344329.txt b/.riot/requirements/1344329.txt deleted file mode 100644 index cf2e4583b0a..00000000000 --- a/.riot/requirements/1344329.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1344329.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -elasticsearch5==5.5.6 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -urllib3==2.1.0 -zipp==3.17.0 diff --git a/.riot/requirements/134a53d.txt b/.riot/requirements/134a53d.txt deleted file mode 100644 index 1473061d7c1..00000000000 --- a/.riot/requirements/134a53d.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/134a53d.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -pyyaml==6.0.2 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/134e77a.txt b/.riot/requirements/134e77a.txt deleted file mode 100644 index da96e381bb6..00000000000 --- a/.riot/requirements/134e77a.txt +++ /dev/null @@ -1,41 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/134e77a.in -# -amqp==5.3.1 -attrs==25.3.0 -backports-zoneinfo[tzdata]==0.2.1 -billiard==4.2.1 -celery==5.5.3 -click==8.1.8 -click-didyoumean==0.3.1 -click-plugins==1.1.1.2 -click-repl==0.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -kombu==5.5.4 -mock==5.2.0 -more-itertools==8.10.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -prompt-toolkit==3.0.51 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -redis==3.5.3 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -tzdata==2025.2 -vine==5.1.0 -wcwidth==0.2.13 -zipp==3.20.2 diff --git a/.riot/requirements/1356251.txt b/.riot/requirements/1356251.txt deleted file mode 100644 index 0b3c927d4fb..00000000000 --- a/.riot/requirements/1356251.txt +++ /dev/null @@ -1,35 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1356251.in -# -aiohttp==3.9.5 -aiohttp-jinja2==1.6 -aiosignal==1.3.1 -async-timeout==4.0.3 -attrs==23.2.0 -coverage[toml]==7.5.4 -exceptiongroup==1.2.1 -frozenlist==1.4.1 -hypothesis==6.45.0 -idna==3.7 -importlib-metadata==8.0.0 -iniconfig==2.0.0 -jinja2==3.1.4 -markupsafe==2.1.5 -mock==5.1.0 -multidict==6.0.5 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pytest==8.2.2 -pytest-aiohttp==1.0.5 -pytest-asyncio==0.23.7 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -yarl==1.9.4 -zipp==3.19.2 diff --git a/.riot/requirements/1367a0e.txt b/.riot/requirements/1367a0e.txt deleted file mode 100644 index 10a489ee4f0..00000000000 --- a/.riot/requirements/1367a0e.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1367a0e.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -tornado==5.1.1 -zipp==3.17.0 diff --git a/.riot/requirements/137cba1.txt b/.riot/requirements/137cba1.txt deleted file mode 100644 index 4ce4b48c527..00000000000 --- a/.riot/requirements/137cba1.txt +++ /dev/null @@ -1,28 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/137cba1.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -decorator==5.1.1 -dogpile-cache==1.3.0 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pbr==6.0.0 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -stevedore==5.1.0 -tomli==2.0.1 -typing-extensions==4.9.0 -zipp==3.17.0 diff --git a/.riot/requirements/138886e.txt b/.riot/requirements/138886e.txt deleted file mode 100644 index 480cd22178b..00000000000 --- a/.riot/requirements/138886e.txt +++ /dev/null @@ -1,35 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/138886e.in -# -aiohappyeyeballs==2.4.4 -aiohttp==3.10.11 -aiosignal==1.3.1 -async-timeout==5.0.1 -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -frozenlist==1.5.0 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -multidict==6.1.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -propcache==0.2.0 -pytest==8.3.5 -pytest-aiohttp==1.0.5 -pytest-asyncio==0.23.7 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -yarl==1.15.2 -zipp==3.20.2 diff --git a/.riot/requirements/13bb925.txt b/.riot/requirements/13bb925.txt deleted file mode 100644 index f87641d20cc..00000000000 --- a/.riot/requirements/13bb925.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/13bb925.in -# -attrs==24.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -falcon==3.0.1 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.4 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -zipp==3.20.2 diff --git a/.riot/requirements/13c380c.txt b/.riot/requirements/13c380c.txt deleted file mode 100644 index bea29a1b8ab..00000000000 --- a/.riot/requirements/13c380c.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/13c380c.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -psycopg2-binary==2.9.10 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -zipp==3.20.2 diff --git a/.riot/requirements/13c42e3.txt b/.riot/requirements/13c42e3.txt deleted file mode 100644 index 82838d89360..00000000000 --- a/.riot/requirements/13c42e3.txt +++ /dev/null @@ -1,54 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/13c42e3.in -# -annotated-types==0.7.0 -anyio==4.5.2 -attrs==25.3.0 -certifi==2025.4.26 -coverage[toml]==7.6.1 -distro==1.9.0 -exceptiongroup==1.3.0 -h11==0.16.0 -httpcore==1.0.9 -httpx==0.27.2 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -multidict==6.1.0 -numpy==1.24.4 -openai[datalib,embeddings]==1.30.1 -opentracing==2.4.0 -packaging==25.0 -pandas==2.0.3 -pandas-stubs==2.0.3.230814 -pillow==9.5.0 -pluggy==1.5.0 -propcache==0.2.0 -pydantic==2.10.6 -pydantic-core==2.27.2 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -pytz==2025.2 -pyyaml==6.0.2 -six==1.17.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tomli==2.2.1 -tqdm==4.67.1 -types-pytz==2024.2.0.20241221 -typing-extensions==4.13.2 -tzdata==2025.2 -urllib3==1.26.20 -vcrpy==6.0.2 -wrapt==1.17.2 -yarl==1.15.2 -zipp==3.20.2 diff --git a/.riot/requirements/13f5237.txt b/.riot/requirements/13f5237.txt deleted file mode 100644 index a9f480d16ae..00000000000 --- a/.riot/requirements/13f5237.txt +++ /dev/null @@ -1,60 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/13f5237.in -# -attrs==25.3.0 -backoff==2.2.1 -certifi==2025.6.15 -charset-normalizer==2.1.1 -click==8.1.8 -coverage[toml]==7.6.1 -deprecated==1.2.18 -exceptiongroup==1.3.0 -flask==2.1.3 -gevent==24.2.1 -googleapis-common-protos==1.70.0 -greenlet==3.1.1 -grpcio==1.70.0 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -itsdangerous==2.2.0 -jinja2==3.1.6 -markupsafe==2.0.1 -mock==5.2.0 -opentelemetry-api==1.33.1 -opentelemetry-exporter-otlp==1.15.0 -opentelemetry-exporter-otlp-proto-grpc==1.15.0 -opentelemetry-exporter-otlp-proto-http==1.15.0 -opentelemetry-instrumentation==0.54b1 -opentelemetry-instrumentation-flask==0.54b1 -opentelemetry-instrumentation-wsgi==0.54b1 -opentelemetry-proto==1.15.0 -opentelemetry-sdk==1.33.1 -opentelemetry-semantic-conventions==0.54b1 -opentelemetry-util-http==0.54b1 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -protobuf==4.25.8 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -requests==2.28.1 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==1.26.20 -werkzeug==2.1.2 -wrapt==1.17.2 -zipp==3.20.2 -zope-event==5.0 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/13f6818.txt b/.riot/requirements/13f6818.txt deleted file mode 100644 index 11bbbf63862..00000000000 --- a/.riot/requirements/13f6818.txt +++ /dev/null @@ -1,35 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/13f6818.in -# -aiohappyeyeballs==2.4.4 -aiohttp==3.10.11 -aiosignal==1.3.1 -async-timeout==5.0.1 -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -frozenlist==1.5.0 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -multidict==6.1.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -propcache==0.2.0 -pytest==8.3.5 -pytest-aiohttp==1.0.5 -pytest-asyncio==0.23.7 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -yarl==1.15.2 -zipp==3.20.2 diff --git a/.riot/requirements/13f7c51.txt b/.riot/requirements/13f7c51.txt deleted file mode 100644 index caf600998bb..00000000000 --- a/.riot/requirements/13f7c51.txt +++ /dev/null @@ -1,34 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/13f7c51.in -# -attrs==25.3.0 -certifi==2025.7.9 -charset-normalizer==3.4.2 -click==8.1.8 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -flask==2.1.3 -hypothesis==6.113.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -itsdangerous==2.2.0 -jinja2==3.1.6 -markupsafe==2.1.5 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -requests==2.32.4 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 -werkzeug==2.3.8 -zipp==3.20.2 diff --git a/.riot/requirements/140ec91.txt b/.riot/requirements/140ec91.txt deleted file mode 100644 index 2c62a8d4b92..00000000000 --- a/.riot/requirements/140ec91.txt +++ /dev/null @@ -1,34 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/140ec91.in -# -attrs==23.2.0 -blinker==1.7.0 -cachelib==0.9.0 -click==8.1.7 -coverage[toml]==7.4.2 -exceptiongroup==1.2.0 -flask==3.0.2 -flask-caching==2.1.0 -hypothesis==6.45.0 -importlib-metadata==7.0.1 -iniconfig==2.0.0 -itsdangerous==2.1.2 -jinja2==3.1.3 -markupsafe==2.1.5 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.4.0 -pytest==8.0.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -python-memcached==1.62 -redis==2.10.6 -sortedcontainers==2.4.0 -tomli==2.0.1 -werkzeug==3.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/1413039.txt b/.riot/requirements/1413039.txt deleted file mode 100644 index 82340d380e3..00000000000 --- a/.riot/requirements/1413039.txt +++ /dev/null @@ -1,30 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1413039.in -# -attrs==24.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -gevent==24.2.1 -greenlet==3.1.1 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.4 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -zipp==3.20.2 -zope-event==5.0 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.0 diff --git a/.riot/requirements/1415ef8.txt b/.riot/requirements/1415ef8.txt deleted file mode 100644 index 24cd0a250b4..00000000000 --- a/.riot/requirements/1415ef8.txt +++ /dev/null @@ -1,40 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1415ef8.in -# -annotated-types==0.7.0 -attrs==25.3.0 -blinker==1.8.2 -certifi==2025.10.5 -charset-normalizer==3.4.3 -click==8.1.8 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -flask==3.0.3 -flask-openapi3==4.0.3 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -itsdangerous==2.2.0 -jinja2==3.1.6 -markupsafe==2.1.5 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pydantic==2.10.6 -pydantic-core==2.27.2 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -requests==2.32.4 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==1.26.20 -werkzeug==3.0.6 -zipp==3.20.2 diff --git a/.riot/requirements/1424e42.txt b/.riot/requirements/1424e42.txt deleted file mode 100644 index f58bbb22bd6..00000000000 --- a/.riot/requirements/1424e42.txt +++ /dev/null @@ -1,22 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1424e42.in -# -attrs==24.2.0 -coverage[toml]==7.6.1 -dnspython==2.6.1 -hypothesis==6.45.0 -iniconfig==2.0.0 -mock==5.1.0 -mongoengine==0.29.1 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pymongo==4.8.0 -pytest==8.3.3 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 diff --git a/.riot/requirements/1429dec.txt b/.riot/requirements/1429dec.txt deleted file mode 100644 index 2abb9987472..00000000000 --- a/.riot/requirements/1429dec.txt +++ /dev/null @@ -1,38 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1429dec.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -gunicorn==23.0.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -importlib-resources==6.4.5 -iniconfig==2.1.0 -jsonschema==4.23.0 -jsonschema-specifications==2023.12.1 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pkgutil-resolve-name==1.3.10 -pluggy==1.5.0 -protobuf==5.29.5 -py-cpuinfo==8.0.0 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-benchmark==4.0.0 -pytest-cov==5.0.0 -pytest-cpp==2.6.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -referencing==0.35.1 -rpds-py==0.20.1 -sortedcontainers==2.4.0 -tomli==2.3.0 -typing-extensions==4.13.2 -uwsgi==2.0.29 -zipp==3.20.2 -zstandard==0.23.0 diff --git a/.riot/requirements/14395e9.txt b/.riot/requirements/14395e9.txt deleted file mode 100644 index 55ad6e69192..00000000000 --- a/.riot/requirements/14395e9.txt +++ /dev/null @@ -1,45 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/14395e9.in -# -asgiref==3.8.1 -attrs==25.3.0 -backports-zoneinfo==0.2.1 -bcrypt==4.2.1 -certifi==2025.8.3 -charset-normalizer==3.4.3 -coverage[toml]==7.6.1 -dill==0.4.0 -django==4.2.24 -django-configurations==2.5.1 -exceptiongroup==1.3.0 -gevent==24.2.1 -greenlet==3.1.1 -gunicorn==23.0.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pylibmc==1.6.3 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-django[testing]==3.10.0 -pytest-mock==3.14.1 -pyyaml==6.0.2 -requests==2.32.4 -six==1.17.0 -sortedcontainers==2.4.0 -sqlparse==0.5.3 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 -zope-event==5.0 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/144ad1a.txt b/.riot/requirements/144ad1a.txt deleted file mode 100644 index 2a1b6cd94b3..00000000000 --- a/.riot/requirements/144ad1a.txt +++ /dev/null @@ -1,42 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/144ad1a.in -# -annotated-types==0.7.0 -anthropic==0.67.0 -anyio==4.5.2 -attrs==25.3.0 -certifi==2025.8.3 -coverage[toml]==7.6.1 -distro==1.9.0 -exceptiongroup==1.3.0 -h11==0.16.0 -httpcore==1.0.9 -httpx==0.27.2 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -jiter==0.9.1 -mock==5.2.0 -multidict==6.1.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -propcache==0.2.0 -pydantic==2.10.6 -pydantic-core==2.27.2 -pytest==8.3.5 -pytest-asyncio==0.24.0 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pyyaml==6.0.2 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==1.26.20 -vcrpy==6.0.2 -wrapt==1.17.3 -yarl==1.15.2 diff --git a/.riot/requirements/14676df.txt b/.riot/requirements/14676df.txt deleted file mode 100644 index 055678228c9..00000000000 --- a/.riot/requirements/14676df.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/14676df.in -# -attrs==25.3.0 -coverage[toml]==7.8.2 -exceptiongroup==1.3.0 -freezegun==1.5.2 -hypothesis==6.45.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -pygments==2.19.1 -pytest==8.4.0 -pytest-cov==6.1.1 -pytest-mock==3.14.1 -pytest-randomly==3.16.0 -python-dateutil==2.9.0.post0 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.14.0 diff --git a/.riot/requirements/1467f24.txt b/.riot/requirements/1467f24.txt deleted file mode 100644 index a59bd2ed545..00000000000 --- a/.riot/requirements/1467f24.txt +++ /dev/null @@ -1,34 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1467f24.in -# -asgiref==3.8.1 -attrs==25.3.0 -backports-zoneinfo==0.2.1 -coverage[toml]==7.6.1 -django==4.2.21 -django-configurations==2.5.1 -djangorestframework==3.15.2 -exceptiongroup==1.3.0 -execnet==2.1.1 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-django[testing]==3.10.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -pytest-xdist==3.6.1 -six==1.17.0 -sortedcontainers==2.4.0 -sqlparse==0.5.3 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/1468cf5.txt b/.riot/requirements/1468cf5.txt deleted file mode 100644 index 6b90ac2ac97..00000000000 --- a/.riot/requirements/1468cf5.txt +++ /dev/null @@ -1,23 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.14 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1468cf5.in -# -attrs==25.3.0 -coverage[toml]==7.10.5 -dnspython==2.7.0 -hypothesis==6.45.0 -iniconfig==2.1.0 -mock==5.2.0 -mongoengine==0.29.1 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -pygments==2.19.2 -pymongo==4.8.0 -pytest==8.4.1 -pytest-cov==6.2.1 -pytest-mock==3.14.1 -pytest-randomly==3.16.0 -sortedcontainers==2.4.0 diff --git a/.riot/requirements/14767b5.txt b/.riot/requirements/14767b5.txt deleted file mode 100644 index 0bb110811df..00000000000 --- a/.riot/requirements/14767b5.txt +++ /dev/null @@ -1,23 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/14767b5.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/14e85f3.txt b/.riot/requirements/14e85f3.txt deleted file mode 100644 index 44ce4a54256..00000000000 --- a/.riot/requirements/14e85f3.txt +++ /dev/null @@ -1,22 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.11 -# by the following command: -# -# pip-compile --no-annotate --resolver=backtracking .riot/requirements/14e85f3.in -# -attrs==24.2.0 -coverage[toml]==7.6.1 -dnspython==2.6.1 -hypothesis==6.45.0 -iniconfig==2.0.0 -mock==5.1.0 -mongoengine==0.29.1 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pymongo==4.8.0 -pytest==8.3.3 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 diff --git a/.riot/requirements/14e9a3d.txt b/.riot/requirements/14e9a3d.txt deleted file mode 100644 index ffbb95edc30..00000000000 --- a/.riot/requirements/14e9a3d.txt +++ /dev/null @@ -1,34 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/14e9a3d.in -# -asgiref==3.8.1 -attrs==25.3.0 -coverage[toml]==7.6.1 -django==3.2.25 -django-configurations==2.5.1 -djangorestframework==3.11.2 -exceptiongroup==1.3.0 -execnet==2.1.1 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-django[testing]==3.10.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -pytest-xdist==3.6.1 -pytz==2025.2 -six==1.17.0 -sortedcontainers==2.4.0 -sqlparse==0.5.3 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/14effbf.txt b/.riot/requirements/14effbf.txt deleted file mode 100644 index 0fcf733c893..00000000000 --- a/.riot/requirements/14effbf.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/14effbf.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pylibmc==1.6.3 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/15199f6.txt b/.riot/requirements/15199f6.txt deleted file mode 100644 index 039082c9342..00000000000 --- a/.riot/requirements/15199f6.txt +++ /dev/null @@ -1,30 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/15199f6.in -# -attrs==25.3.0 -azure-core==1.33.0 -azure-eventhub==5.12.2 -certifi==2025.8.3 -charset-normalizer==3.4.3 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-asyncio==0.23.7 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -requests==2.32.4 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 diff --git a/.riot/requirements/151e533.txt b/.riot/requirements/151e533.txt deleted file mode 100644 index 6f9a56bd894..00000000000 --- a/.riot/requirements/151e533.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/151e533.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -logbook==1.7.0.post0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/1522cb8.txt b/.riot/requirements/1522cb8.txt deleted file mode 100644 index fb583577f6d..00000000000 --- a/.riot/requirements/1522cb8.txt +++ /dev/null @@ -1,51 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1522cb8.in -# -attrs==25.3.0 -certifi==2025.6.15 -charset-normalizer==2.1.1 -click==8.1.8 -coverage[toml]==7.6.1 -deprecated==1.2.18 -exceptiongroup==1.3.0 -flask==2.1.3 -gevent==24.2.1 -greenlet==3.1.1 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.0.0 -iniconfig==2.1.0 -itsdangerous==2.2.0 -jinja2==3.1.6 -markupsafe==2.0.1 -mock==5.2.0 -opentelemetry-api==1.26.0 -opentelemetry-instrumentation==0.47b0 -opentelemetry-instrumentation-flask==0.47b0 -opentelemetry-instrumentation-wsgi==0.47b0 -opentelemetry-semantic-conventions==0.47b0 -opentelemetry-util-http==0.47b0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -requests==2.28.1 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==1.26.20 -werkzeug==2.1.2 -wrapt==1.17.2 -zipp==3.20.2 -zope-event==5.0 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/153a274.txt b/.riot/requirements/153a274.txt deleted file mode 100644 index 9832f760415..00000000000 --- a/.riot/requirements/153a274.txt +++ /dev/null @@ -1,38 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/153a274.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -gunicorn==23.0.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -importlib-resources==6.4.5 -iniconfig==2.1.0 -jsonschema==4.23.0 -jsonschema-specifications==2023.12.1 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pkgutil-resolve-name==1.3.10 -pluggy==1.5.0 -protobuf==5.29.5 -py-cpuinfo==8.0.0 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-benchmark==4.0.0 -pytest-cov==5.0.0 -pytest-cpp==2.6.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -referencing==0.35.1 -rpds-py==0.20.1 -sortedcontainers==2.4.0 -tomli==2.3.0 -typing-extensions==4.13.2 -uwsgi==2.0.31 -zipp==3.20.2 -zstandard==0.23.0 diff --git a/.riot/requirements/1560ba9.txt b/.riot/requirements/1560ba9.txt deleted file mode 100644 index e7f12e49d80..00000000000 --- a/.riot/requirements/1560ba9.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/1560ba9.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -psycopg2-binary==2.9.10 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -zipp==3.20.2 diff --git a/.riot/requirements/156272b.txt b/.riot/requirements/156272b.txt deleted file mode 100644 index 5bea558cda4..00000000000 --- a/.riot/requirements/156272b.txt +++ /dev/null @@ -1,27 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.13 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/156272b.in -# -attrs==25.4.0 -coverage[toml]==7.11.0 -gunicorn==23.0.0 -hypothesis==6.45.0 -iniconfig==2.3.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -protobuf==6.33.0 -py-cpuinfo==8.0.0 -pygments==2.19.2 -pytest==8.4.2 -pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 -pytest-cov==7.0.0 -pytest-mock==3.15.1 -pytest-randomly==4.0.1 -sortedcontainers==2.4.0 -uwsgi==2.0.31 -zstandard==0.25.0 diff --git a/.riot/requirements/15ba505.txt b/.riot/requirements/15ba505.txt deleted file mode 100644 index 0de23cc2c0a..00000000000 --- a/.riot/requirements/15ba505.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/15ba505.in -# -attrs==24.2.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -googleapis-common-protos==1.65.0 -grpcio==1.66.1 -hypothesis==6.45.0 -importlib-metadata==8.4.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -protobuf==5.28.0 -pytest==8.3.2 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.20.1 diff --git a/.riot/requirements/15c5dd6.txt b/.riot/requirements/15c5dd6.txt deleted file mode 100644 index a015618b336..00000000000 --- a/.riot/requirements/15c5dd6.txt +++ /dev/null @@ -1,28 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/15c5dd6.in -# -attrs==23.1.0 -beautifulsoup4==4.12.2 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -soupsieve==2.5 -tomli==2.0.1 -waitress==2.1.2 -webob==1.8.7 -webtest==3.0.0 -zipp==3.17.0 diff --git a/.riot/requirements/15de642.txt b/.riot/requirements/15de642.txt deleted file mode 100644 index 9e138c07de8..00000000000 --- a/.riot/requirements/15de642.txt +++ /dev/null @@ -1,23 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/15de642.in -# -attrs==25.3.0 -coverage[toml]==7.8.2 -freezegun==1.5.2 -hypothesis==6.45.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -pygments==2.19.1 -pytest==8.4.0 -pytest-cov==6.1.1 -pytest-mock==3.14.1 -pytest-randomly==3.16.0 -python-dateutil==2.9.0.post0 -six==1.17.0 -sortedcontainers==2.4.0 diff --git a/.riot/requirements/15eba42.txt b/.riot/requirements/15eba42.txt deleted file mode 100644 index e815da238a7..00000000000 --- a/.riot/requirements/15eba42.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/15eba42.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -graphql-core==3.2.3 -hypothesis==6.45.0 -importlib-metadata==7.0.1 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/15eea13.txt b/.riot/requirements/15eea13.txt deleted file mode 100644 index 882c470efc7..00000000000 --- a/.riot/requirements/15eea13.txt +++ /dev/null @@ -1,34 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/15eea13.in -# -asgiref==3.8.1 -attrs==25.3.0 -backports-zoneinfo==0.2.1 -coverage[toml]==7.6.1 -django==4.2.21 -django-configurations==2.5.1 -djangorestframework==3.15.2 -exceptiongroup==1.3.0 -execnet==2.1.1 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-django[testing]==3.10.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -pytest-xdist==3.6.1 -six==1.17.0 -sortedcontainers==2.4.0 -sqlparse==0.5.3 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/15eebc1.txt b/.riot/requirements/15eebc1.txt deleted file mode 100644 index 04325d6e406..00000000000 --- a/.riot/requirements/15eebc1.txt +++ /dev/null @@ -1,30 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/15eebc1.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -graphene==3.4.3 -graphql-core==3.2.6 -graphql-relay==3.2.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/2a6bd8f.txt b/.riot/requirements/160a7fd.txt similarity index 95% rename from .riot/requirements/2a6bd8f.txt rename to .riot/requirements/160a7fd.txt index b9eead1c7cb..d317f3f13ca 100644 --- a/.riot/requirements/2a6bd8f.txt +++ b/.riot/requirements/160a7fd.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/2a6bd8f.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/160a7fd.in # attrs==25.4.0 coverage[toml]==7.10.7 @@ -22,7 +22,7 @@ py-cpuinfo==8.0.0 pygments==2.19.2 pytest==8.4.2 pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 +pytest-benchmark==5.2.3 pytest-cov==7.0.0 pytest-cpp==2.6.0 pytest-mock==3.15.1 diff --git a/.riot/requirements/1631653.txt b/.riot/requirements/1631653.txt deleted file mode 100644 index 2f8bf49a9df..00000000000 --- a/.riot/requirements/1631653.txt +++ /dev/null @@ -1,23 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1631653.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/1632ff5.txt b/.riot/requirements/1632ff5.txt deleted file mode 100644 index e382438fc6a..00000000000 --- a/.riot/requirements/1632ff5.txt +++ /dev/null @@ -1,29 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1632ff5.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -future==1.0.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -pytz==2025.2 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -vertica-python==0.7.4 -zipp==3.20.2 diff --git a/.riot/requirements/1634f79.txt b/.riot/requirements/1634f79.txt deleted file mode 100644 index b9cc3be1e5f..00000000000 --- a/.riot/requirements/1634f79.txt +++ /dev/null @@ -1,38 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1634f79.in -# -attrs==25.1.0 -blinker==1.8.2 -certifi==2025.1.31 -charset-normalizer==3.4.1 -click==7.1.2 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -flask==1.1.4 -flask-openapi3==1.1.5 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -itsdangerous==1.1.0 -jinja2==2.11.3 -markupsafe==1.1.1 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pydantic==1.10.21 -pytest==8.3.4 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -requests==2.32.3 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.12.2 -urllib3==1.26.20 -werkzeug==1.0.1 -zipp==3.20.2 diff --git a/.riot/requirements/163a963.txt b/.riot/requirements/163a963.txt deleted file mode 100644 index 68e73bd43c0..00000000000 --- a/.riot/requirements/163a963.txt +++ /dev/null @@ -1,30 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/163a963.in -# -attrs==24.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -gevent==24.2.1 -greenlet==3.1.1 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.4 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -zipp==3.20.2 -zope-event==5.0 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.0 diff --git a/.riot/requirements/164cf92.txt b/.riot/requirements/164cf92.txt deleted file mode 100644 index 83dfe13f9e2..00000000000 --- a/.riot/requirements/164cf92.txt +++ /dev/null @@ -1,40 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/164cf92.in -# -aiofiles==24.1.0 -anyio==4.5.2 -attrs==25.3.0 -certifi==2025.8.3 -charset-normalizer==3.4.3 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -fastapi==0.64.0 -h11==0.16.0 -httpcore==1.0.9 -httpx==0.27.2 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pydantic==1.10.22 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -python-multipart==0.0.20 -requests==2.32.4 -sniffio==1.3.1 -sortedcontainers==2.4.0 -starlette==0.13.6 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 -zipp==3.20.2 diff --git a/.riot/requirements/164d658.txt b/.riot/requirements/164d658.txt deleted file mode 100644 index cd0bb3c4f4c..00000000000 --- a/.riot/requirements/164d658.txt +++ /dev/null @@ -1,27 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/164d658.in -# -attrs==23.1.0 -cassandra-driver==3.28.0 -click==8.1.7 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -geomet==0.2.1.post1 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -six==1.16.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/165cb23.txt b/.riot/requirements/165cb23.txt deleted file mode 100644 index c33d090c510..00000000000 --- a/.riot/requirements/165cb23.txt +++ /dev/null @@ -1,64 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.13 -# by the following command: -# -# pip-compile --allow-unsafe --cert=None --client-cert=None --index-url=None --no-annotate --pip-args=None .riot/requirements/165cb23.in -# -annotated-types==0.7.0 -attrs==25.3.0 -cachetools==5.5.2 -certifi==2025.8.3 -charset-normalizer==3.4.3 -coverage[toml]==7.10.7 -docstring-parser==0.17.0 -google-ai-generativelanguage==0.6.6 -google-api-core[grpc]==2.25.1 -google-api-python-client==2.183.0 -google-auth==2.40.3 -google-auth-httplib2==0.2.0 -google-cloud-aiplatform[all]==1.71.1 -google-cloud-bigquery==3.38.0 -google-cloud-core==2.4.3 -google-cloud-resource-manager==1.14.2 -google-cloud-storage==2.19.0 -google-crc32c==1.7.1 -google-generativeai==0.7.2 -google-resumable-media==2.7.2 -googleapis-common-protos[grpc]==1.70.0 -grpc-google-iam-v1==0.14.2 -grpcio==1.75.1 -grpcio-status==1.62.3 -httplib2==0.31.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -mock==5.2.0 -numpy==2.3.3 -opentracing==2.4.0 -packaging==25.0 -pillow==11.3.0 -pluggy==1.6.0 -proto-plus==1.26.1 -protobuf==4.25.8 -pyasn1==0.6.1 -pyasn1-modules==0.4.2 -pydantic==2.11.9 -pydantic-core==2.33.2 -pygments==2.19.2 -pyparsing==3.2.5 -pytest==8.4.2 -pytest-asyncio==1.2.0 -pytest-cov==7.0.0 -pytest-mock==3.15.1 -python-dateutil==2.9.0.post0 -requests==2.32.5 -rsa==4.9.1 -shapely==2.1.2 -six==1.17.0 -sortedcontainers==2.4.0 -tqdm==4.67.1 -typing-extensions==4.15.0 -typing-inspection==0.4.1 -uritemplate==4.2.0 -urllib3==2.5.0 -vertexai==1.71.1 diff --git a/.riot/requirements/166f21a.txt b/.riot/requirements/166f21a.txt deleted file mode 100644 index 7b53f8a5926..00000000000 --- a/.riot/requirements/166f21a.txt +++ /dev/null @@ -1,64 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.11 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/166f21a.in -# -annotated-types==0.7.0 -attrs==25.3.0 -cachetools==5.5.2 -certifi==2025.4.26 -charset-normalizer==3.4.2 -coverage[toml]==7.8.2 -docstring-parser==0.16 -google-ai-generativelanguage==0.6.6 -google-api-core[grpc]==2.25.0 -google-api-python-client==2.171.0 -google-auth==2.40.3 -google-auth-httplib2==0.2.0 -google-cloud-aiplatform[all]==1.71.1 -google-cloud-bigquery==3.34.0 -google-cloud-core==2.4.3 -google-cloud-resource-manager==1.14.2 -google-cloud-storage==2.19.0 -google-crc32c==1.7.1 -google-generativeai==0.7.2 -google-resumable-media==2.7.2 -googleapis-common-protos[grpc]==1.70.0 -grpc-google-iam-v1==0.14.2 -grpcio==1.73.0 -grpcio-status==1.62.3 -httplib2==0.22.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -mock==5.2.0 -numpy==2.3.0 -opentracing==2.4.0 -packaging==25.0 -pillow==11.2.1 -pluggy==1.6.0 -proto-plus==1.26.1 -protobuf==4.25.8 -pyasn1==0.6.1 -pyasn1-modules==0.4.2 -pydantic==2.11.5 -pydantic-core==2.33.2 -pygments==2.19.1 -pyparsing==3.2.3 -pytest==8.4.0 -pytest-asyncio==1.0.0 -pytest-cov==6.1.1 -pytest-mock==3.14.1 -python-dateutil==2.9.0.post0 -requests==2.32.4 -rsa==4.9.1 -shapely==2.1.1 -six==1.17.0 -sortedcontainers==2.4.0 -tqdm==4.67.1 -typing-extensions==4.14.0 -typing-inspection==0.4.1 -uritemplate==4.2.0 -urllib3==2.4.0 -vertexai==1.71.1 diff --git a/.riot/requirements/167d6de.txt b/.riot/requirements/167d6de.txt deleted file mode 100644 index 295e09e1410..00000000000 --- a/.riot/requirements/167d6de.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/167d6de.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -mongoengine==0.29.1 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pymongo==3.8.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/168e13d.txt b/.riot/requirements/168e13d.txt deleted file mode 100644 index 5161e01c8a3..00000000000 --- a/.riot/requirements/168e13d.txt +++ /dev/null @@ -1,23 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.14 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/168e13d.in -# -attrs==25.3.0 -coverage[toml]==7.10.5 -dnspython==2.7.0 -hypothesis==6.45.0 -iniconfig==2.1.0 -mock==5.2.0 -mongoengine==0.24.2 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -pygments==2.19.2 -pymongo==4.8.0 -pytest==8.4.1 -pytest-cov==6.2.1 -pytest-mock==3.14.1 -pytest-randomly==3.16.0 -sortedcontainers==2.4.0 diff --git a/.riot/requirements/169a623.txt b/.riot/requirements/169a623.txt deleted file mode 100644 index 3b56c7174fb..00000000000 --- a/.riot/requirements/169a623.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/169a623.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/169d13a.txt b/.riot/requirements/169d13a.txt deleted file mode 100644 index c1004ad1952..00000000000 --- a/.riot/requirements/169d13a.txt +++ /dev/null @@ -1,47 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/169d13a.in -# -annotated-types==0.7.0 -anyio==4.5.2 -attrs==25.3.0 -certifi==2025.10.5 -coverage[toml]==7.6.1 -distro==1.9.0 -exceptiongroup==1.3.0 -h11==0.16.0 -httpcore==1.0.9 -httpx==0.28.1 -hypothesis==6.45.0 -idna==3.11 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -jiter==0.9.1 -mock==5.2.0 -multidict==6.1.0 -openai==1.76.2 -opentracing==2.4.0 -packaging==25.0 -pillow==10.4.0 -pluggy==1.5.0 -propcache==0.2.0 -pydantic==2.10.6 -pydantic-core==2.27.2 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -pyyaml==6.0.3 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tomli==2.3.0 -tqdm==4.67.1 -typing-extensions==4.13.2 -urllib3==1.26.20 -vcrpy==6.0.2 -wrapt==2.0.0 -yarl==1.15.2 -zipp==3.20.2 diff --git a/.riot/requirements/16b7aa5.txt b/.riot/requirements/16b7aa5.txt deleted file mode 100644 index 1957b9a5706..00000000000 --- a/.riot/requirements/16b7aa5.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/16b7aa5.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mariadb==1.0.11 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/16bdd8d.txt b/.riot/requirements/16bdd8d.txt deleted file mode 100644 index f248df3e158..00000000000 --- a/.riot/requirements/16bdd8d.txt +++ /dev/null @@ -1,30 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/16bdd8d.in -# -attrs==23.2.0 -certifi==2024.2.2 -charset-normalizer==3.3.2 -coverage[toml]==7.4.2 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -idna==3.6 -importlib-metadata==7.0.1 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.4.0 -pytest==8.0.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -requests==2.31.0 -requests-mock==1.11.0 -six==1.16.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -urllib3==1.26.18 -zipp==3.17.0 diff --git a/.riot/requirements/16c251e.txt b/.riot/requirements/16c251e.txt deleted file mode 100644 index 31796fe5ae4..00000000000 --- a/.riot/requirements/16c251e.txt +++ /dev/null @@ -1,27 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/16c251e.in -# -attrs==25.3.0 -backports-zoneinfo==0.2.1 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -psycopg==3.2.9 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/1e8e952.txt b/.riot/requirements/16c526e.txt similarity index 89% rename from .riot/requirements/1e8e952.txt rename to .riot/requirements/16c526e.txt index 9fa41e80bb4..60e6beada29 100644 --- a/.riot/requirements/1e8e952.txt +++ b/.riot/requirements/16c526e.txt @@ -2,10 +2,10 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1e8e952.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/16c526e.in # attrs==25.4.0 -coverage[toml]==7.11.0 +coverage[toml]==7.11.3 exceptiongroup==1.3.0 gunicorn==23.0.0 hypothesis==6.45.0 @@ -19,9 +19,9 @@ pluggy==1.6.0 protobuf==3.19.0 py-cpuinfo==8.0.0 pygments==2.19.2 -pytest==8.4.2 +pytest==9.0.0 pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 +pytest-benchmark==5.2.3 pytest-cov==7.0.0 pytest-cpp==2.6.0 pytest-mock==3.15.1 diff --git a/.riot/requirements/16eb426.txt b/.riot/requirements/16eb426.txt deleted file mode 100644 index e1072294f88..00000000000 --- a/.riot/requirements/16eb426.txt +++ /dev/null @@ -1,23 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/16eb426.in -# -attrs==23.2.0 -coverage[toml]==7.4.3 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.1 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.4.0 -pytest==8.0.2 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/170ff7e.txt b/.riot/requirements/170ff7e.txt deleted file mode 100644 index 64fffcfc4f2..00000000000 --- a/.riot/requirements/170ff7e.txt +++ /dev/null @@ -1,28 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/170ff7e.in -# -attrs==25.3.0 -coverage[toml]==7.8.2 -dnspython==2.7.0 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.7.0 -iniconfig==2.1.0 -mock==5.2.0 -mongoengine==0.24.2 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -pygments==2.19.1 -pymongo==4.8.0 -pytest==8.4.0 -pytest-cov==6.1.1 -pytest-mock==3.14.1 -pytest-randomly==3.16.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.14.0 -zipp==3.23.0 diff --git a/.riot/requirements/172eb1b.txt b/.riot/requirements/172eb1b.txt new file mode 100644 index 00000000000..c242a9d936b --- /dev/null +++ b/.riot/requirements/172eb1b.txt @@ -0,0 +1,37 @@ +# +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/172eb1b.in +# +attrs==25.4.0 +coverage[toml]==7.11.3 +gevent==25.9.1 +greenlet==3.2.4 +gunicorn[gevent]==23.0.0 +hypothesis==6.45.0 +iniconfig==2.3.0 +jsonschema==4.25.1 +jsonschema-specifications==2025.9.1 +mock==5.2.0 +opentracing==2.4.0 +packaging==25.0 +pluggy==1.6.0 +protobuf==6.33.0 +py-cpuinfo==8.0.0 +pygments==2.19.2 +pytest==9.0.0 +pytest-asyncio==0.21.1 +pytest-benchmark==5.2.3 +pytest-cov==7.0.0 +pytest-cpp==2.6.0 +pytest-mock==3.15.1 +pytest-randomly==4.0.1 +referencing==0.37.0 +rpds-py==0.28.0 +sortedcontainers==2.4.0 +typing-extensions==4.15.0 +uwsgi==2.0.31 +zope-event==6.1 +zope-interface==8.1 +zstandard==0.25.0 diff --git a/.riot/requirements/1732d2c.txt b/.riot/requirements/1732d2c.txt deleted file mode 100644 index 76ee383c7ea..00000000000 --- a/.riot/requirements/1732d2c.txt +++ /dev/null @@ -1,30 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1732d2c.in -# -attrs==25.4.0 -coverage[toml]==7.11.0 -exceptiongroup==1.3.0 -gunicorn==23.0.0 -hypothesis==6.45.0 -iniconfig==2.3.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -protobuf==6.33.0 -py-cpuinfo==8.0.0 -pygments==2.19.2 -pytest==8.4.2 -pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 -pytest-cov==7.0.0 -pytest-mock==3.15.1 -pytest-randomly==4.0.1 -sortedcontainers==2.4.0 -tomli==2.3.0 -typing-extensions==4.15.0 -uwsgi==2.0.31 -zstandard==0.25.0 diff --git a/.riot/requirements/142cc85.txt b/.riot/requirements/17382eb.txt similarity index 89% rename from .riot/requirements/142cc85.txt rename to .riot/requirements/17382eb.txt index 6c2205dba48..19baa40182e 100644 --- a/.riot/requirements/142cc85.txt +++ b/.riot/requirements/17382eb.txt @@ -2,10 +2,10 @@ # This file is autogenerated by pip-compile with Python 3.12 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/142cc85.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/17382eb.in # attrs==25.4.0 -coverage[toml]==7.11.0 +coverage[toml]==7.11.3 gunicorn==23.0.0 hypothesis==6.45.0 iniconfig==2.3.0 @@ -18,9 +18,9 @@ pluggy==1.6.0 protobuf==4.22.0 py-cpuinfo==8.0.0 pygments==2.19.2 -pytest==8.4.2 +pytest==9.0.0 pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 +pytest-benchmark==5.2.3 pytest-cov==7.0.0 pytest-cpp==2.6.0 pytest-mock==3.15.1 diff --git a/.riot/requirements/174cced.txt b/.riot/requirements/174cced.txt deleted file mode 100644 index 61ba59f5372..00000000000 --- a/.riot/requirements/174cced.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/174cced.in -# -attrs==23.1.0 -certifi==2023.11.17 -coverage[toml]==7.3.4 -elasticsearch==7.13.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -urllib3==1.26.18 -zipp==3.17.0 diff --git a/.riot/requirements/177f4da.txt b/.riot/requirements/177f4da.txt deleted file mode 100644 index 09614cde509..00000000000 --- a/.riot/requirements/177f4da.txt +++ /dev/null @@ -1,44 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/177f4da.in -# -aiobotocore==1.0.7 -aiohappyeyeballs==2.4.4 -aiohttp==3.10.11 -aioitertools==0.12.0 -aiosignal==1.3.1 -async-generator==1.10 -async-timeout==5.0.1 -attrs==25.3.0 -botocore==1.15.32 -coverage[toml]==7.6.1 -docutils==0.15.2 -exceptiongroup==1.3.0 -frozenlist==1.5.0 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -jmespath==0.10.0 -mock==5.2.0 -multidict==6.1.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -propcache==0.2.0 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==1.25.11 -wrapt==1.17.2 -yarl==1.15.2 -zipp==3.20.2 diff --git a/.riot/requirements/178cd30.txt b/.riot/requirements/178cd30.txt deleted file mode 100644 index 635350b856e..00000000000 --- a/.riot/requirements/178cd30.txt +++ /dev/null @@ -1,29 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/178cd30.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -future==1.0.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -pytz==2025.2 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -vertica-python==0.6.14 -zipp==3.20.2 diff --git a/.riot/requirements/17b0130.txt b/.riot/requirements/17b0130.txt deleted file mode 100644 index c893b33f3ff..00000000000 --- a/.riot/requirements/17b0130.txt +++ /dev/null @@ -1,31 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/17b0130.in -# -attrs==25.3.0 -azure-core==1.33.0 -azure-functions==1.10.1 -azure-servicebus==7.14.2 -certifi==2025.8.3 -charset-normalizer==3.4.3 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -isodate==0.7.2 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -requests==2.32.4 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 diff --git a/.riot/requirements/17c09be.txt b/.riot/requirements/17c09be.txt deleted file mode 100644 index 232f0a3a355..00000000000 --- a/.riot/requirements/17c09be.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/17c09be.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -mysqlclient==2.2.1 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/17d317e.txt b/.riot/requirements/17d317e.txt deleted file mode 100644 index 819553cb0e3..00000000000 --- a/.riot/requirements/17d317e.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/17d317e.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -elasticsearch6==6.8.2 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -urllib3==2.1.0 -zipp==3.17.0 diff --git a/.riot/requirements/17dacc9.txt b/.riot/requirements/17dacc9.txt deleted file mode 100644 index ebc39a6a11b..00000000000 --- a/.riot/requirements/17dacc9.txt +++ /dev/null @@ -1,27 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.11 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/17dacc9.in -# -attrs==25.4.0 -coverage[toml]==7.11.0 -gunicorn==23.0.0 -hypothesis==6.45.0 -iniconfig==2.3.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -protobuf==6.33.0 -py-cpuinfo==8.0.0 -pygments==2.19.2 -pytest==8.4.2 -pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 -pytest-cov==7.0.0 -pytest-mock==3.15.1 -pytest-randomly==4.0.1 -sortedcontainers==2.4.0 -uwsgi==2.0.31 -zstandard==0.25.0 diff --git a/.riot/requirements/17ec5eb.txt b/.riot/requirements/17ec5eb.txt deleted file mode 100644 index 40b68f0c906..00000000000 --- a/.riot/requirements/17ec5eb.txt +++ /dev/null @@ -1,35 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/17ec5eb.in -# -aiohttp==3.9.5 -aiohttp-jinja2==1.5.1 -aiosignal==1.3.1 -async-timeout==4.0.3 -attrs==23.2.0 -coverage[toml]==7.5.4 -exceptiongroup==1.2.1 -frozenlist==1.4.1 -hypothesis==6.45.0 -idna==3.7 -importlib-metadata==8.0.0 -iniconfig==2.0.0 -jinja2==3.1.4 -markupsafe==2.1.5 -mock==5.1.0 -multidict==6.0.5 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pytest==8.2.2 -pytest-aiohttp==1.0.5 -pytest-asyncio==0.23.7 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -yarl==1.9.4 -zipp==3.19.2 diff --git a/.riot/requirements/180a9be.txt b/.riot/requirements/180a9be.txt deleted file mode 100644 index ed0a3c11f03..00000000000 --- a/.riot/requirements/180a9be.txt +++ /dev/null @@ -1,31 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/180a9be.in -# -attrs==25.3.0 -certifi==2025.4.26 -chardet==3.0.4 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -idna==2.7 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -requests==2.20.1 -requests-mock==1.11.0 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==1.24.3 -zipp==3.20.2 diff --git a/.riot/requirements/1810da7.txt b/.riot/requirements/1810da7.txt deleted file mode 100644 index 020c016edce..00000000000 --- a/.riot/requirements/1810da7.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1810da7.in -# -attrs==24.2.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pyodbc==4.0.39 -pytest==8.3.4 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -zipp==3.20.2 diff --git a/.riot/requirements/181c98f.txt b/.riot/requirements/181c98f.txt deleted file mode 100644 index b89a5382948..00000000000 --- a/.riot/requirements/181c98f.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/181c98f.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -msgpack==1.0.7 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/1828aa7.txt b/.riot/requirements/1828aa7.txt deleted file mode 100644 index 8a7d96d3a0e..00000000000 --- a/.riot/requirements/1828aa7.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1828aa7.in -# -attrs==23.2.0 -certifi==2024.2.2 -charset-normalizer==3.3.2 -coverage[toml]==7.4.4 -docker==7.0.0 -exceptiongroup==1.2.1 -hypothesis==6.45.0 -idna==3.7 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.0 -pluggy==1.4.0 -pytest==8.1.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -requests==2.31.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -urllib3==2.2.1 diff --git a/.riot/requirements/18474a9.txt b/.riot/requirements/18474a9.txt deleted file mode 100644 index 8fcd85fe4fe..00000000000 --- a/.riot/requirements/18474a9.txt +++ /dev/null @@ -1,40 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/18474a9.in -# -anyio==3.7.1 -attrs==25.3.0 -certifi==2025.10.5 -charset-normalizer==3.4.4 -click==8.1.8 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -fastapi==0.86.0 -h11==0.16.0 -httpcore==1.0.9 -httpx==0.27.2 -hypothesis==6.45.0 -idna==3.11 -iniconfig==2.1.0 -jinja2==3.1.6 -markupsafe==2.1.5 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pydantic==1.10.24 -pytest==8.3.5 -pytest-asyncio==0.24.0 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -python-multipart==0.0.20 -requests==2.32.4 -sniffio==1.3.1 -sortedcontainers==2.4.0 -starlette==0.20.4 -tomli==2.3.0 -typing-extensions==4.13.2 -urllib3==2.2.3 -uvicorn==0.33.0 diff --git a/.riot/requirements/4c3fba8.txt b/.riot/requirements/184f4e7.txt similarity index 88% rename from .riot/requirements/4c3fba8.txt rename to .riot/requirements/184f4e7.txt index 7a4b02df2b9..bdc0a7186de 100644 --- a/.riot/requirements/4c3fba8.txt +++ b/.riot/requirements/184f4e7.txt @@ -2,10 +2,10 @@ # This file is autogenerated by pip-compile with Python 3.13 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/4c3fba8.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/184f4e7.in # attrs==25.4.0 -coverage[toml]==7.11.0 +coverage[toml]==7.11.3 gunicorn==23.0.0 hypothesis==6.45.0 iniconfig==2.3.0 @@ -18,9 +18,9 @@ pluggy==1.6.0 protobuf==6.33.0 py-cpuinfo==8.0.0 pygments==2.19.2 -pytest==8.4.2 +pytest==9.0.0 pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 +pytest-benchmark==5.2.3 pytest-cov==7.0.0 pytest-cpp==2.6.0 pytest-mock==3.15.1 diff --git a/.riot/requirements/185fc1c.txt b/.riot/requirements/185fc1c.txt deleted file mode 100644 index f593ce365a6..00000000000 --- a/.riot/requirements/185fc1c.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/185fc1c.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -mongoengine==0.29.1 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pymongo==3.13.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/1878fa7.txt b/.riot/requirements/1878fa7.txt deleted file mode 100644 index db98927c9c0..00000000000 --- a/.riot/requirements/1878fa7.txt +++ /dev/null @@ -1,29 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/1878fa7.in -# -attrs==25.3.0 -certifi==2025.1.31 -charset-normalizer==3.4.1 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opensearch-py[requests]==2.0.1 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -requests==2.32.3 -sortedcontainers==2.4.0 -tomli==2.2.1 -urllib3==1.26.20 -zipp==3.20.2 diff --git a/.riot/requirements/18829ea.txt b/.riot/requirements/18829ea.txt deleted file mode 100644 index 6038e99395a..00000000000 --- a/.riot/requirements/18829ea.txt +++ /dev/null @@ -1,47 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/18829ea.in -# -annotated-types==0.7.0 -anyio==4.5.2 -attrs==25.3.0 -certifi==2025.10.5 -coverage[toml]==7.6.1 -distro==1.9.0 -exceptiongroup==1.3.0 -h11==0.16.0 -httpcore==1.0.9 -httpx==0.28.1 -hypothesis==6.45.0 -idna==3.11 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -jiter==0.9.1 -mock==5.2.0 -multidict==6.1.0 -openai==2.2.0 -opentracing==2.4.0 -packaging==25.0 -pillow==10.4.0 -pluggy==1.5.0 -propcache==0.2.0 -pydantic==2.10.6 -pydantic-core==2.27.2 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -pyyaml==6.0.3 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tomli==2.3.0 -tqdm==4.67.1 -typing-extensions==4.13.2 -urllib3==1.26.20 -vcrpy==6.0.2 -wrapt==2.0.0 -yarl==1.15.2 -zipp==3.20.2 diff --git a/.riot/requirements/189128e.txt b/.riot/requirements/189128e.txt deleted file mode 100644 index a90089d09b4..00000000000 --- a/.riot/requirements/189128e.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/189128e.in -# -attrs==24.2.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.4.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pymysql==0.10.1 -pytest==8.3.2 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.20.1 diff --git a/.riot/requirements/4609657.txt b/.riot/requirements/1896c2f.txt similarity index 89% rename from .riot/requirements/4609657.txt rename to .riot/requirements/1896c2f.txt index 2afe1d17388..51eac42cd27 100644 --- a/.riot/requirements/4609657.txt +++ b/.riot/requirements/1896c2f.txt @@ -2,10 +2,10 @@ # This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/4609657.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1896c2f.in # attrs==25.4.0 -coverage[toml]==7.11.0 +coverage[toml]==7.11.3 gunicorn==23.0.0 hypothesis==6.45.0 iniconfig==2.3.0 @@ -18,9 +18,9 @@ pluggy==1.6.0 protobuf==6.33.0 py-cpuinfo==8.0.0 pygments==2.19.2 -pytest==8.4.2 +pytest==9.0.0 pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 +pytest-benchmark==5.2.3 pytest-cov==7.0.0 pytest-cpp==2.6.0 pytest-mock==3.15.1 diff --git a/.riot/requirements/18abddb.txt b/.riot/requirements/18abddb.txt deleted file mode 100644 index cf90d7073c4..00000000000 --- a/.riot/requirements/18abddb.txt +++ /dev/null @@ -1,77 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/18abddb.in -# -arrow==1.3.0 -asgiref==3.8.1 -attrs==25.3.0 -autobahn==23.1.2 -automat==24.8.1 -bcrypt==4.2.1 -blessed==1.21.0 -certifi==2025.4.26 -cffi==1.17.1 -channels==3.0.5 -charset-normalizer==3.4.2 -constantly==23.10.4 -coverage[toml]==7.6.1 -cryptography==45.0.3 -daphne==3.0.2 -django==2.2.28 -django-configurations==2.3.2 -django-picklefield==3.0.1 -django-pylibmc==0.6.1 -django-q==1.3.6 -django-redis==4.5.0 -exceptiongroup==1.3.0 -hyperlink==21.0.0 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -incremental==24.7.2 -iniconfig==2.1.0 -isodate==0.7.2 -lxml==5.4.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -platformdirs==4.3.6 -pluggy==1.5.0 -psycopg2-binary==2.9.10 -pyasn1==0.6.1 -pyasn1-modules==0.4.2 -pycparser==2.22 -pylibmc==1.6.3 -pyopenssl==25.1.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-django[testing]==3.10.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -python-memcached==1.62 -pytz==2025.2 -redis==2.10.6 -requests==2.32.3 -requests-file==2.1.0 -requests-toolbelt==1.0.0 -service-identity==24.2.0 -six==1.17.0 -sortedcontainers==2.4.0 -spyne==2.14.0 -sqlparse==0.5.3 -tomli==2.2.1 -twisted[tls]==24.11.0 -txaio==23.1.1 -types-python-dateutil==2.9.0.20241206 -typing-extensions==4.13.2 -urllib3==2.2.3 -wcwidth==0.2.13 -zeep==4.3.1 -zipp==3.20.2 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/18c9043.txt b/.riot/requirements/18c9043.txt deleted file mode 100644 index 93b2a354491..00000000000 --- a/.riot/requirements/18c9043.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/18c9043.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -tornado==4.5.3 -zipp==3.17.0 diff --git a/.riot/requirements/18caf61.txt b/.riot/requirements/18caf61.txt deleted file mode 100644 index 21f16fd526e..00000000000 --- a/.riot/requirements/18caf61.txt +++ /dev/null @@ -1,66 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/18caf61.in -# -annotated-types==0.7.0 -attrs==25.3.0 -cachetools==5.5.2 -certifi==2025.4.26 -charset-normalizer==3.4.2 -coverage[toml]==7.8.2 -docstring-parser==0.16 -exceptiongroup==1.3.0 -google-ai-generativelanguage==0.6.6 -google-api-core[grpc]==2.25.0 -google-api-python-client==2.171.0 -google-auth==2.40.3 -google-auth-httplib2==0.2.0 -google-cloud-aiplatform[all]==1.71.1 -google-cloud-bigquery==3.34.0 -google-cloud-core==2.4.3 -google-cloud-resource-manager==1.14.2 -google-cloud-storage==2.19.0 -google-crc32c==1.7.1 -google-generativeai==0.7.2 -google-resumable-media==2.7.2 -googleapis-common-protos[grpc]==1.70.0 -grpc-google-iam-v1==0.14.2 -grpcio==1.73.0 -grpcio-status==1.62.3 -httplib2==0.22.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -mock==5.2.0 -numpy==2.0.2 -opentracing==2.4.0 -packaging==25.0 -pillow==11.2.1 -pluggy==1.6.0 -proto-plus==1.26.1 -protobuf==4.25.8 -pyasn1==0.6.1 -pyasn1-modules==0.4.2 -pydantic==2.11.5 -pydantic-core==2.33.2 -pygments==2.19.1 -pyparsing==3.2.3 -pytest==8.4.0 -pytest-asyncio==1.0.0 -pytest-cov==6.1.1 -pytest-mock==3.14.1 -python-dateutil==2.9.0.post0 -requests==2.32.4 -rsa==4.9.1 -shapely==2.0.7 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -tqdm==4.67.1 -typing-extensions==4.14.0 -typing-inspection==0.4.1 -uritemplate==4.2.0 -urllib3==2.4.0 -vertexai==1.71.1 diff --git a/.riot/requirements/18f25af.txt b/.riot/requirements/18f25af.txt deleted file mode 100644 index f60e2fd1d12..00000000000 --- a/.riot/requirements/18f25af.txt +++ /dev/null @@ -1,27 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.13 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/18f25af.in -# -attrs==25.4.0 -coverage[toml]==7.11.0 -gunicorn==23.0.0 -hypothesis==6.45.0 -iniconfig==2.3.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -protobuf==4.22.0 -py-cpuinfo==8.0.0 -pygments==2.19.2 -pytest==8.4.2 -pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 -pytest-cov==7.0.0 -pytest-mock==3.15.1 -pytest-randomly==4.0.1 -sortedcontainers==2.4.0 -uwsgi==2.0.31 -zstandard==0.25.0 diff --git a/.riot/requirements/192e4d0.txt b/.riot/requirements/192e4d0.txt deleted file mode 100644 index a2835589432..00000000000 --- a/.riot/requirements/192e4d0.txt +++ /dev/null @@ -1,46 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/192e4d0.in -# -attrs==23.2.0 -beautifulsoup4==4.12.3 -certifi==2024.7.4 -charset-normalizer==3.3.2 -coverage[toml]==7.6.0 -exceptiongroup==1.2.2 -hupper==1.12.1 -hypothesis==6.45.0 -idna==3.7 -importlib-metadata==8.2.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.1 -pastedeploy==3.1.0 -plaster==1.1.2 -plaster-pastedeploy==1.0.1 -pluggy==1.5.0 -pserve-test-app @ file:///home/bits/project/tests/contrib/pyramid/pserve_app -pyramid==2.0.2 -pytest==8.3.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -requests==2.32.3 -sortedcontainers==2.4.0 -soupsieve==2.5 -tomli==2.0.1 -translationstring==1.4 -urllib3==2.2.2 -venusian==3.1.0 -waitress==3.0.0 -webob==1.8.7 -webtest==3.0.0 -zipp==3.19.2 -zope-deprecation==5.0 -zope-interface==6.4.post2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==71.1.0 diff --git a/.riot/requirements/1951a77.txt b/.riot/requirements/1951a77.txt deleted file mode 100644 index 384b84d06d0..00000000000 --- a/.riot/requirements/1951a77.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1951a77.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -jinja2==3.1.2 -markupsafe==2.1.3 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/195a93b.txt b/.riot/requirements/195a93b.txt deleted file mode 100644 index 418997b2e76..00000000000 --- a/.riot/requirements/195a93b.txt +++ /dev/null @@ -1,35 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/195a93b.in -# -asgiref==3.8.1 -attrs==25.3.0 -backports-zoneinfo==0.2.1 -coverage[toml]==7.6.1 -django==4.2.20 -django-configurations==2.5.1 -django-hosts==6.0 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-django[testing]==3.10.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -six==1.17.0 -sortedcontainers==2.4.0 -sqlparse==0.5.3 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/198266a.txt b/.riot/requirements/198266a.txt deleted file mode 100644 index a0a7c21269e..00000000000 --- a/.riot/requirements/198266a.txt +++ /dev/null @@ -1,45 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/198266a.in -# -asgiref==3.8.1 -attrs==25.3.0 -backports-zoneinfo==0.2.1 -bcrypt==4.2.1 -certifi==2025.8.3 -charset-normalizer==3.4.3 -coverage[toml]==7.6.1 -dill==0.4.0 -django==4.0.10 -django-configurations==2.5.1 -exceptiongroup==1.3.0 -gevent==24.2.1 -greenlet==3.1.1 -gunicorn==23.0.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pylibmc==1.6.3 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-django[testing]==3.10.0 -pytest-mock==3.14.1 -pyyaml==6.0.2 -requests==2.32.4 -six==1.17.0 -sortedcontainers==2.4.0 -sqlparse==0.5.3 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 -zope-event==5.0 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/19a46f0.txt b/.riot/requirements/19a46f0.txt deleted file mode 100644 index 5310868d43e..00000000000 --- a/.riot/requirements/19a46f0.txt +++ /dev/null @@ -1,32 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/19a46f0.in -# -attrs==25.4.0 -coverage[toml]==7.10.7 -exceptiongroup==1.3.0 -gunicorn==23.0.0 -hypothesis==6.45.0 -importlib-metadata==8.7.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -protobuf==6.33.0 -py-cpuinfo==8.0.0 -pygments==2.19.2 -pytest==8.4.2 -pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 -pytest-cov==7.0.0 -pytest-mock==3.15.1 -pytest-randomly==4.0.1 -sortedcontainers==2.4.0 -tomli==2.3.0 -typing-extensions==4.15.0 -uwsgi==2.0.31 -zipp==3.23.0 -zstandard==0.25.0 diff --git a/.riot/requirements/19aab60.txt b/.riot/requirements/19aab60.txt deleted file mode 100644 index 0bf2d25d3a2..00000000000 --- a/.riot/requirements/19aab60.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/19aab60.in -# -attrs==24.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -falcon==4.0.2 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.4 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -zipp==3.20.2 diff --git a/.riot/requirements/19aba18.txt b/.riot/requirements/19aba18.txt deleted file mode 100644 index 752af632f91..00000000000 --- a/.riot/requirements/19aba18.txt +++ /dev/null @@ -1,27 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/19aba18.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -dnspython==2.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -mongoengine==0.29.1 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pymongo==4.10.1 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/19aeb31.txt b/.riot/requirements/19aeb31.txt deleted file mode 100644 index 148e9a30091..00000000000 --- a/.riot/requirements/19aeb31.txt +++ /dev/null @@ -1,34 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/19aeb31.in -# -anyio==4.5.2 -asgiref==3.0.0 -async-timeout==3.0.1 -attrs==25.3.0 -certifi==2025.8.3 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -h11==0.16.0 -httpcore==1.0.9 -httpx==0.27.2 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/19dd610.txt b/.riot/requirements/19dd610.txt deleted file mode 100644 index 6c6db530273..00000000000 --- a/.riot/requirements/19dd610.txt +++ /dev/null @@ -1,32 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/19dd610.in -# -attrs==25.4.0 -coverage[toml]==7.10.7 -exceptiongroup==1.3.0 -gunicorn==23.0.0 -hypothesis==6.45.0 -importlib-metadata==8.7.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -protobuf==6.33.0 -py-cpuinfo==8.0.0 -pygments==2.19.2 -pytest==8.4.2 -pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 -pytest-cov==7.0.0 -pytest-mock==3.15.1 -pytest-randomly==4.0.1 -sortedcontainers==2.4.0 -tomli==2.3.0 -typing-extensions==4.15.0 -uwsgi==2.0.31 -zipp==3.23.0 -zstandard==0.25.0 diff --git a/.riot/requirements/1a21c9f.txt b/.riot/requirements/1a21c9f.txt deleted file mode 100644 index cf8b0fcebdf..00000000000 --- a/.riot/requirements/1a21c9f.txt +++ /dev/null @@ -1,34 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.11 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1a21c9f.in -# -attrs==25.4.0 -coverage[toml]==7.11.0 -gevent==25.9.1 -greenlet==3.2.4 -gunicorn[gevent]==23.0.0 -hypothesis==6.45.0 -iniconfig==2.3.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -protobuf==6.33.0 -py-cpuinfo==8.0.0 -pygments==2.19.2 -pytest==8.4.2 -pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 -pytest-cov==7.0.0 -pytest-mock==3.15.1 -pytest-randomly==4.0.1 -sortedcontainers==2.4.0 -uwsgi==2.0.31 -zope-event==6.0 -zope-interface==8.0.1 -zstandard==0.25.0 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==80.9.0 diff --git a/.riot/requirements/1a2c79e.txt b/.riot/requirements/1a2c79e.txt deleted file mode 100644 index 9edb41d3df2..00000000000 --- a/.riot/requirements/1a2c79e.txt +++ /dev/null @@ -1,28 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1a2c79e.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -decorator==5.1.1 -dogpile-cache==1.3.0 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pbr==6.0.0 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -stevedore==5.1.0 -tomli==2.0.1 -typing-extensions==4.9.0 -zipp==3.17.0 diff --git a/.riot/requirements/1a3a39d.txt b/.riot/requirements/1a3a39d.txt deleted file mode 100644 index 6ba873d2190..00000000000 --- a/.riot/requirements/1a3a39d.txt +++ /dev/null @@ -1,42 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1a3a39d.in -# -aiobotocore==2.0.1 -aiohappyeyeballs==2.4.0 -aiohttp==3.10.5 -aioitertools==0.11.0 -aiosignal==1.3.1 -async-generator==1.10 -async-timeout==4.0.3 -attrs==24.2.0 -botocore==1.22.8 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -frozenlist==1.4.1 -hypothesis==6.45.0 -idna==3.8 -importlib-metadata==8.4.0 -iniconfig==2.0.0 -jmespath==0.10.0 -mock==5.1.0 -multidict==6.0.5 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pytest==8.3.2 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -six==1.16.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -typing-extensions==4.12.2 -urllib3==1.26.19 -wrapt==1.16.0 -yarl==1.9.4 -zipp==3.20.0 diff --git a/.riot/requirements/1a6e6c0.txt b/.riot/requirements/1a6e6c0.txt deleted file mode 100644 index b7d1ec2eb01..00000000000 --- a/.riot/requirements/1a6e6c0.txt +++ /dev/null @@ -1,32 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1a6e6c0.in -# -anyio==4.2.0 -attrs==23.1.0 -certifi==2023.11.17 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -h11==0.14.0 -httpcore==1.0.2 -httpx==0.26.0 -hypothesis==6.45.0 -idna==3.6 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sniffio==1.3.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -typing-extensions==4.9.0 -zipp==3.17.0 diff --git a/.riot/requirements/1a84cc2.txt b/.riot/requirements/1a84cc2.txt deleted file mode 100644 index beb0cbbdbc9..00000000000 --- a/.riot/requirements/1a84cc2.txt +++ /dev/null @@ -1,23 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/1a84cc2.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -zipp==3.20.2 diff --git a/.riot/requirements/1ac9ec1.txt b/.riot/requirements/1ac9ec1.txt deleted file mode 100644 index a491beef90c..00000000000 --- a/.riot/requirements/1ac9ec1.txt +++ /dev/null @@ -1,23 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1ac9ec1.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/1aca748.txt b/.riot/requirements/1aca748.txt deleted file mode 100644 index 3dac8924a3d..00000000000 --- a/.riot/requirements/1aca748.txt +++ /dev/null @@ -1,30 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1aca748.in -# -attrs==23.1.0 -certifi==2023.11.17 -charset-normalizer==3.3.2 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -idna==3.6 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -python-consul==1.1.0 -requests==2.31.0 -six==1.16.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -urllib3==2.1.0 -zipp==3.17.0 diff --git a/.riot/requirements/1adbb5d.txt b/.riot/requirements/1adbb5d.txt deleted file mode 100644 index efa8a19a752..00000000000 --- a/.riot/requirements/1adbb5d.txt +++ /dev/null @@ -1,42 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1adbb5d.in -# -aiofiles==24.1.0 -aiosqlite==0.20.0 -anyio==3.7.1 -attrs==25.3.0 -certifi==2025.8.3 -charset-normalizer==3.4.3 -coverage[toml]==7.6.1 -databases==0.8.0 -exceptiongroup==1.3.0 -greenlet==3.1.1 -h11==0.12.0 -httpcore==0.14.7 -httpx==0.22.0 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -requests==2.32.4 -rfc3986[idna2008]==1.5.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -sqlalchemy==1.4.54 -starlette==0.14.2 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 -zipp==3.20.2 diff --git a/.riot/requirements/1ae2797.txt b/.riot/requirements/1ae2797.txt deleted file mode 100644 index b1170153af9..00000000000 --- a/.riot/requirements/1ae2797.txt +++ /dev/null @@ -1,35 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1ae2797.in -# -aiohttp==3.9.5 -aiohttp-jinja2==1.5.1 -aiosignal==1.3.1 -async-timeout==4.0.3 -attrs==23.2.0 -coverage[toml]==7.5.4 -exceptiongroup==1.2.1 -frozenlist==1.4.1 -hypothesis==6.45.0 -idna==3.7 -importlib-metadata==8.0.0 -iniconfig==2.0.0 -jinja2==3.1.4 -markupsafe==2.1.5 -mock==5.1.0 -multidict==6.0.5 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pytest==8.2.2 -pytest-aiohttp==1.0.5 -pytest-asyncio==0.23.7 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -yarl==1.9.4 -zipp==3.19.2 diff --git a/.riot/requirements/1af4fe2.txt b/.riot/requirements/1af4fe2.txt deleted file mode 100644 index 3a4761a3456..00000000000 --- a/.riot/requirements/1af4fe2.txt +++ /dev/null @@ -1,35 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1af4fe2.in -# -anyio==4.5.2 -attrs==25.3.0 -certifi==2025.8.3 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -execnet==2.1.1 -h11==0.16.0 -httpcore==1.0.9 -httpx==0.27.2 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -more-itertools==8.10.0 -msgpack==1.1.1 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==7.4.4 -pytest-cov==2.12.0 -pytest-mock==2.0.0 -pytest-randomly==3.15.0 -pytest-xdist==3.6.1 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/1b02ea2.txt b/.riot/requirements/1b02ea2.txt deleted file mode 100644 index 73847ef6b54..00000000000 --- a/.riot/requirements/1b02ea2.txt +++ /dev/null @@ -1,34 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1b02ea2.in -# -anyio==3.7.1 -attrs==25.3.0 -certifi==2025.7.9 -charset-normalizer==3.4.2 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -fastapi==0.86.0 -h11==0.16.0 -httpcore==1.0.9 -httpx==0.27.2 -hypothesis==6.113.0 -idna==3.10 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pydantic==1.10.22 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -requests==2.32.4 -sniffio==1.3.1 -sortedcontainers==2.4.0 -starlette==0.20.4 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 diff --git a/.riot/requirements/1b19707.txt b/.riot/requirements/1b19707.txt deleted file mode 100644 index 5a50cb0f571..00000000000 --- a/.riot/requirements/1b19707.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1b19707.in -# -attrs==24.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -mako==1.3.8 -markupsafe==2.1.5 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.4 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -zipp==3.20.2 diff --git a/.riot/requirements/1b4f196.txt b/.riot/requirements/1b4f196.txt index 3234f36194a..d9ca0210e41 100644 --- a/.riot/requirements/1b4f196.txt +++ b/.riot/requirements/1b4f196.txt @@ -9,7 +9,7 @@ attrs==25.4.0 bcrypt==4.2.1 certifi==2025.10.5 charset-normalizer==3.4.4 -coverage[toml]==7.11.0 +coverage[toml]==7.11.1 dill==0.4.0 django==5.2.8 django-configurations==2.5.1 @@ -36,8 +36,5 @@ six==1.17.0 sortedcontainers==2.4.0 sqlparse==0.5.3 urllib3==2.5.0 -zope-event==6.0 +zope-event==6.1 zope-interface==8.0.1 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==80.9.0 diff --git a/.riot/requirements/1b6f5be.txt b/.riot/requirements/1b6f5be.txt deleted file mode 100644 index 30ccd368628..00000000000 --- a/.riot/requirements/1b6f5be.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1b6f5be.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -msgpack==1.0.7 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/1ba4b57.txt b/.riot/requirements/1ba4b57.txt deleted file mode 100644 index 18b24da31a7..00000000000 --- a/.riot/requirements/1ba4b57.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1ba4b57.in -# -attrs==23.2.0 -coverage[toml]==7.4.1 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.1 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.4.0 -pytest==8.0.0 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -urllib3==2.2.0 -zipp==3.17.0 diff --git a/.riot/requirements/1bceb88.txt b/.riot/requirements/1bceb88.txt deleted file mode 100644 index 2c50572f098..00000000000 --- a/.riot/requirements/1bceb88.txt +++ /dev/null @@ -1,56 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1bceb88.in -# -aiobotocore==2.3.1 -aiohappyeyeballs==2.4.4 -aiohttp==3.10.11 -aioitertools==0.12.0 -aiosignal==1.3.1 -async-timeout==5.0.1 -attrs==24.3.0 -botocore==1.24.21 -certifi==2024.12.14 -charset-normalizer==3.4.1 -coverage[toml]==7.6.1 -elastic-transport==8.15.1 -elasticsearch==8.17.0 -events==0.5 -exceptiongroup==1.2.2 -frozenlist==1.5.0 -gevent==20.12.1 -greenlet==1.0.0 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -jmespath==1.0.1 -mock==5.1.0 -multidict==6.1.0 -opensearch-py==2.8.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -propcache==0.2.0 -pynamodb==5.5.1 -pytest==8.3.4 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -requests==2.32.3 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.12.2 -urllib3==1.26.20 -wrapt==1.17.0 -yarl==1.15.2 -zipp==3.20.2 -zope-event==5.0 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.0 diff --git a/.riot/requirements/1bee666.txt b/.riot/requirements/1bee666.txt deleted file mode 100644 index 70c923d2825..00000000000 --- a/.riot/requirements/1bee666.txt +++ /dev/null @@ -1,64 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1bee666.in -# -annotated-types==0.7.0 -attrs==24.2.0 -cachetools==5.5.0 -certifi==2024.8.30 -charset-normalizer==3.4.0 -coverage[toml]==7.6.8 -docstring-parser==0.16 -exceptiongroup==1.2.2 -google-ai-generativelanguage==0.6.10 -google-api-core[grpc]==2.23.0 -google-api-python-client==2.154.0 -google-auth==2.36.0 -google-auth-httplib2==0.2.0 -google-cloud-aiplatform[all]==1.71.1 -google-cloud-bigquery==3.27.0 -google-cloud-core==2.4.1 -google-cloud-resource-manager==1.13.1 -google-cloud-storage==2.18.2 -google-crc32c==1.6.0 -google-generativeai==0.8.3 -google-resumable-media==2.7.2 -googleapis-common-protos[grpc]==1.66.0 -grpc-google-iam-v1==0.13.1 -grpcio==1.68.0 -grpcio-status==1.68.0 -httplib2==0.22.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.0.0 -mock==5.1.0 -numpy==2.0.2 -opentracing==2.4.0 -packaging==24.2 -pillow==11.0.0 -pluggy==1.5.0 -proto-plus==1.25.0 -protobuf==5.28.3 -pyasn1==0.6.1 -pyasn1-modules==0.4.1 -pydantic==2.10.2 -pydantic-core==2.27.1 -pyparsing==3.2.0 -pytest==8.3.3 -pytest-asyncio==0.24.0 -pytest-cov==6.0.0 -pytest-mock==3.14.0 -python-dateutil==2.9.0.post0 -requests==2.32.3 -rsa==4.9 -shapely==2.0.6 -six==1.16.0 -sortedcontainers==2.4.0 -tomli==2.1.0 -tqdm==4.67.1 -typing-extensions==4.12.2 -uritemplate==4.1.1 -urllib3==2.2.3 -vertexai==1.71.1 diff --git a/.riot/requirements/1bf3da5.txt b/.riot/requirements/1bf3da5.txt deleted file mode 100644 index da379d432f9..00000000000 --- a/.riot/requirements/1bf3da5.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1bf3da5.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mariadb==1.1.13 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/16cbfe2.txt b/.riot/requirements/1c00026.txt similarity index 89% rename from .riot/requirements/16cbfe2.txt rename to .riot/requirements/1c00026.txt index 8308002727c..88e25af94ad 100644 --- a/.riot/requirements/16cbfe2.txt +++ b/.riot/requirements/1c00026.txt @@ -2,10 +2,10 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/16cbfe2.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1c00026.in # attrs==25.4.0 -coverage[toml]==7.11.0 +coverage[toml]==7.11.3 exceptiongroup==1.3.0 gunicorn==23.0.0 hypothesis==6.45.0 @@ -19,9 +19,9 @@ pluggy==1.6.0 protobuf==6.33.0 py-cpuinfo==8.0.0 pygments==2.19.2 -pytest==8.4.2 +pytest==9.0.0 pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 +pytest-benchmark==5.2.3 pytest-cov==7.0.0 pytest-cpp==2.6.0 pytest-mock==3.15.1 diff --git a/.riot/requirements/1c0509d.txt b/.riot/requirements/1c0509d.txt deleted file mode 100644 index e08e98db570..00000000000 --- a/.riot/requirements/1c0509d.txt +++ /dev/null @@ -1,28 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1c0509d.in -# -async-timeout==4.0.3 -attrs==23.1.0 -click==7.1.2 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.1 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -redis==5.0.1 -rq==1.8.1 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/1c0ccc9.txt b/.riot/requirements/1c0ccc9.txt deleted file mode 100644 index 8a49d5e9d54..00000000000 --- a/.riot/requirements/1c0ccc9.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1c0ccc9.in -# -attrs==23.1.0 -certifi==2023.11.17 -coverage[toml]==7.3.4 -elasticsearch==7.17.9 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -urllib3==1.26.18 -zipp==3.17.0 diff --git a/.riot/requirements/1c1da8c.txt b/.riot/requirements/1c1da8c.txt deleted file mode 100644 index 090dda34995..00000000000 --- a/.riot/requirements/1c1da8c.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1c1da8c.in -# -attrs==23.2.0 -coverage[toml]==7.5.4 -exceptiongroup==1.2.1 -hypothesis==6.45.0 -importlib-metadata==8.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pytest==8.2.2 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -redis==3.5.3 -redis-py-cluster==2.1.3 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.19.2 diff --git a/.riot/requirements/198d33e.txt b/.riot/requirements/1c300f5.txt similarity index 89% rename from .riot/requirements/198d33e.txt rename to .riot/requirements/1c300f5.txt index 5cc143507d6..e0339899180 100644 --- a/.riot/requirements/198d33e.txt +++ b/.riot/requirements/1c300f5.txt @@ -2,10 +2,10 @@ # This file is autogenerated by pip-compile with Python 3.12 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/198d33e.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1c300f5.in # attrs==25.4.0 -coverage[toml]==7.11.0 +coverage[toml]==7.11.3 gunicorn==23.0.0 hypothesis==6.45.0 iniconfig==2.3.0 @@ -18,9 +18,9 @@ pluggy==1.6.0 protobuf==6.33.0 py-cpuinfo==8.0.0 pygments==2.19.2 -pytest==8.4.2 +pytest==9.0.0 pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 +pytest-benchmark==5.2.3 pytest-cov==7.0.0 pytest-cpp==2.6.0 pytest-mock==3.15.1 diff --git a/.riot/requirements/1c31001.txt b/.riot/requirements/1c31001.txt deleted file mode 100644 index 0aa511b0f41..00000000000 --- a/.riot/requirements/1c31001.txt +++ /dev/null @@ -1,50 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1c31001.in -# -annotated-types==0.7.0 -anthropic==0.28.1 -anyio==4.5.2 -attrs==25.3.0 -certifi==2025.4.26 -charset-normalizer==3.4.2 -coverage[toml]==7.6.1 -distro==1.9.0 -exceptiongroup==1.3.0 -filelock==3.16.1 -fsspec==2025.3.0 -h11==0.16.0 -hf-xet==1.1.3 -httpcore==1.0.9 -httpx==0.27.2 -huggingface-hub==0.32.4 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -jiter==0.9.1 -mock==5.2.0 -multidict==6.1.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -propcache==0.2.0 -pydantic==2.10.6 -pydantic-core==2.27.2 -pytest==8.3.5 -pytest-asyncio==0.24.0 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pyyaml==6.0.2 -requests==2.32.3 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tokenizers==0.21.0 -tomli==2.2.1 -tqdm==4.67.1 -typing-extensions==4.13.2 -urllib3==1.26.20 -vcrpy==6.0.2 -wrapt==1.17.2 -yarl==1.15.2 diff --git a/.riot/requirements/1c3d896.txt b/.riot/requirements/1c3d896.txt deleted file mode 100644 index 8efc222d6ee..00000000000 --- a/.riot/requirements/1c3d896.txt +++ /dev/null @@ -1,42 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1c3d896.in -# -annotated-types==0.7.0 -anyio==4.5.2 -attrs==25.3.0 -certifi==2025.10.5 -charset-normalizer==3.4.4 -click==8.1.8 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -fastapi==0.114.2 -h11==0.16.0 -httpcore==1.0.9 -httpx==0.27.2 -hypothesis==6.45.0 -idna==3.11 -iniconfig==2.1.0 -jinja2==3.1.6 -markupsafe==2.1.5 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pydantic==2.10.6 -pydantic-core==2.27.2 -pytest==8.3.5 -pytest-asyncio==0.24.0 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -python-multipart==0.0.20 -requests==2.32.4 -sniffio==1.3.1 -sortedcontainers==2.4.0 -starlette==0.38.6 -tomli==2.3.0 -typing-extensions==4.13.2 -urllib3==2.2.3 -uvicorn==0.33.0 diff --git a/.riot/requirements/1c4e625.txt b/.riot/requirements/1c4e625.txt deleted file mode 100644 index 4ee880ebb56..00000000000 --- a/.riot/requirements/1c4e625.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/1c4e625.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -mongoengine==0.23.1 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pymongo==3.13.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/1c56cf0.txt b/.riot/requirements/1c56cf0.txt deleted file mode 100644 index d292b56cb3d..00000000000 --- a/.riot/requirements/1c56cf0.txt +++ /dev/null @@ -1,74 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1c56cf0.in -# -annotated-types==0.7.0 -attrs==25.3.0 -aws-sam-translator==1.98.0 -aws-xray-sdk==2.14.0 -boto==2.49.0 -boto3==1.22.0 -botocore==1.25.0 -certifi==2025.4.26 -cffi==1.17.1 -cfn-lint==0.53.1 -charset-normalizer==3.4.2 -coverage[toml]==7.6.1 -cryptography==45.0.3 -docker==7.1.0 -ecdsa==0.14.1 -exceptiongroup==1.3.0 -execnet==2.1.1 -hypothesis==6.45.0 -idna==2.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -jinja2==2.10.3 -jmespath==1.0.1 -jsondiff==2.2.1 -jsonpatch==1.33 -jsonpointer==3.0.0 -jsonschema==3.2.0 -junit-xml==1.9 -markupsafe==1.1.1 -mock==5.2.0 -more-itertools==10.5.0 -moto==1.3.16 -networkx==2.8.8 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pyasn1==0.4.8 -pycparser==2.22 -pydantic==2.10.6 -pydantic-core==2.27.2 -pynamodb==5.0.3 -pyrsistent==0.20.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -pytest-xdist==3.6.1 -python-dateutil==2.9.0.post0 -python-jose[cryptography]==3.4.0 -pytz==2025.2 -pyyaml==6.0.2 -requests==2.32.4 -responses==0.25.7 -rsa==4.9.1 -s3transfer==0.5.2 -six==1.17.0 -sortedcontainers==2.4.0 -sshpubkeys==3.3.1 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==1.26.20 -werkzeug==2.1.2 -wrapt==1.17.2 -xmltodict==0.14.2 -zipp==3.20.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/1c5f254.txt b/.riot/requirements/1c5f254.txt deleted file mode 100644 index 0bd81f9fe7a..00000000000 --- a/.riot/requirements/1c5f254.txt +++ /dev/null @@ -1,38 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1c5f254.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -gevent==22.10.2 -greenlet==3.1.1 -gunicorn[gevent]==23.0.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -protobuf==5.29.5 -py-cpuinfo==8.0.0 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-benchmark==4.0.0 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.3.0 -typing-extensions==4.13.2 -uwsgi==2.0.31 -zipp==3.20.2 -zope-event==5.0 -zope-interface==7.2 -zstandard==0.23.0 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/1c84e93.txt b/.riot/requirements/1c84e93.txt deleted file mode 100644 index 66fc775af92..00000000000 --- a/.riot/requirements/1c84e93.txt +++ /dev/null @@ -1,37 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1c84e93.in -# -attrs==25.3.0 -azure-core==1.33.0 -azure-eventhub==5.15.0 -azure-functions==1.23.0 -azure-storage-blob==12.26.0 -certifi==2025.8.3 -cffi==1.17.1 -charset-normalizer==3.4.3 -coverage[toml]==7.6.1 -cryptography==46.0.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -isodate==0.7.2 -markupsafe==2.1.5 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pycparser==2.23 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -requests==2.32.4 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 -werkzeug==3.0.6 diff --git a/.riot/requirements/1c87bc4.txt b/.riot/requirements/1c87bc4.txt deleted file mode 100644 index bc50f51cc3a..00000000000 --- a/.riot/requirements/1c87bc4.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1c87bc4.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -mysql-connector-python==8.0.5 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/1cc7b0e.txt b/.riot/requirements/1cc7b0e.txt deleted file mode 100644 index adb8f71e30b..00000000000 --- a/.riot/requirements/1cc7b0e.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1cc7b0e.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -yaaredis==2.0.4 -zipp==3.17.0 diff --git a/.riot/requirements/1cda235.txt b/.riot/requirements/1cda235.txt deleted file mode 100644 index 5b372bb3fec..00000000000 --- a/.riot/requirements/1cda235.txt +++ /dev/null @@ -1,28 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/1cda235.in -# -aiopg==1.4.0 -async-timeout==4.0.3 -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -psycopg2-binary==2.9.10 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -sqlalchemy==2.0.41 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/1ce28f4.txt b/.riot/requirements/1ce28f4.txt deleted file mode 100644 index 51fc553928e..00000000000 --- a/.riot/requirements/1ce28f4.txt +++ /dev/null @@ -1,37 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1ce28f4.in -# -astunparse==1.6.3 -attrs==25.3.0 -certifi==2025.10.5 -cffi==1.17.1 -charset-normalizer==3.4.4 -coverage[toml]==7.6.1 -cryptography==46.0.3 -exceptiongroup==1.3.0 -grpcio==1.70.0 -hypothesis==6.45.0 -idna==3.11 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -protobuf==5.29.5 -pycparser==2.23 -pycryptodome==3.23.0 -pytest==8.3.5 -pytest-asyncio==0.24.0 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -requests==2.32.4 -simplejson==3.20.2 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.3.0 -typing-extensions==4.13.2 -urllib3==2.2.3 -wheel==0.45.1 diff --git a/.riot/requirements/1ce3412.txt b/.riot/requirements/1ce3412.txt deleted file mode 100644 index 2013dc5e8b8..00000000000 --- a/.riot/requirements/1ce3412.txt +++ /dev/null @@ -1,31 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1ce3412.in -# -attrs==23.1.0 -certifi==2023.11.17 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -h11==0.14.0 -httpcore==0.12.3 -httpx==0.17.1 -hypothesis==6.45.0 -idna==3.6 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -rfc3986[idna2008]==1.5.0 -sniffio==1.3.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/1ce93b3.txt b/.riot/requirements/1ce93b3.txt deleted file mode 100644 index a0edba9ffd0..00000000000 --- a/.riot/requirements/1ce93b3.txt +++ /dev/null @@ -1,22 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.13 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1ce93b3.in -# -attrs==24.2.0 -coverage[toml]==7.6.1 -dnspython==2.7.0 -hypothesis==6.45.0 -iniconfig==2.0.0 -mock==5.1.0 -mongoengine==0.29.1 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pymongo==4.8.0 -pytest==8.3.3 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 diff --git a/.riot/requirements/1cef696.txt b/.riot/requirements/1cef696.txt deleted file mode 100644 index 7a7725cdf1a..00000000000 --- a/.riot/requirements/1cef696.txt +++ /dev/null @@ -1,27 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1cef696.in -# -attrs==24.2.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -googleapis-common-protos==1.65.0 -grpcio==1.34.1 -hypothesis==6.45.0 -importlib-metadata==8.4.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -protobuf==5.28.0 -pytest==8.3.2 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -six==1.16.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.20.1 diff --git a/.riot/requirements/1d19e24.txt b/.riot/requirements/1d19e24.txt deleted file mode 100644 index f30e8e479df..00000000000 --- a/.riot/requirements/1d19e24.txt +++ /dev/null @@ -1,37 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1d19e24.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -gunicorn==23.0.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -importlib-resources==6.4.5 -iniconfig==2.1.0 -jsonschema==4.23.0 -jsonschema-specifications==2023.12.1 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pkgutil-resolve-name==1.3.10 -pluggy==1.5.0 -protobuf==5.29.5 -py-cpuinfo==8.0.0 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-benchmark==4.0.0 -pytest-cov==5.0.0 -pytest-cpp==2.6.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -referencing==0.35.1 -rpds-py==0.20.1 -sortedcontainers==2.4.0 -tomli==2.3.0 -typing-extensions==4.13.2 -zipp==3.20.2 -zstandard==0.23.0 diff --git a/.riot/requirements/1d1dbc1.txt b/.riot/requirements/1d1dbc1.txt deleted file mode 100644 index 179f45bf156..00000000000 --- a/.riot/requirements/1d1dbc1.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1d1dbc1.in -# -attrs==25.3.0 -coverage[toml]==7.8.2 -exceptiongroup==1.3.0 -freezegun==1.3.1 -hypothesis==6.45.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -pygments==2.19.1 -pytest==8.4.0 -pytest-cov==6.1.1 -pytest-mock==3.14.1 -pytest-randomly==3.16.0 -python-dateutil==2.9.0.post0 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.14.0 diff --git a/.riot/requirements/1d23fbc.txt b/.riot/requirements/1d23fbc.txt deleted file mode 100644 index ba4db809a86..00000000000 --- a/.riot/requirements/1d23fbc.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1d23fbc.in -# -attrs==24.2.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -jinja2==2.10.3 -markupsafe==1.1.1 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.3 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.1.0 -zipp==3.20.2 diff --git a/.riot/requirements/1d38b9f.txt b/.riot/requirements/1d38b9f.txt deleted file mode 100644 index 2be422fecc0..00000000000 --- a/.riot/requirements/1d38b9f.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1d38b9f.in -# -attrs==25.3.0 -certifi==2025.8.3 -charset-normalizer==3.4.3 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -requests==2.32.4 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 diff --git a/.riot/requirements/1d390e8.txt b/.riot/requirements/1d390e8.txt deleted file mode 100644 index e288067465c..00000000000 --- a/.riot/requirements/1d390e8.txt +++ /dev/null @@ -1,42 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1d390e8.in -# -aiobotocore==2.13.3 -aiohappyeyeballs==2.4.0 -aiohttp==3.10.5 -aioitertools==0.11.0 -aiosignal==1.3.1 -async-generator==1.10 -async-timeout==4.0.3 -attrs==24.2.0 -botocore==1.34.162 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -frozenlist==1.4.1 -hypothesis==6.45.0 -idna==3.8 -importlib-metadata==8.4.0 -iniconfig==2.0.0 -jmespath==1.0.1 -mock==5.1.0 -multidict==6.0.5 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pytest==8.3.2 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -six==1.16.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -typing-extensions==4.12.2 -urllib3==1.26.19 -wrapt==1.16.0 -yarl==1.9.4 -zipp==3.20.0 diff --git a/.riot/requirements/1d788df.txt b/.riot/requirements/1d788df.txt deleted file mode 100644 index 29c6cd06a17..00000000000 --- a/.riot/requirements/1d788df.txt +++ /dev/null @@ -1,32 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1d788df.in -# -attrs==25.3.0 -certifi==2025.7.9 -charset-normalizer==3.4.2 -click==7.1.2 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -flask==1.1.4 -hypothesis==6.113.0 -idna==3.10 -iniconfig==2.1.0 -itsdangerous==1.1.0 -jinja2==2.11.3 -markupsafe==1.1.1 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -requests==2.32.4 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 -werkzeug==1.0.1 diff --git a/.riot/requirements/1db8cf2.txt b/.riot/requirements/1db8cf2.txt deleted file mode 100644 index e84a492d5e7..00000000000 --- a/.riot/requirements/1db8cf2.txt +++ /dev/null @@ -1,28 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1db8cf2.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -greenlet==3.0.3 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -mysql-connector-python==9.0.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -psycopg2-binary==2.9.10 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -sqlalchemy==2.0.40 -tomli==2.2.1 -typing-extensions==4.13.1 -zipp==3.20.2 diff --git a/.riot/requirements/1dcf37e.txt b/.riot/requirements/1dcf37e.txt deleted file mode 100644 index 458a62f2355..00000000000 --- a/.riot/requirements/1dcf37e.txt +++ /dev/null @@ -1,27 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/1dcf37e.in -# -aiopg==0.16.0 -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -psycopg2-binary==2.9.10 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -sqlalchemy==2.0.41 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/1dd7f62.txt b/.riot/requirements/1dd7f62.txt deleted file mode 100644 index a9e66451ce7..00000000000 --- a/.riot/requirements/1dd7f62.txt +++ /dev/null @@ -1,29 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1dd7f62.in -# -amqp==5.3.1 -attrs==25.3.0 -backports-zoneinfo[tzdata]==0.2.1 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -kombu==5.5.4 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -tzdata==2025.2 -vine==5.1.0 -zipp==3.20.2 diff --git a/.riot/requirements/1df8347.txt b/.riot/requirements/1df8347.txt deleted file mode 100644 index ca1c3a6ec3f..00000000000 --- a/.riot/requirements/1df8347.txt +++ /dev/null @@ -1,36 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1df8347.in -# -attrs==24.3.0 -certifi==2024.12.14 -charset-normalizer==3.4.1 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -gevent==24.2.1 -greenlet==3.1.1 -gunicorn==23.0.0 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.4 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -requests==2.32.3 -sortedcontainers==2.4.0 -tomli==2.2.1 -urllib3==2.2.3 -zipp==3.20.2 -zope-event==5.0 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.0 diff --git a/.riot/requirements/1dfd438.txt b/.riot/requirements/1dfd438.txt deleted file mode 100644 index 32ced73b7f7..00000000000 --- a/.riot/requirements/1dfd438.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1dfd438.in -# -attrs==24.2.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.4.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pymysql==1.1.1 -pytest==8.3.2 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.20.1 diff --git a/.riot/requirements/1e08b64.txt b/.riot/requirements/1e08b64.txt deleted file mode 100644 index 1145707ee4c..00000000000 --- a/.riot/requirements/1e08b64.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1e08b64.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -msgpack==1.0.8 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/1e0e29e.txt b/.riot/requirements/1e0e29e.txt deleted file mode 100644 index 2ba80bbb6ef..00000000000 --- a/.riot/requirements/1e0e29e.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1e0e29e.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -loguru==0.7.2 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/1e3534f.txt b/.riot/requirements/1e3534f.txt deleted file mode 100644 index 6f5850a6d4f..00000000000 --- a/.riot/requirements/1e3534f.txt +++ /dev/null @@ -1,36 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1e3534f.in -# -attrs==23.2.0 -cheroot==10.0.1 -cherrypy==17.4.2 -contextlib2==21.6.0 -coverage[toml]==7.6.0 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.2.0 -iniconfig==2.0.0 -jaraco-functools==4.0.1 -mock==5.1.0 -more-itertools==8.10.0 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -portend==3.2.0 -pytest==8.3.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -six==1.16.0 -sortedcontainers==2.4.0 -tempora==5.6.0 -tomli==2.0.1 -typing-extensions==4.12.2 -zc-lockfile==3.0.post1 -zipp==3.19.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==71.1.0 diff --git a/.riot/requirements/1e649b4.txt b/.riot/requirements/1e649b4.txt deleted file mode 100644 index 964238d148f..00000000000 --- a/.riot/requirements/1e649b4.txt +++ /dev/null @@ -1,55 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/1e649b4.in -# -amqp==5.3.1 -attrs==25.3.0 -backports-zoneinfo[tzdata]==0.2.1 -billiard==4.2.1 -celery==5.5.3 -certifi==2025.4.26 -charset-normalizer==3.4.2 -click==8.1.8 -click-didyoumean==0.3.1 -click-plugins==1.1.1 -click-repl==0.3.0 -coverage[toml]==7.6.1 -django==2.2.28 -exceptiongroup==1.3.0 -gevent==24.2.1 -greenlet==3.1.1 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -kombu==5.5.4 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -prompt-toolkit==3.0.51 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -pytz==2025.2 -requests==2.32.4 -six==1.17.0 -sortedcontainers==2.4.0 -sqlalchemy==1.2.19 -sqlparse==0.5.3 -tomli==2.2.1 -typing-extensions==4.13.2 -tzdata==2025.2 -urllib3==2.2.3 -vine==5.1.0 -wcwidth==0.2.13 -zipp==3.20.2 -zope-event==5.0 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/1e8124b.txt b/.riot/requirements/1e8124b.txt deleted file mode 100644 index e9d4b404a12..00000000000 --- a/.riot/requirements/1e8124b.txt +++ /dev/null @@ -1,54 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1e8124b.in -# -annotated-types==0.7.0 -anyio==3.7.1 -attrs==25.3.0 -certifi==2025.4.26 -coverage[toml]==7.6.1 -distro==1.9.0 -exceptiongroup==1.3.0 -h11==0.16.0 -httpcore==1.0.9 -httpx==0.27.2 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -multidict==6.1.0 -numpy==1.24.4 -openai[datalib,embeddings]==1.0.0 -opentracing==2.4.0 -packaging==25.0 -pandas==2.0.3 -pandas-stubs==2.0.3.230814 -pillow==9.5.0 -pluggy==1.5.0 -propcache==0.2.0 -pydantic==2.10.6 -pydantic-core==2.27.2 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -pytz==2025.2 -pyyaml==6.0.2 -six==1.17.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tomli==2.2.1 -tqdm==4.67.1 -types-pytz==2024.2.0.20241221 -typing-extensions==4.13.2 -tzdata==2025.2 -urllib3==1.26.20 -vcrpy==6.0.2 -wrapt==1.17.2 -yarl==1.15.2 -zipp==3.20.2 diff --git a/.riot/requirements/1ea308d.txt b/.riot/requirements/1ea308d.txt deleted file mode 100644 index 8c8ddfcc11f..00000000000 --- a/.riot/requirements/1ea308d.txt +++ /dev/null @@ -1,30 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1ea308d.in -# -asgiref==3.8.1 -attrs==25.3.0 -backports-zoneinfo==0.2.1 -certifi==2025.6.15 -charset-normalizer==3.4.2 -coverage[toml]==7.6.1 -django==4.0.10 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -requests==2.32.4 -sortedcontainers==2.4.0 -sqlparse==0.5.3 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 diff --git a/.riot/requirements/1eb29d6.txt b/.riot/requirements/1eb29d6.txt deleted file mode 100644 index 2de32e68a6d..00000000000 --- a/.riot/requirements/1eb29d6.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1eb29d6.in -# -attrs==23.2.0 -coverage[toml]==7.4.2 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.1 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.4.0 -pytest==8.0.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -urllib3==1.25 -zipp==3.17.0 diff --git a/.riot/requirements/1ef7371.txt b/.riot/requirements/1ef7371.txt deleted file mode 100644 index c94f76cedcb..00000000000 --- a/.riot/requirements/1ef7371.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1ef7371.in -# -attrs==23.2.0 -coverage[toml]==7.5.4 -exceptiongroup==1.2.1 -hypothesis==6.45.0 -importlib-metadata==8.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pytest==8.2.2 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -redis==3.0.1 -redis-py-cluster==2.0.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.19.2 diff --git a/.riot/requirements/1efb912.txt b/.riot/requirements/1efb912.txt deleted file mode 100644 index 50742922dd8..00000000000 --- a/.riot/requirements/1efb912.txt +++ /dev/null @@ -1,47 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1efb912.in -# -attrs==25.3.0 -certifi==2025.4.26 -cffi==1.17.1 -charset-normalizer==3.4.2 -coverage[toml]==7.6.1 -cryptography==45.0.3 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -jinja2==3.1.6 -linkify-it-py==2.0.3 -markdown-it-py[linkify,plugins]==3.0.0 -markupsafe==2.1.5 -mdit-py-plugins==0.4.2 -mdurl==0.1.2 -memray==1.17.2 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -platformdirs==4.3.6 -pluggy==1.5.0 -psycopg2-binary==2.9.10 -pycparser==2.22 -pycryptodome==3.23.0 -pygments==2.19.1 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-memray==1.7.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -requests==2.32.3 -rich==14.0.0 -sortedcontainers==2.4.0 -textual==3.2.0 -tomli==2.2.1 -typing-extensions==4.13.2 -uc-micro-py==1.0.3 -urllib3==2.2.3 -zipp==3.20.2 diff --git a/.riot/requirements/1f27e33.txt b/.riot/requirements/1f27e33.txt deleted file mode 100644 index c4a6c126e7f..00000000000 --- a/.riot/requirements/1f27e33.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1f27e33.in -# -attrs==23.1.0 -confluent-kafka==2.3.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/1f2ab25.txt b/.riot/requirements/1f2ab25.txt deleted file mode 100644 index ee70e55666e..00000000000 --- a/.riot/requirements/1f2ab25.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1f2ab25.in -# -async-timeout==5.0.1 -asyncpg==0.30.0 -attrs==24.2.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.4 -pytest-asyncio==0.21.2 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -zipp==3.20.2 diff --git a/.riot/requirements/1f540f4.txt b/.riot/requirements/1f540f4.txt deleted file mode 100644 index 46b38265655..00000000000 --- a/.riot/requirements/1f540f4.txt +++ /dev/null @@ -1,49 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate --resolver=backtracking .riot/requirements/1f540f4.in -# -attrs==24.2.0 -boto3==1.35.45 -botocore==1.35.45 -bytecode==0.15.1 -cattrs==23.2.3 -certifi==2024.7.4 -charset-normalizer==3.3.2 -coverage[toml]==7.5.4 -datadog==0.51.0 -datadog-lambda==6.105.0 -ddsketch==3.0.1 -ddtrace==2.20.0 -deprecated==1.2.14 -envier==0.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.4.0 -iniconfig==2.0.0 -jmespath==1.0.1 -mock==5.1.0 -opentelemetry-api==1.27.0 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -protobuf==5.28.2 -pytest==8.3.3 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -requests==2.32.3 -s3transfer==0.10.3 -six==1.16.0 -sortedcontainers==2.4.0 -tomli==2.0.2 -typing-extensions==4.12.2 -ujson==5.10.0 -urllib3==1.26.20 -wrapt==1.16.0 -xmltodict==0.14.2 -zipp==3.20.2 diff --git a/.riot/requirements/1f77a44.txt b/.riot/requirements/1f77a44.txt deleted file mode 100644 index 6068e633bbc..00000000000 --- a/.riot/requirements/1f77a44.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1f77a44.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -openfeature-sdk==0.7.5 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.3.0 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/1f9c58a.txt b/.riot/requirements/1f9c58a.txt deleted file mode 100644 index 141f6723214..00000000000 --- a/.riot/requirements/1f9c58a.txt +++ /dev/null @@ -1,38 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1f9c58a.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -gevent==24.2.1 -greenlet==3.1.1 -httpretty==1.1.4 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -py-cpuinfo==9.0.0 -pyfakefs==5.10.0 -pytest==8.3.5 -pytest-asyncio==0.23.8 -pytest-benchmark==4.0.0 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -python-json-logger==2.0.7 -sortedcontainers==2.4.0 -tomli==2.3.0 -typing-extensions==4.13.2 -wrapt==1.17.3 -zipp==3.20.2 -zope-event==5.0 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/1fb1389.txt b/.riot/requirements/1fb1389.txt deleted file mode 100644 index 6006e992b98..00000000000 --- a/.riot/requirements/1fb1389.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1fb1389.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pymemcache==3.5.2 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -six==1.16.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/1fcb05f.txt b/.riot/requirements/1fcb05f.txt deleted file mode 100644 index a9332da417c..00000000000 --- a/.riot/requirements/1fcb05f.txt +++ /dev/null @@ -1,27 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1fcb05f.in -# -amqp==2.6.1 -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -kombu==4.6.11 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -vine==1.3.0 -zipp==3.20.2 diff --git a/.riot/requirements/1fd3342.txt b/.riot/requirements/1fd3342.txt deleted file mode 100644 index c703d4437cf..00000000000 --- a/.riot/requirements/1fd3342.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1fd3342.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -openfeature-sdk==0.5.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.3.0 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/1fd4b6f.txt b/.riot/requirements/1fd4b6f.txt deleted file mode 100644 index 34a88ea876c..00000000000 --- a/.riot/requirements/1fd4b6f.txt +++ /dev/null @@ -1,47 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1fd4b6f.in -# -annotated-types==0.7.0 -anyio==4.5.2 -attrs==25.3.0 -certifi==2025.10.5 -coverage[toml]==7.6.1 -distro==1.9.0 -exceptiongroup==1.3.0 -h11==0.16.0 -httpcore==1.0.9 -httpx==0.28.1 -hypothesis==6.45.0 -idna==3.11 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -jiter==0.9.1 -mock==5.2.0 -multidict==6.1.0 -openai==1.109.1 -opentracing==2.4.0 -packaging==25.0 -pillow==10.4.0 -pluggy==1.5.0 -propcache==0.2.0 -pydantic==2.10.6 -pydantic-core==2.27.2 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -pyyaml==6.0.3 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tomli==2.3.0 -tqdm==4.67.1 -typing-extensions==4.13.2 -urllib3==1.26.20 -vcrpy==6.0.2 -wrapt==2.0.0 -yarl==1.15.2 -zipp==3.20.2 diff --git a/.riot/requirements/18c1062.txt b/.riot/requirements/1fdae65.txt similarity index 88% rename from .riot/requirements/18c1062.txt rename to .riot/requirements/1fdae65.txt index c0e14e9e73b..97d1346de89 100644 --- a/.riot/requirements/18c1062.txt +++ b/.riot/requirements/1fdae65.txt @@ -2,10 +2,10 @@ # This file is autogenerated by pip-compile with Python 3.13 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/18c1062.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1fdae65.in # attrs==25.4.0 -coverage[toml]==7.11.0 +coverage[toml]==7.11.3 gunicorn==23.0.0 hypothesis==6.45.0 iniconfig==2.3.0 @@ -18,9 +18,9 @@ pluggy==1.6.0 protobuf==6.33.0 py-cpuinfo==8.0.0 pygments==2.19.2 -pytest==8.4.2 +pytest==9.0.0 pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 +pytest-benchmark==5.2.3 pytest-cov==7.0.0 pytest-cpp==2.6.0 pytest-mock==3.15.1 diff --git a/.riot/requirements/1fe5c31.txt b/.riot/requirements/1fe5c31.txt deleted file mode 100644 index 106cb794d61..00000000000 --- a/.riot/requirements/1fe5c31.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1fe5c31.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -structlog==23.2.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/1ffebce.txt b/.riot/requirements/1ffebce.txt deleted file mode 100644 index 5b613bc5d30..00000000000 --- a/.riot/requirements/1ffebce.txt +++ /dev/null @@ -1,35 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1ffebce.in -# -anyio==4.5.2 -attrs==25.3.0 -certifi==2025.8.3 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -execnet==2.1.1 -h11==0.16.0 -httpcore==1.0.9 -httpx==0.27.2 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -more-itertools==8.10.0 -msgpack==1.1.1 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==2.12.0 -pytest-mock==2.0.0 -pytest-randomly==3.15.0 -pytest-xdist==3.6.1 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/20699e5.txt b/.riot/requirements/20699e5.txt deleted file mode 100644 index 75d6b416d16..00000000000 --- a/.riot/requirements/20699e5.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/20699e5.in -# -asyncpg==0.22.0 -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-asyncio==0.21.2 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/206be6b.txt b/.riot/requirements/206be6b.txt deleted file mode 100644 index 2b2d3633eca..00000000000 --- a/.riot/requirements/206be6b.txt +++ /dev/null @@ -1,36 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/206be6b.in -# -annotated-types==0.7.0 -anyio==4.5.2 -attrs==25.3.0 -certifi==2025.7.9 -charset-normalizer==3.4.2 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -fastapi==0.114.2 -h11==0.16.0 -httpcore==1.0.9 -httpx==0.27.2 -hypothesis==6.113.0 -idna==3.10 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pydantic==2.10.6 -pydantic-core==2.27.2 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -requests==2.32.4 -sniffio==1.3.1 -sortedcontainers==2.4.0 -starlette==0.38.6 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 diff --git a/.riot/requirements/21bc53e.txt b/.riot/requirements/21bc53e.txt deleted file mode 100644 index d7a646e282d..00000000000 --- a/.riot/requirements/21bc53e.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/21bc53e.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -openfeature-sdk==0.6.1 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.3.0 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/24618e2.txt b/.riot/requirements/24618e2.txt deleted file mode 100644 index 2481c88634f..00000000000 --- a/.riot/requirements/24618e2.txt +++ /dev/null @@ -1,42 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/24618e2.in -# -aiofiles==24.1.0 -aiosqlite==0.20.0 -anyio==3.7.1 -attrs==25.3.0 -certifi==2025.8.3 -charset-normalizer==3.4.3 -coverage[toml]==7.6.1 -databases==0.8.0 -exceptiongroup==1.3.0 -greenlet==3.1.1 -h11==0.12.0 -httpcore==0.14.7 -httpx==0.22.0 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -requests==2.32.4 -rfc3986[idna2008]==1.5.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -sqlalchemy==1.4.54 -starlette==0.20.4 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 -zipp==3.20.2 diff --git a/.riot/requirements/260ead7.txt b/.riot/requirements/260ead7.txt deleted file mode 100644 index f006fcd4f84..00000000000 --- a/.riot/requirements/260ead7.txt +++ /dev/null @@ -1,48 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/260ead7.in -# -aiofiles==23.2.1 -aiohttp==3.9.1 -aiosignal==1.3.1 -async-generator==1.10 -async-timeout==4.0.3 -attrs==23.1.0 -certifi==2023.11.17 -charset-normalizer==3.3.2 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -frozenlist==1.4.1 -h11==0.9.0 -httpcore==0.11.1 -httptools==0.6.1 -httpx==0.15.4 -hypothesis==6.45.0 -idna==3.6 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -multidict==5.2.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -pytest-sanic==1.6.2 -requests==2.31.0 -rfc3986[idna2008]==1.5.0 -sanic==20.12.7 -sniffio==1.3.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -ujson==5.9.0 -urllib3==2.1.0 -uvloop==0.19.0 -websockets==9.1 -yarl==1.9.4 -zipp==3.17.0 diff --git a/.riot/requirements/2715c88.txt b/.riot/requirements/2715c88.txt deleted file mode 100644 index ed246768e2e..00000000000 --- a/.riot/requirements/2715c88.txt +++ /dev/null @@ -1,42 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/2715c88.in -# -aiofiles==24.1.0 -aiosqlite==0.20.0 -anyio==3.7.1 -attrs==25.3.0 -certifi==2025.8.3 -charset-normalizer==3.4.3 -coverage[toml]==7.6.1 -databases==0.8.0 -exceptiongroup==1.3.0 -greenlet==3.1.1 -h11==0.12.0 -httpcore==0.14.7 -httpx==0.22.0 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -requests==2.32.4 -rfc3986[idna2008]==1.5.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -sqlalchemy==1.4.54 -starlette==0.33.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 -zipp==3.20.2 diff --git a/.riot/requirements/273fcaf.txt b/.riot/requirements/273fcaf.txt deleted file mode 100644 index eb4ea0f7ab0..00000000000 --- a/.riot/requirements/273fcaf.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/273fcaf.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -msgpack==1.1.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -py-cpuinfo==9.0.0 -pytest==8.3.5 -pytest-benchmark==4.0.0 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -zipp==3.20.2 diff --git a/.riot/requirements/2bcce4e.txt b/.riot/requirements/2bcce4e.txt deleted file mode 100644 index c444938efa6..00000000000 --- a/.riot/requirements/2bcce4e.txt +++ /dev/null @@ -1,23 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/2bcce4e.in -# -attrs==25.3.0 -coverage[toml]==7.8.2 -freezegun==1.3.1 -hypothesis==6.45.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -pygments==2.19.1 -pytest==8.4.0 -pytest-cov==6.1.1 -pytest-mock==3.14.1 -pytest-randomly==3.16.0 -python-dateutil==2.9.0.post0 -six==1.17.0 -sortedcontainers==2.4.0 diff --git a/.riot/requirements/2be0e27.txt b/.riot/requirements/2be0e27.txt deleted file mode 100644 index da5795c27eb..00000000000 --- a/.riot/requirements/2be0e27.txt +++ /dev/null @@ -1,45 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/2be0e27.in -# -aiofiles==23.2.1 -anyio==4.2.0 -attrs==23.1.0 -certifi==2023.11.17 -charset-normalizer==3.3.2 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -h11==0.14.0 -httpcore==0.16.3 -httptools==0.6.1 -httpx==0.23.3 -hypothesis==6.45.0 -idna==3.6 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -multidict==5.2.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -requests==2.31.0 -rfc3986[idna2008]==1.5.0 -sanic==21.12.2 -sanic-routing==0.7.2 -sanic-testing==0.8.3 -sniffio==1.3.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -typing-extensions==4.9.0 -ujson==5.9.0 -urllib3==2.1.0 -uvloop==0.19.0 -websockets==10.4 -zipp==3.17.0 diff --git a/.riot/requirements/2d3b0ef.txt b/.riot/requirements/2d3b0ef.txt deleted file mode 100644 index d99a88b036e..00000000000 --- a/.riot/requirements/2d3b0ef.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/2d3b0ef.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -graphql-core==3.2.3 -hypothesis==6.45.0 -importlib-metadata==7.0.1 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/2f7da3e.txt b/.riot/requirements/2f7da3e.txt deleted file mode 100644 index 362060f9ca9..00000000000 --- a/.riot/requirements/2f7da3e.txt +++ /dev/null @@ -1,88 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/2f7da3e.in -# -annotated-types==0.7.0 -attrs==25.3.0 -aws-sam-translator==1.100.0 -aws-xray-sdk==2.14.0 -boto3==1.34.49 -botocore==1.34.49 -certifi==2025.8.3 -cffi==1.17.1 -cfn-lint==1.26.1 -charset-normalizer==3.4.3 -coverage[toml]==7.6.1 -cryptography==45.0.7 -docker==7.1.0 -ecdsa==0.19.1 -exceptiongroup==1.3.0 -graphql-core==3.2.6 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -importlib-resources==6.4.5 -iniconfig==2.1.0 -jinja2==3.1.6 -jmespath==1.0.1 -jsondiff==2.2.1 -jsonpatch==1.33 -jsonpointer==3.0.0 -jsonschema==4.23.0 -jsonschema-path==0.3.4 -jsonschema-specifications==2023.12.1 -lazy-object-proxy==1.10.0 -markupsafe==2.1.5 -mock==5.2.0 -moto[all]==4.2.14 -mpmath==1.3.0 -multidict==6.1.0 -multipart==1.3.0 -networkx==3.1 -openapi-schema-validator==0.6.3 -openapi-spec-validator==0.7.2 -opentracing==2.4.0 -packaging==25.0 -pathable==0.4.4 -pkgutil-resolve-name==1.3.10 -pluggy==1.5.0 -propcache==0.2.0 -py-partiql-parser==0.5.0 -pyasn1==0.4.8 -pycparser==2.23 -pydantic==2.10.6 -pydantic-core==2.27.2 -pyparsing==3.1.4 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -python-jose[cryptography]==3.4.0 -pyyaml==6.0.2 -referencing==0.35.1 -regex==2024.11.6 -requests==2.32.4 -responses==0.25.8 -rfc3339-validator==0.1.4 -rpds-py==0.20.1 -rsa==4.9.1 -s3transfer==0.10.4 -six==1.17.0 -sortedcontainers==2.4.0 -sshpubkeys==3.3.1 -sympy==1.13.3 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==1.26.20 -vcrpy==6.0.1 -werkzeug==3.0.6 -wrapt==1.17.3 -xmltodict==0.15.0 -yarl==1.15.2 -zipp==3.20.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/3007b59.txt b/.riot/requirements/3007b59.txt deleted file mode 100644 index ae662d03dd9..00000000000 --- a/.riot/requirements/3007b59.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/3007b59.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -msgpack==1.1.1 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/30641af.txt b/.riot/requirements/30641af.txt deleted file mode 100644 index 407ecbf61ed..00000000000 --- a/.riot/requirements/30641af.txt +++ /dev/null @@ -1,29 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/30641af.in -# -attrs==25.3.0 -certifi==2025.6.15 -charset-normalizer==3.4.2 -coverage[toml]==7.6.1 -django==2.2.28 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytz==2025.2 -requests==2.32.4 -sortedcontainers==2.4.0 -sqlparse==0.5.3 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 diff --git a/.riot/requirements/30b2227.txt b/.riot/requirements/30b2227.txt deleted file mode 100644 index 11938ffc708..00000000000 --- a/.riot/requirements/30b2227.txt +++ /dev/null @@ -1,35 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/30b2227.in -# -aiohttp==3.9.5 -aiohttp-jinja2==1.6 -aiosignal==1.3.1 -async-timeout==4.0.3 -attrs==23.2.0 -coverage[toml]==7.5.4 -exceptiongroup==1.2.1 -frozenlist==1.4.1 -hypothesis==6.45.0 -idna==3.7 -importlib-metadata==8.0.0 -iniconfig==2.0.0 -jinja2==3.1.4 -markupsafe==2.1.5 -mock==5.1.0 -multidict==6.0.5 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pytest==8.2.2 -pytest-aiohttp==1.0.5 -pytest-asyncio==0.23.7 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -yarl==1.9.4 -zipp==3.19.2 diff --git a/.riot/requirements/30d009a.txt b/.riot/requirements/30d009a.txt deleted file mode 100644 index 44259583d11..00000000000 --- a/.riot/requirements/30d009a.txt +++ /dev/null @@ -1,40 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/30d009a.in -# -anyio==4.5.2 -attrs==25.3.0 -certifi==2025.10.5 -charset-normalizer==3.4.4 -click==8.1.8 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -fastapi==0.94.1 -h11==0.16.0 -httpcore==1.0.9 -httpx==0.27.2 -hypothesis==6.45.0 -idna==3.11 -iniconfig==2.1.0 -jinja2==3.1.6 -markupsafe==2.1.5 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pydantic==1.10.24 -pytest==8.3.5 -pytest-asyncio==0.24.0 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -python-multipart==0.0.20 -requests==2.32.4 -sniffio==1.3.1 -sortedcontainers==2.4.0 -starlette==0.26.1 -tomli==2.3.0 -typing-extensions==4.13.2 -urllib3==2.2.3 -uvicorn==0.33.0 diff --git a/.riot/requirements/315c2cb.txt b/.riot/requirements/315c2cb.txt deleted file mode 100644 index 8a45f9b13fe..00000000000 --- a/.riot/requirements/315c2cb.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/315c2cb.in -# -async-timeout==5.0.1 -attrs==24.2.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.3 -pytest-asyncio==0.23.7 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -redis==4.6.0 -sortedcontainers==2.4.0 -tomli==2.1.0 -zipp==3.20.2 diff --git a/.riot/requirements/328b28c.txt b/.riot/requirements/328b28c.txt deleted file mode 100644 index 38eac9651b9..00000000000 --- a/.riot/requirements/328b28c.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --no-annotate --resolver=backtracking .riot/requirements/328b28c.in -# -attrs==24.2.0 -coverage[toml]==7.6.1 -dnspython==2.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -iniconfig==2.0.0 -mock==5.1.0 -mongoengine==0.29.1 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pymongo==4.8.0 -pytest==8.3.3 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 diff --git a/.riot/requirements/3348fe3.txt b/.riot/requirements/3348fe3.txt deleted file mode 100644 index 956b6f44882..00000000000 --- a/.riot/requirements/3348fe3.txt +++ /dev/null @@ -1,46 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/3348fe3.in -# -attrs==23.2.0 -beautifulsoup4==4.12.3 -certifi==2024.7.4 -charset-normalizer==3.3.2 -coverage[toml]==7.6.0 -exceptiongroup==1.2.2 -hupper==1.12.1 -hypothesis==6.45.0 -idna==3.7 -importlib-metadata==8.2.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.1 -pastedeploy==3.1.0 -plaster==1.1.2 -plaster-pastedeploy==1.0.1 -pluggy==1.5.0 -pserve-test-app @ file:///home/bits/project/tests/contrib/pyramid/pserve_app -pyramid==2.0.2 -pytest==8.3.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -requests==2.32.3 -sortedcontainers==2.4.0 -soupsieve==2.5 -tomli==2.0.1 -translationstring==1.4 -urllib3==2.2.2 -venusian==3.1.0 -waitress==3.0.0 -webob==1.8.7 -webtest==3.0.0 -zipp==3.19.2 -zope-deprecation==5.0 -zope-interface==6.4.post2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==71.1.0 diff --git a/.riot/requirements/33ce309.txt b/.riot/requirements/33ce309.txt deleted file mode 100644 index ba7c81d2662..00000000000 --- a/.riot/requirements/33ce309.txt +++ /dev/null @@ -1,27 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/33ce309.in -# -attrs==25.4.0 -coverage[toml]==7.11.0 -gunicorn==23.0.0 -hypothesis==6.45.0 -iniconfig==2.3.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -protobuf==4.22.0 -py-cpuinfo==8.0.0 -pygments==2.19.2 -pytest==8.4.2 -pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 -pytest-cov==7.0.0 -pytest-mock==3.15.1 -pytest-randomly==4.0.1 -sortedcontainers==2.4.0 -uwsgi==2.0.31 -zstandard==0.25.0 diff --git a/.riot/requirements/34a1fc3.txt b/.riot/requirements/34a1fc3.txt deleted file mode 100644 index c5a744026e8..00000000000 --- a/.riot/requirements/34a1fc3.txt +++ /dev/null @@ -1,30 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/34a1fc3.in -# -attrs==25.4.0 -coverage[toml]==7.11.0 -exceptiongroup==1.3.0 -gunicorn==23.0.0 -hypothesis==6.45.0 -iniconfig==2.3.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -protobuf==6.33.0 -py-cpuinfo==8.0.0 -pygments==2.19.2 -pytest==8.4.2 -pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 -pytest-cov==7.0.0 -pytest-mock==3.15.1 -pytest-randomly==4.0.1 -sortedcontainers==2.4.0 -tomli==2.3.0 -typing-extensions==4.15.0 -uwsgi==2.0.31 -zstandard==0.25.0 diff --git a/.riot/requirements/95a8551.txt b/.riot/requirements/377066a.txt similarity index 89% rename from .riot/requirements/95a8551.txt rename to .riot/requirements/377066a.txt index e233f1861cd..48a37a86b98 100644 --- a/.riot/requirements/95a8551.txt +++ b/.riot/requirements/377066a.txt @@ -2,10 +2,10 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/95a8551.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/377066a.in # attrs==25.4.0 -coverage[toml]==7.11.0 +coverage[toml]==7.11.3 exceptiongroup==1.3.0 gunicorn==23.0.0 hypothesis==6.45.0 @@ -19,9 +19,9 @@ pluggy==1.6.0 protobuf==6.33.0 py-cpuinfo==8.0.0 pygments==2.19.2 -pytest==8.4.2 +pytest==9.0.0 pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 +pytest-benchmark==5.2.3 pytest-cov==7.0.0 pytest-cpp==2.6.0 pytest-mock==3.15.1 diff --git a/.riot/requirements/3aa457c.txt b/.riot/requirements/3aa457c.txt deleted file mode 100644 index 0f35c37a47a..00000000000 --- a/.riot/requirements/3aa457c.txt +++ /dev/null @@ -1,42 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/3aa457c.in -# -asgiref==3.8.1 -attrs==25.3.0 -backports-zoneinfo==0.2.1 -certifi==2025.8.3 -charset-normalizer==3.4.3 -click==7.1.2 -coverage[toml]==7.6.1 -django==4.2.24 -exceptiongroup==1.3.0 -flask==1.1.4 -gunicorn==23.0.0 -httpretty==1.0.5 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -itsdangerous==1.1.0 -jinja2==2.11.3 -markupsafe==1.1.1 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -requests==2.32.4 -sortedcontainers==2.4.0 -sqlparse==0.5.3 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 -werkzeug==1.0.1 -xmltodict==0.15.0 -zipp==3.20.2 diff --git a/.riot/requirements/3b65323.txt b/.riot/requirements/3b65323.txt deleted file mode 100644 index 6e7fb0a5c7f..00000000000 --- a/.riot/requirements/3b65323.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/3b65323.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -mysqlclient==2.2.1 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/3ba7e37.txt b/.riot/requirements/3ba7e37.txt deleted file mode 100644 index 3dbb32d1178..00000000000 --- a/.riot/requirements/3ba7e37.txt +++ /dev/null @@ -1,33 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/3ba7e37.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -glob2==0.7 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mako==1.3.10 -markupsafe==2.1.5 -mock==5.2.0 -more-itertools==8.10.0 -msgpack==1.1.0 -opentracing==2.4.0 -packaging==25.0 -parse==1.20.2 -parse-type==0.6.4 -pluggy==1.5.0 -py==1.11.0 -pytest==7.4.4 -pytest-bdd==6.0.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -zipp==3.20.2 diff --git a/.riot/requirements/3c0f573.txt b/.riot/requirements/3c0f573.txt deleted file mode 100644 index fb0db0675f4..00000000000 --- a/.riot/requirements/3c0f573.txt +++ /dev/null @@ -1,37 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/3c0f573.in -# -attrs==25.4.0 -coverage[toml]==7.11.0 -exceptiongroup==1.3.0 -gevent==25.9.1 -greenlet==3.2.4 -gunicorn[gevent]==23.0.0 -hypothesis==6.45.0 -iniconfig==2.3.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -protobuf==6.33.0 -py-cpuinfo==8.0.0 -pygments==2.19.2 -pytest==8.4.2 -pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 -pytest-cov==7.0.0 -pytest-mock==3.15.1 -pytest-randomly==4.0.1 -sortedcontainers==2.4.0 -tomli==2.3.0 -typing-extensions==4.15.0 -uwsgi==2.0.31 -zope-event==6.0 -zope-interface==8.0.1 -zstandard==0.25.0 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==80.9.0 diff --git a/.riot/requirements/3dd53da.txt b/.riot/requirements/3dd53da.txt deleted file mode 100644 index 088ac0ddd7e..00000000000 --- a/.riot/requirements/3dd53da.txt +++ /dev/null @@ -1,22 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.13 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/3dd53da.in -# -attrs==24.2.0 -coverage[toml]==7.6.1 -dnspython==2.7.0 -hypothesis==6.45.0 -iniconfig==2.0.0 -mock==5.1.0 -mongoengine==0.29.1 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pymongo==4.8.0 -pytest==8.3.3 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 diff --git a/.riot/requirements/3f2ebdc.txt b/.riot/requirements/3f2ebdc.txt deleted file mode 100644 index a8cdfd63d33..00000000000 --- a/.riot/requirements/3f2ebdc.txt +++ /dev/null @@ -1,40 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/3f2ebdc.in -# -annotated-types==0.7.0 -attrs==25.3.0 -blinker==1.8.2 -certifi==2025.10.5 -charset-normalizer==3.4.3 -click==8.1.8 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -flask==2.3.3 -flask-openapi3==4.0.3 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -itsdangerous==2.2.0 -jinja2==3.1.6 -markupsafe==2.1.5 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pydantic==2.10.6 -pydantic-core==2.27.2 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -requests==2.32.4 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==1.26.20 -werkzeug==3.0.6 -zipp==3.20.2 diff --git a/.riot/requirements/3f3ce6e.txt b/.riot/requirements/3f3ce6e.txt deleted file mode 100644 index 15223e399f6..00000000000 --- a/.riot/requirements/3f3ce6e.txt +++ /dev/null @@ -1,30 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/3f3ce6e.in -# -attrs==25.3.0 -azure-core==1.33.0 -azure-eventhub==5.15.0 -certifi==2025.8.3 -charset-normalizer==3.4.3 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-asyncio==0.23.7 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -requests==2.32.4 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 diff --git a/.riot/requirements/3f40530.txt b/.riot/requirements/3f40530.txt deleted file mode 100644 index 125f04e194c..00000000000 --- a/.riot/requirements/3f40530.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/3f40530.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -loguru==0.4.1 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/40a41fd.txt b/.riot/requirements/40a41fd.txt deleted file mode 100644 index 9f9034b3892..00000000000 --- a/.riot/requirements/40a41fd.txt +++ /dev/null @@ -1,23 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.13 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/40a41fd.in -# -attrs==25.3.0 -coverage[toml]==7.8.2 -dnspython==2.7.0 -hypothesis==6.45.0 -iniconfig==2.1.0 -mock==5.2.0 -mongoengine==0.24.2 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -pygments==2.19.1 -pymongo==4.8.0 -pytest==8.4.0 -pytest-cov==6.1.1 -pytest-mock==3.14.1 -pytest-randomly==3.16.0 -sortedcontainers==2.4.0 diff --git a/.riot/requirements/40adc31.txt b/.riot/requirements/40adc31.txt deleted file mode 100644 index 21dbd2582a2..00000000000 --- a/.riot/requirements/40adc31.txt +++ /dev/null @@ -1,34 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/40adc31.in -# -anyio==4.5.2 -attrs==25.3.0 -certifi==2025.7.9 -charset-normalizer==3.4.2 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -fastapi==0.94.1 -h11==0.16.0 -httpcore==1.0.9 -httpx==0.27.2 -hypothesis==6.113.0 -idna==3.10 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pydantic==1.10.22 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -requests==2.32.4 -sniffio==1.3.1 -sortedcontainers==2.4.0 -starlette==0.26.1 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 diff --git a/.riot/requirements/44339c7.txt b/.riot/requirements/44339c7.txt deleted file mode 100644 index 2aa39fbf0f5..00000000000 --- a/.riot/requirements/44339c7.txt +++ /dev/null @@ -1,56 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/44339c7.in -# -anyio==4.2.0 -asn1crypto==1.5.1 -attrs==23.1.0 -azure-common==1.1.28 -azure-core==1.29.6 -azure-storage-blob==12.19.0 -boto3==1.34.6 -botocore==1.34.6 -certifi==2020.12.5 -cffi==1.16.0 -chardet==3.0.4 -charset-normalizer==3.3.2 -coverage[toml]==7.3.4 -cryptography==3.4.8 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -idna==2.10 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -isodate==0.6.1 -jmespath==1.0.1 -mock==5.1.0 -opentracing==2.4.0 -oscrypto==1.3.0 -packaging==23.2 -pluggy==1.3.0 -pycparser==2.21 -pycryptodomex==3.19.0 -pyjwt==2.8.0 -pyopenssl==19.1.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -python-dateutil==2.8.2 -pytz==2020.5 -requests==2.31.0 -responses==0.16.0 -s3transfer==0.10.0 -six==1.16.0 -sniffio==1.3.0 -snowflake-connector-python==2.3.10 -sortedcontainers==2.4.0 -tomli==2.0.1 -typing-extensions==4.9.0 -urllib3==1.26.18 -zipp==3.17.0 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==71.1.0 diff --git a/.riot/requirements/470a446.txt b/.riot/requirements/470a446.txt new file mode 100644 index 00000000000..4ff3f39a628 --- /dev/null +++ b/.riot/requirements/470a446.txt @@ -0,0 +1,36 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/470a446.in +# +attrs==25.4.0 +coverage[toml]==7.11.3 +gevent==25.9.1 +greenlet==3.2.4 +gunicorn[gevent]==23.0.0 +hypothesis==6.45.0 +iniconfig==2.3.0 +jsonschema==4.25.1 +jsonschema-specifications==2025.9.1 +mock==5.2.0 +opentracing==2.4.0 +packaging==25.0 +pluggy==1.6.0 +protobuf==6.33.0 +py-cpuinfo==8.0.0 +pygments==2.19.2 +pytest==9.0.0 +pytest-asyncio==0.21.1 +pytest-benchmark==5.2.3 +pytest-cov==7.0.0 +pytest-cpp==2.6.0 +pytest-mock==3.15.1 +pytest-randomly==4.0.1 +referencing==0.37.0 +rpds-py==0.28.0 +sortedcontainers==2.4.0 +uwsgi==2.0.31 +zope-event==6.1 +zope-interface==8.1 +zstandard==0.25.0 diff --git a/.riot/requirements/11f199b.txt b/.riot/requirements/492b83f.txt similarity index 55% rename from .riot/requirements/11f199b.txt rename to .riot/requirements/492b83f.txt index 288e0e3aee0..659390d6b9d 100644 --- a/.riot/requirements/11f199b.txt +++ b/.riot/requirements/492b83f.txt @@ -2,33 +2,38 @@ # This file is autogenerated by pip-compile with Python 3.13 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/11f199b.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/492b83f.in # +asgiref==3.10.0 attrs==25.4.0 -coverage[toml]==7.11.0 +bcrypt==4.2.1 +certifi==2025.10.5 +charset-normalizer==3.4.4 +coverage[toml]==7.11.1 +dill==0.4.0 +django==4.2.26 +django-configurations==2.5.1 gevent==25.9.1 greenlet==3.2.4 -gunicorn[gevent]==23.0.0 +gunicorn==23.0.0 hypothesis==6.45.0 +idna==3.11 iniconfig==2.3.0 mock==5.2.0 opentracing==2.4.0 packaging==25.0 pluggy==1.6.0 -protobuf==6.33.0 -py-cpuinfo==8.0.0 pygments==2.19.2 +pylibmc==1.6.3 pytest==8.4.2 -pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 pytest-cov==7.0.0 +pytest-django[testing]==3.10.0 pytest-mock==3.15.1 -pytest-randomly==4.0.1 +pyyaml==6.0.3 +requests==2.32.5 +six==1.17.0 sortedcontainers==2.4.0 -uwsgi==2.0.31 -zope-event==6.0 +sqlparse==0.5.3 +urllib3==2.5.0 +zope-event==6.1 zope-interface==8.0.1 -zstandard==0.25.0 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==80.9.0 diff --git a/.riot/requirements/4ad5317.txt b/.riot/requirements/4ad5317.txt deleted file mode 100644 index 9d6cecbc8e5..00000000000 --- a/.riot/requirements/4ad5317.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/4ad5317.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -psycopg2-binary==2.8.6 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -zipp==3.20.2 diff --git a/.riot/requirements/1ed98b0.txt b/.riot/requirements/4d59dd3.txt similarity index 89% rename from .riot/requirements/1ed98b0.txt rename to .riot/requirements/4d59dd3.txt index 1ae59d863d1..d5cd256d540 100644 --- a/.riot/requirements/1ed98b0.txt +++ b/.riot/requirements/4d59dd3.txt @@ -2,10 +2,10 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1ed98b0.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/4d59dd3.in # attrs==25.4.0 -coverage[toml]==7.11.0 +coverage[toml]==7.11.3 exceptiongroup==1.3.0 gunicorn==23.0.0 hypothesis==6.45.0 @@ -19,9 +19,9 @@ pluggy==1.6.0 protobuf==6.33.0 py-cpuinfo==8.0.0 pygments==2.19.2 -pytest==8.4.2 +pytest==9.0.0 pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 +pytest-benchmark==5.2.3 pytest-cov==7.0.0 pytest-cpp==2.6.0 pytest-mock==3.15.1 diff --git a/.riot/requirements/4de03a5.txt b/.riot/requirements/4de03a5.txt deleted file mode 100644 index 8fa32aa29f3..00000000000 --- a/.riot/requirements/4de03a5.txt +++ /dev/null @@ -1,79 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/4de03a5.in -# -arrow==1.3.0 -asgiref==3.8.1 -attrs==24.3.0 -autobahn==23.1.2 -automat==24.8.1 -backports-zoneinfo==0.2.1 -bcrypt==4.2.1 -blessed==1.20.0 -certifi==2024.12.14 -cffi==1.17.1 -channels==4.2.0 -charset-normalizer==3.4.0 -constantly==23.10.4 -coverage[toml]==7.6.1 -cryptography==44.0.0 -daphne==4.1.2 -django==4.2.17 -django-configurations==2.5.1 -django-picklefield==3.2 -django-pylibmc==0.6.1 -django-q==1.3.6 -django-redis==4.5.0 -exceptiongroup==1.2.2 -hyperlink==21.0.0 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -incremental==24.7.2 -iniconfig==2.0.0 -isodate==0.7.2 -lxml==5.3.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.2 -platformdirs==4.3.6 -pluggy==1.5.0 -psycopg==3.2.3 -psycopg2-binary==2.9.10 -pyasn1==0.6.1 -pyasn1-modules==0.4.1 -pycparser==2.22 -pylibmc==1.6.3 -pyopenssl==24.3.0 -pytest==8.3.4 -pytest-cov==5.0.0 -pytest-django[testing]==3.10.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -python-memcached==1.62 -pytz==2024.2 -redis==2.10.6 -requests==2.32.3 -requests-file==2.1.0 -requests-toolbelt==1.0.0 -service-identity==24.2.0 -six==1.17.0 -sortedcontainers==2.4.0 -spyne==2.14.0 -sqlparse==0.5.3 -tomli==2.2.1 -twisted[tls]==24.11.0 -txaio==23.1.1 -types-python-dateutil==2.9.0.20241206 -typing-extensions==4.12.2 -urllib3==2.2.3 -wcwidth==0.2.13 -zeep==4.3.1 -zipp==3.20.2 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.0 diff --git a/.riot/requirements/4ef6c1c.txt b/.riot/requirements/4ef6c1c.txt deleted file mode 100644 index b17633d8a1a..00000000000 --- a/.riot/requirements/4ef6c1c.txt +++ /dev/null @@ -1,47 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate --resolver=backtracking .riot/requirements/4ef6c1c.in -# -attrs==25.1.0 -boto3==1.36.19 -botocore==1.36.19 -bytecode==0.16.1 -certifi==2025.1.31 -charset-normalizer==3.4.1 -coverage[toml]==7.6.1 -datadog==0.51.0 -datadog-lambda==6.105.0 -ddtrace==2.20.1 -deprecated==1.2.18 -envier==0.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -jmespath==1.0.1 -mock==5.1.0 -opentelemetry-api==1.30.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -protobuf==5.29.3 -pytest==8.3.4 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -requests==2.32.3 -s3transfer==0.11.2 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.12.2 -ujson==5.10.0 -urllib3==1.26.20 -wrapt==1.17.2 -xmltodict==0.14.2 -zipp==3.20.2 diff --git a/.riot/requirements/4f441db.txt b/.riot/requirements/4f441db.txt deleted file mode 100644 index 8bcbc844c30..00000000000 --- a/.riot/requirements/4f441db.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/4f441db.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -dogpile-cache==0.6.8 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/4f4caf8.txt b/.riot/requirements/4f4caf8.txt deleted file mode 100644 index 7441d0631a9..00000000000 --- a/.riot/requirements/4f4caf8.txt +++ /dev/null @@ -1,45 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/4f4caf8.in -# -attrs==25.3.0 -babel==2.17.0 -certifi==2025.8.3 -charset-normalizer==3.4.3 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -execnet==2.1.1 -gevent==24.2.1 -greenlet==3.1.1 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -markupsafe==2.1.5 -mock==5.2.0 -mysql-connector-python==9.0.0 -mysqlclient==2.1.1 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -psycopg2-binary==2.9.10 -pymysql==1.1.2 -pytest==8.3.5 -pytest-asyncio==0.24.0 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-xdist==3.6.1 -pytz==2025.2 -requests==2.32.4 -sortedcontainers==2.4.0 -sqlalchemy==2.0.43 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 -werkzeug==3.0.6 -zope-event==5.0 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/4f9be04.txt b/.riot/requirements/4f9be04.txt deleted file mode 100644 index 1bc07ce87aa..00000000000 --- a/.riot/requirements/4f9be04.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/4f9be04.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -elasticsearch2==2.5.1 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -urllib3==1.26.18 -zipp==3.17.0 diff --git a/.riot/requirements/50b70d9.txt b/.riot/requirements/50b70d9.txt deleted file mode 100644 index 8bedee18d6e..00000000000 --- a/.riot/requirements/50b70d9.txt +++ /dev/null @@ -1,35 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/50b70d9.in -# -asgiref==3.8.1 -attrs==25.3.0 -coverage[toml]==7.6.1 -django==3.2.25 -django-configurations==2.5.1 -django-hosts==4.0 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-django[testing]==3.10.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -pytz==2025.2 -six==1.17.0 -sortedcontainers==2.4.0 -sqlparse==0.5.3 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/55b2430.txt b/.riot/requirements/55b2430.txt deleted file mode 100644 index 9e5c9096838..00000000000 --- a/.riot/requirements/55b2430.txt +++ /dev/null @@ -1,30 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/55b2430.in -# -attrs==25.3.0 -cattrs==22.2.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -execnet==2.1.1 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -molten==1.0.2 -mypy-extensions==1.1.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -pytest-xdist==3.6.1 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==3.10.0.2 -typing-inspect==0.6.0 -zipp==3.20.2 diff --git a/.riot/requirements/55b8536.txt b/.riot/requirements/55b8536.txt deleted file mode 100644 index ed6036adcd1..00000000000 --- a/.riot/requirements/55b8536.txt +++ /dev/null @@ -1,62 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/55b8536.in -# -annotated-types==0.7.0 -attrs==24.2.0 -cachetools==5.5.0 -certifi==2024.8.30 -charset-normalizer==3.4.0 -coverage[toml]==7.6.8 -docstring-parser==0.16 -google-ai-generativelanguage==0.6.10 -google-api-core[grpc]==2.23.0 -google-api-python-client==2.154.0 -google-auth==2.36.0 -google-auth-httplib2==0.2.0 -google-cloud-aiplatform[all]==1.71.1 -google-cloud-bigquery==3.27.0 -google-cloud-core==2.4.1 -google-cloud-resource-manager==1.13.1 -google-cloud-storage==2.18.2 -google-crc32c==1.6.0 -google-generativeai==0.8.3 -google-resumable-media==2.7.2 -googleapis-common-protos[grpc]==1.66.0 -grpc-google-iam-v1==0.13.1 -grpcio==1.68.0 -grpcio-status==1.68.0 -httplib2==0.22.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.0.0 -mock==5.1.0 -numpy==2.1.3 -opentracing==2.4.0 -packaging==24.2 -pillow==11.0.0 -pluggy==1.5.0 -proto-plus==1.25.0 -protobuf==5.28.3 -pyasn1==0.6.1 -pyasn1-modules==0.4.1 -pydantic==2.10.2 -pydantic-core==2.27.1 -pyparsing==3.2.0 -pytest==8.3.3 -pytest-asyncio==0.24.0 -pytest-cov==6.0.0 -pytest-mock==3.14.0 -python-dateutil==2.9.0.post0 -requests==2.32.3 -rsa==4.9 -shapely==2.0.6 -six==1.16.0 -sortedcontainers==2.4.0 -tqdm==4.67.1 -typing-extensions==4.12.2 -uritemplate==4.1.1 -urllib3==2.2.3 -vertexai==1.71.1 diff --git a/.riot/requirements/57ce041.txt b/.riot/requirements/57ce041.txt deleted file mode 100644 index 2debe686006..00000000000 --- a/.riot/requirements/57ce041.txt +++ /dev/null @@ -1,27 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.11 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/57ce041.in -# -attrs==25.4.0 -coverage[toml]==7.11.0 -gunicorn==23.0.0 -hypothesis==6.45.0 -iniconfig==2.3.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -protobuf==4.22.0 -py-cpuinfo==8.0.0 -pygments==2.19.2 -pytest==8.4.2 -pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 -pytest-cov==7.0.0 -pytest-mock==3.15.1 -pytest-randomly==4.0.1 -sortedcontainers==2.4.0 -uwsgi==2.0.31 -zstandard==0.25.0 diff --git a/.riot/requirements/59a4721.txt b/.riot/requirements/59a4721.txt deleted file mode 100644 index f41c79474d7..00000000000 --- a/.riot/requirements/59a4721.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/59a4721.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -openfeature-sdk==0.7.5 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.3.0 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/59df9d0.txt b/.riot/requirements/59df9d0.txt deleted file mode 100644 index 45196ed8d7e..00000000000 --- a/.riot/requirements/59df9d0.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.11 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/59df9d0.in -# -attrs==23.1.0 -austin-python==1.7.0 -coverage[toml]==7.3.2 -hypothesis==6.45.0 -iniconfig==2.0.0 -markdown-it-py==3.0.0 -mdurl==0.1.2 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -protobuf==3.20.3 -psutil==5.9.6 -pygments==2.16.1 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -rich==13.6.0 -sortedcontainers==2.4.0 -toml==0.10.2 diff --git a/.riot/requirements/5ac9b4e.txt b/.riot/requirements/5ac9b4e.txt deleted file mode 100644 index 046c13e20b2..00000000000 --- a/.riot/requirements/5ac9b4e.txt +++ /dev/null @@ -1,64 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.13 -# by the following command: -# -# pip-compile --allow-unsafe --cert=None --client-cert=None --index-url=None --no-annotate --pip-args=None .riot/requirements/5ac9b4e.in -# -annotated-types==0.7.0 -attrs==25.3.0 -cachetools==5.5.2 -certifi==2025.8.3 -charset-normalizer==3.4.3 -coverage[toml]==7.10.7 -docstring-parser==0.17.0 -google-ai-generativelanguage==0.6.15 -google-api-core[grpc]==2.25.1 -google-api-python-client==2.183.0 -google-auth==2.40.3 -google-auth-httplib2==0.2.0 -google-cloud-aiplatform[all]==1.71.1 -google-cloud-bigquery==3.38.0 -google-cloud-core==2.4.3 -google-cloud-resource-manager==1.14.2 -google-cloud-storage==2.19.0 -google-crc32c==1.7.1 -google-generativeai==0.8.5 -google-resumable-media==2.7.2 -googleapis-common-protos[grpc]==1.70.0 -grpc-google-iam-v1==0.14.2 -grpcio==1.75.1 -grpcio-status==1.71.2 -httplib2==0.31.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -mock==5.2.0 -numpy==2.3.3 -opentracing==2.4.0 -packaging==25.0 -pillow==11.3.0 -pluggy==1.6.0 -proto-plus==1.26.1 -protobuf==5.29.5 -pyasn1==0.6.1 -pyasn1-modules==0.4.2 -pydantic==2.11.9 -pydantic-core==2.33.2 -pygments==2.19.2 -pyparsing==3.2.5 -pytest==8.4.2 -pytest-asyncio==1.2.0 -pytest-cov==7.0.0 -pytest-mock==3.15.1 -python-dateutil==2.9.0.post0 -requests==2.32.5 -rsa==4.9.1 -shapely==2.1.2 -six==1.17.0 -sortedcontainers==2.4.0 -tqdm==4.67.1 -typing-extensions==4.15.0 -typing-inspection==0.4.1 -uritemplate==4.2.0 -urllib3==2.5.0 -vertexai==1.71.1 diff --git a/.riot/requirements/5b339ac.txt b/.riot/requirements/5b339ac.txt deleted file mode 100644 index 9c52400986b..00000000000 --- a/.riot/requirements/5b339ac.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/5b339ac.in -# -attrs==23.1.0 -certifi==2023.11.17 -coverage[toml]==7.3.4 -elasticsearch7==7.17.9 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -urllib3==1.26.18 -zipp==3.17.0 diff --git a/.riot/requirements/5b55f2d.txt b/.riot/requirements/5b55f2d.txt deleted file mode 100644 index 4d502d83648..00000000000 --- a/.riot/requirements/5b55f2d.txt +++ /dev/null @@ -1,27 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/5b55f2d.in -# -attrs==23.1.0 -certifi==2023.11.17 -coverage[toml]==7.3.4 -elastic-transport==8.11.0 -elasticsearch8==8.11.1 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -urllib3==2.1.0 -zipp==3.17.0 diff --git a/.riot/requirements/5e79012.txt b/.riot/requirements/5e79012.txt deleted file mode 100644 index 22de4e112ba..00000000000 --- a/.riot/requirements/5e79012.txt +++ /dev/null @@ -1,44 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/5e79012.in -# -aiohappyeyeballs==2.4.4 -aiohttp==3.10.11 -aiosignal==1.3.1 -async-timeout==5.0.1 -attrs==25.3.0 -certifi==2025.1.31 -charset-normalizer==3.4.1 -coverage[toml]==7.6.1 -elastic-transport==8.17.1 -elasticsearch[async]==9.0.0 -elasticsearch7[async]==7.17.12 -events==0.5 -exceptiongroup==1.2.2 -frozenlist==1.5.0 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -multidict==6.1.0 -opensearch-py[async]==2.8.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -propcache==0.2.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -requests==2.32.3 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==1.26.20 -yarl==1.15.2 -zipp==3.20.2 diff --git a/.riot/requirements/5ed7bed.txt b/.riot/requirements/5ed7bed.txt deleted file mode 100644 index 8d62589d83b..00000000000 --- a/.riot/requirements/5ed7bed.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/5ed7bed.in -# -attrs==23.1.0 -confluent-kafka==1.9.2 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/610b7cb.txt b/.riot/requirements/610b7cb.txt deleted file mode 100644 index 59a69bc25a6..00000000000 --- a/.riot/requirements/610b7cb.txt +++ /dev/null @@ -1,27 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/610b7cb.in -# -attrs==23.1.0 -certifi==2023.11.17 -coverage[toml]==7.3.4 -elastic-transport==8.11.0 -elasticsearch==8.11.1 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -urllib3==2.1.0 -zipp==3.17.0 diff --git a/.riot/requirements/6724bb2.txt b/.riot/requirements/6724bb2.txt deleted file mode 100644 index 8962a3db440..00000000000 --- a/.riot/requirements/6724bb2.txt +++ /dev/null @@ -1,30 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/6724bb2.in -# -asgiref==3.8.1 -attrs==25.3.0 -backports-zoneinfo==0.2.1 -certifi==2025.6.15 -charset-normalizer==3.4.2 -coverage[toml]==7.6.1 -django==4.2.23 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -requests==2.32.4 -sortedcontainers==2.4.0 -sqlparse==0.5.3 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 diff --git a/.riot/requirements/6820ef2.txt b/.riot/requirements/6820ef2.txt deleted file mode 100644 index 2db99b509e5..00000000000 --- a/.riot/requirements/6820ef2.txt +++ /dev/null @@ -1,62 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.11 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/6820ef2.in -# -annotated-types==0.7.0 -attrs==24.2.0 -cachetools==5.5.0 -certifi==2024.8.30 -charset-normalizer==3.4.0 -coverage[toml]==7.6.8 -docstring-parser==0.16 -google-ai-generativelanguage==0.6.10 -google-api-core[grpc]==2.23.0 -google-api-python-client==2.154.0 -google-auth==2.36.0 -google-auth-httplib2==0.2.0 -google-cloud-aiplatform[all]==1.71.1 -google-cloud-bigquery==3.27.0 -google-cloud-core==2.4.1 -google-cloud-resource-manager==1.13.1 -google-cloud-storage==2.18.2 -google-crc32c==1.6.0 -google-generativeai==0.8.3 -google-resumable-media==2.7.2 -googleapis-common-protos[grpc]==1.66.0 -grpc-google-iam-v1==0.13.1 -grpcio==1.68.0 -grpcio-status==1.68.0 -httplib2==0.22.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.0.0 -mock==5.1.0 -numpy==2.1.3 -opentracing==2.4.0 -packaging==24.2 -pillow==11.0.0 -pluggy==1.5.0 -proto-plus==1.25.0 -protobuf==5.28.3 -pyasn1==0.6.1 -pyasn1-modules==0.4.1 -pydantic==2.10.2 -pydantic-core==2.27.1 -pyparsing==3.2.0 -pytest==8.3.3 -pytest-asyncio==0.24.0 -pytest-cov==6.0.0 -pytest-mock==3.14.0 -python-dateutil==2.9.0.post0 -requests==2.32.3 -rsa==4.9 -shapely==2.0.6 -six==1.16.0 -sortedcontainers==2.4.0 -tqdm==4.67.1 -typing-extensions==4.12.2 -uritemplate==4.1.1 -urllib3==2.2.3 -vertexai==1.71.1 diff --git a/.riot/requirements/685a359.txt b/.riot/requirements/685a359.txt deleted file mode 100644 index 72c9d9c6554..00000000000 --- a/.riot/requirements/685a359.txt +++ /dev/null @@ -1,45 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/685a359.in -# -aiofiles==23.2.1 -anyio==4.2.0 -attrs==23.1.0 -certifi==2023.11.17 -charset-normalizer==3.3.2 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -h11==0.14.0 -httpcore==0.16.3 -httptools==0.6.1 -httpx==0.23.3 -hypothesis==6.45.0 -idna==3.6 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -multidict==5.2.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -requests==2.31.0 -rfc3986[idna2008]==1.5.0 -sanic==21.12.2 -sanic-routing==0.7.2 -sanic-testing==0.8.3 -sniffio==1.3.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -typing-extensions==4.9.0 -ujson==5.9.0 -urllib3==2.1.0 -uvloop==0.19.0 -websockets==10.4 -zipp==3.17.0 diff --git a/.riot/requirements/696c125.txt b/.riot/requirements/696c125.txt deleted file mode 100644 index 6dfb1e7605d..00000000000 --- a/.riot/requirements/696c125.txt +++ /dev/null @@ -1,74 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/696c125.in -# -annotated-types==0.7.0 -attrs==25.3.0 -aws-sam-translator==1.97.0 -aws-xray-sdk==2.14.0 -boto==2.49.0 -boto3==1.37.38 -botocore==1.37.38 -certifi==2025.4.26 -cffi==1.17.1 -cfn-lint==0.53.1 -charset-normalizer==3.4.2 -coverage[toml]==7.6.1 -cryptography==45.0.3 -docker==7.1.0 -ecdsa==0.14.1 -exceptiongroup==1.3.0 -execnet==2.1.1 -hypothesis==6.45.0 -idna==2.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -jinja2==2.10.3 -jmespath==1.0.1 -jsondiff==2.2.1 -jsonpatch==1.33 -jsonpointer==3.0.0 -jsonschema==3.2.0 -junit-xml==1.9 -markupsafe==1.1.1 -mock==5.2.0 -more-itertools==10.5.0 -moto==1.3.16 -networkx==2.8.8 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pyasn1==0.4.8 -pycparser==2.22 -pydantic==2.10.6 -pydantic-core==2.27.2 -pynamodb==5.5.1 -pyrsistent==0.20.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -pytest-xdist==3.6.1 -python-dateutil==2.9.0.post0 -python-jose[cryptography]==3.4.0 -pytz==2025.2 -pyyaml==6.0.2 -requests==2.32.3 -responses==0.25.7 -rsa==4.9.1 -s3transfer==0.11.5 -six==1.17.0 -sortedcontainers==2.4.0 -sshpubkeys==3.3.1 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==1.26.20 -werkzeug==2.1.2 -wrapt==1.17.2 -xmltodict==0.14.2 -zipp==3.20.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/6a87378.txt b/.riot/requirements/6a87378.txt deleted file mode 100644 index 8e0eabae4ad..00000000000 --- a/.riot/requirements/6a87378.txt +++ /dev/null @@ -1,35 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/6a87378.in -# -attrs==25.3.0 -blinker==1.8.2 -certifi==2025.7.9 -charset-normalizer==3.4.2 -click==8.1.8 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -flask==3.0.3 -hypothesis==6.113.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -itsdangerous==2.2.0 -jinja2==3.1.6 -markupsafe==2.1.5 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -requests==2.32.4 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 -werkzeug==3.0.6 -zipp==3.20.2 diff --git a/.riot/requirements/6acdecb.txt b/.riot/requirements/6acdecb.txt deleted file mode 100644 index 3045a6a286d..00000000000 --- a/.riot/requirements/6acdecb.txt +++ /dev/null @@ -1,31 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/6acdecb.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -gunicorn==23.0.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -protobuf==5.29.5 -py-cpuinfo==8.0.0 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-benchmark==4.0.0 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.3.0 -typing-extensions==4.13.2 -uwsgi==2.0.31 -zipp==3.20.2 -zstandard==0.23.0 diff --git a/.riot/requirements/f8baf7c.txt b/.riot/requirements/6add1e1.txt similarity index 89% rename from .riot/requirements/f8baf7c.txt rename to .riot/requirements/6add1e1.txt index d0f146012b2..a47847d95ce 100644 --- a/.riot/requirements/f8baf7c.txt +++ b/.riot/requirements/6add1e1.txt @@ -2,10 +2,10 @@ # This file is autogenerated by pip-compile with Python 3.12 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/f8baf7c.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/6add1e1.in # attrs==25.4.0 -coverage[toml]==7.11.0 +coverage[toml]==7.11.3 gunicorn==23.0.0 hypothesis==6.45.0 iniconfig==2.3.0 @@ -18,9 +18,9 @@ pluggy==1.6.0 protobuf==6.33.0 py-cpuinfo==8.0.0 pygments==2.19.2 -pytest==8.4.2 +pytest==9.0.0 pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 +pytest-benchmark==5.2.3 pytest-cov==7.0.0 pytest-cpp==2.6.0 pytest-mock==3.15.1 diff --git a/.riot/requirements/6bec1ec.txt b/.riot/requirements/6bec1ec.txt deleted file mode 100644 index 3e128a77c79..00000000000 --- a/.riot/requirements/6bec1ec.txt +++ /dev/null @@ -1,31 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/6bec1ec.in -# -attrs==24.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -gevent==24.2.1 -greenlet==3.1.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -mock==5.1.0 -msgpack==1.1.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.4 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -zipp==3.20.2 -zope-event==5.0 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.0 diff --git a/.riot/requirements/f7d20cb.txt b/.riot/requirements/6c2a4d1.txt similarity index 95% rename from .riot/requirements/f7d20cb.txt rename to .riot/requirements/6c2a4d1.txt index ef9107d6c5e..a3ca91e8993 100644 --- a/.riot/requirements/f7d20cb.txt +++ b/.riot/requirements/6c2a4d1.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/f7d20cb.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/6c2a4d1.in # attrs==25.4.0 coverage[toml]==7.10.7 @@ -22,7 +22,7 @@ py-cpuinfo==8.0.0 pygments==2.19.2 pytest==8.4.2 pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 +pytest-benchmark==5.2.3 pytest-cov==7.0.0 pytest-cpp==2.6.0 pytest-mock==3.15.1 diff --git a/.riot/requirements/bebf559.txt b/.riot/requirements/6c3e5ec.txt similarity index 66% rename from .riot/requirements/bebf559.txt rename to .riot/requirements/6c3e5ec.txt index c88dcfdf8da..ce24be968f5 100644 --- a/.riot/requirements/bebf559.txt +++ b/.riot/requirements/6c3e5ec.txt @@ -2,14 +2,14 @@ # This file is autogenerated by pip-compile with Python 3.12 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/bebf559.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/6c3e5ec.in # -asgiref==3.9.1 -attrs==25.3.0 +asgiref==3.10.0 +attrs==25.4.0 bcrypt==4.2.1 -certifi==2025.8.3 -charset-normalizer==3.4.3 -coverage[toml]==7.10.7 +certifi==2025.10.5 +charset-normalizer==3.4.4 +coverage[toml]==7.11.1 dill==0.4.0 django==4.0.10 django-configurations==2.5.1 @@ -17,8 +17,9 @@ gevent==25.9.1 greenlet==3.2.4 gunicorn==23.0.0 hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 +idna==3.11 +iniconfig==2.3.0 +legacy-cgi==2.6.4 mock==5.2.0 opentracing==2.4.0 packaging==25.0 @@ -29,14 +30,11 @@ pytest==8.4.2 pytest-cov==7.0.0 pytest-django[testing]==3.10.0 pytest-mock==3.15.1 -pyyaml==6.0.2 +pyyaml==6.0.3 requests==2.32.5 six==1.17.0 sortedcontainers==2.4.0 sqlparse==0.5.3 urllib3==2.5.0 -zope-event==6.0 -zope-interface==8.0 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==80.9.0 +zope-event==6.1 +zope-interface==8.0.1 diff --git a/.riot/requirements/6c7321b.txt b/.riot/requirements/6c7321b.txt deleted file mode 100644 index 95dbc79d252..00000000000 --- a/.riot/requirements/6c7321b.txt +++ /dev/null @@ -1,30 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/6c7321b.in -# -attrs==25.3.0 -cattrs==22.2.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -execnet==2.1.1 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -molten==1.0.2 -mypy-extensions==1.1.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -pytest-xdist==3.6.1 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==3.10.0.2 -typing-inspect==0.6.0 -zipp==3.20.2 diff --git a/.riot/requirements/c48b250.txt b/.riot/requirements/6c76bd7.txt similarity index 66% rename from .riot/requirements/c48b250.txt rename to .riot/requirements/6c76bd7.txt index 2d957b44797..1e823950d21 100644 --- a/.riot/requirements/c48b250.txt +++ b/.riot/requirements/6c76bd7.txt @@ -2,14 +2,14 @@ # This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/c48b250.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/6c76bd7.in # -asgiref==3.9.1 -attrs==25.3.0 +asgiref==3.10.0 +attrs==25.4.0 bcrypt==4.2.1 -certifi==2025.8.3 -charset-normalizer==3.4.3 -coverage[toml]==7.10.7 +certifi==2025.10.5 +charset-normalizer==3.4.4 +coverage[toml]==7.11.1 dill==0.4.0 django==4.0.10 django-configurations==2.5.1 @@ -17,8 +17,9 @@ gevent==25.9.1 greenlet==3.2.4 gunicorn==23.0.0 hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 +idna==3.11 +iniconfig==2.3.0 +legacy-cgi==2.6.4 mock==5.2.0 opentracing==2.4.0 packaging==25.0 @@ -29,14 +30,11 @@ pytest==8.4.2 pytest-cov==7.0.0 pytest-django[testing]==3.10.0 pytest-mock==3.15.1 -pyyaml==6.0.2 +pyyaml==6.0.3 requests==2.32.5 six==1.17.0 sortedcontainers==2.4.0 sqlparse==0.5.3 urllib3==2.5.0 -zope-event==6.0 -zope-interface==8.0 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==80.9.0 +zope-event==6.1 +zope-interface==8.0.1 diff --git a/.riot/requirements/6c872ab.txt b/.riot/requirements/6c872ab.txt deleted file mode 100644 index e5434a6da08..00000000000 --- a/.riot/requirements/6c872ab.txt +++ /dev/null @@ -1,27 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/6c872ab.in -# -attrs==23.1.0 -certifi==2023.11.17 -coverage[toml]==7.3.4 -elastic-transport==8.11.0 -elasticsearch8==8.0.1 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -urllib3==2.1.0 -zipp==3.17.0 diff --git a/.riot/requirements/6d67b0b.txt b/.riot/requirements/6d67b0b.txt deleted file mode 100644 index d701321ec5c..00000000000 --- a/.riot/requirements/6d67b0b.txt +++ /dev/null @@ -1,42 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/6d67b0b.in -# -asn1crypto==1.5.1 -attrs==23.1.0 -certifi==2023.11.17 -cffi==1.16.0 -charset-normalizer==3.3.2 -coverage[toml]==7.3.4 -cryptography==38.0.4 -exceptiongroup==1.2.0 -filelock==3.13.1 -hypothesis==6.45.0 -idna==3.6 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -platformdirs==3.11.0 -pluggy==1.3.0 -pycparser==2.21 -pyjwt==2.8.0 -pyopenssl==23.2.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -pytz==2023.3.post1 -requests==2.31.0 -responses==0.16.0 -six==1.16.0 -snowflake-connector-python==3.6.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -tomlkit==0.12.3 -typing-extensions==4.9.0 -urllib3==1.26.18 -zipp==3.17.0 diff --git a/.riot/requirements/6da0824.txt b/.riot/requirements/6da0824.txt deleted file mode 100644 index 72d3c32244c..00000000000 --- a/.riot/requirements/6da0824.txt +++ /dev/null @@ -1,27 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/6da0824.in -# -attrs==25.3.0 -azure-functions==1.10.1 -certifi==2025.8.3 -charset-normalizer==3.4.3 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -requests==2.32.4 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 diff --git a/.riot/requirements/6e26af7.txt b/.riot/requirements/6e26af7.txt deleted file mode 100644 index 96aa8bbb8ad..00000000000 --- a/.riot/requirements/6e26af7.txt +++ /dev/null @@ -1,51 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/6e26af7.in -# -annotated-types==0.7.0 -anyio==4.5.2 -attrs==25.3.0 -boto3==1.37.38 -botocore==1.37.38 -certifi==2025.8.3 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -fastapi==0.116.1 -freezegun==1.5.5 -h11==0.16.0 -httpcore==1.0.9 -httpretty==1.1.4 -httpx==0.27.2 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -jmespath==1.0.1 -mock==5.2.0 -msgpack==1.1.1 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pydantic==2.10.6 -pydantic-core==2.27.2 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -s3transfer==0.11.5 -six==1.17.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -starlette==0.44.0 -structlog==25.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==1.26.20 -wheel==0.45.1 -zipp==3.20.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/6f7b1a1.txt b/.riot/requirements/6f7b1a1.txt deleted file mode 100644 index a22644251aa..00000000000 --- a/.riot/requirements/6f7b1a1.txt +++ /dev/null @@ -1,37 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/6f7b1a1.in -# -attrs==25.4.0 -coverage[toml]==7.11.0 -exceptiongroup==1.3.0 -gevent==25.9.1 -greenlet==3.2.4 -gunicorn[gevent]==23.0.0 -hypothesis==6.45.0 -iniconfig==2.3.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -protobuf==6.33.0 -py-cpuinfo==8.0.0 -pygments==2.19.2 -pytest==8.4.2 -pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 -pytest-cov==7.0.0 -pytest-mock==3.15.1 -pytest-randomly==4.0.1 -sortedcontainers==2.4.0 -tomli==2.3.0 -typing-extensions==4.15.0 -uwsgi==2.0.31 -zope-event==6.0 -zope-interface==8.0.1 -zstandard==0.25.0 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==80.9.0 diff --git a/.riot/requirements/70dec77.txt b/.riot/requirements/70dec77.txt deleted file mode 100644 index 16751370567..00000000000 --- a/.riot/requirements/70dec77.txt +++ /dev/null @@ -1,35 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/70dec77.in -# -asgiref==3.8.1 -attrs==25.3.0 -backports-zoneinfo==0.2.1 -coverage[toml]==7.6.1 -django==4.2.20 -django-configurations==2.5.1 -django-hosts==5.2 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-django[testing]==3.10.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -six==1.17.0 -sortedcontainers==2.4.0 -sqlparse==0.5.3 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/7341bd9.txt b/.riot/requirements/7341bd9.txt deleted file mode 100644 index 95fd932c141..00000000000 --- a/.riot/requirements/7341bd9.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/7341bd9.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pymemcache==3.4.4 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -six==1.16.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/73d37c5.txt b/.riot/requirements/73d37c5.txt deleted file mode 100644 index af1be13fd5b..00000000000 --- a/.riot/requirements/73d37c5.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/73d37c5.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.0.0 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -tornado==6.4.2 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/75dda93.txt b/.riot/requirements/75dda93.txt deleted file mode 100644 index 8d2df273f73..00000000000 --- a/.riot/requirements/75dda93.txt +++ /dev/null @@ -1,34 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/75dda93.in -# -attrs==23.2.0 -blinker==1.7.0 -cachelib==0.9.0 -click==7.1.2 -coverage[toml]==7.4.2 -exceptiongroup==1.2.0 -flask==1.1.4 -flask-caching==2.1.0 -hypothesis==6.45.0 -importlib-metadata==7.0.1 -iniconfig==2.0.0 -itsdangerous==1.1.0 -jinja2==2.11.3 -markupsafe==1.1.1 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.4.0 -pytest==8.0.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -python-memcached==1.62 -redis==2.10.6 -sortedcontainers==2.4.0 -tomli==2.0.1 -werkzeug==1.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/7613d04.txt b/.riot/requirements/7613d04.txt deleted file mode 100644 index af4b5537dd7..00000000000 --- a/.riot/requirements/7613d04.txt +++ /dev/null @@ -1,32 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/7613d04.in -# -attrs==25.3.0 -certifi==2025.1.31 -charset-normalizer==3.4.1 -coverage[toml]==7.6.1 -events==0.5 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opensearch-py[requests]==2.8.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -requests==2.32.3 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -urllib3==1.26.20 -zipp==3.20.2 diff --git a/.riot/requirements/768e5b9.txt b/.riot/requirements/768e5b9.txt deleted file mode 100644 index 23ad150ce63..00000000000 --- a/.riot/requirements/768e5b9.txt +++ /dev/null @@ -1,27 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/768e5b9.in -# -attrs==25.4.0 -coverage[toml]==7.11.0 -gunicorn==23.0.0 -hypothesis==6.45.0 -iniconfig==2.3.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -protobuf==6.33.0 -py-cpuinfo==8.0.0 -pygments==2.19.2 -pytest==8.4.2 -pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 -pytest-cov==7.0.0 -pytest-mock==3.15.1 -pytest-randomly==4.0.1 -sortedcontainers==2.4.0 -uwsgi==2.0.31 -zstandard==0.25.0 diff --git a/.riot/requirements/77db507.txt b/.riot/requirements/77db507.txt deleted file mode 100644 index bc4ac6664eb..00000000000 --- a/.riot/requirements/77db507.txt +++ /dev/null @@ -1,38 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/77db507.in -# -astunparse==1.6.3 -attrs==25.3.0 -blinker==1.8.2 -certifi==2025.4.26 -charset-normalizer==3.4.2 -click==8.1.8 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -flask==3.0.3 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -itsdangerous==2.2.0 -jinja2==3.1.6 -markupsafe==2.1.5 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -requests==2.32.3 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -urllib3==2.2.3 -virtualenv-clone==0.5.7 -werkzeug==3.0.6 -wheel==0.45.1 -zipp==3.20.2 diff --git a/.riot/requirements/79deb5b.txt b/.riot/requirements/79deb5b.txt deleted file mode 100644 index 22e9b499ea7..00000000000 --- a/.riot/requirements/79deb5b.txt +++ /dev/null @@ -1,42 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/79deb5b.in -# -aiobotocore==1.4.2 -aiohappyeyeballs==2.4.0 -aiohttp==3.10.5 -aioitertools==0.11.0 -aiosignal==1.3.1 -async-generator==1.10 -async-timeout==4.0.3 -attrs==24.2.0 -botocore==1.20.106 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -frozenlist==1.4.1 -hypothesis==6.45.0 -idna==3.8 -importlib-metadata==8.4.0 -iniconfig==2.0.0 -jmespath==0.10.0 -mock==5.1.0 -multidict==6.0.5 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pytest==8.3.2 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -six==1.16.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -typing-extensions==4.12.2 -urllib3==1.26.19 -wrapt==1.16.0 -yarl==1.9.4 -zipp==3.20.0 diff --git a/.riot/requirements/7b02bf5.txt b/.riot/requirements/7b02bf5.txt deleted file mode 100644 index 399b31b7be8..00000000000 --- a/.riot/requirements/7b02bf5.txt +++ /dev/null @@ -1,31 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/7b02bf5.in -# -attrs==25.3.0 -azure-core==1.33.0 -azure-servicebus==7.14.2 -certifi==2025.8.3 -charset-normalizer==3.4.3 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -isodate==0.7.2 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-asyncio==0.23.7 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -requests==2.32.4 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 diff --git a/.riot/requirements/7fc5d79.txt b/.riot/requirements/7fc5d79.txt deleted file mode 100644 index 3b3c5a35f65..00000000000 --- a/.riot/requirements/7fc5d79.txt +++ /dev/null @@ -1,49 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/7fc5d79.in -# -attrs==25.3.0 -babel==2.17.0 -blinker==1.8.2 -certifi==2025.8.3 -charset-normalizer==3.4.3 -click==8.1.8 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -flask==3.0.3 -flask-babel==4.0.0 -gevent==24.2.1 -greenlet==3.1.1 -gunicorn==23.0.0 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -itsdangerous==2.2.0 -jinja2==3.1.6 -markupsafe==2.1.5 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -psycopg2-binary==2.9.10 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -pytz==2025.2 -requests==2.32.4 -sortedcontainers==2.4.0 -sqlalchemy==2.0.43 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 -werkzeug==3.0.6 -zipp==3.20.2 -zope-event==5.0 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/7ffd29a.txt b/.riot/requirements/7ffd29a.txt deleted file mode 100644 index 447a518853d..00000000000 --- a/.riot/requirements/7ffd29a.txt +++ /dev/null @@ -1,20 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/7ffd29a.in -# -attrs==25.1.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.4 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -sortedcontainers==2.4.0 -tomli==2.2.1 diff --git a/.riot/requirements/8005ddd.txt b/.riot/requirements/8005ddd.txt deleted file mode 100644 index 21b76718581..00000000000 --- a/.riot/requirements/8005ddd.txt +++ /dev/null @@ -1,40 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.11 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/8005ddd.in -# -attrs==25.4.0 -coverage[toml]==7.11.0 -gevent==25.9.1 -greenlet==3.2.4 -gunicorn[gevent]==23.0.0 -hypothesis==6.45.0 -iniconfig==2.3.0 -jsonschema==4.25.1 -jsonschema-specifications==2025.9.1 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -protobuf==6.33.0 -py-cpuinfo==8.0.0 -pygments==2.19.2 -pytest==8.4.2 -pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 -pytest-cov==7.0.0 -pytest-cpp==2.6.0 -pytest-mock==3.15.1 -pytest-randomly==4.0.1 -referencing==0.37.0 -rpds-py==0.28.0 -sortedcontainers==2.4.0 -typing-extensions==4.15.0 -uwsgi==2.0.31 -zope-event==6.0 -zope-interface==8.0.1 -zstandard==0.25.0 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==80.9.0 diff --git a/.riot/requirements/82fb241.txt b/.riot/requirements/82fb241.txt deleted file mode 100644 index 269b0fb3b8b..00000000000 --- a/.riot/requirements/82fb241.txt +++ /dev/null @@ -1,33 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/82fb241.in -# -aiohttp==3.7.4.post0 -async-timeout==3.0.1 -attrs==25.3.0 -chardet==4.0.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -multidict==6.1.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -propcache==0.2.0 -pytest==8.3.5 -pytest-aiohttp==0.3.0 -pytest-asyncio==0.23.7 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -yarl==1.15.2 -zipp==3.20.2 diff --git a/.riot/requirements/83c892b.txt b/.riot/requirements/83c892b.txt deleted file mode 100644 index 9728e45e7fc..00000000000 --- a/.riot/requirements/83c892b.txt +++ /dev/null @@ -1,34 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/83c892b.in -# -attrs==25.4.0 -coverage[toml]==7.11.0 -gevent==25.9.1 -greenlet==3.2.4 -gunicorn[gevent]==23.0.0 -hypothesis==6.45.0 -iniconfig==2.3.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -protobuf==6.33.0 -py-cpuinfo==8.0.0 -pygments==2.19.2 -pytest==8.4.2 -pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 -pytest-cov==7.0.0 -pytest-mock==3.15.1 -pytest-randomly==4.0.1 -sortedcontainers==2.4.0 -uwsgi==2.0.31 -zope-event==6.0 -zope-interface==8.0.1 -zstandard==0.25.0 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==80.9.0 diff --git a/.riot/requirements/853ba9f.txt b/.riot/requirements/853ba9f.txt deleted file mode 100644 index 53ad0794ccf..00000000000 --- a/.riot/requirements/853ba9f.txt +++ /dev/null @@ -1,45 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/853ba9f.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -gevent==22.10.2 -greenlet==3.1.1 -gunicorn[gevent]==23.0.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -importlib-resources==6.4.5 -iniconfig==2.1.0 -jsonschema==4.23.0 -jsonschema-specifications==2023.12.1 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pkgutil-resolve-name==1.3.10 -pluggy==1.5.0 -protobuf==5.29.5 -py-cpuinfo==8.0.0 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-benchmark==4.0.0 -pytest-cov==5.0.0 -pytest-cpp==2.6.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -referencing==0.35.1 -rpds-py==0.20.1 -sortedcontainers==2.4.0 -tomli==2.3.0 -typing-extensions==4.13.2 -uwsgi==2.0.31 -zipp==3.20.2 -zope-event==5.0 -zope-interface==7.2 -zstandard==0.23.0 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/85e923f.txt b/.riot/requirements/85e923f.txt deleted file mode 100644 index dc94da04908..00000000000 --- a/.riot/requirements/85e923f.txt +++ /dev/null @@ -1,36 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/85e923f.in -# -attrs==24.3.0 -certifi==2024.12.14 -charset-normalizer==3.4.1 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -gevent==24.2.1 -greenlet==3.1.1 -gunicorn==20.0.4 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.4 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -requests==2.32.3 -sortedcontainers==2.4.0 -tomli==2.2.1 -urllib3==2.2.3 -zipp==3.20.2 -zope-event==5.0 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.0 diff --git a/.riot/requirements/8733595.txt b/.riot/requirements/8733595.txt deleted file mode 100644 index e921c950132..00000000000 --- a/.riot/requirements/8733595.txt +++ /dev/null @@ -1,38 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/8733595.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -gevent==24.2.1 -greenlet==3.1.1 -httpretty==1.1.4 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -py-cpuinfo==9.0.0 -pyfakefs==5.10.0 -pytest==8.3.5 -pytest-asyncio==0.23.8 -pytest-benchmark==4.0.0 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -python-json-logger==2.0.7 -sortedcontainers==2.4.0 -tomli==2.3.0 -typing-extensions==4.13.2 -wrapt==2.0.0 -zipp==3.20.2 -zope-event==5.0 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/8a17cb2.txt b/.riot/requirements/8a17cb2.txt deleted file mode 100644 index c692572e88b..00000000000 --- a/.riot/requirements/8a17cb2.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/8a17cb2.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mariadb==1.1.13 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/8c110bf.txt b/.riot/requirements/8c110bf.txt deleted file mode 100644 index 7ad7d4b82f0..00000000000 --- a/.riot/requirements/8c110bf.txt +++ /dev/null @@ -1,30 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/8c110bf.in -# -attrs==25.3.0 -beautifulsoup4==4.14.2 -bottle==0.12.25 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -soupsieve==2.7 -tomli==2.3.0 -typing-extensions==4.13.2 -waitress==3.0.0 -webob==1.8.9 -webtest==3.0.1 -zipp==3.20.2 diff --git a/.riot/requirements/9029977.txt b/.riot/requirements/9029977.txt deleted file mode 100644 index e320a67d9a0..00000000000 --- a/.riot/requirements/9029977.txt +++ /dev/null @@ -1,23 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/9029977.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/921b9fb.txt b/.riot/requirements/921b9fb.txt deleted file mode 100644 index 8ec138a215a..00000000000 --- a/.riot/requirements/921b9fb.txt +++ /dev/null @@ -1,51 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/921b9fb.in -# -attrs==25.3.0 -certifi==2025.6.15 -charset-normalizer==2.1.1 -click==8.1.8 -coverage[toml]==7.6.1 -deprecated==1.2.18 -exceptiongroup==1.3.0 -flask==2.1.3 -gevent==24.2.1 -greenlet==3.1.1 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -itsdangerous==2.2.0 -jinja2==3.1.6 -markupsafe==2.0.1 -mock==5.2.0 -opentelemetry-api==1.15.0 -opentelemetry-instrumentation==0.45b0 -opentelemetry-instrumentation-flask==0.45b0 -opentelemetry-instrumentation-wsgi==0.45b0 -opentelemetry-semantic-conventions==0.45b0 -opentelemetry-util-http==0.45b0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -requests==2.28.1 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==1.26.20 -werkzeug==2.1.2 -wrapt==1.17.2 -zipp==3.20.2 -zope-event==5.0 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/95f5020.txt b/.riot/requirements/95f5020.txt deleted file mode 100644 index ba732f47c55..00000000000 --- a/.riot/requirements/95f5020.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/95f5020.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -msgpack==1.1.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/9777f3d.txt b/.riot/requirements/9777f3d.txt deleted file mode 100644 index 4dddf9cd5d5..00000000000 --- a/.riot/requirements/9777f3d.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/9777f3d.in -# -attrs==24.2.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.4.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pymysql==1.1.1 -pytest==8.3.2 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.20.1 diff --git a/.riot/requirements/97f1328.txt b/.riot/requirements/97f1328.txt deleted file mode 100644 index 68875c403e1..00000000000 --- a/.riot/requirements/97f1328.txt +++ /dev/null @@ -1,88 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/97f1328.in -# -aiohappyeyeballs==2.4.4 -aiohttp==3.10.11 -aiosignal==1.3.1 -annotated-types==0.7.0 -anyio==4.5.2 -appdirs==1.4.4 -async-timeout==4.0.3 -attrs==25.3.0 -certifi==2025.7.14 -charset-normalizer==3.4.2 -coverage[toml]==7.6.1 -dataclasses-json==0.6.7 -datasets==3.1.0 -dill==0.3.8 -distro==1.9.0 -exceptiongroup==1.3.0 -filelock==3.16.1 -frozenlist==1.5.0 -fsspec[http]==2024.9.0 -greenlet==3.1.1 -h11==0.16.0 -hf-xet==1.1.5 -httpcore==1.0.9 -httpx==0.28.1 -huggingface-hub==0.33.4 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -jiter==0.9.1 -jsonpatch==1.33 -jsonpointer==3.0.0 -langchain==0.2.17 -langchain-community==0.2.19 -langchain-core==0.2.43 -langchain-openai==0.1.25 -langchain-text-splitters==0.2.4 -langsmith==0.1.147 -marshmallow==3.22.0 -mock==5.2.0 -multidict==6.1.0 -multiprocess==0.70.16 -mypy-extensions==1.1.0 -nest-asyncio==1.6.0 -numpy==1.24.4 -openai==1.97.1 -opentracing==2.4.0 -orjson==3.10.15 -packaging==24.2 -pandas==2.0.3 -pluggy==1.5.0 -propcache==0.2.0 -pyarrow==17.0.0 -pydantic==2.10.6 -pydantic-core==2.27.2 -pysbd==0.3.4 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -python-dateutil==2.9.0.post0 -pytz==2025.2 -pyyaml==6.0.2 -ragas==0.1.21 -regex==2024.11.6 -requests==2.32.4 -requests-toolbelt==1.0.0 -six==1.17.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -sqlalchemy==2.0.41 -tenacity==8.5.0 -tiktoken==0.7.0 -tomli==2.2.1 -tqdm==4.67.1 -typing-extensions==4.13.2 -typing-inspect==0.9.0 -tzdata==2025.2 -urllib3==1.26.20 -vcrpy==6.0.2 -wrapt==1.17.2 -xxhash==3.5.0 -yarl==1.15.2 diff --git a/.riot/requirements/9a6a8b9.txt b/.riot/requirements/9a6a8b9.txt deleted file mode 100644 index fc4a1144f87..00000000000 --- a/.riot/requirements/9a6a8b9.txt +++ /dev/null @@ -1,30 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/9a6a8b9.in -# -algoliasearch==2.5.0 -attrs==25.3.0 -certifi==2025.4.26 -charset-normalizer==3.4.2 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -requests==2.32.3 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==1.26.20 -zipp==3.20.2 diff --git a/.riot/requirements/47bd83d.txt b/.riot/requirements/9b46157.txt similarity index 89% rename from .riot/requirements/47bd83d.txt rename to .riot/requirements/9b46157.txt index a1fe8535aa0..cd593d67f42 100644 --- a/.riot/requirements/47bd83d.txt +++ b/.riot/requirements/9b46157.txt @@ -2,10 +2,10 @@ # This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/47bd83d.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/9b46157.in # attrs==25.4.0 -coverage[toml]==7.11.0 +coverage[toml]==7.11.3 gunicorn==23.0.0 hypothesis==6.45.0 iniconfig==2.3.0 @@ -18,9 +18,9 @@ pluggy==1.6.0 protobuf==6.33.0 py-cpuinfo==8.0.0 pygments==2.19.2 -pytest==8.4.2 +pytest==9.0.0 pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 +pytest-benchmark==5.2.3 pytest-cov==7.0.0 pytest-cpp==2.6.0 pytest-mock==3.15.1 diff --git a/.riot/requirements/9b8251b.txt b/.riot/requirements/9b8251b.txt deleted file mode 100644 index c6c4004b105..00000000000 --- a/.riot/requirements/9b8251b.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/9b8251b.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -yaaredis==3.0.0 -zipp==3.17.0 diff --git a/.riot/requirements/9d50a6f.txt b/.riot/requirements/9d50a6f.txt deleted file mode 100644 index e09d60c42d8..00000000000 --- a/.riot/requirements/9d50a6f.txt +++ /dev/null @@ -1,33 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/9d50a6f.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -glob2==0.7 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mako==1.3.10 -markupsafe==2.1.5 -mock==5.2.0 -more-itertools==8.10.0 -msgpack==1.1.0 -opentracing==2.4.0 -packaging==25.0 -parse==1.20.2 -parse-type==0.6.4 -pluggy==1.5.0 -py==1.11.0 -pytest==7.4.4 -pytest-bdd==4.1.0 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -zipp==3.20.2 diff --git a/.riot/requirements/9d72125.txt b/.riot/requirements/9d72125.txt deleted file mode 100644 index 7b0be1b80c9..00000000000 --- a/.riot/requirements/9d72125.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/9d72125.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -mysqlclient==2.2.1 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/9e76fdf.txt b/.riot/requirements/9e76fdf.txt deleted file mode 100644 index c4d483c9361..00000000000 --- a/.riot/requirements/9e76fdf.txt +++ /dev/null @@ -1,33 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/9e76fdf.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -django==2.2.28 -django-configurations==2.3.2 -djangorestframework==3.12.4 -exceptiongroup==1.3.0 -execnet==2.1.1 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-django[testing]==3.10.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -pytest-xdist==3.6.1 -pytz==2025.2 -six==1.17.0 -sortedcontainers==2.4.0 -sqlparse==0.5.3 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/9eedbc0.txt b/.riot/requirements/9eedbc0.txt deleted file mode 100644 index 7d4ef3d6baf..00000000000 --- a/.riot/requirements/9eedbc0.txt +++ /dev/null @@ -1,42 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/9eedbc0.in -# -attrs==23.2.0 -autocommand==2.2.2 -backports-tarfile==1.2.0 -cheroot==10.0.1 -cherrypy==18.10.0 -coverage[toml]==7.6.0 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.2.0 -importlib-resources==6.4.0 -inflect==7.3.1 -iniconfig==2.0.0 -jaraco-collections==5.0.1 -jaraco-context==5.3.0 -jaraco-functools==4.0.1 -jaraco-text==3.14.0 -mock==5.1.0 -more-itertools==8.10.0 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -portend==3.2.0 -pytest==8.3.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tempora==5.6.0 -tomli==2.0.1 -typeguard==4.3.0 -typing-extensions==4.12.2 -zc-lockfile==3.0.post1 -zipp==3.19.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==71.1.0 diff --git a/.riot/requirements/a0b94b1.txt b/.riot/requirements/a0b94b1.txt deleted file mode 100644 index 71d76a09e22..00000000000 --- a/.riot/requirements/a0b94b1.txt +++ /dev/null @@ -1,23 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/a0b94b1.in -# -attrs==25.3.0 -coverage[toml]==7.8.2 -dnspython==2.7.0 -hypothesis==6.45.0 -iniconfig==2.1.0 -mock==5.2.0 -mongoengine==0.24.2 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -pygments==2.19.1 -pymongo==4.8.0 -pytest==8.4.0 -pytest-cov==6.1.1 -pytest-mock==3.14.1 -pytest-randomly==3.16.0 -sortedcontainers==2.4.0 diff --git a/.riot/requirements/a25912e.txt b/.riot/requirements/a25912e.txt deleted file mode 100644 index e08d10b7a83..00000000000 --- a/.riot/requirements/a25912e.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/a25912e.in -# -attrs==25.3.0 -certifi==2025.1.31 -charset-normalizer==3.4.1 -coverage[toml]==7.6.1 -ddtrace-api==0.0.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -requests==2.32.3 -sortedcontainers==2.4.0 -tomli==2.2.1 -urllib3==2.2.3 diff --git a/.riot/requirements/a3adb9c.txt b/.riot/requirements/a3adb9c.txt deleted file mode 100644 index 39528f00e9d..00000000000 --- a/.riot/requirements/a3adb9c.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/a3adb9c.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -msgpack==1.1.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/a582736.txt b/.riot/requirements/a582736.txt deleted file mode 100644 index 4f6a4e7e1a2..00000000000 --- a/.riot/requirements/a582736.txt +++ /dev/null @@ -1,28 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/a582736.in -# -aiopg==1.4.0 -async-timeout==4.0.3 -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -psycopg2-binary==2.9.10 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -sqlalchemy==2.0.41 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/a6f9342.txt b/.riot/requirements/a6f9342.txt deleted file mode 100644 index bd9fa7ad268..00000000000 --- a/.riot/requirements/a6f9342.txt +++ /dev/null @@ -1,51 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/a6f9342.in -# -attrs==25.3.0 -certifi==2025.6.15 -charset-normalizer==2.1.1 -click==8.1.8 -coverage[toml]==7.6.1 -deprecated==1.2.18 -exceptiongroup==1.3.0 -flask==2.1.3 -gevent==24.2.1 -greenlet==3.1.1 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -itsdangerous==2.2.0 -jinja2==3.1.6 -markupsafe==2.0.1 -mock==5.2.0 -opentelemetry-api==1.33.1 -opentelemetry-instrumentation==0.54b1 -opentelemetry-instrumentation-flask==0.54b1 -opentelemetry-instrumentation-wsgi==0.54b1 -opentelemetry-semantic-conventions==0.54b1 -opentelemetry-util-http==0.54b1 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -requests==2.28.1 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==1.26.20 -werkzeug==2.1.2 -wrapt==1.17.2 -zipp==3.20.2 -zope-event==5.0 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/aa2ebfa.txt b/.riot/requirements/aa2ebfa.txt deleted file mode 100644 index 8cefc17f634..00000000000 --- a/.riot/requirements/aa2ebfa.txt +++ /dev/null @@ -1,48 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/aa2ebfa.in -# -attrs==25.3.0 -babel==2.17.0 -certifi==2025.8.3 -charset-normalizer==3.4.3 -click==8.1.8 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -flask==1.1.2 -flask-babel==2.0.0 -gevent==24.2.1 -greenlet==3.1.1 -gunicorn==23.0.0 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -itsdangerous==2.0.1 -jinja2==2.11.3 -markupsafe==1.1.1 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -psycopg2-binary==2.9.10 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -pytz==2025.2 -requests==2.32.4 -sortedcontainers==2.4.0 -sqlalchemy==2.0.43 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 -werkzeug==2.0.3 -zipp==3.20.2 -zope-event==5.0 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/ab2f587.txt b/.riot/requirements/ab2f587.txt deleted file mode 100644 index 29fd2375edd..00000000000 --- a/.riot/requirements/ab2f587.txt +++ /dev/null @@ -1,64 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/ab2f587.in -# -annotated-types==0.7.0 -attrs==24.2.0 -cachetools==5.5.0 -certifi==2024.8.30 -charset-normalizer==3.4.0 -coverage[toml]==7.6.8 -docstring-parser==0.16 -exceptiongroup==1.2.2 -google-ai-generativelanguage==0.6.10 -google-api-core[grpc]==2.23.0 -google-api-python-client==2.154.0 -google-auth==2.36.0 -google-auth-httplib2==0.2.0 -google-cloud-aiplatform[all]==1.71.1 -google-cloud-bigquery==3.27.0 -google-cloud-core==2.4.1 -google-cloud-resource-manager==1.13.1 -google-cloud-storage==2.18.2 -google-crc32c==1.6.0 -google-generativeai==0.8.3 -google-resumable-media==2.7.2 -googleapis-common-protos[grpc]==1.66.0 -grpc-google-iam-v1==0.13.1 -grpcio==1.68.0 -grpcio-status==1.68.0 -httplib2==0.22.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.0.0 -mock==5.1.0 -numpy==2.1.3 -opentracing==2.4.0 -packaging==24.2 -pillow==11.0.0 -pluggy==1.5.0 -proto-plus==1.25.0 -protobuf==5.28.3 -pyasn1==0.6.1 -pyasn1-modules==0.4.1 -pydantic==2.10.2 -pydantic-core==2.27.1 -pyparsing==3.2.0 -pytest==8.3.3 -pytest-asyncio==0.24.0 -pytest-cov==6.0.0 -pytest-mock==3.14.0 -python-dateutil==2.9.0.post0 -requests==2.32.3 -rsa==4.9 -shapely==2.0.6 -six==1.16.0 -sortedcontainers==2.4.0 -tomli==2.1.0 -tqdm==4.67.1 -typing-extensions==4.12.2 -uritemplate==4.1.1 -urllib3==2.2.3 -vertexai==1.71.1 diff --git a/.riot/requirements/abc0b46.txt b/.riot/requirements/abc0b46.txt deleted file mode 100644 index 64f004d6b5b..00000000000 --- a/.riot/requirements/abc0b46.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/abc0b46.in -# -aiomysql==0.2.0 -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pymysql==1.1.0 -pytest==7.4.3 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/ac01b32.txt b/.riot/requirements/ac01b32.txt deleted file mode 100644 index 8668be156c8..00000000000 --- a/.riot/requirements/ac01b32.txt +++ /dev/null @@ -1,33 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/ac01b32.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -django==2.2.28 -django-configurations==2.3.2 -djangorestframework==3.13.1 -exceptiongroup==1.3.0 -execnet==2.1.1 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-django[testing]==3.10.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -pytest-xdist==3.6.1 -pytz==2025.2 -six==1.17.0 -sortedcontainers==2.4.0 -sqlparse==0.5.3 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/ac28820.txt b/.riot/requirements/ac28820.txt deleted file mode 100644 index 22f151f0679..00000000000 --- a/.riot/requirements/ac28820.txt +++ /dev/null @@ -1,29 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/ac28820.in -# -aniso8601==9.0.1 -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -graphene==3.0 -graphql-core==3.1.7 -graphql-relay==3.1.5 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/ac77620.txt b/.riot/requirements/ac77620.txt deleted file mode 100644 index adb4c0a9955..00000000000 --- a/.riot/requirements/ac77620.txt +++ /dev/null @@ -1,66 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/ac77620.in -# -annotated-types==0.7.0 -attrs==25.3.0 -cachetools==5.5.2 -certifi==2025.4.26 -charset-normalizer==3.4.2 -coverage[toml]==7.8.2 -docstring-parser==0.16 -exceptiongroup==1.3.0 -google-ai-generativelanguage==0.6.6 -google-api-core[grpc]==2.25.0 -google-api-python-client==2.171.0 -google-auth==2.40.3 -google-auth-httplib2==0.2.0 -google-cloud-aiplatform[all]==1.71.1 -google-cloud-bigquery==3.34.0 -google-cloud-core==2.4.3 -google-cloud-resource-manager==1.14.2 -google-cloud-storage==2.19.0 -google-crc32c==1.7.1 -google-generativeai==0.7.2 -google-resumable-media==2.7.2 -googleapis-common-protos[grpc]==1.70.0 -grpc-google-iam-v1==0.14.2 -grpcio==1.73.0 -grpcio-status==1.62.3 -httplib2==0.22.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -mock==5.2.0 -numpy==2.2.6 -opentracing==2.4.0 -packaging==25.0 -pillow==11.2.1 -pluggy==1.6.0 -proto-plus==1.26.1 -protobuf==4.25.8 -pyasn1==0.6.1 -pyasn1-modules==0.4.2 -pydantic==2.11.5 -pydantic-core==2.33.2 -pygments==2.19.1 -pyparsing==3.2.3 -pytest==8.4.0 -pytest-asyncio==1.0.0 -pytest-cov==6.1.1 -pytest-mock==3.14.1 -python-dateutil==2.9.0.post0 -requests==2.32.4 -rsa==4.9.1 -shapely==2.1.1 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -tqdm==4.67.1 -typing-extensions==4.14.0 -typing-inspection==0.4.1 -uritemplate==4.2.0 -urllib3==2.4.0 -vertexai==1.71.1 diff --git a/.riot/requirements/ad1bcb5.txt b/.riot/requirements/ad1bcb5.txt deleted file mode 100644 index 701deb13fa4..00000000000 --- a/.riot/requirements/ad1bcb5.txt +++ /dev/null @@ -1,28 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/ad1bcb5.in -# -async-timeout==4.0.3 -attrs==23.1.0 -click==7.1.2 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.1 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -redis==5.0.1 -rq==1.15.1 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/ad40916.txt b/.riot/requirements/ad40916.txt deleted file mode 100644 index 853f497ee9e..00000000000 --- a/.riot/requirements/ad40916.txt +++ /dev/null @@ -1,22 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.11 -# by the following command: -# -# pip-compile --no-annotate --resolver=backtracking .riot/requirements/ad40916.in -# -attrs==24.2.0 -coverage[toml]==7.6.1 -dnspython==2.6.1 -hypothesis==6.45.0 -iniconfig==2.0.0 -mock==5.1.0 -mongoengine==0.29.1 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pymongo==4.8.0 -pytest==8.3.3 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 diff --git a/.riot/requirements/b089663.txt b/.riot/requirements/b089663.txt deleted file mode 100644 index 956c6d73e92..00000000000 --- a/.riot/requirements/b089663.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --no-annotate --resolver=backtracking .riot/requirements/b089663.in -# -attrs==24.2.0 -coverage[toml]==7.6.1 -dnspython==2.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -mock==5.1.0 -mongoengine==0.29.1 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pymongo==4.8.0 -pytest==8.3.3 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.20.2 diff --git a/.riot/requirements/b344fed.txt b/.riot/requirements/b344fed.txt deleted file mode 100644 index 73e61eb69f9..00000000000 --- a/.riot/requirements/b344fed.txt +++ /dev/null @@ -1,22 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/b344fed.in -# -attrs==24.2.0 -coverage[toml]==7.6.1 -dnspython==2.6.1 -hypothesis==6.45.0 -iniconfig==2.0.0 -mock==5.1.0 -mongoengine==0.29.1 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pymongo==4.8.0 -pytest==8.3.3 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 diff --git a/.riot/requirements/b39e5f7.txt b/.riot/requirements/b39e5f7.txt deleted file mode 100644 index e0845dfc719..00000000000 --- a/.riot/requirements/b39e5f7.txt +++ /dev/null @@ -1,33 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/b39e5f7.in -# -attrs==23.2.0 -blinker==1.7.0 -click==7.1.2 -coverage[toml]==7.4.2 -exceptiongroup==1.2.0 -flask==1.1.4 -flask-caching==1.10.1 -hypothesis==6.45.0 -importlib-metadata==7.0.1 -iniconfig==2.0.0 -itsdangerous==1.1.0 -jinja2==2.11.3 -markupsafe==1.1.1 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.4.0 -pytest==8.0.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -python-memcached==1.62 -redis==2.10.6 -sortedcontainers==2.4.0 -tomli==2.0.1 -werkzeug==1.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/b436a4c.txt b/.riot/requirements/b436a4c.txt deleted file mode 100644 index dddc661ed72..00000000000 --- a/.riot/requirements/b436a4c.txt +++ /dev/null @@ -1,46 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/b436a4c.in -# -attrs==23.2.0 -beautifulsoup4==4.12.3 -certifi==2024.7.4 -charset-normalizer==3.3.2 -coverage[toml]==7.6.0 -exceptiongroup==1.2.2 -hupper==1.12.1 -hypothesis==6.45.0 -idna==3.7 -importlib-metadata==8.2.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.1 -pastedeploy==3.1.0 -plaster==1.1.2 -plaster-pastedeploy==1.0.1 -pluggy==1.5.0 -pserve-test-app @ file:///home/bits/project/tests/contrib/pyramid/pserve_app -pyramid==1.10.8 -pytest==8.3.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -requests==2.32.3 -sortedcontainers==2.4.0 -soupsieve==2.5 -tomli==2.0.1 -translationstring==1.4 -urllib3==2.2.2 -venusian==3.1.0 -waitress==3.0.0 -webob==1.8.7 -webtest==3.0.0 -zipp==3.19.2 -zope-deprecation==5.0 -zope-interface==6.4.post2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==71.1.0 diff --git a/.riot/requirements/ad77105.txt b/.riot/requirements/b635ec9.txt similarity index 95% rename from .riot/requirements/ad77105.txt rename to .riot/requirements/b635ec9.txt index 0f3502a7194..7492cc53a53 100644 --- a/.riot/requirements/ad77105.txt +++ b/.riot/requirements/b635ec9.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/ad77105.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/b635ec9.in # attrs==25.4.0 coverage[toml]==7.10.7 @@ -22,7 +22,7 @@ py-cpuinfo==8.0.0 pygments==2.19.2 pytest==8.4.2 pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 +pytest-benchmark==5.2.3 pytest-cov==7.0.0 pytest-cpp==2.6.0 pytest-mock==3.15.1 diff --git a/.riot/requirements/b6e9905.txt b/.riot/requirements/b6e9905.txt deleted file mode 100644 index c17865f1eae..00000000000 --- a/.riot/requirements/b6e9905.txt +++ /dev/null @@ -1,79 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/b6e9905.in -# -arrow==1.3.0 -asgiref==3.8.1 -attrs==24.3.0 -autobahn==23.1.2 -automat==24.8.1 -backports-zoneinfo==0.2.1 -bcrypt==4.2.1 -blessed==1.20.0 -certifi==2024.12.14 -cffi==1.17.1 -channels==4.2.0 -charset-normalizer==3.4.0 -constantly==23.10.4 -coverage[toml]==7.6.1 -cryptography==44.0.0 -daphne==4.1.2 -django==4.2.17 -django-configurations==2.5.1 -django-picklefield==3.2 -django-pylibmc==0.6.1 -django-q==1.3.6 -django-redis==4.5.0 -exceptiongroup==1.2.2 -hyperlink==21.0.0 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -incremental==24.7.2 -iniconfig==2.0.0 -isodate==0.7.2 -lxml==5.3.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.2 -platformdirs==4.3.6 -pluggy==1.5.0 -psycopg==3.2.3 -psycopg2-binary==2.9.10 -pyasn1==0.6.1 -pyasn1-modules==0.4.1 -pycparser==2.22 -pylibmc==1.6.3 -pyopenssl==24.3.0 -pytest==8.3.4 -pytest-cov==5.0.0 -pytest-django[testing]==3.10.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -python-memcached==1.62 -pytz==2024.2 -redis==2.10.6 -requests==2.32.3 -requests-file==2.1.0 -requests-toolbelt==1.0.0 -service-identity==24.2.0 -six==1.17.0 -sortedcontainers==2.4.0 -spyne==2.14.0 -sqlparse==0.5.3 -tomli==2.2.1 -twisted[tls]==24.11.0 -txaio==23.1.1 -types-python-dateutil==2.9.0.20241206 -typing-extensions==4.12.2 -urllib3==2.2.3 -wcwidth==0.2.13 -zeep==4.3.1 -zipp==3.20.2 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.0 diff --git a/.riot/requirements/b786604.txt b/.riot/requirements/b786604.txt deleted file mode 100644 index cb26a822c6d..00000000000 --- a/.riot/requirements/b786604.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/b786604.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -elasticsearch1==1.10.0 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -urllib3==1.26.18 -zipp==3.17.0 diff --git a/.riot/requirements/b7a530f.txt b/.riot/requirements/b7a530f.txt deleted file mode 100644 index 802d6e0593e..00000000000 --- a/.riot/requirements/b7a530f.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/b7a530f.in -# -attrs==25.1.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.4 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -zipp==3.20.2 diff --git a/.riot/requirements/b80e42b.txt b/.riot/requirements/b80e42b.txt deleted file mode 100644 index 6885e5531e6..00000000000 --- a/.riot/requirements/b80e42b.txt +++ /dev/null @@ -1,29 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/b80e42b.in -# -algoliasearch==2.6.3 -attrs==24.2.0 -certifi==2024.7.4 -charset-normalizer==3.3.2 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -idna==3.8 -importlib-metadata==8.4.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pytest==8.3.2 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -requests==2.32.3 -sortedcontainers==2.4.0 -tomli==2.0.1 -urllib3==1.26.19 -zipp==3.20.0 diff --git a/.riot/requirements/baf46ab.txt b/.riot/requirements/baf46ab.txt deleted file mode 100644 index 5a983e008c5..00000000000 --- a/.riot/requirements/baf46ab.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/baf46ab.in -# -async-timeout==5.0.1 -attrs==24.2.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.3 -pytest-asyncio==0.23.7 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -redis==4.6.0 -sortedcontainers==2.4.0 -tomli==2.1.0 -zipp==3.20.2 diff --git a/.riot/requirements/bb588fd.txt b/.riot/requirements/bb588fd.txt deleted file mode 100644 index 900d23b901a..00000000000 --- a/.riot/requirements/bb588fd.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/bb588fd.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -msgpack==1.1.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/bdada1a.txt b/.riot/requirements/bdada1a.txt deleted file mode 100644 index 2a394359c49..00000000000 --- a/.riot/requirements/bdada1a.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/bdada1a.in -# -attrs==24.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -falcon==3.1.3 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.4 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -zipp==3.20.2 diff --git a/.riot/requirements/bfd8366.txt b/.riot/requirements/bfd8366.txt deleted file mode 100644 index 3c91ad7fcca..00000000000 --- a/.riot/requirements/bfd8366.txt +++ /dev/null @@ -1,52 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/bfd8366.in -# -annotated-types==0.7.0 -anyio==4.5.2 -attrs==22.1.0 -boto3==1.37.38 -botocore==1.37.38 -cattrs==23.1.2 -certifi==2025.8.3 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -fastapi==0.116.1 -freezegun==1.5.5 -h11==0.16.0 -httpcore==1.0.9 -httpretty==1.1.4 -httpx==0.27.2 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -jmespath==1.0.1 -mock==5.2.0 -msgpack==1.1.1 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pydantic==2.10.6 -pydantic-core==2.27.2 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -s3transfer==0.11.5 -six==1.17.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -starlette==0.44.0 -structlog==25.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==1.26.20 -wheel==0.45.1 -zipp==3.20.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/c10c210.txt b/.riot/requirements/c10c210.txt deleted file mode 100644 index 309fa2b596d..00000000000 --- a/.riot/requirements/c10c210.txt +++ /dev/null @@ -1,33 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/c10c210.in -# -anyio==4.5.2 -asgiref==3.8.1 -attrs==25.3.0 -certifi==2025.8.3 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -h11==0.16.0 -httpcore==1.0.9 -httpx==0.27.2 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/c16273a.txt b/.riot/requirements/c16273a.txt deleted file mode 100644 index 6f97a5d643a..00000000000 --- a/.riot/requirements/c16273a.txt +++ /dev/null @@ -1,64 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/c16273a.in -# -annotated-types==0.7.0 -attrs==25.3.0 -cachetools==5.5.2 -certifi==2025.4.26 -charset-normalizer==3.4.2 -coverage[toml]==7.8.2 -docstring-parser==0.16 -google-ai-generativelanguage==0.6.6 -google-api-core[grpc]==2.25.0 -google-api-python-client==2.171.0 -google-auth==2.40.3 -google-auth-httplib2==0.2.0 -google-cloud-aiplatform[all]==1.71.1 -google-cloud-bigquery==3.34.0 -google-cloud-core==2.4.3 -google-cloud-resource-manager==1.14.2 -google-cloud-storage==2.19.0 -google-crc32c==1.7.1 -google-generativeai==0.7.2 -google-resumable-media==2.7.2 -googleapis-common-protos[grpc]==1.70.0 -grpc-google-iam-v1==0.14.2 -grpcio==1.73.0 -grpcio-status==1.62.3 -httplib2==0.22.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -mock==5.2.0 -numpy==2.3.0 -opentracing==2.4.0 -packaging==25.0 -pillow==11.2.1 -pluggy==1.6.0 -proto-plus==1.26.1 -protobuf==4.25.8 -pyasn1==0.6.1 -pyasn1-modules==0.4.2 -pydantic==2.11.5 -pydantic-core==2.33.2 -pygments==2.19.1 -pyparsing==3.2.3 -pytest==8.4.0 -pytest-asyncio==1.0.0 -pytest-cov==6.1.1 -pytest-mock==3.14.1 -python-dateutil==2.9.0.post0 -requests==2.32.4 -rsa==4.9.1 -shapely==2.1.1 -six==1.17.0 -sortedcontainers==2.4.0 -tqdm==4.67.1 -typing-extensions==4.14.0 -typing-inspection==0.4.1 -uritemplate==4.2.0 -urllib3==2.4.0 -vertexai==1.71.1 diff --git a/.riot/requirements/c2ee914.txt b/.riot/requirements/c2ee914.txt deleted file mode 100644 index 66ce3c49b64..00000000000 --- a/.riot/requirements/c2ee914.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/c2ee914.in -# -attrs==23.1.0 -coverage[toml]==7.3.0 -exceptiongroup==1.1.3 -httpretty==1.1.4 -hypothesis==6.45.0 -iniconfig==2.0.0 -mock==5.1.0 -msgpack==1.0.5 -opentracing==2.4.0 -packaging==23.1 -pluggy==1.2.0 -pytest==7.4.0 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.11.1 -sortedcontainers==2.4.0 -tomli==2.0.1 -typing-extensions==4.7.1 diff --git a/.riot/requirements/c33a085.txt b/.riot/requirements/c33a085.txt new file mode 100644 index 00000000000..2fdd88ffeed --- /dev/null +++ b/.riot/requirements/c33a085.txt @@ -0,0 +1,37 @@ +# +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/c33a085.in +# +attrs==25.4.0 +coverage[toml]==7.11.3 +gevent==25.9.1 +greenlet==3.2.4 +gunicorn[gevent]==23.0.0 +hypothesis==6.45.0 +iniconfig==2.3.0 +jsonschema==4.25.1 +jsonschema-specifications==2025.9.1 +mock==5.2.0 +opentracing==2.4.0 +packaging==25.0 +pluggy==1.6.0 +protobuf==6.33.0 +py-cpuinfo==8.0.0 +pygments==2.19.2 +pytest==9.0.0 +pytest-asyncio==0.21.1 +pytest-benchmark==5.2.3 +pytest-cov==7.0.0 +pytest-cpp==2.6.0 +pytest-mock==3.15.1 +pytest-randomly==4.0.1 +referencing==0.37.0 +rpds-py==0.28.0 +sortedcontainers==2.4.0 +typing-extensions==4.15.0 +uwsgi==2.0.31 +zope-event==6.1 +zope-interface==8.1 +zstandard==0.25.0 diff --git a/.riot/requirements/c482689.txt b/.riot/requirements/c482689.txt deleted file mode 100644 index 4d61b425aa3..00000000000 --- a/.riot/requirements/c482689.txt +++ /dev/null @@ -1,50 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/c482689.in -# -asgiref==3.8.1 -attrs==25.3.0 -certifi==2025.6.15 -charset-normalizer==2.1.1 -click==7.1.2 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -flask==1.1.4 -gevent==24.2.1 -greenlet==3.1.1 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -itsdangerous==1.1.0 -jinja2==2.11.3 -markupsafe==2.0.1 -mock==5.2.0 -opentelemetry-api==1.0.0 -opentelemetry-instrumentation==0.19b0 -opentelemetry-instrumentation-flask==0.19b0 -opentelemetry-instrumentation-wsgi==0.19b0 -opentelemetry-util-http==0.19b0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -requests==2.28.1 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==1.26.20 -werkzeug==1.0.1 -wrapt==1.17.2 -zipp==3.20.2 -zope-event==5.0 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/c4dace8.txt b/.riot/requirements/c4dace8.txt deleted file mode 100644 index b828932c4c2..00000000000 --- a/.riot/requirements/c4dace8.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/c4dace8.in -# -attrs==24.2.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pyodbc==5.2.0 -pytest==8.3.4 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -zipp==3.20.2 diff --git a/.riot/requirements/c74560f.txt b/.riot/requirements/c74560f.txt deleted file mode 100644 index 06136e66715..00000000000 --- a/.riot/requirements/c74560f.txt +++ /dev/null @@ -1,32 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/c74560f.in -# -async-timeout==5.0.1 -attrs==24.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -gevent==24.2.1 -greenlet==3.1.1 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.4 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -redis==5.2.1 -sortedcontainers==2.4.0 -tomli==2.2.1 -zipp==3.20.2 -zope-event==5.0 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.0 diff --git a/.riot/requirements/c826075.txt b/.riot/requirements/c826075.txt deleted file mode 100644 index 8b37fe1c728..00000000000 --- a/.riot/requirements/c826075.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/c826075.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -flaky==3.8.1 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/12f8771.txt b/.riot/requirements/cc3c148.txt similarity index 89% rename from .riot/requirements/12f8771.txt rename to .riot/requirements/cc3c148.txt index 036b13514ad..bf81d4d73ac 100644 --- a/.riot/requirements/12f8771.txt +++ b/.riot/requirements/cc3c148.txt @@ -2,10 +2,10 @@ # This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/12f8771.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/cc3c148.in # attrs==25.4.0 -coverage[toml]==7.11.0 +coverage[toml]==7.11.3 gunicorn==23.0.0 hypothesis==6.45.0 iniconfig==2.3.0 @@ -18,9 +18,9 @@ pluggy==1.6.0 protobuf==6.33.0 py-cpuinfo==8.0.0 pygments==2.19.2 -pytest==8.4.2 +pytest==9.0.0 pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 +pytest-benchmark==5.2.3 pytest-cov==7.0.0 pytest-cpp==2.6.0 pytest-mock==3.15.1 diff --git a/.riot/requirements/60c0a59.txt b/.riot/requirements/cdbf8f3.txt similarity index 89% rename from .riot/requirements/60c0a59.txt rename to .riot/requirements/cdbf8f3.txt index afe3f0d96c6..848015677df 100644 --- a/.riot/requirements/60c0a59.txt +++ b/.riot/requirements/cdbf8f3.txt @@ -2,10 +2,10 @@ # This file is autogenerated by pip-compile with Python 3.12 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/60c0a59.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/cdbf8f3.in # attrs==25.4.0 -coverage[toml]==7.11.0 +coverage[toml]==7.11.3 gunicorn==23.0.0 hypothesis==6.45.0 iniconfig==2.3.0 @@ -18,9 +18,9 @@ pluggy==1.6.0 protobuf==6.33.0 py-cpuinfo==8.0.0 pygments==2.19.2 -pytest==8.4.2 +pytest==9.0.0 pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 +pytest-benchmark==5.2.3 pytest-cov==7.0.0 pytest-cpp==2.6.0 pytest-mock==3.15.1 diff --git a/.riot/requirements/ce26b2c.txt b/.riot/requirements/ce26b2c.txt deleted file mode 100644 index 85a8151acd0..00000000000 --- a/.riot/requirements/ce26b2c.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/ce26b2c.in -# -aredis==1.1.8 -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/ce48624.txt b/.riot/requirements/ce48624.txt deleted file mode 100644 index 7f4fe653b48..00000000000 --- a/.riot/requirements/ce48624.txt +++ /dev/null @@ -1,49 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/ce48624.in -# -attrs==25.3.0 -babel==2.17.0 -blinker==1.8.2 -certifi==2025.8.3 -charset-normalizer==3.4.3 -click==8.1.8 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -flask==2.3.3 -flask-babel==4.0.0 -gevent==24.2.1 -greenlet==3.1.1 -gunicorn==23.0.0 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -itsdangerous==2.2.0 -jinja2==3.1.6 -markupsafe==2.1.5 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -psycopg2-binary==2.9.10 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -pytz==2025.2 -requests==2.32.4 -sortedcontainers==2.4.0 -sqlalchemy==2.0.43 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 -werkzeug==3.0.6 -zipp==3.20.2 -zope-event==5.0 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/7ab50e4.txt b/.riot/requirements/cf86081.txt similarity index 55% rename from .riot/requirements/7ab50e4.txt rename to .riot/requirements/cf86081.txt index 6db2d9d7cd9..7d61954e17b 100644 --- a/.riot/requirements/7ab50e4.txt +++ b/.riot/requirements/cf86081.txt @@ -2,38 +2,53 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/7ab50e4.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/cf86081.in # -attrs==25.4.0 +annotated-types==0.7.0 +anyio==4.11.0 +attrs==22.1.0 +boto3==1.40.52 +botocore==1.40.52 +cattrs==23.1.2 +certifi==2025.10.5 coverage[toml]==7.10.7 exceptiongroup==1.3.0 -gevent==22.10.2 -greenlet==3.2.4 -gunicorn[gevent]==23.0.0 +fastapi==0.119.0 +freezegun==1.5.5 +h11==0.16.0 +httpcore==1.0.9 +httpretty==1.1.4 +httpx==0.27.2 hypothesis==6.45.0 +idna==3.11 importlib-metadata==8.7.0 iniconfig==2.1.0 +jmespath==1.0.1 mock==5.2.0 +msgpack==1.1.2 opentracing==2.4.0 packaging==25.0 pluggy==1.6.0 -protobuf==6.33.0 -py-cpuinfo==8.0.0 +pydantic==2.12.2 +pydantic-core==2.41.4 pygments==2.19.2 pytest==8.4.2 -pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 pytest-cov==7.0.0 pytest-mock==3.15.1 pytest-randomly==4.0.1 +python-dateutil==2.9.0.post0 +s3transfer==0.14.0 +six==1.17.0 +sniffio==1.3.1 sortedcontainers==2.4.0 +starlette==0.48.0 +structlog==25.4.0 tomli==2.3.0 typing-extensions==4.15.0 -uwsgi==2.0.31 +typing-inspection==0.4.2 +urllib3==1.26.20 +wheel==0.45.1 zipp==3.23.0 -zope-event==6.0 -zope-interface==8.0.1 -zstandard==0.25.0 # The following packages are considered to be unsafe in a requirements file: setuptools==80.9.0 diff --git a/.riot/requirements/cfb7b47.txt b/.riot/requirements/cfb7b47.txt deleted file mode 100644 index c6b7817267a..00000000000 --- a/.riot/requirements/cfb7b47.txt +++ /dev/null @@ -1,33 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/cfb7b47.in -# -anyio==4.5.2 -asgiref==3.8.1 -attrs==25.3.0 -certifi==2025.8.3 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -h11==0.16.0 -httpcore==1.0.9 -httpx==0.27.2 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/d002f87.txt b/.riot/requirements/d002f87.txt deleted file mode 100644 index 54053f21afb..00000000000 --- a/.riot/requirements/d002f87.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/d002f87.in -# -attrs==24.2.0 -avro==1.12.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.4.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pytest==8.3.2 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.20.1 diff --git a/.riot/requirements/d2cb323.txt b/.riot/requirements/d2cb323.txt deleted file mode 100644 index cec5fdb7891..00000000000 --- a/.riot/requirements/d2cb323.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/d2cb323.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -structlog==20.2.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/d59e395.txt b/.riot/requirements/d59e395.txt deleted file mode 100644 index b865c214967..00000000000 --- a/.riot/requirements/d59e395.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/d59e395.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -decorator==5.1.1 -dogpile-cache==0.9.2 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/d66afaf.txt b/.riot/requirements/d66afaf.txt deleted file mode 100644 index 0b95a2b04d0..00000000000 --- a/.riot/requirements/d66afaf.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/d66afaf.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -mongoengine==0.29.1 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pymongo==3.9.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/d776a9a.txt b/.riot/requirements/d776a9a.txt deleted file mode 100644 index 07d09e22b12..00000000000 --- a/.riot/requirements/d776a9a.txt +++ /dev/null @@ -1,35 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/d776a9a.in -# -attrs==25.3.0 -azure-core==1.33.0 -azure-eventhub==5.15.0 -azure-functions==1.10.1 -azure-storage-blob==12.26.0 -certifi==2025.8.3 -cffi==1.17.1 -charset-normalizer==3.4.3 -coverage[toml]==7.6.1 -cryptography==46.0.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -isodate==0.7.2 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pycparser==2.23 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -requests==2.32.4 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 diff --git a/.riot/requirements/d84f5ef.txt b/.riot/requirements/d84f5ef.txt deleted file mode 100644 index 57914495970..00000000000 --- a/.riot/requirements/d84f5ef.txt +++ /dev/null @@ -1,51 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/d84f5ef.in -# -annotated-types==0.7.0 -anyio==4.5.2 -attrs==25.3.0 -boto3==1.37.38 -botocore==1.37.38 -certifi==2025.8.3 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -fastapi==0.116.1 -freezegun==1.5.5 -h11==0.16.0 -httpcore==1.0.9 -httpretty==1.1.4 -httpx==0.27.2 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -jmespath==1.0.1 -mock==5.2.0 -msgpack==1.1.1 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pydantic==2.10.6 -pydantic-core==2.27.2 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -s3transfer==0.11.5 -six==1.17.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -starlette==0.44.0 -structlog==25.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==1.26.20 -wheel==0.45.1 -zipp==3.20.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/d8c9ddb.txt b/.riot/requirements/d8c9ddb.txt deleted file mode 100644 index a8703fdfcfe..00000000000 --- a/.riot/requirements/d8c9ddb.txt +++ /dev/null @@ -1,40 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/d8c9ddb.in -# -aiofiles==24.1.0 -anyio==4.5.2 -attrs==25.3.0 -certifi==2025.8.3 -charset-normalizer==3.4.3 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -fastapi==0.90.1 -h11==0.16.0 -httpcore==1.0.9 -httpx==0.27.2 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pydantic==1.10.22 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -python-multipart==0.0.20 -requests==2.32.4 -sniffio==1.3.1 -sortedcontainers==2.4.0 -starlette==0.23.1 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 -zipp==3.20.2 diff --git a/.riot/requirements/19167e2.txt b/.riot/requirements/da649f1.txt similarity index 88% rename from .riot/requirements/19167e2.txt rename to .riot/requirements/da649f1.txt index 7abbcc3622f..024581f579b 100644 --- a/.riot/requirements/19167e2.txt +++ b/.riot/requirements/da649f1.txt @@ -2,10 +2,10 @@ # This file is autogenerated by pip-compile with Python 3.13 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/19167e2.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/da649f1.in # attrs==25.4.0 -coverage[toml]==7.11.0 +coverage[toml]==7.11.3 gunicorn==23.0.0 hypothesis==6.45.0 iniconfig==2.3.0 @@ -18,9 +18,9 @@ pluggy==1.6.0 protobuf==6.33.0 py-cpuinfo==8.0.0 pygments==2.19.2 -pytest==8.4.2 +pytest==9.0.0 pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 +pytest-benchmark==5.2.3 pytest-cov==7.0.0 pytest-cpp==2.6.0 pytest-mock==3.15.1 diff --git a/.riot/requirements/db728f4.txt b/.riot/requirements/db728f4.txt new file mode 100644 index 00000000000..c9d62454155 --- /dev/null +++ b/.riot/requirements/db728f4.txt @@ -0,0 +1,39 @@ +# +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/db728f4.in +# +attrs==25.4.0 +coverage[toml]==7.11.3 +exceptiongroup==1.3.0 +gevent==25.9.1 +greenlet==3.2.4 +gunicorn[gevent]==23.0.0 +hypothesis==6.45.0 +iniconfig==2.3.0 +jsonschema==4.25.1 +jsonschema-specifications==2025.9.1 +mock==5.2.0 +opentracing==2.4.0 +packaging==25.0 +pluggy==1.6.0 +protobuf==6.33.0 +py-cpuinfo==8.0.0 +pygments==2.19.2 +pytest==9.0.0 +pytest-asyncio==0.21.1 +pytest-benchmark==5.2.3 +pytest-cov==7.0.0 +pytest-cpp==2.6.0 +pytest-mock==3.15.1 +pytest-randomly==4.0.1 +referencing==0.37.0 +rpds-py==0.28.0 +sortedcontainers==2.4.0 +tomli==2.3.0 +typing-extensions==4.15.0 +uwsgi==2.0.31 +zope-event==6.1 +zope-interface==8.1 +zstandard==0.25.0 diff --git a/.riot/requirements/dbf191e.txt b/.riot/requirements/dbf191e.txt deleted file mode 100644 index 3e34c492c5e..00000000000 --- a/.riot/requirements/dbf191e.txt +++ /dev/null @@ -1,45 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/dbf191e.in -# -asn1crypto==1.5.1 -attrs==23.2.0 -certifi==2024.7.4 -cffi==1.16.0 -charset-normalizer==2.1.1 -coverage[toml]==7.6.0 -cryptography==38.0.4 -exceptiongroup==1.2.2 -filelock==3.15.4 -hypothesis==6.45.0 -idna==3.7 -importlib-metadata==8.2.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -oscrypto==1.3.0 -packaging==24.1 -pluggy==1.5.0 -pycparser==2.22 -pycryptodomex==3.20.0 -pyjwt==2.8.0 -pyopenssl==22.1.0 -pytest==8.3.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -pytz==2024.1 -requests==2.32.3 -responses==0.16.0 -six==1.16.0 -snowflake-connector-python==2.9.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -typing-extensions==4.12.2 -urllib3==1.26.19 -zipp==3.19.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==71.1.0 diff --git a/.riot/requirements/dc3ecf5.txt b/.riot/requirements/dc3ecf5.txt deleted file mode 100644 index 3a9c449bce0..00000000000 --- a/.riot/requirements/dc3ecf5.txt +++ /dev/null @@ -1,35 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/dc3ecf5.in -# -attrs==25.3.0 -blinker==1.8.2 -certifi==2025.7.9 -charset-normalizer==3.4.2 -click==8.1.8 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -flask==2.3.3 -hypothesis==6.113.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -itsdangerous==2.2.0 -jinja2==3.1.6 -markupsafe==2.1.5 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -requests==2.32.4 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 -werkzeug==3.0.6 -zipp==3.20.2 diff --git a/.riot/requirements/dc9cd92.txt b/.riot/requirements/dc9cd92.txt deleted file mode 100644 index e00e3702a78..00000000000 --- a/.riot/requirements/dc9cd92.txt +++ /dev/null @@ -1,40 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/dc9cd92.in -# -attrs==25.4.0 -coverage[toml]==7.11.0 -gevent==25.9.1 -greenlet==3.2.4 -gunicorn[gevent]==23.0.0 -hypothesis==6.45.0 -iniconfig==2.3.0 -jsonschema==4.25.1 -jsonschema-specifications==2025.9.1 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -protobuf==6.33.0 -py-cpuinfo==8.0.0 -pygments==2.19.2 -pytest==8.4.2 -pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 -pytest-cov==7.0.0 -pytest-cpp==2.6.0 -pytest-mock==3.15.1 -pytest-randomly==4.0.1 -referencing==0.37.0 -rpds-py==0.28.0 -sortedcontainers==2.4.0 -typing-extensions==4.15.0 -uwsgi==2.0.31 -zope-event==6.0 -zope-interface==8.0.1 -zstandard==0.25.0 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==80.9.0 diff --git a/.riot/requirements/dc9f475.txt b/.riot/requirements/dc9f475.txt deleted file mode 100644 index 163edfe0799..00000000000 --- a/.riot/requirements/dc9f475.txt +++ /dev/null @@ -1,42 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/dc9f475.in -# -attrs==23.2.0 -autocommand==2.2.2 -backports-tarfile==1.2.0 -cheroot==10.0.1 -cherrypy==18.10.0 -coverage[toml]==7.6.0 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.2.0 -importlib-resources==6.4.0 -inflect==7.3.1 -iniconfig==2.0.0 -jaraco-collections==5.0.1 -jaraco-context==5.3.0 -jaraco-functools==4.0.1 -jaraco-text==3.14.0 -mock==5.1.0 -more-itertools==8.10.0 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -portend==3.2.0 -pytest==8.3.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tempora==5.6.0 -tomli==2.0.1 -typeguard==4.3.0 -typing-extensions==4.12.2 -zc-lockfile==3.0.post1 -zipp==3.19.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==71.1.0 diff --git a/.riot/requirements/de53117.txt b/.riot/requirements/de53117.txt deleted file mode 100644 index 1dd3dcf18f2..00000000000 --- a/.riot/requirements/de53117.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --no-annotate --resolver=backtracking .riot/requirements/de53117.in -# -attrs==24.2.0 -coverage[toml]==7.6.1 -dnspython==2.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -mock==5.1.0 -mongoengine==0.29.1 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pymongo==4.8.0 -pytest==8.3.3 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.20.2 diff --git a/.riot/requirements/e1e09c9.txt b/.riot/requirements/e1e09c9.txt deleted file mode 100644 index 9f07d4c2561..00000000000 --- a/.riot/requirements/e1e09c9.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/e1e09c9.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pylibmc==1.6.3 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/e222783.txt b/.riot/requirements/e222783.txt deleted file mode 100644 index 9d6fa6e77f5..00000000000 --- a/.riot/requirements/e222783.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/e222783.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -protobuf==5.29.3 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -zipp==3.20.2 diff --git a/.riot/requirements/110d00d.txt b/.riot/requirements/e24351a.txt similarity index 89% rename from .riot/requirements/110d00d.txt rename to .riot/requirements/e24351a.txt index 72f5a3650b2..571e0aaebb2 100644 --- a/.riot/requirements/110d00d.txt +++ b/.riot/requirements/e24351a.txt @@ -2,10 +2,10 @@ # This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/110d00d.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/e24351a.in # attrs==25.4.0 -coverage[toml]==7.11.0 +coverage[toml]==7.11.3 gunicorn==23.0.0 hypothesis==6.45.0 iniconfig==2.3.0 @@ -18,9 +18,9 @@ pluggy==1.6.0 protobuf==4.22.0 py-cpuinfo==8.0.0 pygments==2.19.2 -pytest==8.4.2 +pytest==9.0.0 pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 +pytest-benchmark==5.2.3 pytest-cov==7.0.0 pytest-cpp==2.6.0 pytest-mock==3.15.1 diff --git a/.riot/requirements/e2c6900.txt b/.riot/requirements/e2c6900.txt deleted file mode 100644 index f3cb21179d5..00000000000 --- a/.riot/requirements/e2c6900.txt +++ /dev/null @@ -1,33 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/e2c6900.in -# -anyio==4.4.0 -attrs==23.2.0 -certifi==2024.6.2 -coverage[toml]==7.5.4 -exceptiongroup==1.2.1 -h11==0.14.0 -httpcore==0.16.3 -httpx==0.23.3 -hypothesis==6.45.0 -idna==3.7 -importlib-metadata==8.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pytest==8.2.2 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -rfc3986[idna2008]==1.5.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tomli==2.0.1 -typing-extensions==4.12.2 -zipp==3.19.2 diff --git a/.riot/requirements/199a155.txt b/.riot/requirements/e712306.txt similarity index 67% rename from .riot/requirements/199a155.txt rename to .riot/requirements/e712306.txt index f190975ed10..d549e4be124 100644 --- a/.riot/requirements/199a155.txt +++ b/.riot/requirements/e712306.txt @@ -2,14 +2,14 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/199a155.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/e712306.in # -asgiref==3.9.1 -attrs==25.3.0 +asgiref==3.10.0 +attrs==25.4.0 bcrypt==4.2.1 -certifi==2025.8.3 -charset-normalizer==3.4.3 -coverage[toml]==7.10.7 +certifi==2025.10.5 +charset-normalizer==3.4.4 +coverage[toml]==7.11.1 dill==0.4.0 django==4.0.10 django-configurations==2.5.1 @@ -18,8 +18,9 @@ gevent==25.9.1 greenlet==3.2.4 gunicorn==23.0.0 hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 +idna==3.11 +iniconfig==2.3.0 +legacy-cgi==2.6.4 mock==5.2.0 opentracing==2.4.0 packaging==25.0 @@ -30,16 +31,13 @@ pytest==8.4.2 pytest-cov==7.0.0 pytest-django[testing]==3.10.0 pytest-mock==3.15.1 -pyyaml==6.0.2 +pyyaml==6.0.3 requests==2.32.5 six==1.17.0 sortedcontainers==2.4.0 sqlparse==0.5.3 -tomli==2.2.1 +tomli==2.3.0 typing-extensions==4.15.0 urllib3==2.5.0 -zope-event==6.0 -zope-interface==8.0 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==80.9.0 +zope-event==6.1 +zope-interface==8.0.1 diff --git a/.riot/requirements/e7a63a3.txt b/.riot/requirements/e7a63a3.txt deleted file mode 100644 index 6c1feed2bd3..00000000000 --- a/.riot/requirements/e7a63a3.txt +++ /dev/null @@ -1,29 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/e7a63a3.in -# -attrs==25.3.0 -certifi==2025.1.31 -charset-normalizer==3.4.1 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opensearch-py[requests]==1.1.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -requests==2.32.3 -sortedcontainers==2.4.0 -tomli==2.2.1 -urllib3==1.26.20 -zipp==3.20.2 diff --git a/.riot/requirements/e8693b9.txt b/.riot/requirements/e8693b9.txt deleted file mode 100644 index 4db2ef78998..00000000000 --- a/.riot/requirements/e8693b9.txt +++ /dev/null @@ -1,77 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/e8693b9.in -# -arrow==1.3.0 -asgiref==3.8.1 -attrs==25.3.0 -autobahn==23.1.2 -automat==24.8.1 -bcrypt==4.2.1 -blessed==1.21.0 -certifi==2025.4.26 -cffi==1.17.1 -channels==3.0.5 -charset-normalizer==3.4.2 -constantly==23.10.4 -coverage[toml]==7.6.1 -cryptography==45.0.3 -daphne==3.0.2 -django==3.0.14 -django-configurations==2.3.2 -django-picklefield==3.0.1 -django-pylibmc==0.6.1 -django-q==1.3.6 -django-redis==4.5.0 -exceptiongroup==1.3.0 -hyperlink==21.0.0 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -incremental==24.7.2 -iniconfig==2.1.0 -isodate==0.7.2 -lxml==5.4.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -platformdirs==4.3.6 -pluggy==1.5.0 -psycopg2-binary==2.9.10 -pyasn1==0.6.1 -pyasn1-modules==0.4.2 -pycparser==2.22 -pylibmc==1.6.3 -pyopenssl==25.1.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-django[testing]==3.10.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -python-memcached==1.62 -pytz==2025.2 -redis==2.10.6 -requests==2.32.3 -requests-file==2.1.0 -requests-toolbelt==1.0.0 -service-identity==24.2.0 -six==1.17.0 -sortedcontainers==2.4.0 -spyne==2.14.0 -sqlparse==0.5.3 -tomli==2.2.1 -twisted[tls]==24.11.0 -txaio==23.1.1 -types-python-dateutil==2.9.0.20241206 -typing-extensions==4.13.2 -urllib3==2.2.3 -wcwidth==0.2.13 -zeep==4.3.1 -zipp==3.20.2 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/e871798.txt b/.riot/requirements/e871798.txt deleted file mode 100644 index 3b8c98da668..00000000000 --- a/.riot/requirements/e871798.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/e871798.in -# -attrs==23.2.0 -coverage[toml]==7.6.0 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.2.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pytest==8.3.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.19.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==71.1.0 diff --git a/.riot/requirements/e87b392.txt b/.riot/requirements/e87b392.txt deleted file mode 100644 index 56eccef1dbf..00000000000 --- a/.riot/requirements/e87b392.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/e87b392.in -# -attrs==23.1.0 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -jinja2==3.0.3 -markupsafe==2.1.3 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/eab5e7a.txt b/.riot/requirements/eab5e7a.txt deleted file mode 100644 index 272838ed70e..00000000000 --- a/.riot/requirements/eab5e7a.txt +++ /dev/null @@ -1,41 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/eab5e7a.in -# -amqp==5.3.1 -attrs==25.3.0 -backports-zoneinfo[tzdata]==0.2.1 -billiard==4.2.1 -celery==5.5.3 -click==8.1.8 -click-didyoumean==0.3.1 -click-plugins==1.1.1.2 -click-repl==0.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -kombu==5.5.4 -mock==5.2.0 -more-itertools==8.10.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -prompt-toolkit==3.0.51 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -redis==3.5.3 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -tzdata==2025.2 -vine==5.1.0 -wcwidth==0.2.13 -zipp==3.20.2 diff --git a/.riot/requirements/eb355e4.txt b/.riot/requirements/eb355e4.txt deleted file mode 100644 index e20acaa4521..00000000000 --- a/.riot/requirements/eb355e4.txt +++ /dev/null @@ -1,47 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/eb355e4.in -# -annotated-types==0.7.0 -anyio==4.5.2 -attrs==25.3.0 -certifi==2025.10.5 -coverage[toml]==7.6.1 -distro==1.9.0 -exceptiongroup==1.3.0 -h11==0.16.0 -httpcore==1.0.9 -httpx==0.28.1 -hypothesis==6.45.0 -idna==3.11 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -jiter==0.9.1 -mock==5.2.0 -multidict==6.1.0 -openai==1.66.0 -opentracing==2.4.0 -packaging==25.0 -pillow==10.4.0 -pluggy==1.5.0 -propcache==0.2.0 -pydantic==2.10.6 -pydantic-core==2.27.2 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -pyyaml==6.0.3 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tomli==2.3.0 -tqdm==4.67.1 -typing-extensions==4.13.2 -urllib3==1.26.20 -vcrpy==6.0.2 -wrapt==2.0.0 -yarl==1.15.2 -zipp==3.20.2 diff --git a/.riot/requirements/ed7ef83.txt b/.riot/requirements/ed7ef83.txt deleted file mode 100644 index 14ad4d14504..00000000000 --- a/.riot/requirements/ed7ef83.txt +++ /dev/null @@ -1,42 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/ed7ef83.in -# -attrs==25.4.0 -coverage[toml]==7.11.0 -exceptiongroup==1.3.0 -gevent==25.9.1 -greenlet==3.2.4 -gunicorn[gevent]==23.0.0 -hypothesis==6.45.0 -iniconfig==2.3.0 -jsonschema==4.25.1 -jsonschema-specifications==2025.9.1 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.6.0 -protobuf==6.33.0 -py-cpuinfo==8.0.0 -pygments==2.19.2 -pytest==8.4.2 -pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 -pytest-cov==7.0.0 -pytest-cpp==2.6.0 -pytest-mock==3.15.1 -pytest-randomly==4.0.1 -referencing==0.37.0 -rpds-py==0.28.0 -sortedcontainers==2.4.0 -tomli==2.3.0 -typing-extensions==4.15.0 -uwsgi==2.0.31 -zope-event==6.0 -zope-interface==8.0.1 -zstandard==0.25.0 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==80.9.0 diff --git a/.riot/requirements/ee62ebe.txt b/.riot/requirements/ee62ebe.txt deleted file mode 100644 index b0e384be4e5..00000000000 --- a/.riot/requirements/ee62ebe.txt +++ /dev/null @@ -1,24 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/ee62ebe.in -# -async-timeout==4.0.3 -attrs==24.2.0 -coverage[toml]==7.6.1 -dramatiq==1.17.0 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -prometheus-client==0.20.0 -pytest==8.3.2 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -redis==5.0.8 -sortedcontainers==2.4.0 -tomli==2.0.1 diff --git a/.riot/requirements/ef10d26.txt b/.riot/requirements/ef10d26.txt deleted file mode 100644 index 02b4ccf8a17..00000000000 --- a/.riot/requirements/ef10d26.txt +++ /dev/null @@ -1,27 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/ef10d26.in -# -amqp==5.3.1 -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -kombu==5.0.2 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -vine==5.1.0 -zipp==3.20.2 diff --git a/.riot/requirements/ef24176.txt b/.riot/requirements/ef24176.txt new file mode 100644 index 00000000000..dcc594cef54 --- /dev/null +++ b/.riot/requirements/ef24176.txt @@ -0,0 +1,39 @@ +# +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/ef24176.in +# +attrs==25.4.0 +coverage[toml]==7.11.3 +exceptiongroup==1.3.0 +gevent==25.9.1 +greenlet==3.2.4 +gunicorn[gevent]==23.0.0 +hypothesis==6.45.0 +iniconfig==2.3.0 +jsonschema==4.25.1 +jsonschema-specifications==2025.9.1 +mock==5.2.0 +opentracing==2.4.0 +packaging==25.0 +pluggy==1.6.0 +protobuf==6.33.0 +py-cpuinfo==8.0.0 +pygments==2.19.2 +pytest==9.0.0 +pytest-asyncio==0.21.1 +pytest-benchmark==5.2.3 +pytest-cov==7.0.0 +pytest-cpp==2.6.0 +pytest-mock==3.15.1 +pytest-randomly==4.0.1 +referencing==0.37.0 +rpds-py==0.28.0 +sortedcontainers==2.4.0 +tomli==2.3.0 +typing-extensions==4.15.0 +uwsgi==2.0.31 +zope-event==6.1 +zope-interface==8.1 +zstandard==0.25.0 diff --git a/.riot/requirements/ef66bb3.txt b/.riot/requirements/ef66bb3.txt deleted file mode 100644 index 7e584779306..00000000000 --- a/.riot/requirements/ef66bb3.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/ef66bb3.in -# -asynctest==0.13.0 -attrs==23.1.0 -coverage[toml]==7.3.4 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -py==1.11.0 -pytest==6.2.5 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -toml==0.10.2 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/f1461b7.txt b/.riot/requirements/f1461b7.txt deleted file mode 100644 index 63023fb4133..00000000000 --- a/.riot/requirements/f1461b7.txt +++ /dev/null @@ -1,38 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/f1461b7.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -gunicorn==23.0.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -importlib-resources==6.4.5 -iniconfig==2.1.0 -jsonschema==4.23.0 -jsonschema-specifications==2023.12.1 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pkgutil-resolve-name==1.3.10 -pluggy==1.5.0 -protobuf==3.19.0 -py-cpuinfo==8.0.0 -pytest==8.3.5 -pytest-asyncio==0.21.1 -pytest-benchmark==4.0.0 -pytest-cov==5.0.0 -pytest-cpp==2.6.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -referencing==0.35.1 -rpds-py==0.20.1 -sortedcontainers==2.4.0 -tomli==2.3.0 -typing-extensions==4.13.2 -uwsgi==2.0.31 -zipp==3.20.2 -zstandard==0.23.0 diff --git a/.riot/requirements/6160594.txt b/.riot/requirements/f15701f.txt similarity index 94% rename from .riot/requirements/6160594.txt rename to .riot/requirements/f15701f.txt index e88aca116eb..0facb1ff02f 100644 --- a/.riot/requirements/6160594.txt +++ b/.riot/requirements/f15701f.txt @@ -2,12 +2,12 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/6160594.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/f15701f.in # attrs==25.4.0 coverage[toml]==7.10.7 exceptiongroup==1.3.0 -gevent==22.10.2 +gevent==25.9.1 greenlet==3.2.4 gunicorn[gevent]==23.0.0 hypothesis==6.45.0 @@ -24,7 +24,7 @@ py-cpuinfo==8.0.0 pygments==2.19.2 pytest==8.4.2 pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 +pytest-benchmark==5.2.3 pytest-cov==7.0.0 pytest-cpp==2.6.0 pytest-mock==3.15.1 diff --git a/.riot/requirements/f229429.txt b/.riot/requirements/f229429.txt deleted file mode 100644 index 4f0448ccdbb..00000000000 --- a/.riot/requirements/f229429.txt +++ /dev/null @@ -1,27 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/f229429.in -# -attrs==23.1.0 -cassandra-driver==3.24.0 -click==8.1.7 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -geomet==0.2.1.post1 -hypothesis==6.45.0 -importlib-metadata==7.0.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -six==1.16.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/f334e66.txt b/.riot/requirements/f334e66.txt deleted file mode 100644 index ba4030e4718..00000000000 --- a/.riot/requirements/f334e66.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/f334e66.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -msgpack==1.1.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/213f995.txt b/.riot/requirements/f3f043e.txt similarity index 95% rename from .riot/requirements/213f995.txt rename to .riot/requirements/f3f043e.txt index 4a94bb00571..a47b929617d 100644 --- a/.riot/requirements/213f995.txt +++ b/.riot/requirements/f3f043e.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/213f995.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/f3f043e.in # attrs==25.4.0 coverage[toml]==7.10.7 @@ -22,7 +22,7 @@ py-cpuinfo==8.0.0 pygments==2.19.2 pytest==8.4.2 pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 +pytest-benchmark==5.2.3 pytest-cov==7.0.0 pytest-cpp==2.6.0 pytest-mock==3.15.1 diff --git a/.riot/requirements/f408d1f.txt b/.riot/requirements/f408d1f.txt deleted file mode 100644 index 9a59658b081..00000000000 --- a/.riot/requirements/f408d1f.txt +++ /dev/null @@ -1,38 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/f408d1f.in -# -attrs==25.1.0 -blinker==1.8.2 -certifi==2025.1.31 -charset-normalizer==3.4.1 -click==7.1.2 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -flask==1.1.4 -flask-openapi3==1.1.5 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -itsdangerous==1.1.0 -jinja2==2.11.3 -markupsafe==1.1.1 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pydantic==1.10.21 -pytest==8.3.4 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -requests==2.32.3 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.12.2 -urllib3==1.26.20 -werkzeug==1.0.1 -zipp==3.20.2 diff --git a/.riot/requirements/f4b1bd3.txt b/.riot/requirements/f4b1bd3.txt deleted file mode 100644 index da3d86a840f..00000000000 --- a/.riot/requirements/f4b1bd3.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/f4b1bd3.in -# -async-timeout==5.0.1 -attrs==24.2.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -pytest==8.3.3 -pytest-asyncio==0.23.7 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -redis==5.0.1 -sortedcontainers==2.4.0 -tomli==2.1.0 -zipp==3.20.2 diff --git a/.riot/requirements/1e2d183.txt b/.riot/requirements/f5d4513.txt similarity index 88% rename from .riot/requirements/1e2d183.txt rename to .riot/requirements/f5d4513.txt index 5ddce3bcb7f..a4f3c50120d 100644 --- a/.riot/requirements/1e2d183.txt +++ b/.riot/requirements/f5d4513.txt @@ -2,10 +2,10 @@ # This file is autogenerated by pip-compile with Python 3.13 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1e2d183.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/f5d4513.in # attrs==25.4.0 -coverage[toml]==7.11.0 +coverage[toml]==7.11.3 gunicorn==23.0.0 hypothesis==6.45.0 iniconfig==2.3.0 @@ -18,9 +18,9 @@ pluggy==1.6.0 protobuf==4.22.0 py-cpuinfo==8.0.0 pygments==2.19.2 -pytest==8.4.2 +pytest==9.0.0 pytest-asyncio==0.21.1 -pytest-benchmark==5.2.1 +pytest-benchmark==5.2.3 pytest-cov==7.0.0 pytest-cpp==2.6.0 pytest-mock==3.15.1 diff --git a/.riot/requirements/f61cdff.txt b/.riot/requirements/f61cdff.txt deleted file mode 100644 index 853373c6a43..00000000000 --- a/.riot/requirements/f61cdff.txt +++ /dev/null @@ -1,44 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/f61cdff.in -# -attrs==25.3.0 -bcrypt==4.2.1 -certifi==2025.8.3 -charset-normalizer==3.4.3 -coverage[toml]==7.6.1 -dill==0.4.0 -django==2.2.28 -django-configurations==2.3.2 -exceptiongroup==1.3.0 -gevent==24.2.1 -greenlet==3.1.1 -gunicorn==23.0.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pylibmc==1.6.3 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-django[testing]==3.10.0 -pytest-mock==3.14.1 -pytz==2025.2 -pyyaml==6.0.2 -requests==2.32.4 -six==1.17.0 -sortedcontainers==2.4.0 -sqlparse==0.5.3 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 -zope-event==5.0 -zope-interface==7.2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.3.2 diff --git a/.riot/requirements/f7e8645.txt b/.riot/requirements/f7e8645.txt deleted file mode 100644 index 3bc220b653d..00000000000 --- a/.riot/requirements/f7e8645.txt +++ /dev/null @@ -1,21 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/f7e8645.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 diff --git a/.riot/requirements/f8e5119.txt b/.riot/requirements/f8e5119.txt deleted file mode 100644 index a06b17c8085..00000000000 --- a/.riot/requirements/f8e5119.txt +++ /dev/null @@ -1,31 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/f8e5119.in -# -attrs==25.3.0 -azure-core==1.33.0 -azure-servicebus==7.14.2 -certifi==2025.8.3 -charset-normalizer==3.4.3 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -idna==3.10 -iniconfig==2.1.0 -isodate==0.7.2 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-asyncio==0.23.7 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -requests==2.32.4 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==2.2.3 diff --git a/.riot/requirements/f903257.txt b/.riot/requirements/f903257.txt deleted file mode 100644 index 1822758bfe4..00000000000 --- a/.riot/requirements/f903257.txt +++ /dev/null @@ -1,37 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/f903257.in -# -attrs==25.3.0 -blinker==1.8.2 -click==8.1.8 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -flask==0.12.5 -flask-cache==0.13.1 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -itsdangerous==1.1.0 -jinja2==2.10.3 -markupsafe==1.1.1 -mock==5.2.0 -more-itertools==8.10.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -py==1.11.0 -pytest==6.2.5 -pytest-cov==3.0.0 -pytest-mock==2.0.0 -pytest-randomly==3.15.0 -python-memcached==1.62 -redis==2.10.6 -sortedcontainers==2.4.0 -toml==0.10.2 -tomli==2.2.1 -typing-extensions==4.13.2 -werkzeug==0.16.1 -zipp==3.20.2 diff --git a/.riot/requirements/f9d0e8e.txt b/.riot/requirements/f9d0e8e.txt deleted file mode 100644 index 42bc8937d56..00000000000 --- a/.riot/requirements/f9d0e8e.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/f9d0e8e.in -# -attrs==24.2.0 -coverage[toml]==7.6.1 -dnspython==2.6.1 -exceptiongroup==1.2.2 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.0.0 -mock==5.1.0 -mongoengine==0.29.1 -opentracing==2.4.0 -packaging==24.1 -pluggy==1.5.0 -pymongo==4.8.0 -pytest==8.3.3 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.20.2 diff --git a/.riot/requirements/fadb064.txt b/.riot/requirements/fadb064.txt deleted file mode 100644 index ad51389c99f..00000000000 --- a/.riot/requirements/fadb064.txt +++ /dev/null @@ -1,28 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate --resolver=backtracking .riot/requirements/fadb064.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.2.2 -googleapis-common-protos==1.70.0 -grpcio==1.34.1 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -opentracing==2.4.0 -packaging==24.2 -pluggy==1.5.0 -protobuf==5.29.4 -pytest==8.3.5 -pytest-asyncio==0.23.7 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -six==1.17.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -zipp==3.20.2 diff --git a/.riot/requirements/fbab99a.txt b/.riot/requirements/fbab99a.txt deleted file mode 100644 index 6351c78934a..00000000000 --- a/.riot/requirements/fbab99a.txt +++ /dev/null @@ -1,28 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/fbab99a.in -# -async-timeout==4.0.3 -attrs==23.1.0 -click==7.1.2 -coverage[toml]==7.3.4 -exceptiongroup==1.2.0 -hypothesis==6.45.0 -importlib-metadata==7.0.1 -iniconfig==2.0.0 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -pytest==7.4.3 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pytest-randomly==3.15.0 -redis==5.0.1 -rq==1.10.1 -sortedcontainers==2.4.0 -tomli==2.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/fd2d2d1.txt b/.riot/requirements/fd2d2d1.txt deleted file mode 100644 index 3cdc7c85224..00000000000 --- a/.riot/requirements/fd2d2d1.txt +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/fd2d2d1.in -# -attrs==25.3.0 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -hypothesis==6.45.0 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -mock==5.2.0 -msgpack==1.1.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -zipp==3.20.2 diff --git a/.riot/requirements/ff0c51d.txt b/.riot/requirements/ff0c51d.txt deleted file mode 100644 index 56853212b68..00000000000 --- a/.riot/requirements/ff0c51d.txt +++ /dev/null @@ -1,40 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/ff0c51d.in -# -annotated-types==0.7.0 -attrs==25.3.0 -blinker==1.8.2 -certifi==2025.10.5 -charset-normalizer==3.4.3 -click==8.1.8 -coverage[toml]==7.6.1 -exceptiongroup==1.3.0 -flask==3.0.3 -flask-openapi3==4.0.3 -hypothesis==6.45.0 -idna==3.10 -importlib-metadata==8.5.0 -iniconfig==2.1.0 -itsdangerous==2.2.0 -jinja2==3.1.6 -markupsafe==2.1.5 -mock==5.2.0 -opentracing==2.4.0 -packaging==25.0 -pluggy==1.5.0 -pydantic==2.10.6 -pydantic-core==2.27.2 -pytest==8.3.5 -pytest-cov==5.0.0 -pytest-mock==3.14.1 -pytest-randomly==3.15.0 -requests==2.32.4 -sortedcontainers==2.4.0 -tomli==2.2.1 -typing-extensions==4.13.2 -urllib3==1.26.20 -werkzeug==3.0.6 -zipp==3.20.2 diff --git a/ddtrace/__init__.py b/ddtrace/__init__.py index 3491da5a392..dac0448fb68 100644 --- a/ddtrace/__init__.py +++ b/ddtrace/__init__.py @@ -19,8 +19,8 @@ from ._monkey import patch # noqa: E402 from ._monkey import patch_all # noqa: E402 from .internal.compat import PYTHON_VERSION_INFO # noqa: E402 +from .internal.settings._config import config from .internal.utils.deprecations import DDTraceDeprecationWarning # noqa: E402 -from .settings._config import config from .version import get_version # noqa: E402 @@ -39,12 +39,12 @@ def check_supported_python_version(): - if PYTHON_VERSION_INFO < (3, 9): + if PYTHON_VERSION_INFO < (3, 10): deprecation_message = ( - "Support for ddtrace with Python version %d.%d is deprecated and will be removed in 4.0.0." + "Support for ddtrace with Python version %d.%d is deprecated and will be removed in 5.0.0." ) - if PYTHON_VERSION_INFO < (3, 8): - deprecation_message = "Support for ddtrace with Python version %d.%d was removed in 3.0.0." + if PYTHON_VERSION_INFO < (3, 9): + deprecation_message = "Support for ddtrace with Python version %d.%d was removed in 4.0.0." debtcollector.deprecate( (deprecation_message % (PYTHON_VERSION_INFO[0], PYTHON_VERSION_INFO[1])), category=DDTraceDeprecationWarning, diff --git a/ddtrace/_logger.py b/ddtrace/_logger.py index 622fcaebd01..511f5516c40 100644 --- a/ddtrace/_logger.py +++ b/ddtrace/_logger.py @@ -17,18 +17,7 @@ DEFAULT_FILE_SIZE_BYTES = 15 << 20 # 15 MB -class LogInjectionState(object): - # Log injection is disabled - DISABLED = "false" - # Log injection is enabled, but not yet configured - ENABLED = "true" - # Log injection is enabled and configured for structured logging - # This value is deprecated, but kept for backwards compatibility - STRUCTURED = "structured" - - -def configure_ddtrace_logger(): - # type: () -> None +def configure_ddtrace_logger() -> None: """Configures ddtrace log levels and file paths. Customization is possible with the environment variables: @@ -110,25 +99,10 @@ def _add_file_handler( return ddtrace_file_handler -def get_log_injection_state(raw_config: Optional[str]) -> bool: - """Returns the current log injection state.""" - if raw_config: - normalized = raw_config.lower().strip() - if normalized == LogInjectionState.STRUCTURED or normalized in ("true", "1"): - return True - elif normalized not in ("false", "0"): - logging.warning( - "Invalid log injection state '%s'. Expected 'true', 'false', or 'structured'. Defaulting to 'false'.", - normalized, - ) - return False - - def _configure_ddtrace_native_logger(): try: from ddtrace.internal.native._native import logger - - from .settings._config import config + from ddtrace.internal.settings._config import config if config._trace_writer_native: backend = get_config("_DD_NATIVE_LOGGING_BACKEND") diff --git a/ddtrace/_monkey.py b/ddtrace/_monkey.py index 599a4dae857..f935b8b9f13 100644 --- a/ddtrace/_monkey.py +++ b/ddtrace/_monkey.py @@ -8,8 +8,8 @@ from wrapt.importer import when_imported from ddtrace.internal.compat import Path +from ddtrace.internal.settings._config import config from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE -from ddtrace.settings._config import config from ddtrace.vendor.debtcollector import deprecate from ddtrace.vendor.packaging.specifiers import SpecifierSet from ddtrace.vendor.packaging.version import Version @@ -38,7 +38,6 @@ "boto": True, "botocore": True, "bottle": True, - "cassandra": True, "celery": True, "consul": True, "ddtrace_api": True, @@ -47,9 +46,7 @@ "elasticsearch": True, "algoliasearch": True, "futures": True, - "freezegun": False, # deprecated, to be removed in ddtrace 4.x "google_adk": True, - "google_generativeai": True, "google_genai": True, "gevent": True, "graphql": True, @@ -58,7 +55,6 @@ "kafka": True, "langgraph": True, "litellm": True, - "mongoengine": True, "mysql": True, "mysqldb": True, "pymysql": True, @@ -155,7 +151,6 @@ "psycopg2", ), "snowflake": ("snowflake.connector",), - "cassandra": ("cassandra.cluster",), "dogpile_cache": ("dogpile.cache",), "mysqldb": ("MySQLdb",), "futures": ("concurrent.futures.thread",), @@ -167,7 +162,6 @@ "httplib": ("http.client",), "kafka": ("confluent_kafka",), "google_adk": ("google.adk",), - "google_generativeai": ("google.generativeai",), "google_genai": ("google.genai",), "langchain": ("langchain_core",), "langgraph": ( @@ -334,7 +328,7 @@ def patch_all(**patch_modules: bool) -> None: :param dict patch_modules: Override whether particular modules are patched or not. - >>> _patch_all(redis=False, cassandra=False) + >>> _patch_all(redis=False) """ deprecate( "patch_all is deprecated and will be removed in a future version of the tracer.", diff --git a/ddtrace/_trace/context.py b/ddtrace/_trace/context.py index f4362755977..7b07b5bfb62 100644 --- a/ddtrace/_trace/context.py +++ b/ddtrace/_trace/context.py @@ -9,8 +9,6 @@ from typing import Tuple from ddtrace._trace._span_link import SpanLink -from ddtrace._trace.types import _MetaDictType -from ddtrace._trace.types import _MetricDictType from ddtrace.constants import _ORIGIN_KEY from ddtrace.constants import _SAMPLING_PRIORITY_KEY from ddtrace.constants import _USER_ID_KEY @@ -25,8 +23,8 @@ _ContextState = Tuple[ Optional[int], # trace_id Optional[int], # span_id - _MetaDictType, # _meta - _MetricDictType, # _metrics + Dict[str, str], # _meta + Dict[str, NumericType], # _metrics List[SpanLink], # span_links Dict[str, Any], # baggage bool, # is_remote @@ -63,15 +61,15 @@ def __init__( span_id: Optional[int] = None, dd_origin: Optional[str] = None, sampling_priority: Optional[float] = None, - meta: Optional[_MetaDictType] = None, - metrics: Optional[_MetricDictType] = None, + meta: Optional[Dict[str, str]] = None, + metrics: Optional[Dict[str, NumericType]] = None, lock: Optional[threading.RLock] = None, span_links: Optional[List[SpanLink]] = None, baggage: Optional[Dict[str, Any]] = None, is_remote: bool = True, ): - self._meta: _MetaDictType = meta if meta is not None else {} - self._metrics: _MetricDictType = metrics if metrics is not None else {} + self._meta: Dict[str, str] = meta if meta is not None else {} + self._metrics: Dict[str, NumericType] = metrics if metrics is not None else {} self._baggage: Dict[str, Any] = baggage if baggage is not None else {} self.trace_id: Optional[int] = trace_id diff --git a/ddtrace/_trace/pin.py b/ddtrace/_trace/pin.py index 2850adb4896..4edacd90e05 100644 --- a/ddtrace/_trace/pin.py +++ b/ddtrace/_trace/pin.py @@ -4,7 +4,7 @@ import ddtrace from ddtrace.internal.compat import is_wrapted -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config from ..internal.logger import get_logger diff --git a/ddtrace/_trace/processor/__init__.py b/ddtrace/_trace/processor/__init__.py index 7b1fcec816e..6e513af6d4f 100644 --- a/ddtrace/_trace/processor/__init__.py +++ b/ddtrace/_trace/processor/__init__.py @@ -26,11 +26,11 @@ from ddtrace.internal.sampling import SpanSamplingRule from ddtrace.internal.sampling import get_span_sampling_rules from ddtrace.internal.service import ServiceStatusError +from ddtrace.internal.settings._config import config +from ddtrace.internal.settings.asm import config as asm_config from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE from ddtrace.internal.writer import AgentResponse from ddtrace.internal.writer import create_trace_writer -from ddtrace.settings._config import config -from ddtrace.settings.asm import config as asm_config log = get_logger(__name__) diff --git a/ddtrace/_trace/processor/resource_renaming.py b/ddtrace/_trace/processor/resource_renaming.py index 6d9b16a7b42..d0647c51e55 100644 --- a/ddtrace/_trace/processor/resource_renaming.py +++ b/ddtrace/_trace/processor/resource_renaming.py @@ -8,7 +8,7 @@ from ddtrace.ext import SpanTypes from ddtrace.ext import http from ddtrace.internal.logger import get_logger -from ddtrace.settings._config import config +from ddtrace.internal.settings._config import config log = get_logger(__name__) @@ -83,4 +83,4 @@ def on_span_finish(self, span: Span): if not is_404 and (not route or config._trace_resource_renaming_always_simplified_endpoint): url = span.get_tag(http.URL) endpoint = self.simplified_endpoint_computer.from_url(url) - span.set_tag_str(http.ENDPOINT, endpoint) + span._set_tag_str(http.ENDPOINT, endpoint) diff --git a/ddtrace/_trace/product.py b/ddtrace/_trace/product.py index 1e709c0ac00..ec78dbc2c89 100644 --- a/ddtrace/_trace/product.py +++ b/ddtrace/_trace/product.py @@ -6,11 +6,9 @@ from envier import En from ddtrace.internal.logger import get_logger -from ddtrace.internal.utils.deprecations import DDTraceDeprecationWarning +from ddtrace.internal.settings.http import HttpConfig from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.formats import parse_tags_str -from ddtrace.settings.http import HttpConfig -from ddtrace.vendor.debtcollector import deprecate log = get_logger(__name__) @@ -40,27 +38,13 @@ def post_preload(): def start(): if _config.enabled: - from ddtrace.settings._config import config + from ddtrace.internal.settings._config import config if config._trace_methods: from ddtrace.internal.tracemethods import _install_trace_methods _install_trace_methods(config._trace_methods) - if _config.global_tags: - from ddtrace.trace import tracer - - # ddtrace library supports setting tracer tags using both DD_TRACE_GLOBAL_TAGS and DD_TAGS - # moving forward we should only support DD_TRACE_GLOBAL_TAGS. - # TODO(munir): Set dd_tags here - deprecate( - "DD_TRACE_GLOBAL_TAGS is deprecated", - message="Please migrate to using DD_TAGS instead", - category=DDTraceDeprecationWarning, - removal_version="4.0.0", - ) - tracer.set_tags(_config.global_tags) - def restart(join=False): from ddtrace.trace import tracer diff --git a/ddtrace/_trace/sampler.py b/ddtrace/_trace/sampler.py index b932d7e71a2..8bd6e0a089c 100644 --- a/ddtrace/_trace/sampler.py +++ b/ddtrace/_trace/sampler.py @@ -10,7 +10,7 @@ from ddtrace._trace.span import Span from ddtrace.constants import _SAMPLING_LIMIT_DECISION -from ddtrace.settings._config import config +from ddtrace.internal.settings._config import config from ..constants import ENV_KEY from ..internal.constants import MAX_UINT_64BITS diff --git a/ddtrace/_trace/span.py b/ddtrace/_trace/span.py index caafc96bfbf..90ac383575d 100644 --- a/ddtrace/_trace/span.py +++ b/ddtrace/_trace/span.py @@ -20,9 +20,6 @@ from ddtrace._trace._span_pointer import _SpanPointerDirection from ddtrace._trace.context import Context from ddtrace._trace.types import _AttributeValueType -from ddtrace._trace.types import _MetaDictType -from ddtrace._trace.types import _MetricDictType -from ddtrace._trace.types import _TagNameType from ddtrace.constants import _SAMPLING_AGENT_DECISION from ddtrace.constants import _SAMPLING_LIMIT_DECISION from ddtrace.constants import _SAMPLING_RULE_DECISION @@ -52,11 +49,8 @@ from ddtrace.internal.constants import SPAN_API_DATADOG from ddtrace.internal.constants import SamplingMechanism from ddtrace.internal.logger import get_logger -from ddtrace.internal.utils.deprecations import DDTraceDeprecationWarning +from ddtrace.internal.settings._config import config from ddtrace.internal.utils.time import Time -from ddtrace.settings._config import config -from ddtrace.vendor.debtcollector import deprecate -from ddtrace.vendor.debtcollector import removals class SpanEvent: @@ -192,9 +186,9 @@ def __init__( self.span_type = span_type self._span_api = span_api - self._meta: _MetaDictType = {} + self._meta: Dict[str, str] = {} self.error = 0 - self._metrics: _MetricDictType = {} + self._metrics: Dict[str, NumericType] = {} self._meta_struct: Dict[str, Dict[str, Any]] = {} @@ -275,25 +269,6 @@ def start(self, value: Union[int, float]) -> None: def finished(self) -> bool: return self.duration_ns is not None - @finished.setter - def finished(self, value: bool) -> None: - """Finishes the span if set to a truthy value. - - If the span is already finished and a truthy value is provided - no action will occur. - """ - deprecate( - prefix="The finished setter is deprecated", - message="""Use the finish() method to finish a span.""", - category=DDTraceDeprecationWarning, - removal_version="4.0.0", - ) - if value: - if not self.finished: - self.duration_ns = Time.time_ns() - self.start_ns - else: - self.duration_ns = None - @property def duration(self) -> Optional[float]: """The span duration in seconds.""" @@ -342,7 +317,7 @@ def _set_sampling_decision_maker( self.context._meta[SAMPLING_DECISION_TRACE_TAG_KEY] = value return value - def set_tag(self, key: _TagNameType, value: Any = None) -> None: + def set_tag(self, key: str, value: Optional[str] = None) -> None: """Set a tag key/value pair on the span. Keys must be strings, values must be ``str``-able. @@ -352,11 +327,6 @@ def set_tag(self, key: _TagNameType, value: Any = None) -> None: :param value: Value to assign for the tag :type value: ``str``-able value """ - - if not isinstance(key, str): - log.warning("Ignoring tag pair %s:%s. Key must be a string.", key, value) - return - # Special case, force `http.status_code` as a string # DEV: `http.status_code` *has* to be in `meta` for metrics # calculated in the trace agent @@ -371,14 +341,14 @@ def set_tag(self, key: _TagNameType, value: Any = None) -> None: INT_TYPES = (net.TARGET_PORT,) if key in INT_TYPES and not val_is_an_int: try: - value = int(value) + value = int(value) # type: ignore val_is_an_int = True except (ValueError, TypeError): pass # Set integers that are less than equal to 2^53 as metrics - if value is not None and val_is_an_int and abs(value) <= 2**53: - self.set_metric(key, value) + if value is not None and val_is_an_int and abs(value) <= 2**53: # type: ignore + self.set_metric(key, value) # type: ignore return # All floats should be set as a metric @@ -402,8 +372,8 @@ def set_tag(self, key: _TagNameType, value: Any = None) -> None: # Set `_dd.measured` tag as a metric # DEV: `set_metric` will ensure it is an integer 0 or 1 if value is None: - value = 1 - self.set_metric(key, value) + value = 1 # type: ignore + self.set_metric(key, value) # type: ignore return try: @@ -420,29 +390,11 @@ def _set_struct_tag(self, key: str, value: Dict[str, Any]) -> None: """ self._meta_struct[key] = value - @removals.remove(removal_version="4.0.0") - def set_struct_tag(self, key: str, value: Dict[str, Any]) -> None: - """ - DEPRECATED - - Set a tag key/value pair on the span meta_struct - Currently it will only be exported with V4 encoding - """ - self._set_struct_tag(key, value) - def _get_struct_tag(self, key: str) -> Optional[Dict[str, Any]]: """Return the given struct or None if it doesn't exist.""" return self._meta_struct.get(key, None) - @removals.remove(removal_version="4.0.0") - def get_struct_tag(self, key: str) -> Optional[Dict[str, Any]]: - """DEPRECATED - - Return the given struct or None if it doesn't exist. - """ - return self._get_struct_tag(key) - - def _set_tag_str(self, key: _TagNameType, value: Text) -> None: + def _set_tag_str(self, key: str, value: str) -> None: """Set a value for a tag. Values are coerced to unicode in Python 2 and str in Python 3, with decoding errors in conversion being replaced with U+FFFD. @@ -454,20 +406,15 @@ def _set_tag_str(self, key: _TagNameType, value: Text) -> None: raise e log.warning("Failed to set text tag '%s'", key, exc_info=True) - @removals.remove(message="use Span.set_tag instead", removal_version="4.0.0") - def set_tag_str(self, key: _TagNameType, value: Text) -> None: - """Deprecated: use `set_tag` instead.""" - self._set_tag_str(key, value) - - def get_tag(self, key: _TagNameType) -> Optional[Text]: + def get_tag(self, key: str) -> Optional[str]: """Return the given tag or None if it doesn't exist.""" return self._meta.get(key, None) - def get_tags(self) -> _MetaDictType: + def get_tags(self) -> Dict[str, str]: """Return all tags.""" return self._meta.copy() - def set_tags(self, tags: Dict[_TagNameType, Any]) -> None: + def set_tags(self, tags: Dict[str, str]) -> None: """Set a dictionary of tags on the given span. Keys and values must be strings (or stringable) """ @@ -475,7 +422,7 @@ def set_tags(self, tags: Dict[_TagNameType, Any]) -> None: for k, v in iter(tags.items()): self.set_tag(k, v) - def set_metric(self, key: _TagNameType, value: NumericType) -> None: + def set_metric(self, key: str, value: NumericType) -> None: """This method sets a numeric tag value for the given key.""" # Enforce a specific constant for `_dd.measured` if key == _SPAN_MEASURED_KEY: @@ -505,7 +452,7 @@ def set_metric(self, key: _TagNameType, value: NumericType) -> None: del self._meta[key] self._metrics[key] = value - def set_metrics(self, metrics: _MetricDictType) -> None: + def set_metrics(self, metrics: Dict[str, NumericType]) -> None: """Set a dictionary of metrics on the given span. Keys must be must be strings (or stringable). Values must be numeric. """ @@ -513,7 +460,7 @@ def set_metrics(self, metrics: _MetricDictType) -> None: for k, v in metrics.items(): self.set_metric(k, v) - def get_metric(self, key: _TagNameType) -> Optional[NumericType]: + def get_metric(self, key: str) -> Optional[NumericType]: """Return the given metric or None if it doesn't exist.""" return self._metrics.get(key) @@ -526,7 +473,7 @@ def _add_on_finish_exception_callback(self, callback: Callable[["Span"], None]): """Add an errortracking related callback to the on_finish_callback array""" self._on_finish_callbacks.insert(0, callback) - def get_metrics(self) -> _MetricDictType: + def get_metrics(self) -> Dict[str, NumericType]: """Return all metrics.""" return self._metrics.copy() @@ -636,8 +583,6 @@ def record_exception( self, exception: BaseException, attributes: Optional[Dict[str, _AttributeValueType]] = None, - timestamp: Optional[int] = None, - escaped: bool = False, ) -> None: """ Records an exception as a span event. Multiple exceptions can be recorded on a span. @@ -646,26 +591,7 @@ def record_exception( :param attributes: Optional dictionary of additional attributes to add to the exception event. These attributes will override the default exception attributes if they contain the same keys. Valid attribute values include (homogeneous array of) strings, booleans, integers, floats. - :param timestamp: Deprecated. - :param escaped: Deprecated. """ - if escaped: - deprecate( - prefix="The escaped argument is deprecated for record_exception", - message="""If an exception exits the scope of the span, it will automatically be - reported in the span tags.""", - category=DDTraceDeprecationWarning, - removal_version="4.0.0", - ) - if timestamp is not None: - deprecate( - prefix="The timestamp argument is deprecated for record_exception", - message="""The timestamp of the span event should correspond to the time when the - error is recorded which is set automatically.""", - category=DDTraceDeprecationWarning, - removal_version="4.0.0", - ) - tb = self._get_traceback(type(exception), exception, exception.__traceback__) attrs: Dict[str, _AttributeValueType] = { @@ -837,15 +763,6 @@ def _finish_with_ancestors(self) -> None: span.finish() span = span._parent - @removals.remove(removal_version="4.0.0") - def finish_with_ancestors(self) -> None: - """Finish this span along with all (accessible) ancestors of this span. - - This method is useful if a sudden program shutdown is required and finishing - the trace is desired. - """ - self._finish_with_ancestors() - def __enter__(self) -> "Span": return self @@ -862,18 +779,6 @@ def __exit__( except Exception: log.exception("error closing trace") - def _pprint(self) -> str: - # Although Span._pprint has been internal to ddtrace since v1.0.0, it is still - # used to debug spans in the wild. Introducing a deprecation warning here to - # give users a chance to migrate to __repr__ before we remove it. - deprecate( - prefix="The _pprint method is deprecated for __repr__", - message="""Use __repr__ instead.""", - category=DDTraceDeprecationWarning, - removal_version="4.0.0", - ) - return self.__repr__() - def __repr__(self) -> str: """Return a detailed string representation of a span.""" return ( diff --git a/ddtrace/_trace/tracer.py b/ddtrace/_trace/tracer.py index 2c348fd29ac..091286187ed 100644 --- a/ddtrace/_trace/tracer.py +++ b/ddtrace/_trace/tracer.py @@ -52,15 +52,13 @@ from ddtrace.internal.processor.endpoint_call_counter import EndpointCallCounterProcessor from ddtrace.internal.runtime import get_runtime_id from ddtrace.internal.schema.processor import BaseServiceProcessor +from ddtrace.internal.settings._config import config +from ddtrace.internal.settings.asm import config as asm_config +from ddtrace.internal.settings.peer_service import _ps_config from ddtrace.internal.utils import _get_metas_to_propagate -from ddtrace.internal.utils.deprecations import DDTraceDeprecationWarning from ddtrace.internal.utils.formats import format_trace_id from ddtrace.internal.writer import AgentWriterInterface from ddtrace.internal.writer import HTTPWriter -from ddtrace.settings._config import config -from ddtrace.settings.asm import config as asm_config -from ddtrace.settings.peer_service import _ps_config -from ddtrace.vendor.debtcollector.removals import remove from ddtrace.version import get_version @@ -201,37 +199,6 @@ def _atexit(self) -> None: ) self.shutdown(timeout=self.SHUTDOWN_TIMEOUT) - @remove( - message="on_start_span is being removed with no replacement", - removal_version="4.0.0", - category=DDTraceDeprecationWarning, - ) - def on_start_span(self, func: Callable[[Span], None]) -> Callable[[Span], None]: - """Register a function to execute when a span start. - - Can be used as a decorator. - - :param func: The function to call when starting a span. - The started span will be passed as argument. - """ - core.on("trace.span_start", callback=func) - return func - - @remove( - message="deregister_on_start_span is being removed with no replacement", - removal_version="4.0.0", - category=DDTraceDeprecationWarning, - ) - def deregister_on_start_span(self, func: Callable[[Span], None]) -> Callable[[Span], None]: - """Unregister a function registered to execute when a span starts. - - Can be used as a decorator. - - :param func: The function to stop calling when starting a span. - """ - core.reset_listeners("trace.span_start", callback=func) - return func - def sample(self, span): self._sampler.sample(span) diff --git a/ddtrace/_trace/types.py b/ddtrace/_trace/types.py index f021420acde..21b2fc5e7af 100644 --- a/ddtrace/_trace/types.py +++ b/ddtrace/_trace/types.py @@ -1,14 +1,7 @@ -from typing import Dict from typing import Sequence -from typing import Text from typing import Union -from ddtrace.internal.compat import NumericType - -_TagNameType = Union[Text, bytes] -_MetaDictType = Dict[_TagNameType, Text] -_MetricDictType = Dict[_TagNameType, NumericType] _AttributeValueType = Union[ str, bool, diff --git a/ddtrace/_trace/utils_botocore/aws_payload_tagging.py b/ddtrace/_trace/utils_botocore/aws_payload_tagging.py index 12aeaf94346..af8cee7a833 100644 --- a/ddtrace/_trace/utils_botocore/aws_payload_tagging.py +++ b/ddtrace/_trace/utils_botocore/aws_payload_tagging.py @@ -198,7 +198,7 @@ def _tag_object(self, span: Span, key: str, obj: Any, depth: int = 0) -> None: """ # if we've hit the maximum allowed tags, mark the expansion as incomplete if self.current_tag_count >= config.botocore.get("payload_tagging_max_tags"): - span.set_tag(self._INCOMPLETE_TAG, True) + span.set_tag(self._INCOMPLETE_TAG, "True") return if obj is None: self.current_tag_count += 1 diff --git a/ddtrace/_version.py b/ddtrace/_version.py new file mode 100644 index 00000000000..9fc278ef97a --- /dev/null +++ b/ddtrace/_version.py @@ -0,0 +1,34 @@ +# file generated by setuptools-scm +# don't change, don't track in version control + +__all__ = [ + "__version__", + "__version_tuple__", + "version", + "version_tuple", + "__commit_id__", + "commit_id", +] + +TYPE_CHECKING = False +if TYPE_CHECKING: + from typing import Tuple + from typing import Union + + VERSION_TUPLE = Tuple[Union[int, str], ...] + COMMIT_ID = Union[str, None] +else: + VERSION_TUPLE = object + COMMIT_ID = object + +version: str +__version__: str +__version_tuple__: VERSION_TUPLE +version_tuple: VERSION_TUPLE +commit_id: COMMIT_ID +__commit_id__: COMMIT_ID + +__version__ = version = "4.0.0.dev0" +__version_tuple__ = version_tuple = (4, 0, 0, "dev0", "") + +# __commit_id__ = commit_id = 'g5db831a3e' diff --git a/ddtrace/appsec/_ai_guard/__init__.py b/ddtrace/appsec/_ai_guard/__init__.py index 0cbda1713ef..925b7277378 100644 --- a/ddtrace/appsec/_ai_guard/__init__.py +++ b/ddtrace/appsec/_ai_guard/__init__.py @@ -1,5 +1,5 @@ import ddtrace.internal.logger as ddlogger -from ddtrace.settings.asm import ai_guard_config +from ddtrace.internal.settings.asm import ai_guard_config logger = ddlogger.get_logger(__name__) diff --git a/ddtrace/appsec/_api_security/api_manager.py b/ddtrace/appsec/_api_security/api_manager.py index de1888cc54c..12b01f1509d 100644 --- a/ddtrace/appsec/_api_security/api_manager.py +++ b/ddtrace/appsec/_api_security/api_manager.py @@ -16,7 +16,7 @@ from ddtrace.ext import http from ddtrace.internal import logger as ddlogger from ddtrace.internal.service import Service -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config log = ddlogger.get_logger(__name__) diff --git a/ddtrace/appsec/_asm_request_context.py b/ddtrace/appsec/_asm_request_context.py index 8360794eff7..51f0957658b 100644 --- a/ddtrace/appsec/_asm_request_context.py +++ b/ddtrace/appsec/_asm_request_context.py @@ -26,7 +26,7 @@ from ddtrace.internal._exceptions import BlockingException from ddtrace.internal.constants import REQUEST_PATH_PARAMS import ddtrace.internal.logger as ddlogger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config if TYPE_CHECKING: diff --git a/ddtrace/appsec/_capabilities.py b/ddtrace/appsec/_capabilities.py index 116fbfe7345..f642d987965 100644 --- a/ddtrace/appsec/_capabilities.py +++ b/ddtrace/appsec/_capabilities.py @@ -1,8 +1,8 @@ import base64 import enum -from ddtrace.settings._config import config -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings._config import config +from ddtrace.internal.settings.asm import config as asm_config class Flags(enum.IntFlag): diff --git a/ddtrace/appsec/_common_module_patches.py b/ddtrace/appsec/_common_module_patches.py index 51f230d466e..a51db937ebc 100644 --- a/ddtrace/appsec/_common_module_patches.py +++ b/ddtrace/appsec/_common_module_patches.py @@ -23,7 +23,7 @@ from ddtrace.internal._unpatched import _gc as gc from ddtrace.internal.logger import get_logger from ddtrace.internal.module import ModuleWatchdog -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config log = get_logger(__name__) diff --git a/ddtrace/appsec/_ddwaf/__init__.py b/ddtrace/appsec/_ddwaf/__init__.py index 5ec5148a3ab..39a014f9e42 100644 --- a/ddtrace/appsec/_ddwaf/__init__.py +++ b/ddtrace/appsec/_ddwaf/__init__.py @@ -5,7 +5,7 @@ from ddtrace.appsec._utils import DDWaf_info from ddtrace.appsec._utils import DDWaf_result from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config __all__ = ["DDWaf", "DDWaf_info", "DDWaf_result", "version", "DDWafRulesType"] diff --git a/ddtrace/appsec/_ddwaf/ddwaf_types.py b/ddtrace/appsec/_ddwaf/ddwaf_types.py index 30af95b1b1c..ba5bd95ce6c 100644 --- a/ddtrace/appsec/_ddwaf/ddwaf_types.py +++ b/ddtrace/appsec/_ddwaf/ddwaf_types.py @@ -17,7 +17,7 @@ from ddtrace.appsec._utils import _observator from ddtrace.appsec._utils import unpatching_popen from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config DDWafRulesType = Union[None, int, str, List[Any], Dict[str, Any]] diff --git a/ddtrace/appsec/_deduplications.py b/ddtrace/appsec/_deduplications.py index 59a76b0670d..f61fedacdc2 100644 --- a/ddtrace/appsec/_deduplications.py +++ b/ddtrace/appsec/_deduplications.py @@ -1,7 +1,7 @@ from collections import OrderedDict from time import monotonic -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config M_INF = float("-inf") diff --git a/ddtrace/appsec/_exploit_prevention/stack_traces.py b/ddtrace/appsec/_exploit_prevention/stack_traces.py index e4711a16385..a2262ddc835 100644 --- a/ddtrace/appsec/_exploit_prevention/stack_traces.py +++ b/ddtrace/appsec/_exploit_prevention/stack_traces.py @@ -9,7 +9,7 @@ from ddtrace.appsec import _asm_request_context from ddtrace.appsec._constants import STACK_TRACE from ddtrace.internal import core -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config def report_stack( diff --git a/ddtrace/appsec/_handlers.py b/ddtrace/appsec/_handlers.py index 9c9e8ea1852..ac7e1cdc3f9 100644 --- a/ddtrace/appsec/_handlers.py +++ b/ddtrace/appsec/_handlers.py @@ -26,9 +26,9 @@ from ddtrace.internal import telemetry from ddtrace.internal.constants import RESPONSE_HEADERS from ddtrace.internal.logger import get_logger +from ddtrace.internal.settings.asm import config as asm_config from ddtrace.internal.utils import http as http_utils from ddtrace.internal.utils.http import parse_form_multipart -from ddtrace.settings.asm import config as asm_config import ddtrace.vendor.xmltodict as xmltodict diff --git a/ddtrace/appsec/_iast/__init__.py b/ddtrace/appsec/_iast/__init__.py index e105bf5f60f..eb3bec683d4 100644 --- a/ddtrace/appsec/_iast/__init__.py +++ b/ddtrace/appsec/_iast/__init__.py @@ -35,7 +35,7 @@ def wrapped_function(wrapped, instance, args, kwargs): from ddtrace.internal import forksafe from ddtrace.internal.logger import get_logger from ddtrace.internal.module import ModuleWatchdog -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config from ._listener import iast_listen from ._overhead_control_engine import oce diff --git a/ddtrace/appsec/_iast/_ast/ast_patching.py b/ddtrace/appsec/_iast/_ast/ast_patching.py index 284cdacd206..47651c08047 100644 --- a/ddtrace/appsec/_iast/_ast/ast_patching.py +++ b/ddtrace/appsec/_iast/_ast/ast_patching.py @@ -14,8 +14,8 @@ from ddtrace.appsec._iast._logs import iast_instrumentation_ast_patching_debug_log from ddtrace.internal.logger import get_logger from ddtrace.internal.module import origin +from ddtrace.internal.settings.asm import config as asm_config from ddtrace.internal.utils.formats import asbool -from ddtrace.settings.asm import config as asm_config from .visitor import AstVisitor diff --git a/ddtrace/appsec/_iast/_ast/visitor.py b/ddtrace/appsec/_iast/_ast/visitor.py index a7e1474f5f9..0df51cba7fe 100644 --- a/ddtrace/appsec/_iast/_ast/visitor.py +++ b/ddtrace/appsec/_iast/_ast/visitor.py @@ -392,7 +392,6 @@ def find_insert_position(module_node: ast.Module) -> int: @staticmethod def _none_constant(from_node: Any) -> Any: # noqa: B008 - # 3.8+ return ast.Constant( lineno=from_node.lineno, col_offset=from_node.col_offset, @@ -863,17 +862,6 @@ def visit_Subscript(self, subscr_node: ast.Subscript) -> Any: call_node.func.attr = aspect_split[1] call_node.func.value.id = aspect_split[0] call_node.args.extend([subscr_node.value, subscr_node.slice]) - # TODO: python 3.8 isn't working correctly with index_aspect, tests raise: - # corrupted size vs. prev_size in fastbins - # Test failed with exit code -6 - # https://app.circleci.com/pipelines/github/DataDog/dd-trace-py/46665/workflows/3cf1257c-feaf-4653-bb9c-fb840baa1776/jobs/3031799 - # elif isinstance(subscr_node.slice, ast.Index): - # if self._is_string_node(subscr_node.slice.value): # type: ignore[attr-defined] - # return subscr_node - # aspect_split = self._aspect_index.split(".") - # call_node.func.attr = aspect_split[1] - # call_node.func.value.id = aspect_split[0] - # call_node.args.extend([subscr_node.value, subscr_node.slice.value]) # type: ignore[attr-defined] else: return subscr_node diff --git a/ddtrace/appsec/_iast/_evidence_redaction/_sensitive_handler.py b/ddtrace/appsec/_iast/_evidence_redaction/_sensitive_handler.py index 3fa804c68a5..d1d52fea0d0 100644 --- a/ddtrace/appsec/_iast/_evidence_redaction/_sensitive_handler.py +++ b/ddtrace/appsec/_iast/_evidence_redaction/_sensitive_handler.py @@ -2,7 +2,7 @@ import string from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config from .._utils import _get_source_index from ..constants import VULN_CMDI diff --git a/ddtrace/appsec/_iast/_handlers.py b/ddtrace/appsec/_iast/_handlers.py index 3ccc3c34c86..33631c388a3 100644 --- a/ddtrace/appsec/_iast/_handlers.py +++ b/ddtrace/appsec/_iast/_handlers.py @@ -22,7 +22,7 @@ from ddtrace.appsec._iast.secure_marks.sanitizers import cmdi_sanitizer from ddtrace.internal import core from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config MessageMapContainer = None diff --git a/ddtrace/appsec/_iast/_iast_request_context.py b/ddtrace/appsec/_iast/_iast_request_context.py index 65d88b2fbe9..4d17aec48c1 100644 --- a/ddtrace/appsec/_iast/_iast_request_context.py +++ b/ddtrace/appsec/_iast/_iast_request_context.py @@ -17,7 +17,7 @@ from ddtrace.constants import _ORIGIN_KEY from ddtrace.internal import core from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config log = get_logger(__name__) diff --git a/ddtrace/appsec/_iast/_iast_request_context_base.py b/ddtrace/appsec/_iast/_iast_request_context_base.py index 7110518945a..f36f947db0b 100644 --- a/ddtrace/appsec/_iast/_iast_request_context_base.py +++ b/ddtrace/appsec/_iast/_iast_request_context_base.py @@ -12,7 +12,7 @@ from ddtrace.appsec._iast.sampling.vulnerability_detection import update_global_vulnerability_limit from ddtrace.internal import core from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config log = get_logger(__name__) diff --git a/ddtrace/appsec/_iast/_langchain.py b/ddtrace/appsec/_iast/_langchain.py index 49456bf370c..d169c6e49ed 100644 --- a/ddtrace/appsec/_iast/_langchain.py +++ b/ddtrace/appsec/_iast/_langchain.py @@ -4,8 +4,8 @@ from ddtrace.appsec._iast._taint_tracking._taint_objects_base import get_tainted_ranges from ddtrace.contrib.internal.trace_utils import unwrap from ddtrace.contrib.internal.trace_utils import wrap +from ddtrace.internal.settings.asm import config as asm_config from ddtrace.internal.utils import get_argument_value -from ddtrace.settings.asm import config as asm_config def langchain_listen(core): diff --git a/ddtrace/appsec/_iast/_loader.py b/ddtrace/appsec/_iast/_loader.py index cef1a02d499..a1f77ee16d3 100644 --- a/ddtrace/appsec/_iast/_loader.py +++ b/ddtrace/appsec/_iast/_loader.py @@ -1,6 +1,6 @@ from ddtrace.appsec._iast._logs import iast_compiling_debug_log from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config from ._ast.ast_patching import astpatch_module diff --git a/ddtrace/appsec/_iast/_logs.py b/ddtrace/appsec/_iast/_logs.py index 5c07099d940..daf506bfd9e 100644 --- a/ddtrace/appsec/_iast/_logs.py +++ b/ddtrace/appsec/_iast/_logs.py @@ -2,7 +2,7 @@ from ddtrace.appsec._iast._metrics import _set_iast_error_metric from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config log = get_logger(__name__) diff --git a/ddtrace/appsec/_iast/_metrics.py b/ddtrace/appsec/_iast/_metrics.py index a27a0355c95..a7a59e62432 100644 --- a/ddtrace/appsec/_iast/_metrics.py +++ b/ddtrace/appsec/_iast/_metrics.py @@ -12,8 +12,8 @@ from ddtrace.appsec._iast._utils import _is_iast_debug_enabled from ddtrace.internal import telemetry from ddtrace.internal.logger import get_logger +from ddtrace.internal.settings.asm import config as asm_config from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE -from ddtrace.settings.asm import config as asm_config log = get_logger(__name__) diff --git a/ddtrace/appsec/_iast/_overhead_control_engine.py b/ddtrace/appsec/_iast/_overhead_control_engine.py index e9e1c00927d..bd2a1ddca9b 100644 --- a/ddtrace/appsec/_iast/_overhead_control_engine.py +++ b/ddtrace/appsec/_iast/_overhead_control_engine.py @@ -8,7 +8,7 @@ from ddtrace.appsec._iast._utils import _is_iast_debug_enabled from ddtrace.internal._unpatched import _threading as threading from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config log = get_logger(__name__) diff --git a/ddtrace/appsec/_iast/_patch_modules.py b/ddtrace/appsec/_iast/_patch_modules.py index 6a903a45a14..4b66bc3a27c 100644 --- a/ddtrace/appsec/_iast/_patch_modules.py +++ b/ddtrace/appsec/_iast/_patch_modules.py @@ -29,7 +29,7 @@ from ddtrace.appsec._iast.secure_marks.sanitizers import create_sanitizer from ddtrace.appsec._iast.secure_marks.validators import create_validator from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config log = get_logger(__name__) diff --git a/ddtrace/appsec/_iast/_patches/json_tainting.py b/ddtrace/appsec/_iast/_patches/json_tainting.py index 2c3ca903645..6a53ed8b735 100644 --- a/ddtrace/appsec/_iast/_patches/json_tainting.py +++ b/ddtrace/appsec/_iast/_patches/json_tainting.py @@ -2,7 +2,7 @@ from ddtrace.appsec._iast._iast_request_context_base import is_iast_request_enabled from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config from ..._constants import IAST from .._patch_modules import WrapFunctonsForIAST diff --git a/ddtrace/appsec/_iast/_pytest_plugin.py b/ddtrace/appsec/_iast/_pytest_plugin.py index 1ca1ad2dbc1..49f56a35322 100644 --- a/ddtrace/appsec/_iast/_pytest_plugin.py +++ b/ddtrace/appsec/_iast/_pytest_plugin.py @@ -6,7 +6,7 @@ from ddtrace.appsec._constants import IAST from ddtrace.appsec._iast.reporter import Vulnerability from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config log = get_logger(__name__) @@ -31,7 +31,7 @@ def ddtrace_iast(request, ddspan): return # looking for IAST data in the span - dict_data = ddspan.get_struct_tag(IAST.STRUCT) + dict_data = ddspan._get_struct_tag(IAST.STRUCT) if dict_data is None: data = ddspan.get_tag(IAST.JSON) if data is None: diff --git a/ddtrace/appsec/_iast/_taint_tracking/__init__.py b/ddtrace/appsec/_iast/_taint_tracking/__init__.py index 527516dd4de..0c2d7902c61 100644 --- a/ddtrace/appsec/_iast/_taint_tracking/__init__.py +++ b/ddtrace/appsec/_iast/_taint_tracking/__init__.py @@ -1,8 +1,8 @@ from ddtrace.appsec._iast._taint_tracking._native import ops # noqa: F401 from ddtrace.appsec._iast._taint_tracking._native.aspect_format import _format_aspect # noqa: F401 -from ddtrace.appsec._iast._taint_tracking._native.aspect_helpers import ( - _convert_escaped_text_to_tainted_text, -) # noqa: F401 +from ddtrace.appsec._iast._taint_tracking._native.aspect_helpers import _convert_escaped_text_to_tainted_text + +# noqa: F401 from ddtrace.appsec._iast._taint_tracking._native.aspect_helpers import are_all_text_all_ranges # noqa: F401 from ddtrace.appsec._iast._taint_tracking._native.aspect_helpers import as_formatted_evidence # noqa: F401 from ddtrace.appsec._iast._taint_tracking._native.aspect_helpers import common_replace # noqa: F401 diff --git a/ddtrace/appsec/_iast/_taint_utils.py b/ddtrace/appsec/_iast/_taint_utils.py index f5e3622c60c..9077d209297 100644 --- a/ddtrace/appsec/_iast/_taint_utils.py +++ b/ddtrace/appsec/_iast/_taint_utils.py @@ -9,7 +9,7 @@ from ddtrace.appsec._iast._taint_tracking._taint_objects import taint_pyobject from ddtrace.appsec._iast._taint_tracking._taint_objects_base import is_pyobject_tainted from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config DBAPI_PREFIXES = ("django-",) diff --git a/ddtrace/appsec/_iast/_utils.py b/ddtrace/appsec/_iast/_utils.py index e2c2dbec836..54c98110667 100644 --- a/ddtrace/appsec/_iast/_utils.py +++ b/ddtrace/appsec/_iast/_utils.py @@ -1,6 +1,6 @@ from typing import List -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config def _get_source_index(sources: List, source) -> int: diff --git a/ddtrace/appsec/_iast/main.py b/ddtrace/appsec/_iast/main.py index fd1c9140746..057e9dd7e4f 100644 --- a/ddtrace/appsec/_iast/main.py +++ b/ddtrace/appsec/_iast/main.py @@ -42,7 +42,7 @@ from ddtrace.appsec._iast.taint_sinks.weak_hash import patch as weak_hash_patch from ddtrace.appsec._iast.taint_sinks.xss import patch as xss_patch from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config log = get_logger(__name__) diff --git a/ddtrace/appsec/_iast/sampling/vulnerability_detection.py b/ddtrace/appsec/_iast/sampling/vulnerability_detection.py index 174a972f14a..035239ec79b 100644 --- a/ddtrace/appsec/_iast/sampling/vulnerability_detection.py +++ b/ddtrace/appsec/_iast/sampling/vulnerability_detection.py @@ -3,7 +3,7 @@ from ddtrace.appsec._iast._iast_env import _get_iast_env from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config log = get_logger(__name__) diff --git a/ddtrace/appsec/_iast/secure_marks/configuration.py b/ddtrace/appsec/_iast/secure_marks/configuration.py index 1b62b5ff6cc..1cd54137723 100644 --- a/ddtrace/appsec/_iast/secure_marks/configuration.py +++ b/ddtrace/appsec/_iast/secure_marks/configuration.py @@ -13,7 +13,7 @@ from ddtrace.appsec._iast._taint_tracking import VulnerabilityType from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config log = get_logger(__name__) diff --git a/ddtrace/appsec/_iast/taint_sinks/_base.py b/ddtrace/appsec/_iast/taint_sinks/_base.py index 9fcf1235482..43eff627e03 100644 --- a/ddtrace/appsec/_iast/taint_sinks/_base.py +++ b/ddtrace/appsec/_iast/taint_sinks/_base.py @@ -13,7 +13,7 @@ from ddtrace.appsec._trace_utils import _asm_manual_keep from ddtrace.internal import core from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config from ..._constants import IAST from ..._constants import IAST_SPAN_TAGS diff --git a/ddtrace/appsec/_iast/taint_sinks/code_injection.py b/ddtrace/appsec/_iast/taint_sinks/code_injection.py index e55311a7f8f..45e19f63545 100644 --- a/ddtrace/appsec/_iast/taint_sinks/code_injection.py +++ b/ddtrace/appsec/_iast/taint_sinks/code_injection.py @@ -13,7 +13,7 @@ from ddtrace.appsec._iast.constants import VULN_CODE_INJECTION from ddtrace.appsec._iast.taint_sinks._base import VulnerabilityBase from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config log = get_logger(__name__) diff --git a/ddtrace/appsec/_iast/taint_sinks/header_injection.py b/ddtrace/appsec/_iast/taint_sinks/header_injection.py index 7d4ab9acc26..30987773555 100644 --- a/ddtrace/appsec/_iast/taint_sinks/header_injection.py +++ b/ddtrace/appsec/_iast/taint_sinks/header_injection.py @@ -71,7 +71,7 @@ from ddtrace.appsec._iast.taint_sinks._base import VulnerabilityBase from ddtrace.appsec._iast.taint_sinks.unvalidated_redirect import _iast_report_unvalidated_redirect from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config log = get_logger(__name__) diff --git a/ddtrace/appsec/_iast/taint_sinks/insecure_cookie.py b/ddtrace/appsec/_iast/taint_sinks/insecure_cookie.py index 9dd0e5f5022..45a43b9dbba 100644 --- a/ddtrace/appsec/_iast/taint_sinks/insecure_cookie.py +++ b/ddtrace/appsec/_iast/taint_sinks/insecure_cookie.py @@ -13,7 +13,7 @@ from ddtrace.appsec._iast.constants import VULN_NO_SAMESITE_COOKIE from ddtrace.appsec._iast.sampling.vulnerability_detection import should_process_vulnerability from ddtrace.appsec._iast.taint_sinks._base import VulnerabilityBase -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config class InsecureCookie(VulnerabilityBase): diff --git a/ddtrace/appsec/_iast/taint_sinks/sql_injection.py b/ddtrace/appsec/_iast/taint_sinks/sql_injection.py index 762d580bb59..bba3847f764 100644 --- a/ddtrace/appsec/_iast/taint_sinks/sql_injection.py +++ b/ddtrace/appsec/_iast/taint_sinks/sql_injection.py @@ -9,7 +9,7 @@ from ddtrace.appsec._iast.constants import DBAPI_INTEGRATIONS from ddtrace.appsec._iast.constants import VULN_SQL_INJECTION from ddtrace.appsec._iast.taint_sinks._base import VulnerabilityBase -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config class SqlInjection(VulnerabilityBase): diff --git a/ddtrace/appsec/_iast/taint_sinks/untrusted_serialization.py b/ddtrace/appsec/_iast/taint_sinks/untrusted_serialization.py index b878663ecdf..6acee4b5647 100644 --- a/ddtrace/appsec/_iast/taint_sinks/untrusted_serialization.py +++ b/ddtrace/appsec/_iast/taint_sinks/untrusted_serialization.py @@ -12,7 +12,7 @@ from ddtrace.appsec._iast.constants import VULN_UNTRUSTED_SERIALIZATION from ddtrace.appsec._iast.taint_sinks._base import VulnerabilityBase from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config log = get_logger(__name__) diff --git a/ddtrace/appsec/_iast/taint_sinks/unvalidated_redirect.py b/ddtrace/appsec/_iast/taint_sinks/unvalidated_redirect.py index 551e3123ab0..300545899cb 100644 --- a/ddtrace/appsec/_iast/taint_sinks/unvalidated_redirect.py +++ b/ddtrace/appsec/_iast/taint_sinks/unvalidated_redirect.py @@ -14,8 +14,8 @@ from ddtrace.appsec._iast.secure_marks.base import add_secure_mark from ddtrace.appsec._iast.taint_sinks._base import VulnerabilityBase from ddtrace.internal.logger import get_logger +from ddtrace.internal.settings.asm import config as asm_config from ddtrace.internal.utils import get_argument_value -from ddtrace.settings.asm import config as asm_config log = get_logger(__name__) diff --git a/ddtrace/appsec/_iast/taint_sinks/weak_cipher.py b/ddtrace/appsec/_iast/taint_sinks/weak_cipher.py index 5bab4769876..ee4a040c59c 100644 --- a/ddtrace/appsec/_iast/taint_sinks/weak_cipher.py +++ b/ddtrace/appsec/_iast/taint_sinks/weak_cipher.py @@ -13,7 +13,7 @@ from ddtrace.appsec._iast.constants import RC4_DEF from ddtrace.appsec._iast.constants import VULN_WEAK_CIPHER_TYPE from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config from .._logs import iast_error from .._metrics import _set_metric_iast_executed_sink diff --git a/ddtrace/appsec/_iast/taint_sinks/weak_hash.py b/ddtrace/appsec/_iast/taint_sinks/weak_hash.py index 8f44a6a5f5f..dc9609cfbda 100644 --- a/ddtrace/appsec/_iast/taint_sinks/weak_hash.py +++ b/ddtrace/appsec/_iast/taint_sinks/weak_hash.py @@ -4,7 +4,7 @@ from typing import Set from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config from ..._common_module_patches import try_unwrap from ..._constants import IAST_SPAN_TAGS diff --git a/ddtrace/appsec/_iast/taint_sinks/xss.py b/ddtrace/appsec/_iast/taint_sinks/xss.py index 29e486ccd0c..db1077fe08b 100644 --- a/ddtrace/appsec/_iast/taint_sinks/xss.py +++ b/ddtrace/appsec/_iast/taint_sinks/xss.py @@ -13,7 +13,7 @@ from ddtrace.appsec._iast.taint_sinks._base import VulnerabilityBase from ddtrace.internal.logger import get_logger from ddtrace.internal.module import ModuleWatchdog -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config log = get_logger(__name__) diff --git a/ddtrace/appsec/_listeners.py b/ddtrace/appsec/_listeners.py index c3d6f443f06..42ca8ba883a 100644 --- a/ddtrace/appsec/_listeners.py +++ b/ddtrace/appsec/_listeners.py @@ -1,7 +1,7 @@ import sys from ddtrace.internal import core -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config _APPSEC_TO_BE_LOADED = True @@ -43,7 +43,7 @@ def load_appsec() -> None: def load_common_appsec_modules(): """Lazily load the common module patches.""" - from ddtrace.settings.asm import config as asm_config + from ddtrace.internal.settings.asm import config as asm_config if asm_config._load_modules: from ddtrace.appsec._common_module_patches import patch_common_modules diff --git a/ddtrace/appsec/_processor.py b/ddtrace/appsec/_processor.py index 1e81cb15125..f09944d351d 100644 --- a/ddtrace/appsec/_processor.py +++ b/ddtrace/appsec/_processor.py @@ -43,7 +43,7 @@ from ddtrace.internal.logger import get_logger from ddtrace.internal.rate_limiter import RateLimiter from ddtrace.internal.remoteconfig import PayloadType -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config log = get_logger(__name__) diff --git a/ddtrace/appsec/_remoteconfiguration.py b/ddtrace/appsec/_remoteconfiguration.py index b169a8ec86e..a49c574a25e 100644 --- a/ddtrace/appsec/_remoteconfiguration.py +++ b/ddtrace/appsec/_remoteconfiguration.py @@ -19,9 +19,9 @@ from ddtrace.internal.remoteconfig._pubsub import PubSub from ddtrace.internal.remoteconfig._subscribers import RemoteConfigSubscriber from ddtrace.internal.remoteconfig.worker import remoteconfig_poller +from ddtrace.internal.settings.asm import config as asm_config from ddtrace.internal.telemetry import telemetry_writer from ddtrace.internal.telemetry.constants import TELEMETRY_APM_PRODUCT -from ddtrace.settings.asm import config as asm_config from ddtrace.trace import Tracer from ddtrace.trace import tracer diff --git a/ddtrace/appsec/_trace_utils.py b/ddtrace/appsec/_trace_utils.py index 83d47a0159d..9bda7c89cff 100644 --- a/ddtrace/appsec/_trace_utils.py +++ b/ddtrace/appsec/_trace_utils.py @@ -18,7 +18,7 @@ from ddtrace.internal import core from ddtrace.internal._exceptions import BlockingException from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config log = get_logger(__name__) diff --git a/ddtrace/appsec/_utils.py b/ddtrace/appsec/_utils.py index b34b2f09c8a..ba44b628c1c 100644 --- a/ddtrace/appsec/_utils.py +++ b/ddtrace/appsec/_utils.py @@ -16,7 +16,7 @@ from ddtrace.contrib.internal.trace_utils_base import _get_header_value_case_insensitive from ddtrace.internal._unpatched import unpatched_json_loads from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config log = get_logger(__name__) @@ -350,13 +350,13 @@ def get_user_info(self, login=False, email=False, name=False): def has_triggers(span) -> bool: if asm_config._use_metastruct_for_triggers: - return (span.get_struct_tag(APPSEC.STRUCT) or {}).get("triggers", None) is not None + return (span._get_struct_tag(APPSEC.STRUCT) or {}).get("triggers", None) is not None return span.get_tag(APPSEC.JSON) is not None def get_triggers(span) -> Any: if asm_config._use_metastruct_for_triggers: - return (span.get_struct_tag(APPSEC.STRUCT) or {}).get("triggers", None) + return (span._get_struct_tag(APPSEC.STRUCT) or {}).get("triggers", None) json_payload = span.get_tag(APPSEC.JSON) if json_payload: try: diff --git a/ddtrace/appsec/ai_guard/_api_client.py b/ddtrace/appsec/ai_guard/_api_client.py index 6002abf442a..a551a8288dd 100644 --- a/ddtrace/appsec/ai_guard/_api_client.py +++ b/ddtrace/appsec/ai_guard/_api_client.py @@ -1,4 +1,5 @@ """AI Guard client for security evaluation of agentic AI workflows.""" + import json from typing import Any from typing import List @@ -13,11 +14,11 @@ from ddtrace.appsec._constants import AI_GUARD from ddtrace.internal import telemetry import ddtrace.internal.logger as ddlogger +from ddtrace.internal.settings.asm import ai_guard_config from ddtrace.internal.telemetry import TELEMETRY_NAMESPACE from ddtrace.internal.telemetry.metrics_namespaces import MetricTagType from ddtrace.internal.utils.http import Response from ddtrace.internal.utils.http import get_connection -from ddtrace.settings.asm import ai_guard_config logger = ddlogger.get_logger(__name__) @@ -211,7 +212,7 @@ def evaluate(self, messages: List[Message], options: Optional[Options] = None) - span.set_tag(AI_GUARD.TOOL_NAME_TAG, tool_name) else: span.set_tag(AI_GUARD.TARGET_TAG, "prompt") - span.set_struct_tag(AI_GUARD.STRUCT, {"messages": self._messages_for_meta_struct(messages)}) + span._set_struct_tag(AI_GUARD.STRUCT, {"messages": self._messages_for_meta_struct(messages)}) try: response = self._execute_request(f"{self._endpoint}/evaluate", payload) diff --git a/ddtrace/bootstrap/preload.py b/ddtrace/bootstrap/preload.py index 4f20d47ee03..fa5beb3044a 100644 --- a/ddtrace/bootstrap/preload.py +++ b/ddtrace/bootstrap/preload.py @@ -10,8 +10,8 @@ from ddtrace.internal.module import ModuleWatchdog # noqa:F401 from ddtrace.internal.products import manager # noqa:F401 from ddtrace.internal.runtime.runtime_metrics import RuntimeWorker # noqa:F401 -from ddtrace.settings.crashtracker import config as crashtracker_config -from ddtrace.settings.profiling import config as profiling_config # noqa:F401 +from ddtrace.internal.settings.crashtracker import config as crashtracker_config +from ddtrace.internal.settings.profiling import config as profiling_config # noqa:F401 from ddtrace.trace import tracer diff --git a/ddtrace/contrib/dbapi.py b/ddtrace/contrib/dbapi.py index e364c542a22..595a2c1f7fb 100644 --- a/ddtrace/contrib/dbapi.py +++ b/ddtrace/contrib/dbapi.py @@ -10,13 +10,13 @@ from ddtrace.internal.utils import ArgumentError from ddtrace.internal.utils import get_argument_value +from .._trace.pin import Pin from ..constants import _SPAN_MEASURED_KEY from ..constants import SPAN_KIND from ..ext import SpanKind from ..ext import SpanTypes from ..ext import db from ..ext import sql -from ..trace import Pin from .internal.trace_utils import ext_service from .internal.trace_utils import iswrapped diff --git a/ddtrace/contrib/dbapi_async.py b/ddtrace/contrib/dbapi_async.py index 05c4ea9282e..d7bd3e520f0 100644 --- a/ddtrace/contrib/dbapi_async.py +++ b/ddtrace/contrib/dbapi_async.py @@ -5,11 +5,11 @@ from ddtrace.internal.utils import ArgumentError from ddtrace.internal.utils import get_argument_value +from .._trace.pin import Pin from ..constants import _SPAN_MEASURED_KEY from ..constants import SPAN_KIND from ..ext import SpanKind from ..ext import SpanTypes -from ..trace import Pin from .dbapi import TracedConnection from .dbapi import TracedCursor from .internal.trace_utils import ext_service diff --git a/ddtrace/contrib/integration_registry/registry.yaml b/ddtrace/contrib/integration_registry/registry.yaml index a2b9c616b2a..56ea8a82f51 100644 --- a/ddtrace/contrib/integration_registry/registry.yaml +++ b/ddtrace/contrib/integration_registry/registry.yaml @@ -66,7 +66,7 @@ integrations: - algoliasearch tested_versions_by_dependency: algoliasearch: - min: 2.5.0 + min: 2.6.3 max: 2.6.3 - integration_name: anthropic @@ -104,7 +104,7 @@ integrations: - asyncpg tested_versions_by_dependency: asyncpg: - min: 0.22.0 + min: 0.23.0 max: 0.30.0 - integration_name: avro @@ -195,16 +195,6 @@ integrations: min: 0.12.25 max: 0.13.4 -- integration_name: cassandra - is_external_package: true - is_tested: true - dependency_names: - - cassandra-driver - tested_versions_by_dependency: - cassandra-driver: - min: 3.24.0 - max: 3.28.0 - - integration_name: celery is_external_package: true is_tested: true @@ -379,16 +369,6 @@ integrations: min: 1.10.1 max: 2.3.0 -- integration_name: freezegun - is_external_package: true - is_tested: true - dependency_names: - - freezegun - tested_versions_by_dependency: - freezegun: - min: 1.3.1 - max: 1.5.2 - - integration_name: futures is_external_package: false is_tested: true @@ -400,7 +380,7 @@ integrations: - gevent tested_versions_by_dependency: gevent: - min: 20.12.1 + min: 21.1.2 max: 25.5.1 - integration_name: google_adk @@ -423,16 +403,6 @@ integrations: min: 1.21.1 max: 1.41.0 -- integration_name: google_generativeai - is_external_package: true - is_tested: true - dependency_names: - - google-generativeai - tested_versions_by_dependency: - google-generativeai: - min: 0.7.2 - max: 0.8.5 - - integration_name: graphql is_external_package: true is_tested: true @@ -609,16 +579,6 @@ integrations: min: 1.0.2 max: 1.0.2 -- integration_name: mongoengine - is_external_package: true - is_tested: true - dependency_names: - - mongoengine - tested_versions_by_dependency: - mongoengine: - min: 0.23.1 - max: 0.29.1 - - integration_name: mysql is_external_package: true is_tested: true @@ -666,7 +626,7 @@ integrations: - protobuf tested_versions_by_dependency: protobuf: - min: 5.29.3 + min: 6.30.1 max: 6.32.0 - integration_name: psycopg @@ -680,7 +640,7 @@ integrations: min: 3.0.18 max: 3.2.10 psycopg2-binary: - min: 2.8.6 + min: 2.9.10 max: 2.9.10 - integration_name: pydantic_ai @@ -740,7 +700,7 @@ integrations: - pynamodb tested_versions_by_dependency: pynamodb: - min: 5.0.3 + min: 5.5.1 max: 5.5.1 - integration_name: pyodbc @@ -826,7 +786,7 @@ integrations: - requests tested_versions_by_dependency: requests: - min: 2.20.1 + min: 2.25.1 max: 2.32.5 - integration_name: rq @@ -862,7 +822,7 @@ integrations: - snowflake-connector-python tested_versions_by_dependency: snowflake-connector-python: - min: 2.3.10 + min: 2.4.6 max: 3.17.2 - integration_name: sqlalchemy @@ -928,7 +888,7 @@ integrations: - urllib3 tested_versions_by_dependency: urllib3: - min: 1.25.0 + min: 1.25.8 max: 2.5.0 - integration_name: valkey diff --git a/ddtrace/contrib/internal/aiomysql/__init__.py b/ddtrace/contrib/internal/aiomysql/__init__.py index 5b060571309..4aca898853e 100644 --- a/ddtrace/contrib/internal/aiomysql/__init__.py +++ b/ddtrace/contrib/internal/aiomysql/__init__.py @@ -19,7 +19,7 @@ To configure the integration on an per-connection basis use the ``Pin`` API:: - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin import asyncio import aiomysql diff --git a/ddtrace/contrib/internal/aiopg/__init__.py b/ddtrace/contrib/internal/aiopg/__init__.py index a419df5dbbf..7c6bedf6d6d 100644 --- a/ddtrace/contrib/internal/aiopg/__init__.py +++ b/ddtrace/contrib/internal/aiopg/__init__.py @@ -2,7 +2,7 @@ Instrument aiopg to report a span for each executed Postgres queries:: from ddtrace import patch - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin import aiopg # If not patched yet, you can patch aiopg specifically diff --git a/ddtrace/contrib/internal/aioredis/__init__.py b/ddtrace/contrib/internal/aioredis/__init__.py index 7abbd826a3c..3fcc7750952 100644 --- a/ddtrace/contrib/internal/aioredis/__init__.py +++ b/ddtrace/contrib/internal/aioredis/__init__.py @@ -55,7 +55,7 @@ ``Pin`` API:: import aioredis - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin myaioredis = aioredis.Aioredis() Pin.override(myaioredis, service="myaioredis") diff --git a/ddtrace/contrib/internal/aioredis/patch.py b/ddtrace/contrib/internal/aioredis/patch.py index 3ce4629620f..96945d1d33a 100644 --- a/ddtrace/contrib/internal/aioredis/patch.py +++ b/ddtrace/contrib/internal/aioredis/patch.py @@ -177,8 +177,7 @@ def _finish_span(future): future.result() if redis_command in ROW_RETURNING_COMMANDS: span.set_metric(db.ROWCOUNT, determine_row_count(redis_command=redis_command, result=future.result())) - # CancelledError exceptions extend from BaseException as of Python 3.8, instead of usual Exception - except (Exception, aioredis.CancelledError): + except aioredis.CancelledError: span.set_exc_info(*sys.exc_info()) if redis_command in ROW_RETURNING_COMMANDS: span.set_metric(db.ROWCOUNT, 0) diff --git a/ddtrace/contrib/internal/algoliasearch/patch.py b/ddtrace/contrib/internal/algoliasearch/patch.py index 5b8571457be..93b0c3caa6d 100644 --- a/ddtrace/contrib/internal/algoliasearch/patch.py +++ b/ddtrace/contrib/internal/algoliasearch/patch.py @@ -37,13 +37,12 @@ algoliasearch_version = VERSION = V0 -def get_version(): - # type: () -> str +def get_version() -> str: return VERSION def _supported_versions() -> Dict[str, str]: - return {"algoliasearch": ">=2.5.0"} + return {"algoliasearch": ">=2.6.3"} def patch(): diff --git a/ddtrace/contrib/internal/anthropic/__init__.py b/ddtrace/contrib/internal/anthropic/__init__.py index 81e62a6083b..f066246b656 100644 --- a/ddtrace/contrib/internal/anthropic/__init__.py +++ b/ddtrace/contrib/internal/anthropic/__init__.py @@ -77,7 +77,7 @@ import anthropic from ddtrace import config - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin Pin.override(anthropic, service="my-anthropic-service") """ # noqa: E501 diff --git a/ddtrace/contrib/internal/aredis/__init__.py b/ddtrace/contrib/internal/aredis/__init__.py index 1ffac72fa36..03841d14c17 100644 --- a/ddtrace/contrib/internal/aredis/__init__.py +++ b/ddtrace/contrib/internal/aredis/__init__.py @@ -53,7 +53,7 @@ To configure particular aredis instances use the :class:`Pin ` API:: import aredis - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin client = aredis.StrictRedis(host="localhost", port=6379) diff --git a/ddtrace/contrib/internal/asgi/middleware.py b/ddtrace/contrib/internal/asgi/middleware.py index 0c2166f0526..2bb6179fd8c 100644 --- a/ddtrace/contrib/internal/asgi/middleware.py +++ b/ddtrace/contrib/internal/asgi/middleware.py @@ -24,10 +24,10 @@ from ddtrace.internal.logger import get_logger from ddtrace.internal.schema import schematize_url_operation from ddtrace.internal.schema.span_attribute_schema import SpanDirection +from ddtrace.internal.settings._config import _get_config from ddtrace.internal.utils import get_blocked from ddtrace.internal.utils import set_blocked from ddtrace.internal.utils.formats import asbool -from ddtrace.settings._config import _get_config from ddtrace.trace import Span diff --git a/ddtrace/contrib/internal/asyncpg/__init__.py b/ddtrace/contrib/internal/asyncpg/__init__.py index 233cde9f51c..90932dbe440 100644 --- a/ddtrace/contrib/internal/asyncpg/__init__.py +++ b/ddtrace/contrib/internal/asyncpg/__init__.py @@ -38,7 +38,7 @@ basis use the ``Pin`` API:: import asyncpg - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin conn = asyncpg.connect("postgres://localhost:5432") Pin.override(conn, service="custom-service") diff --git a/ddtrace/contrib/internal/asyncpg/patch.py b/ddtrace/contrib/internal/asyncpg/patch.py index 586b751a1a0..67b55c40d70 100644 --- a/ddtrace/contrib/internal/asyncpg/patch.py +++ b/ddtrace/contrib/internal/asyncpg/patch.py @@ -47,13 +47,12 @@ log = get_logger(__name__) -def get_version(): - # type: () -> str +def get_version() -> str: return getattr(asyncpg, "__version__", "") def _supported_versions() -> Dict[str, str]: - return {"asyncpg": ">=0.22.0"} + return {"asyncpg": ">=0.23.0"} def _get_connection_tags(conn): diff --git a/ddtrace/contrib/internal/aws_lambda/patch.py b/ddtrace/contrib/internal/aws_lambda/patch.py index 1f2840de1f4..77d1fe55651 100644 --- a/ddtrace/contrib/internal/aws_lambda/patch.py +++ b/ddtrace/contrib/internal/aws_lambda/patch.py @@ -8,10 +8,10 @@ from ddtrace.contrib.internal.aws_lambda._cold_start import set_cold_start from ddtrace.internal.logger import get_logger from ddtrace.internal.serverless import in_aws_lambda +from ddtrace.internal.settings._config import _get_config from ddtrace.internal.utils import get_argument_value from ddtrace.internal.wrapping import unwrap from ddtrace.internal.wrapping import wrap -from ddtrace.settings._config import _get_config from ddtrace.trace import tracer diff --git a/ddtrace/contrib/internal/azure_eventhubs/patch.py b/ddtrace/contrib/internal/azure_eventhubs/patch.py index 1273457983c..7daa99ed480 100644 --- a/ddtrace/contrib/internal/azure_eventhubs/patch.py +++ b/ddtrace/contrib/internal/azure_eventhubs/patch.py @@ -9,9 +9,9 @@ from ddtrace.contrib.internal.trace_utils import unwrap as _u from ddtrace.ext import azure_eventhubs as azure_eventhubsx from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.settings._config import _get_config from ddtrace.internal.utils import get_argument_value from ddtrace.internal.utils.formats import asbool -from ddtrace.settings._config import _get_config from .utils import create_context from .utils import dispatch_message_modifier diff --git a/ddtrace/contrib/internal/azure_functions/patch.py b/ddtrace/contrib/internal/azure_functions/patch.py index b3a2c420e1f..21f1b6e9e9f 100644 --- a/ddtrace/contrib/internal/azure_functions/patch.py +++ b/ddtrace/contrib/internal/azure_functions/patch.py @@ -10,9 +10,9 @@ from ddtrace.ext import azure_eventhubs as azure_eventhubsx from ddtrace.ext import azure_servicebus as azure_servicebusx from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.settings._config import _get_config from ddtrace.internal.utils.formats import asbool from ddtrace.propagation.http import HTTPPropagator -from ddtrace.settings._config import _get_config from .utils import create_context from .utils import wrap_function_with_tracing diff --git a/ddtrace/contrib/internal/azure_servicebus/patch.py b/ddtrace/contrib/internal/azure_servicebus/patch.py index 82aefa6ea0e..27f88943b44 100644 --- a/ddtrace/contrib/internal/azure_servicebus/patch.py +++ b/ddtrace/contrib/internal/azure_servicebus/patch.py @@ -9,8 +9,8 @@ from ddtrace.contrib.internal.trace_utils import unwrap as _u from ddtrace.ext import azure_servicebus as azure_servicebusx from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.settings._config import _get_config from ddtrace.internal.utils.formats import asbool -from ddtrace.settings._config import _get_config from .utils import create_context from .utils import dispatch_message_modifier diff --git a/ddtrace/contrib/internal/botocore/patch.py b/ddtrace/contrib/internal/botocore/patch.py index 323676595fe..a3528d726ea 100644 --- a/ddtrace/contrib/internal/botocore/patch.py +++ b/ddtrace/contrib/internal/botocore/patch.py @@ -30,11 +30,11 @@ from ddtrace.internal.schema import schematize_cloud_faas_operation from ddtrace.internal.schema import schematize_cloud_messaging_operation from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.settings._config import Config from ddtrace.internal.utils import get_argument_value from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.formats import deep_getattr from ddtrace.llmobs._integrations import BedrockIntegration -from ddtrace.settings._config import Config from .services.bedrock import patched_bedrock_api_call from .services.bedrock_agents import patched_bedrock_agents_api_call diff --git a/ddtrace/contrib/internal/cassandra/__init__.py b/ddtrace/contrib/internal/cassandra/__init__.py deleted file mode 100644 index d0de07f8f16..00000000000 --- a/ddtrace/contrib/internal/cassandra/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -"""Instrument Cassandra to report Cassandra queries. - -``import ddtrace.auto`` will automatically patch your Cluster instance to make it work. -:: - - from ddtrace import patch - from ddtrace.trace import Pin - from cassandra.cluster import Cluster - - # If not patched yet, you can patch cassandra specifically - patch(cassandra=True) - - # This will report spans with the default instrumentation - cluster = Cluster(contact_points=["127.0.0.1"], port=9042) - session = cluster.connect("my_keyspace") - # Example of instrumented query - session.execute("select id from my_table limit 10;") - - # Use a pin to specify metadata related to this cluster - cluster = Cluster(contact_points=['10.1.1.3', '10.1.1.4', '10.1.1.5'], port=9042) - Pin.override(cluster, service='cassandra-backend') - session = cluster.connect("my_keyspace") - session.execute("select id from my_table limit 10;") -""" diff --git a/ddtrace/contrib/internal/cassandra/patch.py b/ddtrace/contrib/internal/cassandra/patch.py deleted file mode 100644 index bc82b8dbe0e..00000000000 --- a/ddtrace/contrib/internal/cassandra/patch.py +++ /dev/null @@ -1,9 +0,0 @@ -from typing import Dict - -from .session import get_version # noqa: F401 -from .session import patch # noqa: F401 -from .session import unpatch # noqa: F401 - - -def _supported_versions() -> Dict[str, str]: - return {"cassandra": ">=3.24.0"} diff --git a/ddtrace/contrib/internal/cassandra/session.py b/ddtrace/contrib/internal/cassandra/session.py deleted file mode 100644 index 92b0bf989bc..00000000000 --- a/ddtrace/contrib/internal/cassandra/session.py +++ /dev/null @@ -1,316 +0,0 @@ -""" -Trace queries along a session to a cassandra cluster -""" -import sys -from typing import Any -from typing import Dict -from typing import List -from typing import Optional - -from cassandra import __version__ - - -try: - import cassandra.cluster as cassandra_cluster -except AttributeError: - from cassandra import cluster as cassandra_cluster -from cassandra.query import BatchStatement -from cassandra.query import BoundStatement -from cassandra.query import PreparedStatement -from cassandra.query import SimpleStatement -import wrapt - -from ddtrace import config -from ddtrace._trace.pin import Pin -from ddtrace.constants import _SPAN_MEASURED_KEY -from ddtrace.constants import ERROR_MSG -from ddtrace.constants import ERROR_TYPE -from ddtrace.constants import SPAN_KIND -from ddtrace.ext import SpanKind -from ddtrace.ext import SpanTypes -from ddtrace.ext import cassandra as cassx -from ddtrace.ext import db -from ddtrace.ext import net -from ddtrace.internal.compat import maybe_stringify -from ddtrace.internal.constants import COMPONENT -from ddtrace.internal.logger import get_logger -from ddtrace.internal.schema import schematize_database_operation -from ddtrace.internal.schema import schematize_service_name -from ddtrace.internal.utils import get_argument_value -from ddtrace.internal.utils.formats import deep_getattr -from ddtrace.trace import Span - - -log = get_logger(__name__) - -RESOURCE_MAX_LENGTH = 5000 -SERVICE = schematize_service_name("cassandra") -CURRENT_SPAN = "_ddtrace_current_span" -PAGE_NUMBER = "_ddtrace_page_number" - - -# Original connect connect function -_connect = cassandra_cluster.Cluster.connect - - -def get_version(): - # type: () -> str - return __version__ - - -def patch(): - """patch will add tracing to the cassandra library.""" - cassandra_cluster.Cluster.connect = wrapt.FunctionWrapper(_connect, traced_connect) - Pin(service=SERVICE).onto(cassandra_cluster.Cluster) - cassandra_cluster._datadog_patch = True - - -def unpatch(): - cassandra_cluster.Cluster.connect = _connect - cassandra_cluster._datadog_patch = False - - -def traced_connect(func, instance, args, kwargs): - session = func(*args, **kwargs) - if not isinstance(session.execute, wrapt.FunctionWrapper): - # FIXME[matt] this should probably be private. - session.execute_async = wrapt.FunctionWrapper(session.execute_async, traced_execute_async) - return session - - -def _close_span_on_success(result, future): - span = getattr(future, CURRENT_SPAN, None) - if not span: - log.debug("traced_set_final_result was not able to get the current span from the ResponseFuture") - return - try: - span.set_tags(_extract_result_metas(cassandra_cluster.ResultSet(future, result))) - except Exception: - log.debug("an exception occurred while setting tags", exc_info=True) - finally: - span.finish() - delattr(future, CURRENT_SPAN) - - -def traced_set_final_result(func, instance, args, kwargs): - result = get_argument_value(args, kwargs, 0, "response") - _close_span_on_success(result, instance) - return func(*args, **kwargs) - - -def _close_span_on_error(exc, future): - span = getattr(future, CURRENT_SPAN, None) - if not span: - log.debug("traced_set_final_exception was not able to get the current span from the ResponseFuture") - return - try: - # handling the exception manually because we - # don't have an ongoing exception here - span.error = 1 - span._set_tag_str(ERROR_MSG, exc.args[0]) - span._set_tag_str(ERROR_TYPE, exc.__class__.__name__) - except Exception: - log.debug("traced_set_final_exception was not able to set the error, failed with error", exc_info=True) - finally: - span.finish() - delattr(future, CURRENT_SPAN) - - -def traced_set_final_exception(func, instance, args, kwargs): - exc = get_argument_value(args, kwargs, 0, "response") - _close_span_on_error(exc, instance) - return func(*args, **kwargs) - - -def traced_start_fetching_next_page(func, instance, args, kwargs): - has_more_pages = getattr(instance, "has_more_pages", True) - if not has_more_pages: - return func(*args, **kwargs) - session = getattr(instance, "session", None) - cluster = getattr(session, "cluster", None) - pin = Pin.get_from(cluster) - if not pin or not pin.enabled(): - return func(*args, **kwargs) - - # In case the current span is not finished we make sure to finish it - old_span = getattr(instance, CURRENT_SPAN, None) - if old_span: - log.debug("previous span was not finished before fetching next page") - old_span.finish() - - query = getattr(instance, "query", None) - - sanitized_query = _sanitize_query(query) if isinstance(query, BatchStatement) else None - statements_and_parameters = query._statements_and_parameters if isinstance(query, BatchStatement) else None - additional_tags = dict(**_extract_session_metas(session), **_extract_cluster_metas(cluster)) - span = _start_span_and_set_tags( - pin, _get_resource(query), additional_tags, sanitized_query, statements_and_parameters - ) - - page_number = getattr(instance, PAGE_NUMBER, 1) + 1 - setattr(instance, PAGE_NUMBER, page_number) - setattr(instance, CURRENT_SPAN, span) - try: - return func(*args, **kwargs) - except Exception: - with span: - span.set_exc_info(*sys.exc_info()) - raise - - -def traced_execute_async(func, instance, args, kwargs): - cluster = getattr(instance, "cluster", None) - pin = Pin.get_from(cluster) - if not pin or not pin.enabled(): - return func(*args, **kwargs) - - query = get_argument_value(args, kwargs, 0, "query") - - sanitized_query = _sanitize_query(query) if isinstance(query, BatchStatement) else None - statements_and_parameters = query._statements_and_parameters if isinstance(query, BatchStatement) else None - additional_tags = dict(**_extract_session_metas(instance), **_extract_cluster_metas(cluster)) - span = _start_span_and_set_tags( - pin, _get_resource(query), additional_tags, sanitized_query, statements_and_parameters - ) - - try: - result = func(*args, **kwargs) - setattr(result, CURRENT_SPAN, span) - setattr(result, PAGE_NUMBER, 1) - result._set_final_result = wrapt.FunctionWrapper(result._set_final_result, traced_set_final_result) - result._set_final_exception = wrapt.FunctionWrapper(result._set_final_exception, traced_set_final_exception) - result.start_fetching_next_page = wrapt.FunctionWrapper( - result.start_fetching_next_page, traced_start_fetching_next_page - ) - - # Since we cannot be sure that the previous methods were overwritten - # before the call ended, we add callbacks that will be run - # synchronously if the call already returned and we remove them right - # after. - result.add_callbacks( - _close_span_on_success, _close_span_on_error, callback_args=(result,), errback_args=(result,) - ) - result.clear_callbacks() - return result - except Exception: - with span: - span.set_exc_info(*sys.exc_info()) - raise - - -def _start_span_and_set_tags( - pin, - resource: str, - additional_tags: Dict, - query: Optional[str] = None, - statements_and_parameters: Optional[List] = None, -) -> Span: - span = pin.tracer.trace( - schematize_database_operation("cassandra.query", database_provider="cassandra"), - service=pin.service, - span_type=SpanTypes.CASSANDRA, - ) - span._set_tag_str(COMPONENT, config.cassandra.integration_name) - span._set_tag_str(db.SYSTEM, "cassandra") - span._set_tag_str(SPAN_KIND, SpanKind.CLIENT) - # PERF: avoid setting via Span.set_tag - span.set_metric(_SPAN_MEASURED_KEY, 1) - span.set_tags(additional_tags) - if query is not None: - span._set_tag_str("cassandra.query", query) - if statements_and_parameters is not None: - span.set_metric("cassandra.batch_size", len(statements_and_parameters)) - span.resource = resource[:RESOURCE_MAX_LENGTH] - return span - - -def _extract_session_metas(session): - metas = {} - - if getattr(session, "keyspace", None): - # FIXME the keyspace can be overridden explicitly in the query itself - # e.g. 'select * from trace.hash_to_resource' - metas[cassx.KEYSPACE] = session.keyspace.lower() - - return metas - - -def _extract_cluster_metas(cluster): - metas = {} - if deep_getattr(cluster, "metadata.cluster_name"): - metas[cassx.CLUSTER] = cluster.metadata.cluster_name - if getattr(cluster, "port", None): - metas[net.TARGET_PORT] = cluster.port - - return metas - - -def _extract_result_metas(result): - metas = {} - if result is None: - return metas - - future = getattr(result, "response_future", None) - - if future: - # get the host - host = maybe_stringify(getattr(future, "coordinator_host", None)) - if host: - host, _, port = host.partition(":") - metas[net.TARGET_HOST] = host - metas[net.SERVER_ADDRESS] = host - if port: - metas[net.TARGET_PORT] = int(port) - elif hasattr(future, "_current_host"): - address = deep_getattr(future, "_current_host.address") - if address: - metas[net.TARGET_HOST] = address - metas[net.SERVER_ADDRESS] = address - - query = getattr(future, "query", None) - if getattr(query, "consistency_level", None): - metas[cassx.CONSISTENCY_LEVEL] = query.consistency_level - if getattr(query, "keyspace", None): - metas[cassx.KEYSPACE] = query.keyspace.lower() - - page_number = getattr(future, PAGE_NUMBER, 1) - has_more_pages = future.has_more_pages - is_paginated = has_more_pages or page_number > 1 - metas[cassx.PAGINATED] = is_paginated - if is_paginated: - metas[cassx.PAGE_NUMBER] = page_number - - if hasattr(result, "current_rows"): - result_rows = result.current_rows or [] - metas[db.ROWCOUNT] = len(result_rows) - - return metas - - -def _get_resource(query: Any) -> str: - if isinstance(query, SimpleStatement) or isinstance(query, PreparedStatement): - return getattr(query, "query_string", query) - elif isinstance(query, BatchStatement): - return "BatchStatement" - elif isinstance(query, BoundStatement): - ps = getattr(query, "prepared_statement", None) - if ps: - return getattr(ps, "query_string", None) - elif isinstance(query, str): - return query - else: - return "unknown-query-type" - - -def _sanitize_query(query: BatchStatement) -> str: - """ - Each element in `_statements_and_parameters` is: - (is_prepared, statement, parameters) - ref:https://github.com/datastax/python-driver/blob/13d6d72be74f40fcef5ec0f2b3e98538b3b87459/cassandra/query.py#L844 - - For prepared statements, the `statement` value is just the query_id - which is not a statement and when trying to join with other strings - raises an error in python3 around joining bytes to unicode, so this - just filters out prepared statements from this tag value - """ - return "; ".join(q[1] for q in query._statements_and_parameters[:2] if not q[0]) diff --git a/ddtrace/contrib/internal/consul/__init__.py b/ddtrace/contrib/internal/consul/__init__.py index fa159309411..f2b9bd536ee 100644 --- a/ddtrace/contrib/internal/consul/__init__.py +++ b/ddtrace/contrib/internal/consul/__init__.py @@ -6,7 +6,7 @@ :: from ddtrace import patch - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin import consul # If not patched yet, you can patch consul specifically diff --git a/ddtrace/contrib/internal/coverage/utils.py b/ddtrace/contrib/internal/coverage/utils.py index 26c6d0fc308..6f543c033fe 100644 --- a/ddtrace/contrib/internal/coverage/utils.py +++ b/ddtrace/contrib/internal/coverage/utils.py @@ -2,8 +2,8 @@ from typing import List from ddtrace.contrib.internal.coverage.data import _original_sys_argv_command +from ddtrace.internal.settings._config import _get_config from ddtrace.internal.utils.formats import asbool -from ddtrace.settings._config import _get_config def is_coverage_loaded() -> bool: diff --git a/ddtrace/contrib/internal/crewai/__init__.py b/ddtrace/contrib/internal/crewai/__init__.py index 4753fc4722d..8f6eb43829a 100644 --- a/ddtrace/contrib/internal/crewai/__init__.py +++ b/ddtrace/contrib/internal/crewai/__init__.py @@ -37,7 +37,7 @@ ``Pin`` API:: import crewai - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin Pin.override(crewai, service="my-crewai-service") """ # noqa: E501 diff --git a/ddtrace/contrib/internal/django/cache.py b/ddtrace/contrib/internal/django/cache.py index 10779ef907d..f8a313144dc 100644 --- a/ddtrace/contrib/internal/django/cache.py +++ b/ddtrace/contrib/internal/django/cache.py @@ -14,10 +14,10 @@ from ddtrace.internal.constants import COMPONENT from ddtrace.internal.logger import get_logger from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.settings.integration import IntegrationConfig from ddtrace.internal.utils.cache import cached from ddtrace.internal.wrapping import is_wrapped_with from ddtrace.internal.wrapping import wrap -from ddtrace.settings.integration import IntegrationConfig from . import utils diff --git a/ddtrace/contrib/internal/django/database.py b/ddtrace/contrib/internal/django/database.py index 1f9eec5a0fa..a56dc5e670d 100644 --- a/ddtrace/contrib/internal/django/database.py +++ b/ddtrace/contrib/internal/django/database.py @@ -19,11 +19,11 @@ from ddtrace.internal.compat import is_wrapted from ddtrace.internal.logger import get_logger from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.settings.integration import IntegrationConfig from ddtrace.internal.utils.cache import cached from ddtrace.internal.wrapping import is_wrapped_with from ddtrace.internal.wrapping import wrap from ddtrace.propagation._database_monitoring import _DBM_Propagator -from ddtrace.settings.integration import IntegrationConfig log = get_logger(__name__) diff --git a/ddtrace/contrib/internal/django/middleware.py b/ddtrace/contrib/internal/django/middleware.py index 1f893e1740c..d21ce410803 100644 --- a/ddtrace/contrib/internal/django/middleware.py +++ b/ddtrace/contrib/internal/django/middleware.py @@ -11,13 +11,13 @@ from ddtrace.internal import core from ddtrace.internal.constants import COMPONENT from ddtrace.internal.logger import get_logger +from ddtrace.internal.settings.asm import config as asm_config +from ddtrace.internal.settings.integration import IntegrationConfig from ddtrace.internal.utils import get_argument_value from ddtrace.internal.utils.importlib import func_name from ddtrace.internal.wrapping import is_wrapped from ddtrace.internal.wrapping import is_wrapped_with from ddtrace.internal.wrapping import wrap -from ddtrace.settings.asm import config as asm_config -from ddtrace.settings.integration import IntegrationConfig log = get_logger(__name__) diff --git a/ddtrace/contrib/internal/django/patch.py b/ddtrace/contrib/internal/django/patch.py index 17f70643079..8e0b56ac040 100644 --- a/ddtrace/contrib/internal/django/patch.py +++ b/ddtrace/contrib/internal/django/patch.py @@ -29,12 +29,12 @@ from ddtrace.internal.schema import schematize_service_name from ddtrace.internal.schema import schematize_url_operation from ddtrace.internal.schema.span_attribute_schema import SpanDirection +from ddtrace.internal.settings.asm import config as asm_config +from ddtrace.internal.settings.integration import IntegrationConfig from ddtrace.internal.telemetry import get_config as _get_config from ddtrace.internal.utils import get_argument_value from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.importlib import func_name -from ddtrace.settings.asm import config as asm_config -from ddtrace.settings.integration import IntegrationConfig from ddtrace.vendor.packaging.version import parse as parse_version diff --git a/ddtrace/contrib/internal/django/response.py b/ddtrace/contrib/internal/django/response.py index 57128cbe551..ee02cef07a1 100644 --- a/ddtrace/contrib/internal/django/response.py +++ b/ddtrace/contrib/internal/django/response.py @@ -29,6 +29,7 @@ from ddtrace.internal.logger import get_logger from ddtrace.internal.schema import schematize_url_operation from ddtrace.internal.schema.span_attribute_schema import SpanDirection +from ddtrace.internal.settings.integration import IntegrationConfig from ddtrace.internal.utils import Block_config from ddtrace.internal.utils import get_argument_value from ddtrace.internal.utils import get_blocked @@ -37,7 +38,6 @@ from ddtrace.internal.wrapping import is_wrapped_with from ddtrace.internal.wrapping import unwrap from ddtrace.internal.wrapping import wrap -from ddtrace.settings.integration import IntegrationConfig from . import utils diff --git a/ddtrace/contrib/internal/django/templates.py b/ddtrace/contrib/internal/django/templates.py index 744550a1a34..7e977a5ad0d 100644 --- a/ddtrace/contrib/internal/django/templates.py +++ b/ddtrace/contrib/internal/django/templates.py @@ -13,11 +13,11 @@ from ddtrace.internal.compat import maybe_stringify from ddtrace.internal.constants import COMPONENT from ddtrace.internal.logger import get_logger +from ddtrace.internal.settings.integration import IntegrationConfig from ddtrace.internal.utils.importlib import func_name from ddtrace.internal.wrapping import is_wrapped_with from ddtrace.internal.wrapping import unwrap from ddtrace.internal.wrapping import wrap -from ddtrace.settings.integration import IntegrationConfig T = TypeVar("T") diff --git a/ddtrace/contrib/internal/django/user.py b/ddtrace/contrib/internal/django/user.py index f8ebeb0b5fc..b1b3631cd18 100644 --- a/ddtrace/contrib/internal/django/user.py +++ b/ddtrace/contrib/internal/django/user.py @@ -1,6 +1,6 @@ from ddtrace.appsec._utils import _UserInfoRetriever from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config log = get_logger(__name__) diff --git a/ddtrace/contrib/internal/dramatiq/patch.py b/ddtrace/contrib/internal/dramatiq/patch.py index dbcb2c1384d..ffd8ce8c0f8 100644 --- a/ddtrace/contrib/internal/dramatiq/patch.py +++ b/ddtrace/contrib/internal/dramatiq/patch.py @@ -10,7 +10,7 @@ from ddtrace.contrib import trace_utils from ddtrace.ext import SpanKind from ddtrace.ext import SpanTypes -from ddtrace.settings._config import Config +from ddtrace.internal.settings._config import Config from ddtrace.trace import tracer diff --git a/ddtrace/contrib/internal/fastapi/patch.py b/ddtrace/contrib/internal/fastapi/patch.py index d10678f53a9..ad624febab5 100644 --- a/ddtrace/contrib/internal/fastapi/patch.py +++ b/ddtrace/contrib/internal/fastapi/patch.py @@ -14,10 +14,10 @@ from ddtrace.internal.compat import is_wrapted from ddtrace.internal.logger import get_logger from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.settings.asm import config as asm_config from ddtrace.internal.telemetry import get_config as _get_config from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.wrappers import unwrap as _u -from ddtrace.settings.asm import config as asm_config log = get_logger(__name__) diff --git a/ddtrace/contrib/internal/freezegun/__init__.py b/ddtrace/contrib/internal/freezegun/__init__.py deleted file mode 100644 index 89086940c89..00000000000 --- a/ddtrace/contrib/internal/freezegun/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -""" -The freezegun integration updates freezegun's default ignore list to ignore ddtrace. - -Enabling -~~~~~~~~ -The freezegun integration is enabled by default. Use :func:`patch()` to enable the integration:: - from ddtrace import patch - patch(freezegun=True) - - -Configuration -~~~~~~~~~~~~~ -The freezegun integration is not configurable, but may be disabled using DD_PATCH_MODULES=freezegun:false . -""" diff --git a/ddtrace/contrib/internal/freezegun/patch.py b/ddtrace/contrib/internal/freezegun/patch.py deleted file mode 100644 index 676952eda45..00000000000 --- a/ddtrace/contrib/internal/freezegun/patch.py +++ /dev/null @@ -1,37 +0,0 @@ -from typing import Dict - -from ddtrace import DDTraceDeprecationWarning -from ddtrace.internal.logger import get_logger -from ddtrace.vendor.debtcollector import deprecate - - -log = get_logger(__name__) - -DDTRACE_MODULE_NAME = "ddtrace" - - -def get_version() -> str: - import freezegun - - try: - return freezegun.__version__ - except AttributeError: - log.debug("Could not get freezegun version") - return "" - - -def _supported_versions() -> Dict[str, str]: - return {"freezegun": "*"} - - -def patch() -> None: - deprecate( - "the freezegun integration is deprecated", - message="this integration is not needed anymore for the correct reporting of span durations.", - removal_version="4.0.0", - category=DDTraceDeprecationWarning, - ) - - -def unpatch() -> None: - pass diff --git a/ddtrace/contrib/internal/gevent/patch.py b/ddtrace/contrib/internal/gevent/patch.py index 1c1cad5ebea..6452370c338 100644 --- a/ddtrace/contrib/internal/gevent/patch.py +++ b/ddtrace/contrib/internal/gevent/patch.py @@ -19,7 +19,7 @@ def get_version(): def _supported_versions() -> Dict[str, str]: - return {"gevent": ">=20.12"} + return {"gevent": ">=21.1.2"} def patch(): diff --git a/ddtrace/contrib/internal/google_genai/__init__.py b/ddtrace/contrib/internal/google_genai/__init__.py index 237a4e43ca0..61bce41aa49 100644 --- a/ddtrace/contrib/internal/google_genai/__init__.py +++ b/ddtrace/contrib/internal/google_genai/__init__.py @@ -41,7 +41,7 @@ ``Pin`` API:: from google import genai - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin Pin.override(genai, service="my-google-genai-service") """ diff --git a/ddtrace/contrib/internal/google_generativeai/__init__.py b/ddtrace/contrib/internal/google_generativeai/__init__.py deleted file mode 100644 index 963b80e7494..00000000000 --- a/ddtrace/contrib/internal/google_generativeai/__init__.py +++ /dev/null @@ -1,80 +0,0 @@ -""" -The Gemini integration instruments the Google Gemini Python API to traces for requests made to Google models. - -All traces submitted from the Gemini integration are tagged by: - -- ``service``, ``env``, ``version``: see the `Unified Service Tagging docs `_. -- ``google_generativeai.request.model``: Google model used in the request. -- ``google_generativeai.request.api_key``: Google Gemini API key used to make the request (obfuscated to match the Google AI Studio UI representation ``...XXXX`` where ``XXXX`` is the last 4 digits of the key). - - -(beta) Prompt and Completion Sampling -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Prompt texts and completion content for the ``generateContent`` endpoint are collected in span tags with a default sampling rate of ``1.0``. -These tags will have truncation applied if the text exceeds the configured character limit. - - -Enabling -~~~~~~~~ - -The Gemini integration is enabled automatically when you use -:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. - -Alternatively, use :func:`patch() ` to manually enable the Gemini integration:: - - from ddtrace import config, patch - - patch(google_generativeai=True) - - -Global Configuration -~~~~~~~~~~~~~~~~~~~~ - -.. py:data:: ddtrace.config.google_generativeai["service"] - - The service name reported by default for Gemini requests. - - Alternatively, you can set this option with the ``DD_SERVICE`` or ``DD_GOOGLE_GENERATIVEAI_SERVICE`` environment - variables. - - Default: ``DD_SERVICE`` - - -.. py:data:: (beta) ddtrace.config.google_generativeai["span_char_limit"] - - Configure the maximum number of characters for the following data within span tags: - - - Text inputs and completions - - Text exceeding the maximum number of characters is truncated to the character limit - and has ``...`` appended to the end. - - Alternatively, you can set this option with the ``DD_GOOGLE_GENERATIVEAI_SPAN_CHAR_LIMIT`` environment - variable. - - Default: ``128`` - - -.. py:data:: (beta) ddtrace.config.google_generativeai["span_prompt_completion_sample_rate"] - - Configure the sample rate for the collection of prompts and completions as span tags. - - Alternatively, you can set this option with the ``DD_GOOGLE_GENERATIVEAI_SPAN_PROMPT_COMPLETION_SAMPLE_RATE`` environment - variable. - - Default: ``1.0`` - - -Instance Configuration -~~~~~~~~~~~~~~~~~~~~~~ - -To configure the Gemini integration on a per-instance basis use the -``Pin`` API:: - - import google.generativeai as genai - from ddtrace import config - from ddtrace.trace import Pin - - Pin.override(genai, service="my-gemini-service") -""" # noqa: E501 diff --git a/ddtrace/contrib/internal/google_generativeai/_utils.py b/ddtrace/contrib/internal/google_generativeai/_utils.py deleted file mode 100644 index 73c210118f8..00000000000 --- a/ddtrace/contrib/internal/google_generativeai/_utils.py +++ /dev/null @@ -1,24 +0,0 @@ -from ddtrace.llmobs._integrations.base_stream_handler import AsyncStreamHandler -from ddtrace.llmobs._integrations.base_stream_handler import StreamHandler - - -class BaseGoogleGenerativeAIStramHandler: - def finalize_stream(self, exception=None): - self.request_kwargs["instance"] = self.options.get("model_instance", None) - self.integration.llmobs_set_tags( - self.primary_span, - args=self.request_args, - kwargs=self.request_kwargs, - response=self.options.get("wrapped_stream", None), - ) - self.primary_span.finish() - - -class GoogleGenerativeAIStramHandler(BaseGoogleGenerativeAIStramHandler, StreamHandler): - def process_chunk(self, chunk, iterator=None): - pass - - -class GoogleGenerativeAIAsyncStreamHandler(BaseGoogleGenerativeAIStramHandler, AsyncStreamHandler): - async def process_chunk(self, chunk, iterator=None): - pass diff --git a/ddtrace/contrib/internal/google_generativeai/patch.py b/ddtrace/contrib/internal/google_generativeai/patch.py deleted file mode 100644 index 8aaf422f509..00000000000 --- a/ddtrace/contrib/internal/google_generativeai/patch.py +++ /dev/null @@ -1,130 +0,0 @@ -import os -import sys -from typing import Dict - -import google.generativeai as genai - -from ddtrace import config -from ddtrace._trace.pin import Pin -from ddtrace.contrib.internal.google_generativeai._utils import GoogleGenerativeAIAsyncStreamHandler -from ddtrace.contrib.internal.google_generativeai._utils import GoogleGenerativeAIStramHandler -from ddtrace.contrib.internal.trace_utils import unwrap -from ddtrace.contrib.internal.trace_utils import with_traced_module -from ddtrace.contrib.internal.trace_utils import wrap -from ddtrace.llmobs._integrations import GeminiIntegration -from ddtrace.llmobs._integrations.base_stream_handler import make_traced_stream -from ddtrace.llmobs._integrations.google_utils import extract_provider_and_model_name - - -config._add( - "genai", - { - "span_prompt_completion_sample_rate": float( - os.getenv("DD_GOOGLE_GENERATIVEAI_SPAN_PROMPT_COMPLETION_SAMPLE_RATE", 1.0) - ), - "span_char_limit": int(os.getenv("DD_GOOGLE_GENERATIVEAI_SPAN_CHAR_LIMIT", 128)), - }, -) - - -def get_version(): - # type: () -> str - return getattr(genai, "__version__", "") - - -def _supported_versions() -> Dict[str, str]: - return {"google.generativeai": ">=0.7.0"} - - -@with_traced_module -def traced_generate(genai, pin, func, instance, args, kwargs): - integration = genai._datadog_integration - stream = kwargs.get("stream", False) - generations = None - provider_name, model_name = extract_provider_and_model_name(instance=instance, model_name_attr="model_name") - span = integration.trace( - pin, - "%s.%s" % (instance.__class__.__name__, func.__name__), - provider=provider_name, - model=model_name, - submit_to_llmobs=True, - ) - try: - generations = func(*args, **kwargs) - if stream: - return make_traced_stream( - generations, - GoogleGenerativeAIStramHandler( - integration, span, args, kwargs, model_instance=instance, wrapped_stream=generations - ), - ) - except Exception: - span.set_exc_info(*sys.exc_info()) - raise - finally: - # streamed spans will be finished separately once the stream generator is exhausted - if span.error or not stream: - kwargs["instance"] = instance - integration.llmobs_set_tags(span, args=args, kwargs=kwargs, response=generations) - span.finish() - return generations - - -@with_traced_module -async def traced_agenerate(genai, pin, func, instance, args, kwargs): - integration = genai._datadog_integration - stream = kwargs.get("stream", False) - generations = None - provider_name, model_name = extract_provider_and_model_name(instance=instance, model_name_attr="model_name") - span = integration.trace( - pin, - "%s.%s" % (instance.__class__.__name__, func.__name__), - provider=provider_name, - model=model_name, - submit_to_llmobs=True, - ) - try: - generations = await func(*args, **kwargs) - if stream: - return make_traced_stream( - generations, - GoogleGenerativeAIAsyncStreamHandler( - integration, span, args, kwargs, model_instance=instance, wrapped_stream=generations - ), - ) - except Exception: - span.set_exc_info(*sys.exc_info()) - raise - finally: - # streamed spans will be finished separately once the stream generator is exhausted - if span.error or not stream: - kwargs["instance"] = instance - integration.llmobs_set_tags(span, args=args, kwargs=kwargs, response=generations) - span.finish() - return generations - - -def patch(): - if getattr(genai, "_datadog_patch", False): - return - - genai._datadog_patch = True - - Pin().onto(genai) - integration = GeminiIntegration(integration_config=config.genai) - genai._datadog_integration = integration - - wrap("google.generativeai", "GenerativeModel.generate_content", traced_generate(genai)) - wrap("google.generativeai", "GenerativeModel.generate_content_async", traced_agenerate(genai)) - - -def unpatch(): - if not getattr(genai, "_datadog_patch", False): - return - - genai._datadog_patch = False - - unwrap(genai.GenerativeModel, "generate_content") - unwrap(genai.GenerativeModel, "generate_content_async") - - delattr(genai, "_datadog_integration") diff --git a/ddtrace/contrib/internal/graphql/__init__.py b/ddtrace/contrib/internal/graphql/__init__.py index e22aef69407..42ac5eeafec 100644 --- a/ddtrace/contrib/internal/graphql/__init__.py +++ b/ddtrace/contrib/internal/graphql/__init__.py @@ -45,7 +45,7 @@ To configure the graphql integration using the ``Pin`` API:: - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin import graphql Pin.override(graphql, service="mygraphql") diff --git a/ddtrace/contrib/internal/grpc/__init__.py b/ddtrace/contrib/internal/grpc/__init__.py index 5713b6779ad..f29cab70eaf 100644 --- a/ddtrace/contrib/internal/grpc/__init__.py +++ b/ddtrace/contrib/internal/grpc/__init__.py @@ -46,7 +46,7 @@ import grpc from ddtrace import patch - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin patch(grpc=True) @@ -63,7 +63,7 @@ from grpc.framework.foundation import logging_pool from ddtrace import patch - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin patch(grpc=True) diff --git a/ddtrace/contrib/internal/grpc/client_interceptor.py b/ddtrace/contrib/internal/grpc/client_interceptor.py index 0d6ddf227a1..7c5ac0ef5e5 100644 --- a/ddtrace/contrib/internal/grpc/client_interceptor.py +++ b/ddtrace/contrib/internal/grpc/client_interceptor.py @@ -233,8 +233,7 @@ def _intercept_client_call(self, method_kind, client_call_details): # propagate distributed tracing headers if available headers = {} if config.grpc.distributed_tracing_enabled: - # NOTE: We need to pass the span to the HTTPPropagator since it isn't active at this point - HTTPPropagator.inject(span.context, headers, span) + HTTPPropagator.inject(span, headers) metadata.extend(headers.items()) diff --git a/ddtrace/contrib/internal/httplib/patch.py b/ddtrace/contrib/internal/httplib/patch.py index cc82045a76f..8fe29d593f6 100644 --- a/ddtrace/contrib/internal/httplib/patch.py +++ b/ddtrace/contrib/internal/httplib/patch.py @@ -19,9 +19,9 @@ from ddtrace.internal.logger import get_logger from ddtrace.internal.schema import schematize_url_operation from ddtrace.internal.schema.span_attribute_schema import SpanDirection +from ddtrace.internal.settings.asm import config as asm_config from ddtrace.internal.utils.formats import asbool from ddtrace.propagation.http import HTTPPropagator -from ddtrace.settings.asm import config as asm_config span_name = "http.client.request" diff --git a/ddtrace/contrib/internal/httpx/__init__.py b/ddtrace/contrib/internal/httpx/__init__.py index 3d8087fbba1..aedd8912c5d 100644 --- a/ddtrace/contrib/internal/httpx/__init__.py +++ b/ddtrace/contrib/internal/httpx/__init__.py @@ -60,7 +60,7 @@ To configure particular ``httpx`` client instances use the :class:`Pin ` API:: import httpx - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin client = httpx.Client() # Override service name for this instance diff --git a/ddtrace/contrib/internal/jinja2/__init__.py b/ddtrace/contrib/internal/jinja2/__init__.py index 94683ebe5c3..3cf7ee6767b 100644 --- a/ddtrace/contrib/internal/jinja2/__init__.py +++ b/ddtrace/contrib/internal/jinja2/__init__.py @@ -16,7 +16,7 @@ The library can be configured globally and per instance, using the Configuration API:: from ddtrace import config - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin # Change service name globally config.jinja2['service_name'] = 'jinja-templates' diff --git a/ddtrace/contrib/internal/kafka/__init__.py b/ddtrace/contrib/internal/kafka/__init__.py index 366dad9bb7d..1188e9f1999 100644 --- a/ddtrace/contrib/internal/kafka/__init__.py +++ b/ddtrace/contrib/internal/kafka/__init__.py @@ -40,7 +40,7 @@ To configure the kafka integration using the ``Pin`` API:: - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin from ddtrace import patch # Make sure to patch before importing confluent_kafka diff --git a/ddtrace/contrib/internal/langgraph/__init__.py b/ddtrace/contrib/internal/langgraph/__init__.py index ea9655193fc..eff34f7e2ae 100644 --- a/ddtrace/contrib/internal/langgraph/__init__.py +++ b/ddtrace/contrib/internal/langgraph/__init__.py @@ -31,6 +31,6 @@ ``Pin`` API:: import langgraph - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin Pin.override(langgraph, service="my-langgraph-service") """ diff --git a/ddtrace/contrib/internal/mariadb/__init__.py b/ddtrace/contrib/internal/mariadb/__init__.py index 1ef08422a00..59d8e306236 100644 --- a/ddtrace/contrib/internal/mariadb/__init__.py +++ b/ddtrace/contrib/internal/mariadb/__init__.py @@ -34,7 +34,7 @@ To configure the mariadb integration on an per-connection basis use the ``Pin`` API:: - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin from ddtrace import patch # Make sure to patch before importing mariadb diff --git a/ddtrace/contrib/internal/mcp/__init__.py b/ddtrace/contrib/internal/mcp/__init__.py index 825d65b7931..b737f1cfbdf 100644 --- a/ddtrace/contrib/internal/mcp/__init__.py +++ b/ddtrace/contrib/internal/mcp/__init__.py @@ -38,7 +38,7 @@ ``Pin`` API:: import mcp - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin Pin.override(mcp, service="my-mcp-service") """ # noqa: E501 diff --git a/ddtrace/contrib/internal/mongoengine/__init__.py b/ddtrace/contrib/internal/mongoengine/__init__.py deleted file mode 100644 index a72c861f4b7..00000000000 --- a/ddtrace/contrib/internal/mongoengine/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -"""Instrument mongoengine to report MongoDB queries. - -``import ddtrace.auto`` will automatically patch your mongoengine connect method to make it work. -:: - - from ddtrace import patch - from ddtrace.trace import Pin - import mongoengine - - # If not patched yet, you can patch mongoengine specifically - patch(mongoengine=True) - - # At that point, mongoengine is instrumented with the default settings - mongoengine.connect('db', alias='default') - - # Use a pin to specify metadata related to this client - client = mongoengine.connect('db', alias='master') - Pin.override(client, service="mongo-master") -""" diff --git a/ddtrace/contrib/internal/mongoengine/patch.py b/ddtrace/contrib/internal/mongoengine/patch.py deleted file mode 100644 index 550d1e83199..00000000000 --- a/ddtrace/contrib/internal/mongoengine/patch.py +++ /dev/null @@ -1,38 +0,0 @@ -# TODO(mabdinur): Remove the pymongoengine integration, this integration does nothing special -# it just uses the pymongo integration and creates unnecessary pin objects -from typing import Dict - -import mongoengine - -from ..pymongo.patch import patch as patch_pymongo_module -from ..pymongo.patch import unpatch as unpatch_pymongo_module -from .trace import WrappedConnect - - -# Original connect function -_connect = mongoengine.connect - - -def get_version(): - # type: () -> str - return getattr(mongoengine, "__version__", "") - - -def _supported_versions() -> Dict[str, str]: - return {"mongoengine": ">=0.23"} - - -def patch(): - if getattr(mongoengine, "_datadog_patch", False): - return - mongoengine.connect = WrappedConnect(_connect) - mongoengine._datadog_patch = True - patch_pymongo_module() - - -def unpatch(): - if not getattr(mongoengine, "_datadog_patch", False): - return - mongoengine.connect = _connect - mongoengine._datadog_patch = False - unpatch_pymongo_module() diff --git a/ddtrace/contrib/internal/mongoengine/trace.py b/ddtrace/contrib/internal/mongoengine/trace.py deleted file mode 100644 index e3deee0e4a4..00000000000 --- a/ddtrace/contrib/internal/mongoengine/trace.py +++ /dev/null @@ -1,38 +0,0 @@ -# 3p -# project -import wrapt - -from ddtrace._trace.pin import Pin - -# keep the TracedMongoClient import to avoid breaking the public api -from ddtrace.contrib.internal.pymongo.client import TracedMongoClient # noqa: F401 -from ddtrace.ext import mongo as mongox -from ddtrace.internal.schema import schematize_service_name - - -# TODO(Benjamin): we should instrument register_connection instead, because more generic -# We should also extract the "alias" attribute and set it as a meta -_SERVICE = schematize_service_name(mongox.SERVICE) - - -# TODO(mabdinur): Remove this class when ``ddtrace.contrib.mongoengine.trace`` is removed -class WrappedConnect(wrapt.ObjectProxy): - """WrappedConnect wraps mongoengines 'connect' function to ensure - that all returned connections are wrapped for tracing. - """ - - def __init__(self, connect): - super(WrappedConnect, self).__init__(connect) - Pin(_SERVICE).onto(self) - - def __call__(self, *args, **kwargs): - client = self.__wrapped__(*args, **kwargs) - pin = Pin.get_from(self) - if pin: - tracer = pin.tracer - pp = Pin(service=pin.service) - if tracer is not None: - pp._tracer = tracer - pp.onto(client) - - return client diff --git a/ddtrace/contrib/internal/mysql/__init__.py b/ddtrace/contrib/internal/mysql/__init__.py index 3336839bcf5..ba9086abb12 100644 --- a/ddtrace/contrib/internal/mysql/__init__.py +++ b/ddtrace/contrib/internal/mysql/__init__.py @@ -41,7 +41,7 @@ To configure the mysql integration on an per-connection basis use the ``Pin`` API:: - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin # Make sure to import mysql.connector and not the 'connect' function, # otherwise you won't have access to the patched version import mysql.connector diff --git a/ddtrace/contrib/internal/mysql/patch.py b/ddtrace/contrib/internal/mysql/patch.py index 557f3fb93c3..91178c0b7f3 100644 --- a/ddtrace/contrib/internal/mysql/patch.py +++ b/ddtrace/contrib/internal/mysql/patch.py @@ -13,9 +13,9 @@ from ddtrace.internal.compat import is_wrapted from ddtrace.internal.schema import schematize_database_operation from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.settings.asm import config as asm_config from ddtrace.internal.utils.formats import asbool from ddtrace.propagation._database_monitoring import _DBM_Propagator -from ddtrace.settings.asm import config as asm_config config._add( diff --git a/ddtrace/contrib/internal/mysqldb/__init__.py b/ddtrace/contrib/internal/mysqldb/__init__.py index 46a5e27de7b..0cfe8158071 100644 --- a/ddtrace/contrib/internal/mysqldb/__init__.py +++ b/ddtrace/contrib/internal/mysqldb/__init__.py @@ -55,7 +55,7 @@ # Make sure to import MySQLdb and not the 'connect' function, # otherwise you won't have access to the patched version - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin import MySQLdb # This will report a span with the default settings diff --git a/ddtrace/contrib/internal/mysqldb/patch.py b/ddtrace/contrib/internal/mysqldb/patch.py index fefccec7776..2e727ac415e 100644 --- a/ddtrace/contrib/internal/mysqldb/patch.py +++ b/ddtrace/contrib/internal/mysqldb/patch.py @@ -18,10 +18,10 @@ from ddtrace.internal.constants import COMPONENT from ddtrace.internal.schema import schematize_database_operation from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.settings.asm import config as asm_config from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.wrappers import unwrap as _u from ddtrace.propagation._database_monitoring import _DBM_Propagator -from ddtrace.settings.asm import config as asm_config config._add( diff --git a/ddtrace/contrib/internal/openai/__init__.py b/ddtrace/contrib/internal/openai/__init__.py index 44495353997..f4bd7fa79cb 100644 --- a/ddtrace/contrib/internal/openai/__init__.py +++ b/ddtrace/contrib/internal/openai/__init__.py @@ -114,7 +114,7 @@ import openai from ddtrace import config - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin Pin.override(openai, service="my-openai-service") """ # noqa: E501 diff --git a/ddtrace/contrib/internal/openai_agents/__init__.py b/ddtrace/contrib/internal/openai_agents/__init__.py index ff3cdd340fc..53f331dabee 100644 --- a/ddtrace/contrib/internal/openai_agents/__init__.py +++ b/ddtrace/contrib/internal/openai_agents/__init__.py @@ -37,7 +37,7 @@ ``Pin`` API:: import agents - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin Pin.override(agents, service="my-agents-service") """ # noqa: E501 diff --git a/ddtrace/contrib/internal/psycopg/__init__.py b/ddtrace/contrib/internal/psycopg/__init__.py index 0c1e134bb15..3f6668961e2 100644 --- a/ddtrace/contrib/internal/psycopg/__init__.py +++ b/ddtrace/contrib/internal/psycopg/__init__.py @@ -50,7 +50,7 @@ To configure the psycopg integration on an per-connection basis use the ``Pin`` API:: - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin import psycopg db = psycopg.connect(connection_factory=factory) diff --git a/ddtrace/contrib/internal/psycopg/patch.py b/ddtrace/contrib/internal/psycopg/patch.py index f9ae4669a98..1891e803e8a 100644 --- a/ddtrace/contrib/internal/psycopg/patch.py +++ b/ddtrace/contrib/internal/psycopg/patch.py @@ -76,8 +76,7 @@ def _psycopg_sql_injector(dbm_comment, sql_statement): ) -def get_version(): - # type: () -> str +def get_version() -> str: return "" @@ -85,11 +84,10 @@ def get_version(): def _supported_versions() -> Dict[str, str]: - return {"psycopg": ">=3.0.0", "psycopg2": ">=2.8.0"} + return {"psycopg": ">=3.0.0", "psycopg2": ">=2.9.10"} -def get_versions(): - # type: () -> List[str] +def get_versions() -> List[str]: return PATCHED_VERSIONS diff --git a/ddtrace/contrib/internal/pymemcache/__init__.py b/ddtrace/contrib/internal/pymemcache/__init__.py index 066bb5653e6..cb874460919 100644 --- a/ddtrace/contrib/internal/pymemcache/__init__.py +++ b/ddtrace/contrib/internal/pymemcache/__init__.py @@ -3,7 +3,7 @@ ``import ddtrace.auto`` will automatically patch the pymemcache ``Client``:: from ddtrace import patch - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin # If not patched yet, patch pymemcache specifically patch(pymemcache=True) diff --git a/ddtrace/contrib/internal/pymongo/__init__.py b/ddtrace/contrib/internal/pymongo/__init__.py index f1210f0047d..d1b2f7d19ab 100644 --- a/ddtrace/contrib/internal/pymongo/__init__.py +++ b/ddtrace/contrib/internal/pymongo/__init__.py @@ -9,7 +9,7 @@ # Be sure to import pymongo and not pymongo.MongoClient directly, # otherwise you won't have access to the patched version from ddtrace import patch - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin import pymongo # If not patched yet, you can patch pymongo specifically diff --git a/ddtrace/contrib/internal/pymysql/__init__.py b/ddtrace/contrib/internal/pymysql/__init__.py index d219e46eccd..631e9594d66 100644 --- a/ddtrace/contrib/internal/pymysql/__init__.py +++ b/ddtrace/contrib/internal/pymysql/__init__.py @@ -41,7 +41,7 @@ To configure the integration on an per-connection basis use the ``Pin`` API:: - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin from pymysql import connect # This will report a span with the default settings diff --git a/ddtrace/contrib/internal/pynamodb/patch.py b/ddtrace/contrib/internal/pynamodb/patch.py index 1d2eb8176a3..71ff07367d6 100644 --- a/ddtrace/contrib/internal/pynamodb/patch.py +++ b/ddtrace/contrib/internal/pynamodb/patch.py @@ -35,13 +35,12 @@ ) -def get_version(): - # type: () -> str +def get_version() -> str: return getattr(pynamodb, "__version__", "") def _supported_versions() -> Dict[str, str]: - return {"pynamodb": ">=5.0"} + return {"pynamodb": ">=5.5.1"} def patch(): diff --git a/ddtrace/contrib/internal/pyodbc/__init__.py b/ddtrace/contrib/internal/pyodbc/__init__.py index 0a2d46d5e70..d074aaa2387 100644 --- a/ddtrace/contrib/internal/pyodbc/__init__.py +++ b/ddtrace/contrib/internal/pyodbc/__init__.py @@ -41,7 +41,7 @@ To configure the integration on an per-connection basis use the ``Pin`` API:: - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin import pyodbc # This will report a span with the default settings diff --git a/ddtrace/contrib/internal/pyramid/patch.py b/ddtrace/contrib/internal/pyramid/patch.py index ded17f54666..6f5ee9c6b5f 100644 --- a/ddtrace/contrib/internal/pyramid/patch.py +++ b/ddtrace/contrib/internal/pyramid/patch.py @@ -5,8 +5,8 @@ import wrapt from ddtrace import config +from ddtrace.internal.settings._config import _get_config from ddtrace.internal.utils.formats import asbool -from ddtrace.settings._config import _get_config from .constants import SETTINGS_DISTRIBUTED_TRACING from .constants import SETTINGS_SERVICE diff --git a/ddtrace/contrib/internal/pytest/_plugin_v2.py b/ddtrace/contrib/internal/pytest/_plugin_v2.py index 97d2a4fae73..24fc6e96513 100644 --- a/ddtrace/contrib/internal/pytest/_plugin_v2.py +++ b/ddtrace/contrib/internal/pytest/_plugin_v2.py @@ -64,6 +64,7 @@ from ddtrace.internal.coverage.code import ModuleCodeCollector from ddtrace.internal.coverage.installer import install as install_coverage from ddtrace.internal.logger import get_logger +from ddtrace.internal.settings.asm import config as asm_config from ddtrace.internal.test_visibility._library_capabilities import LibraryCapabilities from ddtrace.internal.test_visibility.api import InternalTest from ddtrace.internal.test_visibility.api import InternalTestModule @@ -71,7 +72,6 @@ from ddtrace.internal.test_visibility.api import InternalTestSuite from ddtrace.internal.test_visibility.coverage_lines import CoverageLines from ddtrace.internal.utils.formats import asbool -from ddtrace.settings.asm import config as asm_config from ddtrace.vendor.debtcollector import deprecate diff --git a/ddtrace/contrib/internal/pytest/_utils.py b/ddtrace/contrib/internal/pytest/_utils.py index b944671aa35..ca4dd5c8b75 100644 --- a/ddtrace/contrib/internal/pytest/_utils.py +++ b/ddtrace/contrib/internal/pytest/_utils.py @@ -20,11 +20,11 @@ from ddtrace.internal.ci_visibility.constants import ITR_UNSKIPPABLE_REASON from ddtrace.internal.ci_visibility.utils import get_source_lines_for_test_method from ddtrace.internal.logger import get_logger +from ddtrace.internal.settings._config import _get_config from ddtrace.internal.test_visibility.api import InternalTest from ddtrace.internal.utils.cache import cached from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.inspection import undecorated -from ddtrace.settings._config import _get_config log = get_logger(__name__) diff --git a/ddtrace/contrib/internal/pytest/plugin.py b/ddtrace/contrib/internal/pytest/plugin.py index 94de436e430..fc64e9c3f45 100644 --- a/ddtrace/contrib/internal/pytest/plugin.py +++ b/ddtrace/contrib/internal/pytest/plugin.py @@ -31,8 +31,8 @@ from ddtrace.contrib.internal.pytest._plugin_v2 import pytest_sessionstart # noqa: F401 from ddtrace.contrib.internal.pytest._plugin_v2 import pytest_terminal_summary # noqa: F401 from ddtrace.contrib.internal.pytest._utils import _extract_span -from ddtrace.settings._telemetry import config as telemetry_config -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings._telemetry import config as telemetry_config +from ddtrace.internal.settings.asm import config as asm_config if asm_config._iast_enabled: diff --git a/ddtrace/contrib/internal/redis/__init__.py b/ddtrace/contrib/internal/redis/__init__.py index 3204fade8df..49a7fd52027 100644 --- a/ddtrace/contrib/internal/redis/__init__.py +++ b/ddtrace/contrib/internal/redis/__init__.py @@ -55,7 +55,7 @@ To configure particular redis instances use the :class:`Pin ` API:: import redis - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin client = redis.StrictRedis(host="localhost", port=6379) diff --git a/ddtrace/contrib/internal/rediscluster/__init__.py b/ddtrace/contrib/internal/rediscluster/__init__.py index 05975277291..1fc846fa8aa 100644 --- a/ddtrace/contrib/internal/rediscluster/__init__.py +++ b/ddtrace/contrib/internal/rediscluster/__init__.py @@ -4,7 +4,7 @@ :: from ddtrace import patch - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin import rediscluster # If not patched yet, you can patch redis specifically diff --git a/ddtrace/contrib/internal/requests/connection.py b/ddtrace/contrib/internal/requests/connection.py index f2a6926ff53..674bb782b17 100644 --- a/ddtrace/contrib/internal/requests/connection.py +++ b/ddtrace/contrib/internal/requests/connection.py @@ -20,9 +20,9 @@ from ddtrace.internal.opentelemetry.constants import OTLP_EXPORTER_HEADER_IDENTIFIER from ddtrace.internal.schema import schematize_url_operation from ddtrace.internal.schema.span_attribute_schema import SpanDirection +from ddtrace.internal.settings.asm import config as asm_config from ddtrace.internal.utils import get_argument_value from ddtrace.propagation.http import HTTPPropagator -from ddtrace.settings.asm import config as asm_config log = get_logger(__name__) diff --git a/ddtrace/contrib/internal/requests/patch.py b/ddtrace/contrib/internal/requests/patch.py index 8db2b85189d..1d8ba74e9a7 100644 --- a/ddtrace/contrib/internal/requests/patch.py +++ b/ddtrace/contrib/internal/requests/patch.py @@ -8,8 +8,8 @@ from ddtrace._trace.pin import Pin from ddtrace.contrib.internal.trace_utils import unwrap as _u from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.settings.asm import config as asm_config from ddtrace.internal.utils.formats import asbool -from ddtrace.settings.asm import config as asm_config from .connection import _wrap_send from .session import TracedSession @@ -31,13 +31,12 @@ Pin(_config=config.requests).onto(TracedSession) -def get_version(): - # type: () -> str +def get_version() -> str: return getattr(requests, "__version__", "") def _supported_versions() -> Dict[str, str]: - return {"requests": ">=2.20.0"} + return {"requests": ">=2.25.1"} def patch(): diff --git a/ddtrace/contrib/internal/rq/__init__.py b/ddtrace/contrib/internal/rq/__init__.py index 596c0c420f6..28606b7a9ef 100644 --- a/ddtrace/contrib/internal/rq/__init__.py +++ b/ddtrace/contrib/internal/rq/__init__.py @@ -28,7 +28,7 @@ To override the service name for a queue:: - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin connection = redis.Redis() queue = rq.Queue(connection=connection) diff --git a/ddtrace/contrib/internal/rq/patch.py b/ddtrace/contrib/internal/rq/patch.py index bcfa7dbdc36..d66876630d7 100644 --- a/ddtrace/contrib/internal/rq/patch.py +++ b/ddtrace/contrib/internal/rq/patch.py @@ -3,19 +3,18 @@ from ddtrace import config from ddtrace._trace.pin import Pin from ddtrace.constants import SPAN_KIND +from ddtrace.contrib import trace_utils +from ddtrace.ext import SpanKind +from ddtrace.ext import SpanTypes from ddtrace.internal import core from ddtrace.internal.constants import COMPONENT from ddtrace.internal.schema import schematize_messaging_operation from ddtrace.internal.schema import schematize_service_name from ddtrace.internal.schema.span_attribute_schema import SpanDirection +from ddtrace.internal.settings._config import _get_config from ddtrace.internal.utils import get_argument_value from ddtrace.internal.utils.formats import asbool -from ....ext import SpanKind -from ....ext import SpanTypes -from ....settings._config import _get_config -from ... import trace_utils - config._add( "rq", diff --git a/ddtrace/contrib/internal/snowflake/__init__.py b/ddtrace/contrib/internal/snowflake/__init__.py index 20ca3021cf3..6207b854aa3 100644 --- a/ddtrace/contrib/internal/snowflake/__init__.py +++ b/ddtrace/contrib/internal/snowflake/__init__.py @@ -40,7 +40,7 @@ To configure the integration on an per-connection basis use the ``Pin`` API:: - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin from snowflake.connector import connect # This will report a span with the default settings diff --git a/ddtrace/contrib/internal/snowflake/patch.py b/ddtrace/contrib/internal/snowflake/patch.py index bab4c6f06bc..4fc3f5b2973 100644 --- a/ddtrace/contrib/internal/snowflake/patch.py +++ b/ddtrace/contrib/internal/snowflake/patch.py @@ -29,8 +29,7 @@ ) -def get_version(): - # type: () -> str +def get_version() -> str: try: import snowflake.connector as c except AttributeError: @@ -41,7 +40,7 @@ def get_version(): def _supported_versions() -> Dict[str, str]: - return {"snowflake": ">=2.3.0"} + return {"snowflake": ">=2.4.6"} class _SFTracedCursor(TracedCursor): diff --git a/ddtrace/contrib/internal/sqlalchemy/patch.py b/ddtrace/contrib/internal/sqlalchemy/patch.py index a87eb855791..2180fa096e9 100644 --- a/ddtrace/contrib/internal/sqlalchemy/patch.py +++ b/ddtrace/contrib/internal/sqlalchemy/patch.py @@ -4,7 +4,7 @@ from wrapt import wrap_function_wrapper as _w from ddtrace.contrib.internal.trace_utils import unwrap -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config from .engine import _wrap_create_engine diff --git a/ddtrace/contrib/internal/sqlite3/__init__.py b/ddtrace/contrib/internal/sqlite3/__init__.py index 351d639b182..c085e69e96d 100644 --- a/ddtrace/contrib/internal/sqlite3/__init__.py +++ b/ddtrace/contrib/internal/sqlite3/__init__.py @@ -41,7 +41,7 @@ To configure the integration on an per-connection basis use the ``Pin`` API:: - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin import sqlite3 # This will report a span with the default settings diff --git a/ddtrace/contrib/internal/sqlite3/patch.py b/ddtrace/contrib/internal/sqlite3/patch.py index c9d5a370897..ec7188a1add 100644 --- a/ddtrace/contrib/internal/sqlite3/patch.py +++ b/ddtrace/contrib/internal/sqlite3/patch.py @@ -13,8 +13,8 @@ from ddtrace.ext import db from ddtrace.internal.schema import schematize_database_operation from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.settings.asm import config as asm_config from ddtrace.internal.utils.formats import asbool -from ddtrace.settings.asm import config as asm_config # Original connect method diff --git a/ddtrace/contrib/internal/starlette/patch.py b/ddtrace/contrib/internal/starlette/patch.py index 5734a837f8a..abb53e4998f 100644 --- a/ddtrace/contrib/internal/starlette/patch.py +++ b/ddtrace/contrib/internal/starlette/patch.py @@ -23,13 +23,13 @@ from ddtrace.internal.endpoints import endpoint_collection from ddtrace.internal.logger import get_logger from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.settings.asm import config as asm_config from ddtrace.internal.telemetry import get_config as _get_config from ddtrace.internal.utils import get_argument_value from ddtrace.internal.utils import get_blocked from ddtrace.internal.utils import set_argument_value from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.wrappers import unwrap as _u -from ddtrace.settings.asm import config as asm_config from ddtrace.trace import Span # noqa:F401 from ddtrace.vendor.packaging.version import parse as parse_version diff --git a/ddtrace/contrib/internal/subprocess/patch.py b/ddtrace/contrib/internal/subprocess/patch.py index 66e0a8dac7b..9649a05329a 100644 --- a/ddtrace/contrib/internal/subprocess/patch.py +++ b/ddtrace/contrib/internal/subprocess/patch.py @@ -21,8 +21,8 @@ from ddtrace.internal import core from ddtrace.internal.forksafe import RLock from ddtrace.internal.logger import get_logger -from ddtrace.settings._config import config -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings._config import config +from ddtrace.internal.settings.asm import config as asm_config log = get_logger(__name__) diff --git a/ddtrace/contrib/internal/trace_utils.py b/ddtrace/contrib/internal/trace_utils.py index 9f3050cc2f9..70c8fd52b1e 100644 --- a/ddtrace/contrib/internal/trace_utils.py +++ b/ddtrace/contrib/internal/trace_utils.py @@ -38,14 +38,14 @@ from ddtrace.internal.constants import SAMPLING_DECISION_TRACE_TAG_KEY from ddtrace.internal.core.event_hub import dispatch from ddtrace.internal.logger import get_logger +from ddtrace.internal.settings._config import config +from ddtrace.internal.settings.asm import config as asm_config import ddtrace.internal.utils.wrappers from ddtrace.propagation.http import HTTPPropagator -from ddtrace.settings._config import config -from ddtrace.settings.asm import config as asm_config if TYPE_CHECKING: # pragma: no cover - from ddtrace.settings.integration import IntegrationConfig # noqa:F401 + from ddtrace.internal.settings.integration import IntegrationConfig # noqa:F401 from ddtrace.trace import Span # noqa:F401 from ddtrace.trace import Tracer # noqa:F401 diff --git a/ddtrace/contrib/internal/trace_utils_base.py b/ddtrace/contrib/internal/trace_utils_base.py index 3d784b37325..af222425693 100644 --- a/ddtrace/contrib/internal/trace_utils_base.py +++ b/ddtrace/contrib/internal/trace_utils_base.py @@ -8,13 +8,13 @@ from ddtrace.ext import user from ddtrace.internal import core from ddtrace.internal.logger import get_logger +from ddtrace.internal.settings._config import config +from ddtrace.internal.settings.asm import config as asm_config +from ddtrace.internal.settings.integration import IntegrationConfig from ddtrace.internal.utils.cache import cached from ddtrace.internal.utils.http import normalize_header_name from ddtrace.internal.utils.http import redact_url from ddtrace.internal.utils.http import strip_query_string -from ddtrace.settings._config import config -from ddtrace.settings.asm import config as asm_config -from ddtrace.settings.integration import IntegrationConfig log = get_logger(__name__) diff --git a/ddtrace/contrib/internal/urllib/patch.py b/ddtrace/contrib/internal/urllib/patch.py index a3a7a0d31f2..6a98e8dc3ad 100644 --- a/ddtrace/contrib/internal/urllib/patch.py +++ b/ddtrace/contrib/internal/urllib/patch.py @@ -4,7 +4,7 @@ from wrapt import wrap_function_wrapper as _w from ddtrace.contrib.internal.trace_utils import unwrap as _u -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config def get_version(): diff --git a/ddtrace/contrib/internal/urllib3/patch.py b/ddtrace/contrib/internal/urllib3/patch.py index fa2de26e876..7e467edc87e 100644 --- a/ddtrace/contrib/internal/urllib3/patch.py +++ b/ddtrace/contrib/internal/urllib3/patch.py @@ -16,12 +16,12 @@ from ddtrace.internal.schema import schematize_service_name from ddtrace.internal.schema import schematize_url_operation from ddtrace.internal.schema.span_attribute_schema import SpanDirection +from ddtrace.internal.settings.asm import config as asm_config from ddtrace.internal.utils import ArgumentError from ddtrace.internal.utils import get_argument_value from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.wrappers import unwrap as _u from ddtrace.propagation.http import HTTPPropagator -from ddtrace.settings.asm import config as asm_config # Ports which, if set, will not be used in hostnames/service names diff --git a/ddtrace/contrib/internal/valkey/patch.py b/ddtrace/contrib/internal/valkey/patch.py index 68edd4deb20..ba8b794962b 100644 --- a/ddtrace/contrib/internal/valkey/patch.py +++ b/ddtrace/contrib/internal/valkey/patch.py @@ -55,7 +55,7 @@ To configure particular valkey instances use the :class:`Pin ` API:: import valkey - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin client = valkey.StrictValkey(host="localhost", port=6379) diff --git a/ddtrace/contrib/internal/vertexai/__init__.py b/ddtrace/contrib/internal/vertexai/__init__.py index 25e5fdc081b..e3fbdb24a69 100644 --- a/ddtrace/contrib/internal/vertexai/__init__.py +++ b/ddtrace/contrib/internal/vertexai/__init__.py @@ -78,7 +78,7 @@ import vertexai from ddtrace import config - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin Pin.override(vertexai, service="my-vertexai-service") """ # noqa: E501 diff --git a/ddtrace/contrib/internal/vertica/__init__.py b/ddtrace/contrib/internal/vertica/__init__.py index df997f5946b..1007efe1b68 100644 --- a/ddtrace/contrib/internal/vertica/__init__.py +++ b/ddtrace/contrib/internal/vertica/__init__.py @@ -28,7 +28,7 @@ ``Pin`` API:: from ddtrace import patch - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin patch(vertica=True) import vertica_python diff --git a/ddtrace/contrib/internal/webbrowser/patch.py b/ddtrace/contrib/internal/webbrowser/patch.py index 973e8934127..4c30af69b6d 100644 --- a/ddtrace/contrib/internal/webbrowser/patch.py +++ b/ddtrace/contrib/internal/webbrowser/patch.py @@ -4,7 +4,7 @@ from wrapt import wrap_function_wrapper as _w from ddtrace.contrib.internal.trace_utils import unwrap as _u -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config def get_version(): diff --git a/ddtrace/contrib/internal/wsgi/wsgi.py b/ddtrace/contrib/internal/wsgi/wsgi.py index 1db5d014594..e977789108a 100644 --- a/ddtrace/contrib/internal/wsgi/wsgi.py +++ b/ddtrace/contrib/internal/wsgi/wsgi.py @@ -12,7 +12,7 @@ from typing import Optional # noqa:F401 from ddtrace._trace.pin import Pin # noqa:F401 - from ddtrace.settings._config import Config # noqa:F401 + from ddtrace.internal.settings._config import Config # noqa:F401 from ddtrace.trace import Span # noqa:F401 from ddtrace.trace import Tracer # noqa:F401 diff --git a/ddtrace/contrib/internal/yaaredis/__init__.py b/ddtrace/contrib/internal/yaaredis/__init__.py index 65917b03c29..4fb0687ec6c 100644 --- a/ddtrace/contrib/internal/yaaredis/__init__.py +++ b/ddtrace/contrib/internal/yaaredis/__init__.py @@ -53,7 +53,7 @@ To configure particular yaaredis instances use the :class:`Pin ` API:: import yaaredis - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin client = yaaredis.StrictRedis(host="localhost", port=6379) diff --git a/ddtrace/contrib/pylibmc.py b/ddtrace/contrib/pylibmc.py index c894b1fa5e2..8a9dc193b5f 100644 --- a/ddtrace/contrib/pylibmc.py +++ b/ddtrace/contrib/pylibmc.py @@ -6,7 +6,7 @@ # Be sure to import pylibmc and not pylibmc.Client directly, # otherwise you won't have access to the patched version from ddtrace import patch - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin import pylibmc # If not patched yet, you can patch pylibmc specifically diff --git a/ddtrace/contrib/requests.py b/ddtrace/contrib/requests.py index 2f289a467e8..747facfe98e 100644 --- a/ddtrace/contrib/requests.py +++ b/ddtrace/contrib/requests.py @@ -65,7 +65,7 @@ use the config API:: from ddtrace import config - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin from requests import Session session = Session() diff --git a/ddtrace/contrib/sqlalchemy.py b/ddtrace/contrib/sqlalchemy.py index 04bffa87b85..e69e6b9c0a8 100644 --- a/ddtrace/contrib/sqlalchemy.py +++ b/ddtrace/contrib/sqlalchemy.py @@ -8,7 +8,7 @@ # patch before importing `create_engine` from ddtrace import patch - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin patch(sqlalchemy=True) # use SQLAlchemy as usual diff --git a/ddtrace/contrib/valkey.py b/ddtrace/contrib/valkey.py index c898aff012d..cf4d942c3ee 100644 --- a/ddtrace/contrib/valkey.py +++ b/ddtrace/contrib/valkey.py @@ -55,7 +55,7 @@ To configure particular valkey instances use the :class:`Pin ` API:: import valkey - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin client = valkey.StrictValkey(host="localhost", port=6379) diff --git a/ddtrace/debugging/_config.py b/ddtrace/debugging/_config.py index 02fce853b66..b58ff7d68ea 100644 --- a/ddtrace/debugging/_config.py +++ b/ddtrace/debugging/_config.py @@ -1,6 +1,6 @@ from ddtrace.internal.logger import get_logger -from ddtrace.settings.dynamic_instrumentation import config as di_config # noqa: F401 -from ddtrace.settings.exception_replay import config as er_config # noqa: F401 +from ddtrace.internal.settings.dynamic_instrumentation import config as di_config # noqa: F401 +from ddtrace.internal.settings.exception_replay import config as er_config # noqa: F401 log = get_logger(__name__) diff --git a/ddtrace/debugging/_exception/replay.py b/ddtrace/debugging/_exception/replay.py index 585fca6d3d8..414430bee59 100644 --- a/ddtrace/debugging/_exception/replay.py +++ b/ddtrace/debugging/_exception/replay.py @@ -19,9 +19,9 @@ from ddtrace.internal.packages import is_user_code from ddtrace.internal.rate_limiter import BudgetRateLimiterWithJitter as RateLimiter from ddtrace.internal.rate_limiter import RateLimitExceeded +from ddtrace.internal.settings._config import config as global_config +from ddtrace.internal.settings.exception_replay import config from ddtrace.internal.utils.time import HourGlass -from ddtrace.settings._config import config as global_config -from ddtrace.settings.exception_replay import config from ddtrace.trace import Span diff --git a/ddtrace/debugging/_origin/span.py b/ddtrace/debugging/_origin/span.py index e11d33a7a9e..ddbbe66842b 100644 --- a/ddtrace/debugging/_origin/span.py +++ b/ddtrace/debugging/_origin/span.py @@ -25,8 +25,8 @@ from ddtrace.internal.logger import get_logger from ddtrace.internal.packages import is_user_code from ddtrace.internal.safety import _isinstance +from ddtrace.internal.settings.code_origin import config as co_config from ddtrace.internal.wrapping.context import WrappingContext -from ddtrace.settings.code_origin import config as co_config from ddtrace.trace import Span diff --git a/ddtrace/debugging/_products/code_origin/span.py b/ddtrace/debugging/_products/code_origin/span.py index b05a6ac2e52..e4dad6ac720 100644 --- a/ddtrace/debugging/_products/code_origin/span.py +++ b/ddtrace/debugging/_products/code_origin/span.py @@ -5,8 +5,8 @@ import ddtrace.internal.core as core from ddtrace.internal.logger import get_logger from ddtrace.internal.products import manager as product_manager -from ddtrace.settings._core import ValueSource -from ddtrace.settings.code_origin import config +from ddtrace.internal.settings._core import ValueSource +from ddtrace.internal.settings.code_origin import config log = get_logger(__name__) diff --git a/ddtrace/debugging/_products/dynamic_instrumentation.py b/ddtrace/debugging/_products/dynamic_instrumentation.py index dceb04daa0a..6616eeb6f14 100644 --- a/ddtrace/debugging/_products/dynamic_instrumentation.py +++ b/ddtrace/debugging/_products/dynamic_instrumentation.py @@ -1,6 +1,6 @@ import enum -from ddtrace.settings.dynamic_instrumentation import config +from ddtrace.internal.settings.dynamic_instrumentation import config requires = ["remote-configuration"] diff --git a/ddtrace/debugging/_products/live_debugger.py b/ddtrace/debugging/_products/live_debugger.py index 1417d22d320..f7ab1621daf 100644 --- a/ddtrace/debugging/_products/live_debugger.py +++ b/ddtrace/debugging/_products/live_debugger.py @@ -1,4 +1,4 @@ -from ddtrace.settings.live_debugging import config +from ddtrace.internal.settings.live_debugging import config # TODO[gab]: Uncomment when the product is ready diff --git a/ddtrace/debugging/_redaction.py b/ddtrace/debugging/_redaction.py index d2eeecc160a..b5e6d9876ed 100644 --- a/ddtrace/debugging/_redaction.py +++ b/ddtrace/debugging/_redaction.py @@ -3,9 +3,9 @@ from ddtrace.debugging._expressions import DDCompiler from ddtrace.debugging._expressions import DDExpression from ddtrace.internal.logger import get_logger +from ddtrace.internal.settings.dynamic_instrumentation import config +from ddtrace.internal.settings.dynamic_instrumentation import normalize_ident from ddtrace.internal.utils.cache import cached -from ddtrace.settings.dynamic_instrumentation import config -from ddtrace.settings.dynamic_instrumentation import normalize_ident log = get_logger(__name__) diff --git a/ddtrace/debugging/_signal/tracing.py b/ddtrace/debugging/_signal/tracing.py index e1c85b27746..c3ea22d84b4 100644 --- a/ddtrace/debugging/_signal/tracing.py +++ b/ddtrace/debugging/_signal/tracing.py @@ -50,7 +50,7 @@ def enter(self, scope: t.Mapping[str, t.Any]) -> None: ) span = self._span_cm.__enter__() - span.set_tags(probe.tags) # type: ignore[arg-type] + span.set_tags(probe.tags) span._set_tag_str(PROBE_ID_TAG_NAME, probe.probe_id) span._set_tag_str(_ORIGIN_KEY, "di") diff --git a/ddtrace/errortracking/_handled_exceptions/bytecode_reporting.py b/ddtrace/errortracking/_handled_exceptions/bytecode_reporting.py index 58c9a3e6c20..fe8d0644de1 100644 --- a/ddtrace/errortracking/_handled_exceptions/bytecode_reporting.py +++ b/ddtrace/errortracking/_handled_exceptions/bytecode_reporting.py @@ -11,7 +11,7 @@ from ddtrace.internal.packages import is_stdlib from ddtrace.internal.packages import is_third_party from ddtrace.internal.packages import is_user_code -from ddtrace.settings.errortracking import config +from ddtrace.internal.settings.errortracking import config INSTRUMENTABLE_TYPES = (types.FunctionType, types.MethodType, staticmethod, type) diff --git a/ddtrace/errortracking/_handled_exceptions/collector.py b/ddtrace/errortracking/_handled_exceptions/collector.py index fe6241b1be7..9358b17646b 100644 --- a/ddtrace/errortracking/_handled_exceptions/collector.py +++ b/ddtrace/errortracking/_handled_exceptions/collector.py @@ -8,7 +8,7 @@ from ddtrace.internal.constants import SPAN_EVENTS_HAS_EXCEPTION from ddtrace.internal.logger import get_logger from ddtrace.internal.service import Service -from ddtrace.settings.errortracking import config +from ddtrace.internal.settings.errortracking import config log = get_logger(__name__) diff --git a/ddtrace/errortracking/_handled_exceptions/monitoring_reporting.py b/ddtrace/errortracking/_handled_exceptions/monitoring_reporting.py index 5ef94fdb0bb..f12a0c3aaee 100644 --- a/ddtrace/errortracking/_handled_exceptions/monitoring_reporting.py +++ b/ddtrace/errortracking/_handled_exceptions/monitoring_reporting.py @@ -12,7 +12,7 @@ from ddtrace.internal.packages import is_stdlib # noqa: F401 from ddtrace.internal.packages import is_third_party # noqa: F401 from ddtrace.internal.packages import is_user_code # noqa: F401 -from ddtrace.settings.errortracking import config +from ddtrace.internal.settings.errortracking import config INSTRUMENTED_FILE_PATHS = [] diff --git a/ddtrace/errortracking/product.py b/ddtrace/errortracking/product.py index 98b91063145..71f8372b5c5 100644 --- a/ddtrace/errortracking/product.py +++ b/ddtrace/errortracking/product.py @@ -1,7 +1,7 @@ """ This is the entry point for the Error Tracking automatic reporting of handled exception. """ -from ddtrace.settings.errortracking import config +from ddtrace.internal.settings.errortracking import config requires = ["tracer"] diff --git a/ddtrace/ext/cassandra.py b/ddtrace/ext/cassandra.py deleted file mode 100644 index d510897d12d..00000000000 --- a/ddtrace/ext/cassandra.py +++ /dev/null @@ -1,6 +0,0 @@ -# tags -CLUSTER = "cassandra.cluster" -KEYSPACE = "cassandra.keyspace" -CONSISTENCY_LEVEL = "cassandra.consistency_level" -PAGINATED = "cassandra.paginated" -PAGE_NUMBER = "cassandra.page_number" diff --git a/ddtrace/internal/_encoding.pyx b/ddtrace/internal/_encoding.pyx index 6b11aaf94d1..e8391ab6506 100644 --- a/ddtrace/internal/_encoding.pyx +++ b/ddtrace/internal/_encoding.pyx @@ -22,14 +22,17 @@ from ..constants import _ORIGIN_KEY as ORIGIN_KEY from .constants import SPAN_LINKS_KEY from .constants import SPAN_EVENTS_KEY from .constants import MAX_UINT_64BITS +from .logger import get_logger from .._trace._limits import MAX_SPAN_META_VALUE_LEN from .._trace._limits import TRUNCATED_SPAN_ATTRIBUTE_LEN -from ..settings._agent import config as agent_config +from .settings._agent import config as agent_config DEF MSGPACK_ARRAY_LENGTH_PREFIX_SIZE = 5 DEF MSGPACK_STRING_TABLE_LENGTH_PREFIX_SIZE = 6 +cdef object log = get_logger(__name__) + cdef extern from "Python.h": const char* PyUnicode_AsUTF8(object o) @@ -699,63 +702,85 @@ cdef class MsgpackEncoderV04(MsgpackEncoderBase): return ret return ret - cdef inline int _pack_meta(self, object meta, char *dd_origin, str span_events) except? -1: + cdef inline int _pack_meta( + self, object meta, char *dd_origin, str span_events, uint64_t span_id, + ) except? -1: cdef Py_ssize_t L cdef int ret cdef dict d + cdef list m - if PyDict_CheckExact(meta): - d = meta - L = len(d) + (dd_origin is not NULL) + (len(span_events) > 0) - if L > ITEM_LIMIT: - raise ValueError("dict is too large") + if not PyDict_CheckExact(meta): + raise TypeError("Unhandled meta type: %r" % type(meta)) - ret = msgpack_pack_map(&self.pk, L) - if ret == 0: - for k, v in d.items(): - ret = pack_text(&self.pk, k) - if ret != 0: - break - ret = pack_text(&self.pk, v) - if ret != 0: - break - if dd_origin is not NULL: - ret = pack_bytes(&self.pk, _ORIGIN_KEY, _ORIGIN_KEY_LEN) - if ret == 0: - ret = pack_bytes(&self.pk, dd_origin, strlen(dd_origin)) - if ret != 0: - return ret - if span_events: - ret = pack_text(&self.pk, SPAN_EVENTS_KEY) - if ret == 0: - ret = pack_text(&self.pk, span_events) - return ret + d = meta - raise TypeError("Unhandled meta type: %r" % type(meta)) + # Filter meta to only str/bytes values + m = [] + for k, v in d.items(): + if PyUnicode_Check(v) or PyBytesLike_Check(v): + m.append((k, v)) + else: + log.warning("[span ID %d] Meta key %r has non-string value %r, skipping", span_id, k, v) + + L = len(m) + (dd_origin is not NULL) + (len(span_events) > 0) + if L > ITEM_LIMIT: + raise ValueError("dict is too large") + + ret = msgpack_pack_map(&self.pk, L) + if ret == 0: + for k, v in m: + ret = pack_text(&self.pk, k) + if ret != 0: + break + ret = pack_text(&self.pk, v) + if ret != 0: + break + if dd_origin is not NULL: + ret = pack_bytes(&self.pk, _ORIGIN_KEY, _ORIGIN_KEY_LEN) + if ret == 0: + ret = pack_bytes(&self.pk, dd_origin, strlen(dd_origin)) + if ret != 0: + return ret + if span_events: + ret = pack_text(&self.pk, SPAN_EVENTS_KEY) + if ret == 0: + ret = pack_text(&self.pk, span_events) + return ret - cdef inline int _pack_metrics(self, object metrics) except? -1: + cdef inline int _pack_metrics(self, object metrics, uint64_t span_id) except? -1: cdef Py_ssize_t L cdef int ret cdef dict d + cdef list m - if PyDict_CheckExact(metrics): - d = metrics - L = len(d) - if L > ITEM_LIMIT: - raise ValueError("dict is too large") + if not PyDict_CheckExact(metrics): + raise TypeError("Unhandled metrics type: %r" % type(metrics)) - ret = msgpack_pack_map(&self.pk, L) - if ret == 0: - for k, v in d.items(): - ret = pack_text(&self.pk, k) - if ret != 0: - break - ret = pack_number(&self.pk, v) - if ret != 0: - break - return ret + d = metrics + m = [] + + # Filter metrics to only number values + for k, v in d.items(): + if PyLong_Check(v) or PyFloat_Check(v): + m.append((k, v)) + else: + log.warning("[span ID %d] Metric key %r has non-numeric value %r, skipping", span_id, k, v) - raise TypeError("Unhandled metrics type: %r" % type(metrics)) + L = len(m) + if L > ITEM_LIMIT: + raise ValueError("dict is too large") + + ret = msgpack_pack_map(&self.pk, L) + if ret == 0: + for k, v in m: + ret = pack_text(&self.pk, k) + if ret != 0: + break + ret = pack_number(&self.pk, v) + if ret != 0: + break + return ret cdef int pack_span(self, object span, unsigned long long trace_id_64bits, void *dd_origin) except? -1: cdef int ret @@ -763,6 +788,7 @@ cdef class MsgpackEncoderV04(MsgpackEncoderBase): cdef int has_span_type cdef int has_meta cdef int has_metrics + cdef uint64_t span_id = span.span_id has_error = (span.error != 0) has_span_type = (span.span_type is not None) @@ -803,7 +829,7 @@ cdef class MsgpackEncoderV04(MsgpackEncoderBase): ret = pack_bytes(&self.pk, b"span_id", 7) if ret != 0: return ret - ret = pack_number(&self.pk, span.span_id) + ret = pack_number(&self.pk, span_id) if ret != 0: return ret @@ -882,7 +908,7 @@ cdef class MsgpackEncoderV04(MsgpackEncoderBase): span_events = "" if has_span_events and not self.top_level_span_event_encoding: span_events = json_dumps([vars(event)() for event in span._events]) - ret = self._pack_meta(span._meta, dd_origin, span_events) + ret = self._pack_meta(span._meta, dd_origin, span_events, span_id) if ret != 0: return ret @@ -909,7 +935,8 @@ cdef class MsgpackEncoderV04(MsgpackEncoderBase): ret = pack_bytes(&self.pk, b"metrics", 7) if ret != 0: return ret - ret = self._pack_metrics(span._metrics) + + ret = self._pack_metrics(span._metrics, span_id) if ret != 0: return ret @@ -1035,6 +1062,8 @@ cdef class MsgpackEncoderV05(MsgpackEncoderBase): cdef int pack_span(self, object span, unsigned long long trace_id_64bits, void *dd_origin) except? -1: cdef int ret + cdef list meta, metrics + cdef uint64_t span_id = span.span_id ret = msgpack_pack_array(&self.pk, 12) if ret != 0: @@ -1054,8 +1083,7 @@ cdef class MsgpackEncoderV05(MsgpackEncoderBase): if ret != 0: return ret - _ = span.span_id - ret = msgpack_pack_uint64(&self.pk, _ if _ is not None else 0) + ret = msgpack_pack_uint64(&self.pk, span_id if span_id is not None else 0) if ret != 0: return ret @@ -1089,14 +1117,22 @@ cdef class MsgpackEncoderV05(MsgpackEncoderBase): if span._events: span_events = json_dumps([vars(event)() for event in span._events]) + # Filter meta to only str/bytes values + meta = [] + for k, v in span._meta.items(): + if PyUnicode_Check(v) or PyBytesLike_Check(v): + meta.append((k, v)) + else: + log.warning("[span ID %d] Meta key %r has non-string value %r, skipping", span_id, k, v) + ret = msgpack_pack_map( &self.pk, - len(span._meta) + (dd_origin is not NULL) + (len(span_links) > 0) + (len(span_events) > 0) + len(meta) + (dd_origin is not NULL) + (len(span_links) > 0) + (len(span_events) > 0) ) if ret != 0: return ret - if span._meta: - for k, v in span._meta.items(): + if meta: + for k, v in meta: ret = self._pack_string(k) if ret != 0: return ret @@ -1125,11 +1161,19 @@ cdef class MsgpackEncoderV05(MsgpackEncoderBase): if ret != 0: return ret - ret = msgpack_pack_map(&self.pk, len(span._metrics)) + # Filter metrics to only number values + metrics = [] + for k, v in span._metrics.items(): + if PyLong_Check(v) or PyFloat_Check(v): + metrics.append((k, v)) + else: + log.warning("[span ID %d] Metric key %r has non-numeric value %r, skipping", span_id, k, v) + + ret = msgpack_pack_map(&self.pk, len(metrics)) if ret != 0: return ret - if span._metrics: - for k, v in span._metrics.items(): + if metrics: + for k, v in metrics: ret = self._pack_string(k) if ret != 0: return ret diff --git a/ddtrace/internal/agent.py b/ddtrace/internal/agent.py index c420fedb611..d2b43078f9f 100644 --- a/ddtrace/internal/agent.py +++ b/ddtrace/internal/agent.py @@ -4,7 +4,7 @@ from ddtrace.internal.logger import get_logger from ddtrace.internal.periodic import ForksafeAwakeablePeriodicService -from ddtrace.settings._agent import config +from ddtrace.internal.settings._agent import config from .utils.http import get_connection diff --git a/ddtrace/internal/appsec/product.py b/ddtrace/internal/appsec/product.py index bffa5ccb121..d3ea0035345 100644 --- a/ddtrace/internal/appsec/product.py +++ b/ddtrace/internal/appsec/product.py @@ -1,5 +1,5 @@ -from ddtrace.settings.asm import ai_guard_config -from ddtrace.settings.asm import config +from ddtrace.internal.settings.asm import ai_guard_config +from ddtrace.internal.settings.asm import config requires = ["remote-configuration"] diff --git a/ddtrace/internal/ci_visibility/encoder.py b/ddtrace/internal/ci_visibility/encoder.py index 48ead25d960..65119d4df82 100644 --- a/ddtrace/internal/ci_visibility/encoder.py +++ b/ddtrace/internal/ci_visibility/encoder.py @@ -262,7 +262,7 @@ def put(self, item): spans_with_coverage = [ span for span in item - if COVERAGE_TAG_NAME in span.get_tags() or span.get_struct_tag(COVERAGE_TAG_NAME) is not None + if COVERAGE_TAG_NAME in span.get_tags() or span._get_struct_tag(COVERAGE_TAG_NAME) is not None ] # Also include session span for parent session ID lookup, even if it doesn't have coverage data session_span = next((span for span in item if span.get_tag(EVENT_TYPE) == SESSION_TYPE), None) diff --git a/ddtrace/internal/ci_visibility/filters.py b/ddtrace/internal/ci_visibility/filters.py index 83297787a18..f4b96fbe88e 100644 --- a/ddtrace/internal/ci_visibility/filters.py +++ b/ddtrace/internal/ci_visibility/filters.py @@ -18,7 +18,7 @@ class TraceCiVisibilityFilter(TraceFilter): def __init__(self, tags, service): - # type: (Dict[Union[str, bytes], str], str) -> None + # type: (Dict[str, str], str) -> None self._tags = tags self._service = service diff --git a/ddtrace/internal/ci_visibility/git_client.py b/ddtrace/internal/ci_visibility/git_client.py index 50e36879ba1..dfd8d83f6e3 100644 --- a/ddtrace/internal/ci_visibility/git_client.py +++ b/ddtrace/internal/ci_visibility/git_client.py @@ -25,9 +25,9 @@ from ddtrace.ext.git import extract_remote_url from ddtrace.ext.git import extract_workspace_path from ddtrace.internal.logger import get_logger +from ddtrace.internal.settings._agent import config as agent_config +from ddtrace.internal.settings._telemetry import config as telemetry_config from ddtrace.internal.utils.retry import fibonacci_backoff_with_jitter -from ddtrace.settings._agent import config as agent_config -from ddtrace.settings._telemetry import config as telemetry_config from ddtrace.trace import Tracer # noqa: F401 from .. import telemetry diff --git a/ddtrace/internal/ci_visibility/recorder.py b/ddtrace/internal/ci_visibility/recorder.py index cfe213adf53..9a22ceec26d 100644 --- a/ddtrace/internal/ci_visibility/recorder.py +++ b/ddtrace/internal/ci_visibility/recorder.py @@ -67,12 +67,12 @@ from ddtrace.internal.evp_proxy.constants import EVP_SUBDOMAIN_HEADER_NAME from ddtrace.internal.logger import get_logger from ddtrace.internal.service import Service +from ddtrace.internal.settings._agent import config as agent_config +from ddtrace.internal.settings.integration import IntegrationConfig from ddtrace.internal.test_visibility._atr_mixins import AutoTestRetriesSettings from ddtrace.internal.test_visibility._library_capabilities import LibraryCapabilities from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.formats import parse_tags_str -from ddtrace.settings._agent import config as agent_config -from ddtrace.settings.integration import IntegrationConfig from ddtrace.trace import Span from ddtrace.trace import TraceFilter from ddtrace.trace import Tracer @@ -156,7 +156,10 @@ class CIVisibility(Service): enabled = False def __init__( - self, tracer: Optional[Tracer] = None, config: Optional[IntegrationConfig] = None, service: Optional[str] = None + self, + tracer: Optional[Tracer] = None, + config: Optional[IntegrationConfig] = None, + service: Optional[str] = None, ) -> None: super().__init__() diff --git a/ddtrace/internal/ci_visibility/utils.py b/ddtrace/internal/ci_visibility/utils.py index 68aae145a34..25319d65406 100644 --- a/ddtrace/internal/ci_visibility/utils.py +++ b/ddtrace/internal/ci_visibility/utils.py @@ -73,9 +73,9 @@ def _add_start_end_source_file_path_data_to_span( log.debug("Tried to collect source start/end lines for test method %s but an exception was raised", test_name) span._set_tag_str(test.SOURCE_FILE, source_file_path) if start_line: - span.set_tag(test.SOURCE_START, start_line) + span.set_metric(test.SOURCE_START, start_line) if end_line: - span.set_tag(test.SOURCE_END, end_line) + span.set_metric(test.SOURCE_END, end_line) def _add_pct_covered_to_span(coverage_data: dict, span: ddtrace.trace.Span): @@ -86,7 +86,7 @@ def _add_pct_covered_to_span(coverage_data: dict, span: ddtrace.trace.Span): if not isinstance(lines_pct_value, float): log.warning("Tried to add total covered percentage to session span but the format was unexpected") return - span.set_tag(test.TEST_LINES_PCT, lines_pct_value) + span.set_metric(test.TEST_LINES_PCT, lines_pct_value) def _generate_fully_qualified_test_name(test_module_path: str, test_suite_name: str, test_name: str) -> str: diff --git a/ddtrace/internal/ci_visibility/writer.py b/ddtrace/internal/ci_visibility/writer.py index fded94b47a2..a3a0ba9d217 100644 --- a/ddtrace/internal/ci_visibility/writer.py +++ b/ddtrace/internal/ci_visibility/writer.py @@ -12,8 +12,8 @@ from ddtrace.internal.ci_visibility.constants import MODULE_TYPE from ddtrace.internal.ci_visibility.constants import SESSION_TYPE from ddtrace.internal.ci_visibility.constants import SUITE_TYPE +from ddtrace.internal.settings._agent import config as agent_config from ddtrace.internal.utils.time import StopWatch -from ddtrace.settings._agent import config as agent_config from ddtrace.vendor.dogstatsd import DogStatsd # noqa:F401 from .. import service diff --git a/ddtrace/internal/compat.py b/ddtrace/internal/compat.py index aaa4b8fd358..a9067b36170 100644 --- a/ddtrace/internal/compat.py +++ b/ddtrace/internal/compat.py @@ -74,11 +74,11 @@ def ip_is_global(ip: str) -> bool: return parsed_ip.is_global +# This fix was implemented in 3.9.8 +# https://github.com/python/cpython/issues/83860 if PYTHON_VERSION_INFO >= (3, 9, 8): from functools import singledispatchmethod else: - # This fix was not backported to 3.8 - # https://github.com/python/cpython/issues/83860 from functools import singledispatchmethod def _register(self, cls, method=None): diff --git a/ddtrace/internal/core/crashtracking.py b/ddtrace/internal/core/crashtracking.py index 7804d9fc739..180b97274f5 100644 --- a/ddtrace/internal/core/crashtracking.py +++ b/ddtrace/internal/core/crashtracking.py @@ -10,10 +10,10 @@ from ddtrace.internal import forksafe from ddtrace.internal.compat import ensure_text from ddtrace.internal.runtime import get_runtime_id -from ddtrace.settings._agent import config as agent_config -from ddtrace.settings.crashtracker import config as crashtracker_config -from ddtrace.settings.profiling import config as profiling_config -from ddtrace.settings.profiling import config_str +from ddtrace.internal.settings._agent import config as agent_config +from ddtrace.internal.settings.crashtracker import config as crashtracker_config +from ddtrace.internal.settings.profiling import config as profiling_config +from ddtrace.internal.settings.profiling import config_str is_available = True diff --git a/ddtrace/internal/core/event_hub.py b/ddtrace/internal/core/event_hub.py index 8860f2d6793..5d0176113ea 100644 --- a/ddtrace/internal/core/event_hub.py +++ b/ddtrace/internal/core/event_hub.py @@ -6,7 +6,7 @@ from typing import Optional from typing import Tuple -from ddtrace.settings._config import config +from ddtrace.internal.settings._config import config _listeners: Dict[str, Dict[Any, Callable[..., Any]]] = {} diff --git a/ddtrace/internal/coverage/instrumentation.py b/ddtrace/internal/coverage/instrumentation.py index 503f902ed9d..be58152e961 100644 --- a/ddtrace/internal/coverage/instrumentation.py +++ b/ddtrace/internal/coverage/instrumentation.py @@ -11,5 +11,4 @@ elif sys.version_info >= (3, 10): from ddtrace.internal.coverage.instrumentation_py3_10 import instrument_all_lines # noqa else: - # Python 3.8 and 3.9 use the same instrumentation - from ddtrace.internal.coverage.instrumentation_py3_8 import instrument_all_lines # noqa + from ddtrace.internal.coverage.instrumentation_py3_9 import instrument_all_lines # noqa diff --git a/ddtrace/internal/coverage/instrumentation_py3_8.py b/ddtrace/internal/coverage/instrumentation_py3_8.py deleted file mode 100644 index 59cc2841137..00000000000 --- a/ddtrace/internal/coverage/instrumentation_py3_8.py +++ /dev/null @@ -1,390 +0,0 @@ -from abc import ABC -import dis -from enum import Enum -import sys -from types import CodeType -import typing as t - -from ddtrace.internal.bytecode_injection import HookType -from ddtrace.internal.test_visibility.coverage_lines import CoverageLines - - -# This is primarily to make mypy happy without having to nest the rest of this module behind a version check -# NOTE: the "prettier" one-liner version (eg: assert (3,11) <= sys.version_info < (3,12)) does not work for mypy -# NOTE: Python 3.8 and 3.9 use the same instrumentation -assert sys.version_info < (3, 10) # nosec - - -class JumpDirection(int, Enum): - FORWARD = 1 - BACKWARD = -1 - - @classmethod - def from_opcode(cls, opcode: int) -> "JumpDirection": - return cls.BACKWARD if "BACKWARD" in dis.opname[opcode] else cls.FORWARD - - -class Jump(ABC): - # NOTE: in Python 3.9, jump arguments are offsets, vs instruction numbers (ie offsets/2) in Python 3.10 - def __init__(self, start: int, arg: int) -> None: - self.start = start - self.end: int - self.arg = arg - - -class AJump(Jump): - __opcodes__ = set(dis.hasjabs) - - def __init__(self, start: int, arg: int) -> None: - super().__init__(start, arg) - self.end = self.arg - - -class RJump(Jump): - __opcodes__ = set(dis.hasjrel) - - def __init__(self, start: int, arg: int, direction: JumpDirection) -> None: - super().__init__(start, arg) - self.direction = direction - self.end = start + (self.arg) * self.direction + 2 - - -class Instruction: - __slots__ = ("offset", "opcode", "arg", "targets") - - def __init__(self, offset: int, opcode: int, arg: int) -> None: - self.offset = offset - self.opcode = opcode - self.arg = arg - self.targets: t.List["Branch"] = [] - - -class Branch(ABC): - def __init__(self, start: Instruction, end: Instruction) -> None: - self.start = start - self.end = end - - @property - def arg(self) -> int: - raise NotImplementedError - - -class RBranch(Branch): - @property - def arg(self) -> int: - return abs(self.end.offset - self.start.offset - 2) >> 1 - - -class ABranch(Branch): - @property - def arg(self) -> int: - return self.end.offset >> 1 - - -EXTENDED_ARG = dis.EXTENDED_ARG -NO_OFFSET = -1 - - -def instr_with_arg(opcode: int, arg: int) -> t.List[Instruction]: - instructions = [Instruction(-1, opcode, arg & 0xFF)] - arg >>= 8 - while arg: - instructions.insert(0, Instruction(NO_OFFSET, EXTENDED_ARG, arg & 0xFF)) - arg >>= 8 - return instructions - - -def update_location_data( - code: CodeType, trap_map: t.Dict[int, int], ext_arg_offsets: t.List[t.Tuple[int, int]] -) -> bytes: - # Some code objects do not have co_lnotab data (eg: certain lambdas) - if code.co_lnotab == b"": - return code.co_lnotab - - # DEV: We expect the original offsets in the trap_map - new_data = bytearray() - data = code.co_lnotab - - ext_arg_offset_iter = iter(sorted(ext_arg_offsets)) - ext_arg_offset, ext_arg_size = next(ext_arg_offset_iter, (None, None)) - - current_orig_offset = 0 # Cumulative offset used to compare against trap offsets - - # All instructions have to have line numbers, so the first instructions of the trap call must mark the beginning of - # the line. The subsequent offsets need to be incremented by the size of the trap call instructions plus any - # extended args. - - # Set the first trap size: - current_new_offset = accumulated_new_offset = trap_map[0] << 1 - - for i in range(0, len(data), 2): - orig_offset_delta = data[i] - line_delta = data[i + 1] - - # For each original offset, we compute how many offsets have been added in the new code, this includes: - # - the size of the trap at the previous offset - # - the amount of extended args added since the previous offset - - current_new_offset += orig_offset_delta - current_orig_offset += orig_offset_delta - accumulated_new_offset += orig_offset_delta - - # If the current offset is 255, just increment: - if orig_offset_delta == 255: - continue - - # If the current offset is 0, it means we are only incrementing the amount of lines jumped by the previous - # non-zero offset - if orig_offset_delta == 0: - new_data.append(0) - new_data.append(line_delta) - continue - - while ext_arg_offset is not None and ext_arg_size is not None and current_new_offset > ext_arg_offset: - accumulated_new_offset += ext_arg_size << 1 - current_new_offset += ext_arg_size << 1 - ext_arg_offset, ext_arg_size = next(ext_arg_offset_iter, (None, None)) - - # If the current line delta changes, flush accumulated data: - if line_delta != 0: - while accumulated_new_offset > 255: - new_data.append(255) - new_data.append(0) - accumulated_new_offset -= 255 - - new_data.append(accumulated_new_offset) - new_data.append(line_delta) - - # Also add the current trap size to the accumulated offset - accumulated_new_offset = trap_map[current_orig_offset] << 1 - current_new_offset += accumulated_new_offset - - return bytes(new_data) - - -LOAD_CONST = dis.opmap["LOAD_CONST"] -CALL = dis.opmap["CALL_FUNCTION"] -POP_TOP = dis.opmap["POP_TOP"] -IMPORT_NAME = dis.opmap["IMPORT_NAME"] -IMPORT_FROM = dis.opmap["IMPORT_FROM"] - - -def trap_call(trap_index: int, arg_index: int) -> t.Tuple[Instruction, ...]: - return ( - *instr_with_arg(LOAD_CONST, trap_index), - *instr_with_arg(LOAD_CONST, arg_index), - Instruction(NO_OFFSET, CALL, 1), - Instruction(NO_OFFSET, POP_TOP, 0), - ) - - -def instrument_all_lines(code: CodeType, hook: HookType, path: str, package: str) -> t.Tuple[CodeType, CoverageLines]: - # TODO[perf]: Check if we really need to << and >> everywhere - trap_func, trap_arg = hook, path - - instructions: t.List[Instruction] = [] - - new_consts = list(code.co_consts) - trap_index = len(new_consts) - new_consts.append(trap_func) - - seen_lines = CoverageLines() - - offset_map = {} - - # Collect all the original jumps - jumps: t.Dict[int, Jump] = {} - traps: t.Dict[int, int] = {} # DEV: This uses the original offsets - line_map = {} - line_starts = dict(dis.findlinestarts(code)) - - # The previous two arguments are kept in order to track the depth of the IMPORT_NAME - # For example, from ...package import module - current_arg: int = 0 - previous_arg: int = 0 - previous_previous_arg: int = 0 - current_import_name: t.Optional[str] = None - current_import_package: t.Optional[str] = None - - try: - code_iter = iter(enumerate(code.co_code)) - ext: list[int] = [] - while True: - original_offset, opcode = next(code_iter) - - if original_offset in line_starts: - # Inject trap call at the beginning of the line. Keep track - # of location and size of the trap call instructions. We - # need this to adjust the location table. - line = line_starts[original_offset] - trap_instructions = trap_call(trap_index, len(new_consts)) - traps[original_offset] = len(trap_instructions) - instructions.extend(trap_instructions) - - # Make sure that the current module is marked as depending on its own package by instrumenting the - # first executable line - package_dep = None - if code.co_name == "" and len(new_consts) == len(code.co_consts) + 1: - package_dep = (package, ("",)) - - new_consts.append((line, trap_arg, package_dep)) - - line_map[original_offset] = trap_instructions[0] - - seen_lines.add(line) - - _, arg = next(code_iter) - - offset = len(instructions) << 1 - - # Propagate code - instructions.append(Instruction(original_offset, opcode, arg)) - - if opcode is EXTENDED_ARG: - ext.append(arg) - continue - else: - previous_previous_arg = previous_arg - previous_arg = current_arg - current_arg = int.from_bytes([*ext, arg], "big", signed=False) - ext.clear() - - # Track imports names - if opcode == IMPORT_NAME: - import_depth = code.co_consts[previous_previous_arg] - current_import_name = code.co_names[current_arg] - # Adjust package name if the import is relative and a parent (ie: if depth is more than 1) - current_import_package = ( - ".".join(package.split(".")[: -import_depth + 1]) if import_depth > 1 else package - ) - new_consts[-1] = ( - new_consts[-1][0], - new_consts[-1][1], - (current_import_package, (current_import_name,)), - ) - - # Also track import from statements since it's possible that the "from" target is a module, eg: - # from my_package import my_module - # Since the package has not changed, we simply extend the previous import names with the new value - if opcode == IMPORT_FROM: - import_from_name = f"{current_import_name}.{code.co_names[current_arg]}" - new_consts[-1] = ( - new_consts[-1][0], - new_consts[-1][1], - (new_consts[-1][2][0], tuple(list(new_consts[-1][2][1]) + [import_from_name])), - ) - - # Collect branching instructions for processing - if opcode in AJump.__opcodes__: - jumps[offset] = AJump(original_offset, current_arg) - elif opcode in RJump.__opcodes__: - jumps[offset] = RJump(original_offset, current_arg, JumpDirection.from_opcode(opcode)) - - if opcode is EXTENDED_ARG: - ext.append(arg) - else: - ext.clear() - except StopIteration: - pass - - # Collect all the old jump start and end offsets - jump_targets = {_ for j in jumps.values() for _ in (j.start, j.end)} - - # Adjust all the offsets and map the old offsets to the new ones for the - # jumps - for index, instr in enumerate(instructions): - new_offset = index << 1 - if instr.offset in jump_targets: - offset_map[instr.offset] = new_offset - instr.offset = new_offset - - # Adjust all the jumps, neglecting any EXTENDED_ARGs for now - branches: t.List[Branch] = [] - for jump in jumps.values(): - new_start = offset_map[jump.start] - new_end = offset_map[jump.end] - - # If we are jumping at the beginning of a line, jump to the - # beginning of the trap call instead - target_instr = line_map.get(jump.end, instructions[new_end >> 1]) - branch: Branch = ( - RBranch(instructions[new_start >> 1], target_instr) - if isinstance(jump, RJump) - else ABranch(instructions[new_start >> 1], target_instr) - ) - target_instr.targets.append(branch) - - branches.append(branch) - - # Process all the branching instructions to adjust the arguments. We - # need to add EXTENDED_ARGs if the argument is too large. - process_branches = True - exts: t.List[t.Tuple[Instruction, int]] = [] - while process_branches: - process_branches = False - for branch in branches: - jump_instr = branch.start - new_arg = branch.arg << 1 # 3.9 uses offsets, not instruction numbers - jump_instr.arg = new_arg & 0xFF - new_arg >>= 8 - c = 0 - index = jump_instr.offset >> 1 - - # Update the argument of the branching instruction, adding - # EXTENDED_ARGs if needed - while new_arg: - if index and instructions[index - 1].opcode is EXTENDED_ARG: - index -= 1 - instructions[index].arg = new_arg & 0xFF - else: - ext_instr = Instruction(index << 1, EXTENDED_ARG, new_arg & 0xFF) - instructions.insert(index, ext_instr) - c += 1 - # If the jump instruction was a target of another jump, - # make the latest EXTENDED_ARG instruction the target - # of that jump. - if jump_instr.targets: - for target in jump_instr.targets: - if target.end is not jump_instr: - raise ValueError("Invalid target") - target.end = ext_instr - ext_instr.targets.extend(jump_instr.targets) - jump_instr.targets.clear() - new_arg >>= 8 - - # Check if we added any EXTENDED_ARGs because we would have to - # reprocess the branches. - # TODO[perf]: only reprocess the branches that are affected. - # However, this branch is not expected to be taken often. - if c: - exts.append((ext_instr, c)) - # Update the instruction offset from the point of insertion - # of the EXTENDED_ARGs - for instr_index, instr in enumerate(instructions[index + 1 :], index + 1): - instr.offset = instr_index << 1 - - process_branches = True - - # Create the new code object - new_code = bytearray() - for instr in instructions: - new_code.append(instr.opcode) - new_code.append(instr.arg) - - # Instrument nested code objects recursively - for original_offset, nested_code in enumerate(code.co_consts): - if isinstance(nested_code, CodeType): - new_consts[original_offset], nested_lines = instrument_all_lines(nested_code, trap_func, trap_arg, package) - seen_lines.update(nested_lines) - - ext_arg_offsets = [(instr.offset, s) for instr, s in exts] - - return ( - code.replace( - co_code=bytes(new_code), - co_consts=tuple(new_consts), - co_stacksize=code.co_stacksize + 4, # TODO: Compute the value! - co_lnotab=update_location_data(code, traps, ext_arg_offsets), - ), - seen_lines, - ) diff --git a/ddtrace/internal/coverage/instrumentation_py3_9.py b/ddtrace/internal/coverage/instrumentation_py3_9.py new file mode 100644 index 00000000000..05544187618 --- /dev/null +++ b/ddtrace/internal/coverage/instrumentation_py3_9.py @@ -0,0 +1,380 @@ +from abc import ABC +import dis +from enum import Enum +import sys + +# This is primarily to make mypy happy without having to nest the rest of this module behind a version check +# NOTE: the "prettier" one-liner version (eg: assert (3,11) <= sys.version_info < (3,12)) does not work for mypy +from types import CodeType +import typing as t + +from ddtrace.internal.bytecode_injection import HookType +from ddtrace.internal.test_visibility.coverage_lines import CoverageLines + + +if sys.version_info < (3, 10): + + class JumpDirection(int, Enum): + FORWARD = 1 + BACKWARD = -1 + + @classmethod + def from_opcode(cls, opcode: int) -> "JumpDirection": + return cls.BACKWARD if "BACKWARD" in dis.opname[opcode] else cls.FORWARD + + class Jump(ABC): + # NOTE: in Python 3.9, jump arguments are offsets, vs instruction numbers (ie offsets/2) in Python 3.10 + def __init__(self, start: int, arg: int) -> None: + self.start = start + self.end: int + self.arg = arg + + class AJump(Jump): + __opcodes__ = set(dis.hasjabs) + + def __init__(self, start: int, arg: int) -> None: + super().__init__(start, arg) + self.end = self.arg + + class RJump(Jump): + __opcodes__ = set(dis.hasjrel) + + def __init__(self, start: int, arg: int, direction: JumpDirection) -> None: + super().__init__(start, arg) + self.direction = direction + self.end = start + (self.arg) * self.direction + 2 + + class Instruction: + __slots__ = ("offset", "opcode", "arg", "targets") + + def __init__(self, offset: int, opcode: int, arg: int) -> None: + self.offset = offset + self.opcode = opcode + self.arg = arg + self.targets: t.List["Branch"] = [] + + class Branch(ABC): + def __init__(self, start: Instruction, end: Instruction) -> None: + self.start = start + self.end = end + + @property + def arg(self) -> int: + raise NotImplementedError + + class RBranch(Branch): + @property + def arg(self) -> int: + return abs(self.end.offset - self.start.offset - 2) >> 1 + + class ABranch(Branch): + @property + def arg(self) -> int: + return self.end.offset >> 1 + + EXTENDED_ARG = dis.EXTENDED_ARG + NO_OFFSET = -1 + + def instr_with_arg(opcode: int, arg: int) -> t.List[Instruction]: + instructions = [Instruction(-1, opcode, arg & 0xFF)] + arg >>= 8 + while arg: + instructions.insert(0, Instruction(NO_OFFSET, EXTENDED_ARG, arg & 0xFF)) + arg >>= 8 + return instructions + + def update_location_data( + code: CodeType, trap_map: t.Dict[int, int], ext_arg_offsets: t.List[t.Tuple[int, int]] + ) -> bytes: + # Some code objects do not have co_lnotab data (eg: certain lambdas) + if code.co_lnotab == b"": + return code.co_lnotab + + # DEV: We expect the original offsets in the trap_map + new_data = bytearray() + data = code.co_lnotab + + ext_arg_offset_iter = iter(sorted(ext_arg_offsets)) + ext_arg_offset, ext_arg_size = next(ext_arg_offset_iter, (None, None)) + + current_orig_offset = 0 # Cumulative offset used to compare against trap offsets + + # All instructions have to have line numbers, so the first instructions of the trap call must mark the + # beginning of the line. The subsequent offsets need to be incremented by the size of the trap call + # instructions plus any extended args. + + # Set the first trap size: + current_new_offset = accumulated_new_offset = trap_map[0] << 1 + + for i in range(0, len(data), 2): + orig_offset_delta = data[i] + line_delta = data[i + 1] + + # For each original offset, we compute how many offsets have been added in the new code, this includes: + # - the size of the trap at the previous offset + # - the amount of extended args added since the previous offset + + current_new_offset += orig_offset_delta + current_orig_offset += orig_offset_delta + accumulated_new_offset += orig_offset_delta + + # If the current offset is 255, just increment: + if orig_offset_delta == 255: + continue + + # If the current offset is 0, it means we are only incrementing the amount of lines jumped by the previous + # non-zero offset + if orig_offset_delta == 0: + new_data.append(0) + new_data.append(line_delta) + continue + + while ext_arg_offset is not None and ext_arg_size is not None and current_new_offset > ext_arg_offset: + accumulated_new_offset += ext_arg_size << 1 + current_new_offset += ext_arg_size << 1 + ext_arg_offset, ext_arg_size = next(ext_arg_offset_iter, (None, None)) + + # If the current line delta changes, flush accumulated data: + if line_delta != 0: + while accumulated_new_offset > 255: + new_data.append(255) + new_data.append(0) + accumulated_new_offset -= 255 + + new_data.append(accumulated_new_offset) + new_data.append(line_delta) + + # Also add the current trap size to the accumulated offset + accumulated_new_offset = trap_map[current_orig_offset] << 1 + current_new_offset += accumulated_new_offset + + return bytes(new_data) + + LOAD_CONST = dis.opmap["LOAD_CONST"] + CALL = dis.opmap["CALL_FUNCTION"] + POP_TOP = dis.opmap["POP_TOP"] + IMPORT_NAME = dis.opmap["IMPORT_NAME"] + IMPORT_FROM = dis.opmap["IMPORT_FROM"] + + def trap_call(trap_index: int, arg_index: int) -> t.Tuple[Instruction, ...]: + return ( + *instr_with_arg(LOAD_CONST, trap_index), + *instr_with_arg(LOAD_CONST, arg_index), + Instruction(NO_OFFSET, CALL, 1), + Instruction(NO_OFFSET, POP_TOP, 0), + ) + + def instrument_all_lines( + code: CodeType, hook: HookType, path: str, package: str + ) -> t.Tuple[CodeType, CoverageLines]: + # TODO[perf]: Check if we really need to << and >> everywhere + trap_func, trap_arg = hook, path + + instructions: t.List[Instruction] = [] + + new_consts = list(code.co_consts) + trap_index = len(new_consts) + new_consts.append(trap_func) + + seen_lines = CoverageLines() + + offset_map = {} + + # Collect all the original jumps + jumps: t.Dict[int, Jump] = {} + traps: t.Dict[int, int] = {} # DEV: This uses the original offsets + line_map = {} + line_starts = dict(dis.findlinestarts(code)) + + # The previous two arguments are kept in order to track the depth of the IMPORT_NAME + # For example, from ...package import module + current_arg: int = 0 + previous_arg: int = 0 + previous_previous_arg: int = 0 + current_import_name: t.Optional[str] = None + current_import_package: t.Optional[str] = None + + try: + code_iter = iter(enumerate(code.co_code)) + ext: list[int] = [] + while True: + original_offset, opcode = next(code_iter) + + if original_offset in line_starts: + # Inject trap call at the beginning of the line. Keep track + # of location and size of the trap call instructions. We + # need this to adjust the location table. + line = line_starts[original_offset] + trap_instructions = trap_call(trap_index, len(new_consts)) + traps[original_offset] = len(trap_instructions) + instructions.extend(trap_instructions) + + # Make sure that the current module is marked as depending on its own package by instrumenting the + # first executable line + package_dep = None + if code.co_name == "" and len(new_consts) == len(code.co_consts) + 1: + package_dep = (package, ("",)) + + new_consts.append((line, trap_arg, package_dep)) + + line_map[original_offset] = trap_instructions[0] + + seen_lines.add(line) + + _, arg = next(code_iter) + + offset = len(instructions) << 1 + + # Propagate code + instructions.append(Instruction(original_offset, opcode, arg)) + + if opcode is EXTENDED_ARG: + ext.append(arg) + continue + else: + previous_previous_arg = previous_arg + previous_arg = current_arg + current_arg = int.from_bytes([*ext, arg], "big", signed=False) + ext.clear() + + # Track imports names + if opcode == IMPORT_NAME: + import_depth = code.co_consts[previous_previous_arg] + current_import_name = code.co_names[current_arg] + # Adjust package name if the import is relative and a parent (ie: if depth is more than 1) + current_import_package = ( + ".".join(package.split(".")[: -import_depth + 1]) if import_depth > 1 else package + ) + new_consts[-1] = ( + new_consts[-1][0], + new_consts[-1][1], + (current_import_package, (current_import_name,)), + ) + + # Also track import from statements since it's possible that the "from" target is a module, eg: + # from my_package import my_module + # Since the package has not changed, we simply extend the previous import names with the new value + if opcode == IMPORT_FROM: + import_from_name = f"{current_import_name}.{code.co_names[current_arg]}" + new_consts[-1] = ( + new_consts[-1][0], + new_consts[-1][1], + (new_consts[-1][2][0], tuple(list(new_consts[-1][2][1]) + [import_from_name])), + ) + + # Collect branching instructions for processing + if opcode in AJump.__opcodes__: + jumps[offset] = AJump(original_offset, current_arg) + elif opcode in RJump.__opcodes__: + jumps[offset] = RJump(original_offset, current_arg, JumpDirection.from_opcode(opcode)) + + if opcode is EXTENDED_ARG: + ext.append(arg) + else: + ext.clear() + except StopIteration: + pass + + # Collect all the old jump start and end offsets + jump_targets = {_ for j in jumps.values() for _ in (j.start, j.end)} + + # Adjust all the offsets and map the old offsets to the new ones for the + # jumps + for index, instr in enumerate(instructions): + new_offset = index << 1 + if instr.offset in jump_targets: + offset_map[instr.offset] = new_offset + instr.offset = new_offset + + # Adjust all the jumps, neglecting any EXTENDED_ARGs for now + branches: t.List[Branch] = [] + for jump in jumps.values(): + new_start = offset_map[jump.start] + new_end = offset_map[jump.end] + + # If we are jumping at the beginning of a line, jump to the + # beginning of the trap call instead + target_instr = line_map.get(jump.end, instructions[new_end >> 1]) + branch: Branch = ( + RBranch(instructions[new_start >> 1], target_instr) + if isinstance(jump, RJump) + else ABranch(instructions[new_start >> 1], target_instr) + ) + target_instr.targets.append(branch) + + branches.append(branch) + + # Process all the branching instructions to adjust the arguments. We + # need to add EXTENDED_ARGs if the argument is too large. + process_branches = True + exts: t.List[t.Tuple[Instruction, int]] = [] + while process_branches: + process_branches = False + for branch in branches: + jump_instr = branch.start + new_arg = branch.arg << 1 # 3.9 uses offsets, not instruction numbers + jump_instr.arg = new_arg & 0xFF + new_arg >>= 8 + c = 0 + index = jump_instr.offset >> 1 + + # Update the argument of the branching instruction, adding + # EXTENDED_ARGs if needed + while new_arg: + if index and instructions[index - 1].opcode is EXTENDED_ARG: + index -= 1 + instructions[index].arg = new_arg & 0xFF + else: + ext_instr = Instruction(index << 1, EXTENDED_ARG, new_arg & 0xFF) + instructions.insert(index, ext_instr) + c += 1 + # If the jump instruction was a target of another jump, + # make the latest EXTENDED_ARG instruction the target + # of that jump. + if jump_instr.targets: + for target in jump_instr.targets: + if target.end is not jump_instr: + raise ValueError("Invalid target") + target.end = ext_instr + ext_instr.targets.extend(jump_instr.targets) + jump_instr.targets.clear() + new_arg >>= 8 + + # Check if we added any EXTENDED_ARGs because we would have to + # reprocess the branches. + # TODO[perf]: only reprocess the branches that are affected. + # However, this branch is not expected to be taken often. + if c: + exts.append((ext_instr, c)) + # Update the instruction offset from the point of insertion + # of the EXTENDED_ARGs + for instr_index, instr in enumerate(instructions[index + 1 :], index + 1): + instr.offset = instr_index << 1 + + process_branches = True + + # Create the new code object + new_code = bytearray() + for instr in instructions: + new_code.append(instr.opcode) + new_code.append(instr.arg) + + # Instrument nested code objects recursively + for original_offset, nested_code in enumerate(code.co_consts): + if isinstance(nested_code, CodeType): + new_consts[original_offset], nested_lines = instrument_all_lines( + nested_code, trap_func, trap_arg, package + ) + seen_lines.update(nested_lines) + + ext_arg_offsets = [(instr.offset, s) for instr, s in exts] + + return ( + code.replace( + co_code=bytes(new_code), + co_consts=tuple(new_consts), + co_stacksize=code.co_stacksize + 4, # TODO: Compute the value! + co_lnotab=update_location_data(code, traps, ext_arg_offsets), + ), + seen_lines, + ) diff --git a/ddtrace/internal/datadog/profiling/ddup/CMakeLists.txt b/ddtrace/internal/datadog/profiling/ddup/CMakeLists.txt index dbe4395cc16..1b111633968 100644 --- a/ddtrace/internal/datadog/profiling/ddup/CMakeLists.txt +++ b/ddtrace/internal/datadog/profiling/ddup/CMakeLists.txt @@ -57,10 +57,6 @@ add_library(${EXTENSION_NAME} SHARED ${DDUP_CPP_SRC}) add_ddup_config(${EXTENSION_NAME}) # Cython generates code that produces errors for the following, so relax compile options target_compile_options(${EXTENSION_NAME} PRIVATE -Wno-old-style-cast -Wno-shadow -Wno-address) -# tp_print is marked deprecated in Python 3.8, but cython still generates code using it -if("${Python3_VERSION_MINOR}" STREQUAL "8") - target_compile_options(${EXTENSION_NAME} PRIVATE -Wno-deprecated-declarations) -endif() # cmake may mutate the name of the library (e.g., lib- and -.so for dynamic libraries). This suppresses that behavior, # which is required to ensure all paths can be inferred correctly by setup.py. diff --git a/ddtrace/internal/datadog/profiling/ddup/_ddup.pyx b/ddtrace/internal/datadog/profiling/ddup/_ddup.pyx index efd3a4ab8ce..39944040fb1 100644 --- a/ddtrace/internal/datadog/profiling/ddup/_ddup.pyx +++ b/ddtrace/internal/datadog/profiling/ddup/_ddup.pyx @@ -18,7 +18,7 @@ from ddtrace.internal.datadog.profiling._types import StringType from ddtrace.internal.datadog.profiling.code_provenance import json_str_to_export from ddtrace.internal.datadog.profiling.util import sanitize_string from ddtrace.internal.runtime import get_runtime_id -from ddtrace.settings._agent import config as agent_config +from ddtrace.internal.settings._agent import config as agent_config ctypedef void (*func_ptr_t)(string_view) diff --git a/ddtrace/internal/datadog/profiling/stack_v2/__init__.pyi b/ddtrace/internal/datadog/profiling/stack_v2/__init__.pyi index 16ab608f206..e34cacceecf 100644 --- a/ddtrace/internal/datadog/profiling/stack_v2/__init__.pyi +++ b/ddtrace/internal/datadog/profiling/stack_v2/__init__.pyi @@ -2,6 +2,10 @@ import asyncio from types import FrameType from typing import Optional, Sequence, Union +def start(min_interval: float = ...) -> bool: ... +def stop() -> None: ... +def link_span(span_id: int, local_root_span_id: int, span_type: Optional[str] = None) -> None: ... +def set_adaptive_sampling(do_adaptive_sampling: bool = ...) -> None: ... def register_thread(id: int, native_id: int, name: str) -> None: ... # noqa: A002 def unregister_thread(name: str) -> None: ... def track_asyncio_loop(thread_id: int, loop: Optional[asyncio.AbstractEventLoop]) -> None: ... diff --git a/ddtrace/internal/datastreams/processor.py b/ddtrace/internal/datastreams/processor.py index 009f68aa5b8..e9b8a874259 100644 --- a/ddtrace/internal/datastreams/processor.py +++ b/ddtrace/internal/datastreams/processor.py @@ -19,9 +19,9 @@ from ddtrace.internal.atexit import register_on_exit_signal from ddtrace.internal.constants import DEFAULT_SERVICE_NAME from ddtrace.internal.native import DDSketch +from ddtrace.internal.settings._agent import config as agent_config +from ddtrace.internal.settings._config import config from ddtrace.internal.utils.retry import fibonacci_backoff_with_jitter -from ddtrace.settings._agent import config as agent_config -from ddtrace.settings._config import config from ddtrace.version import get_version from .._encoding import packb diff --git a/ddtrace/internal/debug.py b/ddtrace/internal/debug.py index 1b331cfe7bd..4d174d278a1 100644 --- a/ddtrace/internal/debug.py +++ b/ddtrace/internal/debug.py @@ -10,11 +10,11 @@ import ddtrace from ddtrace.internal.packages import get_distributions +from ddtrace.internal.settings._agent import config as agent_config +from ddtrace.internal.settings.asm import config as asm_config from ddtrace.internal.utils.cache import callonce from ddtrace.internal.writer import AgentWriterInterface from ddtrace.internal.writer import LogWriter -from ddtrace.settings._agent import config as agent_config -from ddtrace.settings.asm import config as asm_config from .logger import get_logger @@ -54,7 +54,7 @@ def collect(tracer): # Inline expensive imports to avoid unnecessary overhead on startup. from ddtrace.internal import gitmetadata from ddtrace.internal.runtime.runtime_metrics import RuntimeWorker - from ddtrace.settings.crashtracker import config as crashtracker_config + from ddtrace.internal.settings.crashtracker import config as crashtracker_config if isinstance(tracer._span_aggregator.writer, LogWriter): agent_url = "AGENTLESS" diff --git a/ddtrace/internal/encoding.py b/ddtrace/internal/encoding.py index 24578cf5d1b..a5c7f3a100e 100644 --- a/ddtrace/internal/encoding.py +++ b/ddtrace/internal/encoding.py @@ -6,7 +6,8 @@ from typing import Optional # noqa:F401 from typing import Tuple # noqa:F401 -from ..settings._agent import config as agent_config # noqa:F401 +from ddtrace.internal.settings._agent import config as agent_config # noqa:F401 + from ._encoding import ListStringTable from ._encoding import MsgpackEncoderV04 from ._encoding import MsgpackEncoderV05 diff --git a/ddtrace/internal/gitmetadata.py b/ddtrace/internal/gitmetadata.py index 58d5fc1d0f9..2bfa21e2ac2 100644 --- a/ddtrace/internal/gitmetadata.py +++ b/ddtrace/internal/gitmetadata.py @@ -5,8 +5,8 @@ from ddtrace.ext.git import MAIN_PACKAGE from ddtrace.ext.git import REPOSITORY_URL from ddtrace.internal.logger import get_logger +from ddtrace.internal.settings._core import DDConfig from ddtrace.internal.utils import formats -from ddtrace.settings._core import DDConfig _GITMETADATA_TAGS = None # type: typing.Optional[typing.Tuple[str, str, str]] diff --git a/ddtrace/internal/iast/product.py b/ddtrace/internal/iast/product.py index c52892f1804..eb2d4b4cd8f 100644 --- a/ddtrace/internal/iast/product.py +++ b/ddtrace/internal/iast/product.py @@ -27,7 +27,7 @@ import sys from ddtrace.internal.logger import get_logger -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config log = get_logger(__name__) diff --git a/ddtrace/internal/logger.py b/ddtrace/internal/logger.py index b5a5057a0b2..a37a84b5744 100644 --- a/ddtrace/internal/logger.py +++ b/ddtrace/internal/logger.py @@ -221,3 +221,26 @@ def format_stack(stack_info, limit) -> str: return stack_info stack_str = "\n".join(stack[-2 * limit :]) return f"{stack[0]}\n{stack_str}" + + +class LogInjectionState(object): + # Log injection is disabled + DISABLED = "false" + # Log injection is enabled, but not yet configured + ENABLED = "true" + # Log injection is enabled and configured for structured logging + # This value is deprecated, but kept for backwards compatibility + STRUCTURED = "structured" + + +def get_log_injection_state(raw_config: Optional[str]) -> bool: + if raw_config: + normalized = raw_config.lower().strip() + if normalized == LogInjectionState.STRUCTURED or normalized in ("true", "1"): + return True + elif normalized not in ("false", "0"): + logging.warning( + "Invalid log injection state '%s'. Expected 'true', 'false', or 'structured'. Defaulting to 'false'.", + normalized, + ) + return False diff --git a/ddtrace/internal/metrics.py b/ddtrace/internal/metrics.py index abae8ddabd9..34b9381c486 100644 --- a/ddtrace/internal/metrics.py +++ b/ddtrace/internal/metrics.py @@ -2,7 +2,7 @@ from typing import Optional # noqa:F401 from ddtrace.internal.dogstatsd import get_dogstatsd_client -from ddtrace.settings._agent import config as agent_config +from ddtrace.internal.settings._agent import config as agent_config class Metrics(object): diff --git a/ddtrace/internal/openfeature/_provider.py b/ddtrace/internal/openfeature/_provider.py index d1aceb07f6f..4eeb4b9c9c9 100644 --- a/ddtrace/internal/openfeature/_provider.py +++ b/ddtrace/internal/openfeature/_provider.py @@ -29,7 +29,7 @@ from ddtrace.internal.openfeature.writer import start_exposure_writer from ddtrace.internal.openfeature.writer import stop_exposure_writer from ddtrace.internal.service import ServiceStatusError -from ddtrace.settings.openfeature import config as ffe_config +from ddtrace.internal.settings.openfeature import config as ffe_config # Handle different import paths between openfeature-sdk versions diff --git a/ddtrace/internal/openfeature/writer.py b/ddtrace/internal/openfeature/writer.py index f44109e47ab..aca4cef4c8f 100644 --- a/ddtrace/internal/openfeature/writer.py +++ b/ddtrace/internal/openfeature/writer.py @@ -14,11 +14,11 @@ from ddtrace.internal import forksafe from ddtrace.internal.logger import get_logger from ddtrace.internal.periodic import PeriodicService +from ddtrace.internal.settings._agent import config as agent_config +from ddtrace.internal.settings.openfeature import config as ffe_config from ddtrace.internal.utils.http import Response from ddtrace.internal.utils.http import get_connection from ddtrace.internal.utils.retry import fibonacci_backoff_with_jitter -from ddtrace.settings._agent import config as agent_config -from ddtrace.settings.openfeature import config as ffe_config logger = get_logger(__name__) diff --git a/ddtrace/internal/opentelemetry/logs.py b/ddtrace/internal/opentelemetry/logs.py index d25df952f2e..a3175abeaaa 100644 --- a/ddtrace/internal/opentelemetry/logs.py +++ b/ddtrace/internal/opentelemetry/logs.py @@ -8,9 +8,9 @@ from ddtrace import config from ddtrace.internal.hostname import get_hostname from ddtrace.internal.logger import get_logger +from ddtrace.internal.settings._opentelemetry import otel_config from ddtrace.internal.telemetry import telemetry_writer from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE -from ddtrace.settings._opentelemetry import otel_config log = get_logger(__name__) diff --git a/ddtrace/internal/opentelemetry/metrics.py b/ddtrace/internal/opentelemetry/metrics.py index 39708db4e50..db59d13ca91 100644 --- a/ddtrace/internal/opentelemetry/metrics.py +++ b/ddtrace/internal/opentelemetry/metrics.py @@ -8,9 +8,9 @@ from ddtrace import config from ddtrace.internal.hostname import get_hostname from ddtrace.internal.logger import get_logger +from ddtrace.internal.settings._opentelemetry import otel_config from ddtrace.internal.telemetry import telemetry_writer from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE -from ddtrace.settings._opentelemetry import otel_config log = get_logger(__name__) diff --git a/ddtrace/internal/opentelemetry/span.py b/ddtrace/internal/opentelemetry/span.py index 79dd6d9267f..f5a626fb49d 100644 --- a/ddtrace/internal/opentelemetry/span.py +++ b/ddtrace/internal/opentelemetry/span.py @@ -15,6 +15,7 @@ from ddtrace.constants import ERROR_STACK from ddtrace.constants import ERROR_TYPE from ddtrace.constants import SPAN_KIND +from ddtrace.internal.compat import ensure_text from ddtrace.internal.logger import get_logger from ddtrace.internal.utils.formats import flatten_key_value from ddtrace.internal.utils.formats import is_sequence @@ -38,11 +39,18 @@ def _ddmap(span, attribute, value): - # type: (DDSpan, str, Union[bytes, NumericType]) -> DDSpan + # type: (DDSpan, str, Union[str, bytes, NumericType]) -> DDSpan if attribute.startswith("meta") or attribute.startswith("metrics"): meta_key = attribute.split("'")[1] if len(attribute.split("'")) == 3 else None if meta_key: - span.set_tag(meta_key, value) + if meta_key == "http.status_code": + if isinstance(value, (int, float)): + value = str(value) + + if isinstance(value, (str, bytes)): + span.set_tag(meta_key, ensure_text(value)) + if isinstance(value, (int, float)): + span.set_metric(meta_key, value) else: setattr(span, attribute, value) return span @@ -182,7 +190,17 @@ def set_attribute(self, key, value): for k, v in flatten_key_value(key, value).items(): self._ddspan.set_tag(k, v) return - self._ddspan.set_tag(key, value) + if key == "http.status_code": + if isinstance(value, (int, float)): + value = str(value) + if isinstance(value, (str, bytes)): + value = ensure_text(value) + self._ddspan.set_tag(key, value) + elif isinstance(value, (int, float)): + self._ddspan.set_metric(key, value) + else: + # TODO: get rid of this usage, `set_tag` only takes str values + self._ddspan.set_tag(key, value) def add_event(self, name, attributes=None, timestamp=None): # type: (str, Optional[Attributes], Optional[int]) -> None diff --git a/ddtrace/internal/opentelemetry/trace.py b/ddtrace/internal/opentelemetry/trace.py index 20a9e86f6e0..28559726ea3 100644 --- a/ddtrace/internal/opentelemetry/trace.py +++ b/ddtrace/internal/opentelemetry/trace.py @@ -30,7 +30,6 @@ from opentelemetry.trace import Link as OtelLink # noqa:F401 from opentelemetry.util.types import AttributeValue as OtelAttributeValue # noqa:F401 - from ddtrace._trace.span import _MetaDictType # noqa:F401 from ddtrace.trace import Tracer as DDTracer # noqa:F401 diff --git a/ddtrace/internal/packages.py b/ddtrace/internal/packages.py index cd602a821a2..d402e4d962e 100644 --- a/ddtrace/internal/packages.py +++ b/ddtrace/internal/packages.py @@ -10,8 +10,8 @@ from ddtrace.internal.compat import Path from ddtrace.internal.module import origin +from ddtrace.internal.settings.third_party import config as tp_config from ddtrace.internal.utils.cache import callonce -from ddtrace.settings.third_party import config as tp_config LOG = logging.getLogger(__name__) diff --git a/ddtrace/internal/processor/stats.py b/ddtrace/internal/processor/stats.py index efd8492769b..ea2227aee1b 100644 --- a/ddtrace/internal/processor/stats.py +++ b/ddtrace/internal/processor/stats.py @@ -12,8 +12,8 @@ from ddtrace._trace.span import Span from ddtrace.internal import compat from ddtrace.internal.native import DDSketch +from ddtrace.internal.settings._config import config from ddtrace.internal.utils.retry import fibonacci_backoff_with_jitter -from ddtrace.settings._config import config from ddtrace.version import get_version from ...constants import _SPAN_MEASURED_KEY diff --git a/ddtrace/internal/products.py b/ddtrace/internal/products.py index 629e746c46d..e9992410cba 100644 --- a/ddtrace/internal/products.py +++ b/ddtrace/internal/products.py @@ -9,13 +9,13 @@ from ddtrace.internal import forksafe from ddtrace.internal.logger import get_logger +from ddtrace.internal.settings._core import DDConfig from ddtrace.internal.telemetry import report_configuration from ddtrace.internal.telemetry import telemetry_writer from ddtrace.internal.uwsgi import check_uwsgi from ddtrace.internal.uwsgi import uWSGIConfigDeprecationWarning from ddtrace.internal.uwsgi import uWSGIConfigError from ddtrace.internal.uwsgi import uWSGIMasterProcess -from ddtrace.settings._core import DDConfig log = get_logger(__name__) diff --git a/ddtrace/internal/remoteconfig/client.py b/ddtrace/internal/remoteconfig/client.py index 608261f5cf0..efa73f65d9f 100644 --- a/ddtrace/internal/remoteconfig/client.py +++ b/ddtrace/internal/remoteconfig/client.py @@ -30,10 +30,10 @@ from ddtrace.internal.remoteconfig._pubsub import PubSub from ddtrace.internal.remoteconfig.constants import REMOTE_CONFIG_AGENT_ENDPOINT from ddtrace.internal.service import ServiceStatus +from ddtrace.internal.settings._agent import config as agent_config +from ddtrace.internal.settings._core import DDConfig from ddtrace.internal.utils.formats import parse_tags_str from ddtrace.internal.utils.version import _pep440_to_semver -from ddtrace.settings._agent import config as agent_config -from ddtrace.settings._core import DDConfig log = get_logger(__name__) diff --git a/ddtrace/internal/remoteconfig/products/client.py b/ddtrace/internal/remoteconfig/products/client.py index ad2bffc0e6e..6309e12814f 100644 --- a/ddtrace/internal/remoteconfig/products/client.py +++ b/ddtrace/internal/remoteconfig/products/client.py @@ -1,6 +1,6 @@ from ddtrace import config from ddtrace.internal.remoteconfig.client import config as rc_config -from ddtrace.settings._agent import config as agent_config +from ddtrace.internal.settings._agent import config as agent_config # TODO: Modularize better into their own respective components diff --git a/ddtrace/internal/runtime/constants.py b/ddtrace/internal/runtime/constants.py index 78b9c5e032f..41b7edb8cd2 100644 --- a/ddtrace/internal/runtime/constants.py +++ b/ddtrace/internal/runtime/constants.py @@ -18,6 +18,7 @@ ) DEFAULT_RUNTIME_METRICS = GC_RUNTIME_METRICS | PSUTIL_RUNTIME_METRICS +DEFAULT_RUNTIME_METRICS_INTERVAL = 10 SERVICE = "service" ENV = "env" diff --git a/ddtrace/internal/runtime/runtime_metrics.py b/ddtrace/internal/runtime/runtime_metrics.py index 124b97ae262..1059937363f 100644 --- a/ddtrace/internal/runtime/runtime_metrics.py +++ b/ddtrace/internal/runtime/runtime_metrics.py @@ -1,5 +1,4 @@ import itertools -import os from typing import ClassVar # noqa:F401 from typing import List # noqa:F401 from typing import Optional # noqa:F401 @@ -8,13 +7,13 @@ from ddtrace.internal import atexit from ddtrace.internal import forksafe from ddtrace.internal.constants import EXPERIMENTAL_FEATURES -from ddtrace.vendor.debtcollector import deprecate from ddtrace.vendor.dogstatsd import DogStatsd from .. import periodic from ..dogstatsd import get_dogstatsd_client from ..logger import get_logger from .constants import DEFAULT_RUNTIME_METRICS +from .constants import DEFAULT_RUNTIME_METRICS_INTERVAL from .metric_collectors import GCRuntimeMetricCollector from .metric_collectors import PSUtilRuntimeMetricCollector from .tag_collectors import PlatformTagCollector @@ -68,29 +67,18 @@ class RuntimeMetrics(RuntimeCollectorsIterable): ] -def _get_interval_or_default(): - if "DD_RUNTIME_METRICS_INTERVAL" in os.environ: - deprecate( - "`DD_RUNTIME_METRICS_INTERVAL` is deprecated and will be removed in a future version.", - removal_version="4.0.0", - ) - return float(os.getenv("DD_RUNTIME_METRICS_INTERVAL", default=10)) - - class RuntimeWorker(periodic.PeriodicService): - """Worker thread for collecting and writing runtime metrics to a DogStatsd - client. - """ + """Worker thread for collecting and writing runtime metrics to a DogStatsd client.""" enabled = False _instance = None # type: ClassVar[Optional[RuntimeWorker]] _lock = forksafe.Lock() - def __init__(self, interval=_get_interval_or_default(), tracer=None, dogstatsd_url=None) -> None: + def __init__(self, interval=DEFAULT_RUNTIME_METRICS_INTERVAL, tracer=None, dogstatsd_url=None) -> None: super().__init__(interval=interval) self.dogstatsd_url: Optional[str] = dogstatsd_url self._dogstatsd_client: DogStatsd = get_dogstatsd_client( - self.dogstatsd_url or ddtrace.settings._agent.config.dogstatsd_url + self.dogstatsd_url or ddtrace.internal.settings._agent.config.dogstatsd_url ) self.tracer: ddtrace.trace.Tracer = tracer or ddtrace.tracer self._runtime_metrics: RuntimeMetrics = RuntimeMetrics() @@ -107,8 +95,7 @@ def __init__(self, interval=_get_interval_or_default(), tracer=None, dogstatsd_u self._platform_tags = self._format_tags(PlatformTags()) @classmethod - def disable(cls): - # type: () -> None + def disable(cls) -> None: with cls._lock: if cls._instance is None: return @@ -134,14 +121,15 @@ def _restart(cls): cls.enable() @classmethod - def enable(cls, flush_interval=None, tracer=None, dogstatsd_url=None): - # type: (Optional[float], Optional[ddtrace.trace.Tracer], Optional[str]) -> None + def enable( + cls, + tracer: Optional[ddtrace.trace.Tracer] = None, + dogstatsd_url: Optional[str] = None, + ) -> None: with cls._lock: if cls._instance is not None: return - if flush_interval is None: - flush_interval = _get_interval_or_default() - runtime_worker = cls(flush_interval, tracer, dogstatsd_url) + runtime_worker = cls(DEFAULT_RUNTIME_METRICS_INTERVAL, tracer, dogstatsd_url) runtime_worker.start() forksafe.register(cls._restart) @@ -150,8 +138,7 @@ def enable(cls, flush_interval=None, tracer=None, dogstatsd_url=None): cls._instance = runtime_worker cls.enabled = True - def flush(self): - # type: () -> None + def flush(self) -> None: # Ensure runtime metrics have up-to-date tags (ex: service, env, version) rumtime_tags = self._format_tags(TracerTags()) + self._platform_tags log.debug("Sending runtime metrics with the following tags: %s", rumtime_tags) @@ -162,11 +149,6 @@ def flush(self): log.debug("Sending ddtrace runtime metric %s:%s", key, value) self.send_metric(key, value) - def _stop_service(self): - # type: (...) -> None - # De-register span hook - super(RuntimeWorker, self)._stop_service() - def _format_tags(self, tags: RuntimeCollectorsIterable) -> List[str]: # DEV: ddstatsd expects tags in the form ['key1:value1', 'key2:value2', ...] return ["{}:{}".format(k, v) for k, v in tags] diff --git a/ddtrace/internal/sampling.py b/ddtrace/internal/sampling.py index eb1129ae26e..1c5624a7385 100644 --- a/ddtrace/internal/sampling.py +++ b/ddtrace/internal/sampling.py @@ -26,7 +26,7 @@ from ddtrace.internal.constants import SamplingMechanism from ddtrace.internal.glob_matching import GlobMatcher from ddtrace.internal.logger import get_logger -from ddtrace.settings._config import config +from ddtrace.internal.settings._config import config from .rate_limiter import RateLimiter diff --git a/ddtrace/internal/schema/processor.py b/ddtrace/internal/schema/processor.py index 9c6ea16b7df..2061f10d271 100644 --- a/ddtrace/internal/schema/processor.py +++ b/ddtrace/internal/schema/processor.py @@ -1,7 +1,7 @@ from ddtrace._trace.processor import TraceProcessor from ddtrace.constants import _BASE_SERVICE_KEY from ddtrace.internal.serverless import in_aws_lambda -from ddtrace.settings._config import config +from ddtrace.internal.settings._config import config from . import schematize_service_name diff --git a/ddtrace/internal/schema/span_attribute_schema.py b/ddtrace/internal/schema/span_attribute_schema.py index 1ebd95c0527..33b8e4eb19d 100644 --- a/ddtrace/internal/schema/span_attribute_schema.py +++ b/ddtrace/internal/schema/span_attribute_schema.py @@ -4,7 +4,7 @@ from typing import Optional from ddtrace.internal.constants import DEFAULT_SERVICE_NAME -from ddtrace.settings._inferred_base_service import detect_service +from ddtrace.internal.settings._inferred_base_service import detect_service class SpanDirection(Enum): diff --git a/tests/contrib/cassandra/__init__.py b/ddtrace/internal/settings/__init__.py similarity index 100% rename from tests/contrib/cassandra/__init__.py rename to ddtrace/internal/settings/__init__.py diff --git a/ddtrace/settings/_agent.py b/ddtrace/internal/settings/_agent.py similarity index 98% rename from ddtrace/settings/_agent.py rename to ddtrace/internal/settings/_agent.py index f2c44b5d678..6ea154aeef5 100644 --- a/ddtrace/settings/_agent.py +++ b/ddtrace/internal/settings/_agent.py @@ -6,7 +6,7 @@ from urllib.parse import urlparse from ddtrace.internal.constants import DEFAULT_TIMEOUT -from ddtrace.settings._core import DDConfig +from ddtrace.internal.settings._core import DDConfig DEFAULT_HOSTNAME = "localhost" diff --git a/ddtrace/settings/_config.py b/ddtrace/internal/settings/_config.py similarity index 96% rename from ddtrace/settings/_config.py rename to ddtrace/internal/settings/_config.py index 6e714cdf8f8..86c7aa289c0 100644 --- a/ddtrace/settings/_config.py +++ b/ddtrace/internal/settings/_config.py @@ -11,32 +11,32 @@ from typing import Tuple # noqa:F401 from typing import Union # noqa:F401 +from ddtrace.internal import gitmetadata +from ddtrace.internal.constants import _PROPAGATION_BEHAVIOR_DEFAULT +from ddtrace.internal.constants import _PROPAGATION_BEHAVIOR_IGNORE +from ddtrace.internal.constants import _PROPAGATION_STYLE_DEFAULT +from ddtrace.internal.constants import _PROPAGATION_STYLE_NONE +from ddtrace.internal.constants import DEFAULT_BUFFER_SIZE +from ddtrace.internal.constants import DEFAULT_MAX_PAYLOAD_SIZE +from ddtrace.internal.constants import DEFAULT_PROCESSING_INTERVAL +from ddtrace.internal.constants import DEFAULT_REUSE_CONNECTIONS +from ddtrace.internal.constants import DEFAULT_SAMPLING_RATE_LIMIT +from ddtrace.internal.constants import DEFAULT_TIMEOUT +from ddtrace.internal.constants import PROPAGATION_STYLE_ALL +from ddtrace.internal.logger import get_log_injection_state +from ddtrace.internal.logger import get_logger +from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME +from ddtrace.internal.serverless import in_aws_lambda from ddtrace.internal.serverless import in_azure_function from ddtrace.internal.serverless import in_gcp_function +from ddtrace.internal.telemetry import get_config as _get_config from ddtrace.internal.telemetry import telemetry_writer from ddtrace.internal.telemetry import validate_and_report_otel_metrics_exporter_enabled from ddtrace.internal.telemetry import validate_otel_envs from ddtrace.internal.utils.cache import cachedmethod +from ddtrace.internal.utils.formats import asbool +from ddtrace.internal.utils.formats import parse_tags_str -from .._logger import get_log_injection_state -from ..internal import gitmetadata -from ..internal.constants import _PROPAGATION_BEHAVIOR_DEFAULT -from ..internal.constants import _PROPAGATION_BEHAVIOR_IGNORE -from ..internal.constants import _PROPAGATION_STYLE_DEFAULT -from ..internal.constants import _PROPAGATION_STYLE_NONE -from ..internal.constants import DEFAULT_BUFFER_SIZE -from ..internal.constants import DEFAULT_MAX_PAYLOAD_SIZE -from ..internal.constants import DEFAULT_PROCESSING_INTERVAL -from ..internal.constants import DEFAULT_REUSE_CONNECTIONS -from ..internal.constants import DEFAULT_SAMPLING_RATE_LIMIT -from ..internal.constants import DEFAULT_TIMEOUT -from ..internal.constants import PROPAGATION_STYLE_ALL -from ..internal.logger import get_logger -from ..internal.schema import DEFAULT_SPAN_SERVICE_NAME -from ..internal.serverless import in_aws_lambda -from ..internal.telemetry import get_config as _get_config -from ..internal.utils.formats import asbool -from ..internal.utils.formats import parse_tags_str from ._inferred_base_service import detect_service from .endpoint_config import fetch_config_from_endpoint from .http import HttpConfig @@ -99,7 +99,6 @@ "pyodbc", "dramatiq", "flask", - "google_generativeai", "google_genai", "google_adk", "urllib3", @@ -126,7 +125,6 @@ "protobuf", "aiohttp_jinja2", "pymongo", - "freezegun", "vertica", "rq_worker", "elasticsearch", @@ -158,7 +156,6 @@ "aiopg", "dogpile_cache", "pylibmc", - "mongoengine", "httpx", "httplib", "rq", @@ -178,7 +175,6 @@ "crewai", "pydantic_ai", "logging", - "cassandra", "boto", "mariadb", "aiohttp", diff --git a/ddtrace/settings/_core.py b/ddtrace/internal/settings/_core.py similarity index 100% rename from ddtrace/settings/_core.py rename to ddtrace/internal/settings/_core.py diff --git a/ddtrace/settings/_database_monitoring.py b/ddtrace/internal/settings/_database_monitoring.py similarity index 88% rename from ddtrace/settings/_database_monitoring.py rename to ddtrace/internal/settings/_database_monitoring.py index 424d4f21028..67e47eda76c 100644 --- a/ddtrace/settings/_database_monitoring.py +++ b/ddtrace/internal/settings/_database_monitoring.py @@ -1,6 +1,6 @@ from envier import validators -from ddtrace.settings._core import DDConfig +from ddtrace.internal.settings._core import DDConfig class DatabaseMonitoringConfig(DDConfig): diff --git a/ddtrace/settings/_inferred_base_service.py b/ddtrace/internal/settings/_inferred_base_service.py similarity index 99% rename from ddtrace/settings/_inferred_base_service.py rename to ddtrace/internal/settings/_inferred_base_service.py index e0a89e8a3bb..cf592521779 100644 --- a/ddtrace/settings/_inferred_base_service.py +++ b/ddtrace/internal/settings/_inferred_base_service.py @@ -9,7 +9,7 @@ from typing import Optional from typing import Tuple -from ..internal.logger import get_logger +from ddtrace.internal.logger import get_logger log = get_logger(__name__) diff --git a/ddtrace/settings/_opentelemetry.py b/ddtrace/internal/settings/_opentelemetry.py similarity index 97% rename from ddtrace/settings/_opentelemetry.py rename to ddtrace/internal/settings/_opentelemetry.py index 2c2f0be453e..ebe75553669 100644 --- a/ddtrace/settings/_opentelemetry.py +++ b/ddtrace/internal/settings/_opentelemetry.py @@ -1,9 +1,9 @@ import typing as t +from ddtrace.internal.settings._agent import get_agent_hostname +from ddtrace.internal.settings._core import DDConfig from ddtrace.internal.telemetry import get_config from ddtrace.internal.telemetry import report_configuration -from ddtrace.settings._agent import get_agent_hostname -from ddtrace.settings._core import DDConfig def _derive_endpoint(config: "ExporterConfig"): diff --git a/ddtrace/settings/_otel_remapper.py b/ddtrace/internal/settings/_otel_remapper.py similarity index 97% rename from ddtrace/settings/_otel_remapper.py rename to ddtrace/internal/settings/_otel_remapper.py index b0841f31d2f..d8558f38220 100644 --- a/ddtrace/settings/_otel_remapper.py +++ b/ddtrace/internal/settings/_otel_remapper.py @@ -5,9 +5,9 @@ from typing import Optional from typing import Tuple -from ..constants import ENV_KEY -from ..constants import VERSION_KEY -from ..internal.logger import get_logger +from ddtrace.constants import ENV_KEY +from ddtrace.constants import VERSION_KEY +from ddtrace.internal.logger import get_logger log = get_logger(__name__) diff --git a/ddtrace/settings/_telemetry.py b/ddtrace/internal/settings/_telemetry.py similarity index 91% rename from ddtrace/settings/_telemetry.py rename to ddtrace/internal/settings/_telemetry.py index 59314854288..42f8411305d 100644 --- a/ddtrace/settings/_telemetry.py +++ b/ddtrace/internal/settings/_telemetry.py @@ -1,8 +1,8 @@ import sys import typing as t -from ddtrace.settings._core import DDConfig -from ddtrace.settings._inferred_base_service import detect_service +from ddtrace.internal.settings._core import DDConfig +from ddtrace.internal.settings._inferred_base_service import detect_service class TelemetryConfig(DDConfig): diff --git a/ddtrace/settings/asm.py b/ddtrace/internal/settings/asm.py similarity index 98% rename from ddtrace/settings/asm.py rename to ddtrace/internal/settings/asm.py index 8e649452ee1..3e20ed632a3 100644 --- a/ddtrace/settings/asm.py +++ b/ddtrace/internal/settings/asm.py @@ -21,8 +21,8 @@ from ddtrace.internal.constants import AI_GUARD_MAX_MESSAGES_LENGTH from ddtrace.internal.constants import AI_GUARD_TIMEOUT from ddtrace.internal.serverless import in_aws_lambda -from ddtrace.settings._config import config as tracer_config -from ddtrace.settings._core import DDConfig +from ddtrace.internal.settings._config import config as tracer_config +from ddtrace.internal.settings._core import DDConfig def _validate_non_negative_int(r: int) -> None: @@ -60,7 +60,9 @@ def build_libddwaf_filename() -> str: ARCHI = "x86" TRANSLATE_ARCH = {"amd64": "x64", "i686": "x86_64", "x86": "win32"} ARCHITECTURE = TRANSLATE_ARCH.get(ARCHI, ARCHI) - return os.path.join(_DIRNAME, "appsec", "_ddwaf", "libddwaf", ARCHITECTURE, "lib", "libddwaf." + FILE_EXTENSION) + return os.path.join( + _DIRNAME, "..", "appsec", "_ddwaf", "libddwaf", ARCHITECTURE, "lib", "libddwaf." + FILE_EXTENSION + ) class ASMConfig(DDConfig): diff --git a/ddtrace/settings/code_origin.py b/ddtrace/internal/settings/code_origin.py similarity index 92% rename from ddtrace/settings/code_origin.py rename to ddtrace/internal/settings/code_origin.py index 8ab313945a3..ada1cd7ff93 100644 --- a/ddtrace/settings/code_origin.py +++ b/ddtrace/internal/settings/code_origin.py @@ -1,4 +1,4 @@ -from ddtrace.settings._core import DDConfig +from ddtrace.internal.settings._core import DDConfig class CodeOriginConfig(DDConfig): diff --git a/ddtrace/settings/crashtracker.py b/ddtrace/internal/settings/crashtracker.py similarity index 98% rename from ddtrace/settings/crashtracker.py rename to ddtrace/internal/settings/crashtracker.py index d102fd7a54c..f58c230daa4 100644 --- a/ddtrace/settings/crashtracker.py +++ b/ddtrace/internal/settings/crashtracker.py @@ -1,8 +1,8 @@ import typing as t +from ddtrace.internal.settings._core import DDConfig from ddtrace.internal.telemetry import report_configuration from ddtrace.internal.utils.formats import parse_tags_str -from ddtrace.settings._core import DDConfig resolver_default = "full" diff --git a/ddtrace/settings/dynamic_instrumentation.py b/ddtrace/internal/settings/dynamic_instrumentation.py similarity index 96% rename from ddtrace/settings/dynamic_instrumentation.py rename to ddtrace/internal/settings/dynamic_instrumentation.py index d08781e1ef5..99431165d5a 100644 --- a/ddtrace/settings/dynamic_instrumentation.py +++ b/ddtrace/internal/settings/dynamic_instrumentation.py @@ -5,9 +5,9 @@ from ddtrace.internal import gitmetadata from ddtrace.internal.compat import Path from ddtrace.internal.constants import DEFAULT_SERVICE_NAME +from ddtrace.internal.settings._agent import config as agent_config +from ddtrace.internal.settings._core import DDConfig from ddtrace.internal.utils.config import get_application_name -from ddtrace.settings._agent import config as agent_config -from ddtrace.settings._core import DDConfig from ddtrace.version import get_version @@ -92,7 +92,6 @@ class DynamicInstrumentationConfig(DDConfig): default=1.0, # seconds help_type="Float", help="Interval in seconds for flushing the dynamic logs upload queue", - deprecations=[("upload.flush_interval", None, "4.0")], ) diagnostics_interval = DDConfig.v( diff --git a/ddtrace/settings/endpoint_config.py b/ddtrace/internal/settings/endpoint_config.py similarity index 100% rename from ddtrace/settings/endpoint_config.py rename to ddtrace/internal/settings/endpoint_config.py diff --git a/ddtrace/settings/errortracking.py b/ddtrace/internal/settings/errortracking.py similarity index 97% rename from ddtrace/settings/errortracking.py rename to ddtrace/internal/settings/errortracking.py index c3b758bdb27..f590770c8e6 100644 --- a/ddtrace/settings/errortracking.py +++ b/ddtrace/internal/settings/errortracking.py @@ -1,7 +1,7 @@ import sys import typing as t -from ddtrace.settings._core import DDConfig +from ddtrace.internal.settings._core import DDConfig def parse_modules(value: t.Union[str, None]) -> t.List[str]: diff --git a/ddtrace/settings/exception_replay.py b/ddtrace/internal/settings/exception_replay.py similarity index 83% rename from ddtrace/settings/exception_replay.py rename to ddtrace/internal/settings/exception_replay.py index 84089124959..bc5d0f0dd81 100644 --- a/ddtrace/settings/exception_replay.py +++ b/ddtrace/internal/settings/exception_replay.py @@ -1,4 +1,4 @@ -from ddtrace.settings._core import DDConfig +from ddtrace.internal.settings._core import DDConfig class ExceptionReplayConfig(DDConfig): @@ -10,7 +10,6 @@ class ExceptionReplayConfig(DDConfig): default=False, help_type="Boolean", help="Enable automatic capturing of exception debugging information", - deprecations=[("debugging.enabled", None, "3.0")], ) max_frames = DDConfig.v( int, diff --git a/ddtrace/settings/http.py b/ddtrace/internal/settings/http.py similarity index 94% rename from ddtrace/settings/http.py rename to ddtrace/internal/settings/http.py index 4e408faddae..dec9ceb3671 100644 --- a/ddtrace/settings/http.py +++ b/ddtrace/internal/settings/http.py @@ -3,9 +3,9 @@ from typing import Optional # noqa:F401 from typing import Union # noqa:F401 -from ..internal.logger import get_logger -from ..internal.utils.cache import cachedmethod -from ..internal.utils.http import normalize_header_name +from ddtrace.internal.logger import get_logger +from ddtrace.internal.utils.cache import cachedmethod +from ddtrace.internal.utils.http import normalize_header_name log = get_logger(__name__) diff --git a/ddtrace/settings/integration.py b/ddtrace/internal/settings/integration.py similarity index 74% rename from ddtrace/settings/integration.py rename to ddtrace/internal/settings/integration.py index e06241bfc47..6cea1c33c75 100644 --- a/ddtrace/settings/integration.py +++ b/ddtrace/internal/settings/integration.py @@ -1,11 +1,9 @@ import os from typing import Optional # noqa:F401 -from ddtrace.internal.utils.deprecations import DDTraceDeprecationWarning -from ddtrace.vendor.debtcollector import deprecate +from ddtrace._hooks import Hooks +from ddtrace.internal.utils.attrdict import AttrDict -from .._hooks import Hooks -from ..internal.utils.attrdict import AttrDict from .http import HttpConfig @@ -81,8 +79,7 @@ def trace_query_string(self): return self.global_config._http.trace_query_string @property - def is_header_tracing_configured(self): - # type: (...) -> bool + def is_header_tracing_configured(self) -> bool: """Returns whether header tracing is enabled for this integration. Will return true if traced headers are configured for this integration @@ -90,45 +87,23 @@ def is_header_tracing_configured(self): """ return self.http.is_header_tracing_configured or self.global_config._http.is_header_tracing_configured - def header_is_traced(self, header_name): - # type: (str) -> bool - """ - Returns whether or not the current header should be traced. - :param header_name: the header name - :type header_name: str - :rtype: bool - """ + def header_is_traced(self, header_name: str) -> bool: + """Returns whether or not the current header should be traced.""" return self._header_tag_name(header_name) is not None - def _header_tag_name(self, header_name): - # type: (str) -> Optional[str] + def _header_tag_name(self, header_name: str) -> Optional[str]: tag_name = self.http._header_tag_name(header_name) if tag_name is None: return self.global_config._header_tag_name(header_name) return tag_name def __getattr__(self, key): - if key in self.APP_ANALYTICS_CONFIG_NAMES: - self.app_analytics_deprecated_warning(key) return super().__getattr__(key) def __setattr__(self, key, value): - if key in self.APP_ANALYTICS_CONFIG_NAMES: - self.app_analytics_deprecated_warning(key) return super().__setattr__(key, value) - def app_analytics_deprecated_warning(self, key): - deprecate( - f"{key} is deprecated", - message="Controlling ingestion via analytics is no longer supported. " - "See https://docs.datadoghq.com/tracing/legacy_app_analytics/" - "?code-lang=python#migrate-to-the-new-configuration-options", - category=DDTraceDeprecationWarning, - removal_version="4.0.0", - ) - def get_analytics_sample_rate(self, use_global_config=False): - self.app_analytics_deprecated_warning("get_analytics_sample_rate") return 1 def __repr__(self): diff --git a/ddtrace/settings/live_debugging.py b/ddtrace/internal/settings/live_debugging.py similarity index 83% rename from ddtrace/settings/live_debugging.py rename to ddtrace/internal/settings/live_debugging.py index 41a638ace8b..e316519d5f9 100644 --- a/ddtrace/settings/live_debugging.py +++ b/ddtrace/internal/settings/live_debugging.py @@ -1,4 +1,4 @@ -from ddtrace.settings._core import DDConfig +from ddtrace.internal.settings._core import DDConfig class LiveDebuggerConfig(DDConfig): diff --git a/ddtrace/settings/openfeature.py b/ddtrace/internal/settings/openfeature.py similarity index 94% rename from ddtrace/settings/openfeature.py rename to ddtrace/internal/settings/openfeature.py index 8a05efe10a6..5149bcee322 100644 --- a/ddtrace/settings/openfeature.py +++ b/ddtrace/internal/settings/openfeature.py @@ -2,7 +2,7 @@ OpenFeature configuration settings. """ -from ddtrace.settings._core import DDConfig +from ddtrace.internal.settings._core import DDConfig class OpenFeatureConfig(DDConfig): diff --git a/ddtrace/settings/peer_service.py b/ddtrace/internal/settings/peer_service.py similarity index 100% rename from ddtrace/settings/peer_service.py rename to ddtrace/internal/settings/peer_service.py diff --git a/ddtrace/settings/profiling.py b/ddtrace/internal/settings/profiling.py similarity index 81% rename from ddtrace/settings/profiling.py rename to ddtrace/internal/settings/profiling.py index fd3ccc9b41e..82653d97642 100644 --- a/ddtrace/settings/profiling.py +++ b/ddtrace/internal/settings/profiling.py @@ -10,11 +10,11 @@ from ddtrace.internal import compat from ddtrace.internal import gitmetadata from ddtrace.internal.logger import get_logger +from ddtrace.internal.settings._core import DDConfig from ddtrace.internal.telemetry import report_configuration from ddtrace.internal.telemetry import telemetry_writer from ddtrace.internal.telemetry.constants import TELEMETRY_LOG_LEVEL from ddtrace.internal.utils.formats import parse_tags_str -from ddtrace.settings._core import DDConfig logger = get_logger(__name__) @@ -85,52 +85,6 @@ def _parse_profiling_enabled(raw: str) -> bool: return False -def _parse_v2_enabled(raw: str) -> bool: - if sys.version_info >= (3, 14): - return False - - # Parse the boolean value - raw_lc = raw.lower() - enabled = raw_lc in ("1", "true", "yes", "on") - - # Warn if user explicitly disabled v2 profiler (v1 is deprecated) - if raw_lc in ("false", "0", "no", "off"): - from ddtrace.internal.utils.deprecations import DDTraceDeprecationWarning - from ddtrace.vendor.debtcollector import deprecate - - deprecate( - "Setting DD_PROFILING_STACK_V2_ENABLED=false is deprecated", - message="The v1 stack profiler is deprecated and will be removed in a future version. " - "Please migrate to the v2 stack profiler by removing DD_PROFILING_STACK_V2_ENABLED=false " - "or setting it to true.", - category=DDTraceDeprecationWarning, - removal_version="4.0.0", - ) - - return enabled - - -def _parse_api_timeout_ms(raw: str) -> int: - # Check if the deprecated DD_PROFILING_API_TIMEOUT is set (in seconds) - deprecated_timeout = os.environ.get("DD_PROFILING_API_TIMEOUT") - if deprecated_timeout is not None: - from ddtrace.internal.utils.deprecations import DDTraceDeprecationWarning - from ddtrace.vendor.debtcollector import deprecate - - deprecate( - "DD_PROFILING_API_TIMEOUT is deprecated", - message="DD_PROFILING_API_TIMEOUT (in seconds) is deprecated and will be removed in version 4.0.0. " - "Please use DD_PROFILING_API_TIMEOUT_MS (in milliseconds) instead.", - category=DDTraceDeprecationWarning, - removal_version="4.0.0", - ) - # Convert seconds to milliseconds - return int(float(deprecated_timeout) * 1000) - - # Otherwise, use the raw value (in milliseconds) - return int(raw) - - def _update_git_metadata_tags(tags): """ Update profiler tags with git metadata @@ -251,7 +205,6 @@ class ProfilingConfig(DDConfig): api_timeout_ms = DDConfig.v( int, "api_timeout_ms", - parser=_parse_api_timeout_ms, default=10000, help_type="Integer", help="The timeout in milliseconds before dropping events if the HTTP API does not reply", @@ -300,24 +253,11 @@ class ProfilingConfigStack(DDConfig): enabled = DDConfig.v( bool, "enabled", - default=True, - help_type="Boolean", - help="Whether to enable the stack profiler", - ) - - _v2_enabled = DDConfig.v( - bool, - "v2_enabled", - parser=_parse_v2_enabled, - # Not yet supported on 3.14 default=sys.version_info < (3, 14), help_type="Boolean", - help="Whether to enable the v2 stack profiler. Also enables the libdatadog collector.", + help="Whether to enable the stack profiler", ) - # V2 can't be enabled if stack collection is disabled or if pre-requisites are not met - v2_enabled = DDConfig.d(bool, lambda c: c._v2_enabled and c.enabled) - v2_adaptive_sampling = DDConfig.v( bool, "v2.adaptive_sampling.enabled", @@ -436,14 +376,14 @@ class ProfilingConfigPytorch(DDConfig): # We also need to check if stack_v2 module is available, and turn if off # if it s not. stack_v2_failure_msg, stack_v2_is_available = _check_for_stack_v2_available() -if config.stack.v2_enabled and not stack_v2_is_available: +if config.stack.enabled and not stack_v2_is_available: msg = stack_v2_failure_msg or "stack_v2 not available" logger.warning("Failed to load stack_v2 module (%s), falling back to v1 stack sampler", msg) telemetry_writer.add_log( TELEMETRY_LOG_LEVEL.ERROR, - "Failed to load stack_v2 module (%s), falling back to v1 stack sampler" % msg, + "Failed to load stack_v2 module (%s), disabling profiling" % msg, ) - config.stack.v2_enabled = False + config.stack.enabled = False # Enrich tags with git metadata and DD_TAGS config.tags = _enrich_tags(config.tags) @@ -452,10 +392,7 @@ class ProfilingConfigPytorch(DDConfig): def config_str(config): configured_features = [] if config.stack.enabled: - if config.stack.v2_enabled: - configured_features.append("stack_v2") - else: - configured_features.append("stack") + configured_features.append("stack_v2") if config.lock.enabled: configured_features.append("lock") if config.memory.enabled: diff --git a/ddtrace/settings/symbol_db.py b/ddtrace/internal/settings/symbol_db.py similarity index 94% rename from ddtrace/settings/symbol_db.py rename to ddtrace/internal/settings/symbol_db.py index a5e21b77262..0f1019f4421 100644 --- a/ddtrace/settings/symbol_db.py +++ b/ddtrace/internal/settings/symbol_db.py @@ -1,6 +1,6 @@ import re -from ddtrace.settings._core import DDConfig +from ddtrace.internal.settings._core import DDConfig class SymbolDatabaseConfig(DDConfig): diff --git a/ddtrace/settings/third_party.py b/ddtrace/internal/settings/third_party.py similarity index 90% rename from ddtrace/settings/third_party.py rename to ddtrace/internal/settings/third_party.py index fca1a4621e4..8e55da69d19 100644 --- a/ddtrace/settings/third_party.py +++ b/ddtrace/internal/settings/third_party.py @@ -1,4 +1,4 @@ -from ddtrace.settings._core import DDConfig +from ddtrace.internal.settings._core import DDConfig class ThirdPartyDetectionConfig(DDConfig): diff --git a/ddtrace/internal/symbol_db/__init__.py b/ddtrace/internal/symbol_db/__init__.py index 80013860369..1ef8d175770 100644 --- a/ddtrace/internal/symbol_db/__init__.py +++ b/ddtrace/internal/symbol_db/__init__.py @@ -2,8 +2,8 @@ from ddtrace.internal import core from ddtrace.internal.remoteconfig.worker import remoteconfig_poller +from ddtrace.internal.settings.symbol_db import config as symdb_config from ddtrace.internal.symbol_db.remoteconfig import SymbolDatabaseAdapter -from ddtrace.settings.symbol_db import config as symdb_config def bootstrap(): diff --git a/ddtrace/internal/symbol_db/product.py b/ddtrace/internal/symbol_db/product.py index c6c165e9577..bbc493d63e6 100644 --- a/ddtrace/internal/symbol_db/product.py +++ b/ddtrace/internal/symbol_db/product.py @@ -1,4 +1,4 @@ -from ddtrace.settings.symbol_db import config +from ddtrace.internal.settings.symbol_db import config requires = ["remote-configuration"] diff --git a/ddtrace/internal/symbol_db/symbols.py b/ddtrace/internal/symbol_db/symbols.py index 9842d57eac9..f3d146b3871 100644 --- a/ddtrace/internal/symbol_db/symbols.py +++ b/ddtrace/internal/symbol_db/symbols.py @@ -33,6 +33,8 @@ from ddtrace.internal.module import origin from ddtrace.internal.runtime import get_runtime_id from ddtrace.internal.safety import _isinstance +from ddtrace.internal.settings._agent import config as agent_config +from ddtrace.internal.settings.symbol_db import config as symdb_config from ddtrace.internal.utils.cache import cached from ddtrace.internal.utils.http import FormData from ddtrace.internal.utils.http import connector @@ -40,8 +42,6 @@ from ddtrace.internal.utils.inspection import linenos from ddtrace.internal.utils.inspection import resolved_code_origin from ddtrace.internal.utils.inspection import undecorated -from ddtrace.settings._agent import config as agent_config -from ddtrace.settings.symbol_db import config as symdb_config log = get_logger(__name__) diff --git a/ddtrace/internal/telemetry/__init__.py b/ddtrace/internal/telemetry/__init__.py index 8af349fe0fd..0a523c8752d 100644 --- a/ddtrace/internal/telemetry/__init__.py +++ b/ddtrace/internal/telemetry/__init__.py @@ -8,16 +8,16 @@ import typing as t from ddtrace.internal.logger import get_logger +from ddtrace.internal.settings._agent import config as agent_config +from ddtrace.internal.settings._core import FLEET_CONFIG +from ddtrace.internal.settings._core import FLEET_CONFIG_IDS +from ddtrace.internal.settings._core import LOCAL_CONFIG +from ddtrace.internal.settings._core import DDConfig +from ddtrace.internal.settings._otel_remapper import ENV_VAR_MAPPINGS +from ddtrace.internal.settings._otel_remapper import SUPPORTED_OTEL_ENV_VARS +from ddtrace.internal.settings._otel_remapper import parse_otel_env from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE from ddtrace.internal.utils.formats import asbool -from ddtrace.settings._agent import config as agent_config -from ddtrace.settings._core import FLEET_CONFIG -from ddtrace.settings._core import FLEET_CONFIG_IDS -from ddtrace.settings._core import LOCAL_CONFIG -from ddtrace.settings._core import DDConfig -from ddtrace.settings._otel_remapper import ENV_VAR_MAPPINGS -from ddtrace.settings._otel_remapper import SUPPORTED_OTEL_ENV_VARS -from ddtrace.settings._otel_remapper import parse_otel_env log = get_logger(__name__) diff --git a/ddtrace/internal/telemetry/writer.py b/ddtrace/internal/telemetry/writer.py index 1b89e13716a..30def852f28 100644 --- a/ddtrace/internal/telemetry/writer.py +++ b/ddtrace/internal/telemetry/writer.py @@ -17,9 +17,9 @@ from ddtrace.internal.endpoints import endpoint_collection from ddtrace.internal.logger import get_logger from ddtrace.internal.packages import is_user_code +from ddtrace.internal.settings._agent import config as agent_config +from ddtrace.internal.settings._telemetry import config from ddtrace.internal.utils.http import get_connection -from ddtrace.settings._agent import config as agent_config -from ddtrace.settings._telemetry import config from ...internal import atexit from ...internal import forksafe @@ -338,7 +338,7 @@ def _report_dependencies(self) -> Optional[List[Dict[str, Any]]]: def _report_endpoints(self) -> Optional[Dict[str, Any]]: """Adds a Telemetry event which sends the list of HTTP endpoints found at startup to the agent""" - import ddtrace.settings.asm as asm_config_module + import ddtrace.internal.settings.asm as asm_config_module if not asm_config_module.config._api_security_endpoint_collection or not self._enabled: return None diff --git a/ddtrace/internal/writer/writer.py b/ddtrace/internal/writer/writer.py index 698d056bde5..15e60e7ba62 100644 --- a/ddtrace/internal/writer/writer.py +++ b/ddtrace/internal/writer/writer.py @@ -19,11 +19,11 @@ from ddtrace.internal.hostname import get_hostname import ddtrace.internal.native as native from ddtrace.internal.runtime import get_runtime_id +from ddtrace.internal.settings._agent import config as agent_config +from ddtrace.internal.settings.asm import ai_guard_config +from ddtrace.internal.settings.asm import config as asm_config import ddtrace.internal.utils.http from ddtrace.internal.utils.retry import fibonacci_backoff_with_jitter -from ddtrace.settings._agent import config as agent_config -from ddtrace.settings.asm import ai_guard_config -from ddtrace.settings.asm import config as asm_config from ...constants import _KEEP_SPANS_RATE_KEY from .. import compat diff --git a/ddtrace/llmobs/_integrations/__init__.py b/ddtrace/llmobs/_integrations/__init__.py index c79c6033ddb..5827a62ffcb 100644 --- a/ddtrace/llmobs/_integrations/__init__.py +++ b/ddtrace/llmobs/_integrations/__init__.py @@ -1,7 +1,6 @@ from .anthropic import AnthropicIntegration from .base import BaseLLMIntegration from .bedrock import BedrockIntegration -from .gemini import GeminiIntegration from .google_adk import GoogleAdkIntegration from .google_genai import GoogleGenAIIntegration from .langchain import LangChainIntegration @@ -15,7 +14,6 @@ "AnthropicIntegration", "BaseLLMIntegration", "BedrockIntegration", - "GeminiIntegration", "GoogleAdkIntegration", "GoogleGenAIIntegration", "LangChainIntegration", diff --git a/ddtrace/llmobs/_integrations/base.py b/ddtrace/llmobs/_integrations/base.py index 0b0fc312afc..6e129cd38a3 100644 --- a/ddtrace/llmobs/_integrations/base.py +++ b/ddtrace/llmobs/_integrations/base.py @@ -11,10 +11,10 @@ from ddtrace.contrib.internal.trace_utils import int_service from ddtrace.ext import SpanTypes from ddtrace.internal.logger import get_logger +from ddtrace.internal.settings.integration import IntegrationConfig from ddtrace.llmobs._constants import INTEGRATION from ddtrace.llmobs._constants import PROXY_REQUEST from ddtrace.llmobs._llmobs import LLMObs -from ddtrace.settings.integration import IntegrationConfig from ddtrace.trace import Span diff --git a/ddtrace/llmobs/_integrations/gemini.py b/ddtrace/llmobs/_integrations/gemini.py deleted file mode 100644 index eb87f95df46..00000000000 --- a/ddtrace/llmobs/_integrations/gemini.py +++ /dev/null @@ -1,131 +0,0 @@ -from typing import Any -from typing import Dict -from typing import Iterable -from typing import List -from typing import Optional - -from ddtrace.internal.utils import get_argument_value -from ddtrace.llmobs._constants import INPUT_MESSAGES -from ddtrace.llmobs._constants import INPUT_TOKENS_METRIC_KEY -from ddtrace.llmobs._constants import METADATA -from ddtrace.llmobs._constants import METRICS -from ddtrace.llmobs._constants import MODEL_NAME -from ddtrace.llmobs._constants import MODEL_PROVIDER -from ddtrace.llmobs._constants import OUTPUT_MESSAGES -from ddtrace.llmobs._constants import OUTPUT_TOKENS_METRIC_KEY -from ddtrace.llmobs._constants import SPAN_KIND -from ddtrace.llmobs._constants import TOTAL_TOKENS_METRIC_KEY -from ddtrace.llmobs._integrations.base import BaseLLMIntegration -from ddtrace.llmobs._integrations.google_utils import extract_message_from_part_gemini_vertexai -from ddtrace.llmobs._integrations.google_utils import get_system_instructions_gemini_vertexai -from ddtrace.llmobs._integrations.google_utils import llmobs_get_metadata_gemini_vertexai -from ddtrace.llmobs._utils import _get_attr -from ddtrace.llmobs.types import Message -from ddtrace.trace import Span - - -class GeminiIntegration(BaseLLMIntegration): - _integration_name = "gemini" - - def _set_base_span_tags( - self, span: Span, provider: Optional[str] = None, model: Optional[str] = None, **kwargs: Dict[str, Any] - ) -> None: - if provider is not None: - span._set_tag_str("google_generativeai.request.provider", str(provider)) - if model is not None: - span._set_tag_str("google_generativeai.request.model", str(model)) - - def _llmobs_set_tags( - self, - span: Span, - args: List[Any], - kwargs: Dict[str, Any], - response: Optional[Any] = None, - operation: str = "", - ) -> None: - instance = kwargs.get("instance", None) - metadata = llmobs_get_metadata_gemini_vertexai(kwargs, instance) - - system_instruction = get_system_instructions_gemini_vertexai(instance) - input_contents = get_argument_value(args, kwargs, 0, "contents") - input_messages: List[Message] = self._extract_input_message(input_contents, system_instruction) - - output_messages: List[Message] = [Message(content="")] - if response is not None: - output_messages = self._extract_output_message(response) - - span._set_ctx_items( - { - SPAN_KIND: "llm", - MODEL_NAME: span.get_tag("google_generativeai.request.model") or "", - MODEL_PROVIDER: span.get_tag("google_generativeai.request.provider") or "", - METADATA: metadata, - INPUT_MESSAGES: input_messages, - OUTPUT_MESSAGES: output_messages, - METRICS: self._extract_metrics(response), - } - ) - - def _extract_input_message(self, contents, system_instruction=None): - messages: List[Message] = [] - if system_instruction: - for instruction in system_instruction: - messages.append(Message(content=instruction or "", role="system")) - if isinstance(contents, str): - messages.append(Message(content=contents)) - return messages - if isinstance(contents, dict): - message = Message(content=contents.get("text", "")) - if contents.get("role", None): - message["role"] = contents["role"] - messages.append(message) - return messages - if not isinstance(contents, list): - messages.append(Message(content="[Non-text content object: {}]".format(repr(contents)))) - return messages - for content in contents: - if isinstance(content, str): - messages.append(Message(content=content)) - continue - role = _get_attr(content, "role", None) - parts = _get_attr(content, "parts", []) - if not parts or not isinstance(parts, Iterable): - message = Message(content="[Non-text content object: {}]".format(repr(content))) - if role: - message["role"] = role - messages.append(message) - continue - for part in parts: - message = extract_message_from_part_gemini_vertexai(part, role) - messages.append(message) - return messages - - def _extract_output_message(self, generations): - output_messages = [] - generations_dict = generations.to_dict() - for candidate in generations_dict.get("candidates", []): - content = candidate.get("content", {}) - role = content.get("role", "model") - parts = content.get("parts", []) - for part in parts: - message = extract_message_from_part_gemini_vertexai(part, role) - output_messages.append(message) - return output_messages - - def _extract_metrics(self, generations): - if not generations: - return {} - generations_dict = generations.to_dict() - - token_counts = generations_dict.get("usage_metadata", None) - if not token_counts: - return - input_tokens = token_counts.get("prompt_token_count", 0) - output_tokens = token_counts.get("candidates_token_count", 0) - total_tokens = input_tokens + output_tokens - - usage = {} - usage[INPUT_TOKENS_METRIC_KEY] = input_tokens - usage[OUTPUT_TOKENS_METRIC_KEY] = output_tokens - usage[TOTAL_TOKENS_METRIC_KEY] = total_tokens - return usage diff --git a/ddtrace/llmobs/_integrations/google_utils.py b/ddtrace/llmobs/_integrations/google_utils.py index 76c12daeac8..29cb06a5857 100644 --- a/ddtrace/llmobs/_integrations/google_utils.py +++ b/ddtrace/llmobs/_integrations/google_utils.py @@ -53,9 +53,8 @@ def extract_provider_and_model_name( Function to extract provider and model name from either kwargs or instance attributes. Args: kwargs: Dictionary containing model information (used for google_genai) - instance: Model instance with attributes (used for vertexai and google_generativeai) - model_name_attr: Attribute name to extract from instance (e.g., "_model_name", "model_name", used for vertexai - and google_generativeai) + instance: Model instance with attributes (used for vertexai) + model_name_attr: Attribute name to extract from instance (e.g., "_model_name", "model_name", used for vertexai) Returns: Tuple of (provider_name, model_name) @@ -237,7 +236,7 @@ def extract_message_from_part_google_genai(part, role: str) -> Message: return Message(content="Unsupported file type: {}".format(type(part)), role=role) -def llmobs_get_metadata_gemini_vertexai(kwargs, instance): +def llmobs_get_metadata_vertexai(kwargs, instance): metadata = {} model_config = getattr(instance, "_generation_config", {}) or {} model_config = model_config.to_dict() if hasattr(model_config, "to_dict") else model_config @@ -253,7 +252,7 @@ def llmobs_get_metadata_gemini_vertexai(kwargs, instance): return metadata -def extract_message_from_part_gemini_vertexai(part, role=None) -> Message: +def extract_message_from_part_vertexai(part, role=None) -> Message: text = _get_attr(part, "text", "") function_call = _get_attr(part, "function_call", None) function_response = _get_attr(part, "function_response", None) @@ -289,7 +288,7 @@ def extract_message_from_part_gemini_vertexai(part, role=None) -> Message: return message -def get_system_instructions_gemini_vertexai(model_instance): +def get_system_instructions_vertexai(model_instance): """ Extract system instructions from model and convert to []str for tagging. """ diff --git a/ddtrace/llmobs/_integrations/langchain.py b/ddtrace/llmobs/_integrations/langchain.py index 7c3c34813d6..7cd214a4bef 100644 --- a/ddtrace/llmobs/_integrations/langchain.py +++ b/ddtrace/llmobs/_integrations/langchain.py @@ -64,7 +64,6 @@ OPENAI_PROVIDER_NAME = "openai" AZURE_OAI_PROVIDER_NAME = "azure" VERTEXAI_PROVIDER_NAME = "vertexai" -GEMINI_PROVIDER_NAME = "google_palm" ROLE_MAPPING = { "human": "user", @@ -187,9 +186,6 @@ def _llmobs_set_tags( # only the llm interface for Vertex AI will get instrumented elif model_provider.startswith(VERTEXAI_PROVIDER_NAME) and operation == "llm": llmobs_integration = "vertexai" - # only the llm interface for Gemini will get instrumented - elif model_provider.startswith(GEMINI_PROVIDER_NAME) and operation == "llm": - llmobs_integration = "google_generativeai" elif any(provider in model_provider for provider in (OPENAI_PROVIDER_NAME, AZURE_OAI_PROVIDER_NAME)): llmobs_integration = "openai" elif operation == "chat" and model_provider.startswith(ANTHROPIC_PROVIDER_NAME): diff --git a/ddtrace/llmobs/_integrations/vertexai.py b/ddtrace/llmobs/_integrations/vertexai.py index 05c30e46c8e..330130c96e2 100644 --- a/ddtrace/llmobs/_integrations/vertexai.py +++ b/ddtrace/llmobs/_integrations/vertexai.py @@ -18,9 +18,9 @@ from ddtrace.llmobs._constants import TOOL_DEFINITIONS from ddtrace.llmobs._constants import TOTAL_TOKENS_METRIC_KEY from ddtrace.llmobs._integrations.base import BaseLLMIntegration -from ddtrace.llmobs._integrations.google_utils import extract_message_from_part_gemini_vertexai -from ddtrace.llmobs._integrations.google_utils import get_system_instructions_gemini_vertexai -from ddtrace.llmobs._integrations.google_utils import llmobs_get_metadata_gemini_vertexai +from ddtrace.llmobs._integrations.google_utils import extract_message_from_part_vertexai +from ddtrace.llmobs._integrations.google_utils import get_system_instructions_vertexai +from ddtrace.llmobs._integrations.google_utils import llmobs_get_metadata_vertexai from ddtrace.llmobs._utils import _get_attr from ddtrace.llmobs.types import Message from ddtrace.trace import Span @@ -48,9 +48,9 @@ def _llmobs_set_tags( instance = kwargs.get("instance", None) history = kwargs.get("history", []) metrics = kwargs.get("metrics", {}) - metadata = llmobs_get_metadata_gemini_vertexai(kwargs, instance) + metadata = llmobs_get_metadata_vertexai(kwargs, instance) - system_instruction = get_system_instructions_gemini_vertexai(instance) + system_instruction = get_system_instructions_vertexai(instance) input_contents = None try: input_contents = get_argument_value(args, kwargs, 0, "content") @@ -123,7 +123,7 @@ def _extract_input_message(self, contents, history, system_instruction=None) -> messages.append(Message(content=contents)) return messages if isinstance(contents, Part): - message = extract_message_from_part_gemini_vertexai(contents) + message = extract_message_from_part_vertexai(contents) messages.append(message) return messages if not isinstance(contents, list): @@ -134,7 +134,7 @@ def _extract_input_message(self, contents, history, system_instruction=None) -> messages.append(Message(content=content)) continue if isinstance(content, Part): - message = extract_message_from_part_gemini_vertexai(content) + message = extract_message_from_part_vertexai(content) messages.append(message) continue messages.extend(self._extract_messages_from_content(content)) @@ -176,7 +176,7 @@ def _extract_messages_from_content(content) -> List[Message]: messages.append(message) return messages for part in parts: - message = extract_message_from_part_gemini_vertexai(part, role) + message = extract_message_from_part_vertexai(part, role) messages.append(message) return messages diff --git a/ddtrace/llmobs/_llmobs.py b/ddtrace/llmobs/_llmobs.py index f9451b0ce3b..8c957cc0ea5 100644 --- a/ddtrace/llmobs/_llmobs.py +++ b/ddtrace/llmobs/_llmobs.py @@ -131,7 +131,6 @@ "openai": "openai", "langchain": "langchain", "google_adk": "google_adk", - "google_generativeai": "google_generativeai", "google_genai": "google_genai", "vertexai": "vertexai", "langgraph": "langgraph", @@ -1561,50 +1560,12 @@ def _set_dict_attribute(span: Span, key, value: Dict[str, Any]) -> None: existing_value.update(value) span._set_ctx_item(key, existing_value) - @classmethod - def submit_evaluation_for( - cls, - label: str, - metric_type: str, - value: Union[str, int, float, bool], - span: Optional[dict] = None, - span_with_tag_value: Optional[Dict[str, str]] = None, - tags: Optional[Dict[str, str]] = None, - ml_app: Optional[str] = None, - timestamp_ms: Optional[int] = None, - metadata: Optional[Dict[str, object]] = None, - assessment: Optional[str] = None, - reasoning: Optional[str] = None, - ) -> None: - """ - Submits a custom evaluation metric for a given span. This method is deprecated and will be - removed in the next major version of ddtrace (4.0). Please use `LLMObs.submit_evaluation()` instead. - """ - log.warning( - "LLMObs.submit_evaluation_for() is deprecated and will be removed in the next major " - "version of ddtrace (4.0). Please use LLMObs.submit_evaluation() instead." - ) - return cls.submit_evaluation( - label=label, - metric_type=metric_type, - value=value, - span=span, - span_with_tag_value=span_with_tag_value, - tags=tags, - ml_app=ml_app, - timestamp_ms=timestamp_ms, - metadata=metadata, - assessment=assessment, - reasoning=reasoning, - ) - @classmethod def submit_evaluation( cls, label: str, metric_type: str, value: Union[str, int, float, bool], - span_context: Optional[Dict[str, str]] = None, span: Optional[dict] = None, span_with_tag_value: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, @@ -1621,9 +1582,6 @@ def submit_evaluation( :param str metric_type: The type of the evaluation metric. One of "categorical", "score", "boolean". :param value: The value of the evaluation metric. Must be a string (categorical), integer (score), float (score), or boolean (boolean). - :param dict span_context: A dictionary containing the span_id and trace_id of interest. This is a - deprecated parameter and will be removed in the next major version of - ddtrace (4.0). Please use `span` or `span_with_tag_value` instead. :param dict span: A dictionary of shape {'span_id': str, 'trace_id': str} uniquely identifying the span associated with this evaluation. :param dict span_with_tag_value: A dictionary with the format {'tag_key': str, 'tag_value': str} @@ -1637,13 +1595,6 @@ def submit_evaluation( :param str assessment: An assessment of this evaluation. Must be either "pass" or "fail". :param str reasoning: An explanation of the evaluation result. """ - if span_context is not None: - log.warning( - "The `span_context` parameter is deprecated and will be removed in the next major version of " - "ddtrace (4.0). Please use `span` or `span_with_tag_value` instead." - ) - span = span or span_context - if cls.enabled is False: log.debug( "LLMObs.submit_evaluation() called when LLMObs is not enabled. ", diff --git a/ddtrace/llmobs/_writer.py b/ddtrace/llmobs/_writer.py index 2e762adf27a..1b4d0f2462b 100644 --- a/ddtrace/llmobs/_writer.py +++ b/ddtrace/llmobs/_writer.py @@ -25,6 +25,7 @@ from ddtrace.internal.evp_proxy.constants import EVP_SUBDOMAIN_HEADER_NAME from ddtrace.internal.logger import get_logger from ddtrace.internal.periodic import PeriodicService +from ddtrace.internal.settings._agent import config as agent_config from ddtrace.internal.utils.http import Response from ddtrace.internal.utils.retry import fibonacci_backoff_with_jitter from ddtrace.llmobs import _telemetry as telemetry @@ -48,7 +49,6 @@ from ddtrace.llmobs._utils import safe_json from ddtrace.llmobs.types import _Meta from ddtrace.llmobs.types import _SpanLink -from ddtrace.settings._agent import config as agent_config logger = get_logger(__name__) diff --git a/ddtrace/opentracer/__init__.py b/ddtrace/opentracer/__init__.py deleted file mode 100644 index 815cdae0022..00000000000 --- a/ddtrace/opentracer/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -from ddtrace.vendor.debtcollector import deprecate - -from .helpers import set_global_tracer -from .tracer import Tracer - - -deprecate( - "The `ddtrace.opentracer` package is deprecated", - message="The ddtrace library no longer supports the OpenTracing API. " - "Use the OpenTelemetry API instead (`ddtrace.opentelemetry`).", - removal_version="4.0.0", -) - - -__all__ = [ - "Tracer", - "set_global_tracer", -] diff --git a/ddtrace/opentracer/helpers.py b/ddtrace/opentracer/helpers.py deleted file mode 100644 index e8e6c4896a4..00000000000 --- a/ddtrace/opentracer/helpers.py +++ /dev/null @@ -1,25 +0,0 @@ -from typing import TYPE_CHECKING - -import opentracing - -import ddtrace - - -if TYPE_CHECKING: # pragma: no cover - from ddtrace.opentracer import Tracer # noqa:F401 - - -""" -Helper routines for Datadog OpenTracing. -""" - - -def set_global_tracer(tracer): - # type: (Tracer) -> None - """Sets the global tracers to the given tracer.""" - - # overwrite the opentracer reference - opentracing.tracer = tracer - - # overwrite the Datadog tracer reference - ddtrace.tracer = tracer._dd_tracer diff --git a/ddtrace/opentracer/propagation/__init__.py b/ddtrace/opentracer/propagation/__init__.py deleted file mode 100644 index 04ddde7014d..00000000000 --- a/ddtrace/opentracer/propagation/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from .http import HTTPPropagator - - -__all__ = [ - "HTTPPropagator", -] diff --git a/ddtrace/opentracer/propagation/binary.py b/ddtrace/opentracer/propagation/binary.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/ddtrace/opentracer/propagation/http.py b/ddtrace/opentracer/propagation/http.py deleted file mode 100644 index 539f8dc2ebd..00000000000 --- a/ddtrace/opentracer/propagation/http.py +++ /dev/null @@ -1,74 +0,0 @@ -from typing import Dict # noqa:F401 - -from opentracing import InvalidCarrierException - -from ddtrace.propagation.http import HTTPPropagator as DDHTTPPropagator - -from ...internal.logger import get_logger -from ..span_context import SpanContext -from .propagator import Propagator - - -log = get_logger(__name__) - -_HTTP_BAGGAGE_PREFIX = "ot-baggage-" -_HTTP_BAGGAGE_PREFIX_LEN = len(_HTTP_BAGGAGE_PREFIX) - - -class HTTPPropagator(Propagator): - """OpenTracing compatible HTTP_HEADER and TEXT_MAP format propagator. - - `HTTPPropagator` provides compatibility by using existing OpenTracing - compatible methods from the ddtracer along with new logic supporting the - outstanding OpenTracing-defined functionality. - """ - - @staticmethod - def inject(span_context, carrier): - # type: (SpanContext, Dict[str, str]) -> None - """Inject a span context into a carrier. - - *span_context* is injected into the carrier by first using an - :class:`ddtrace.propagation.http.HTTPPropagator` to inject the ddtracer - specific fields. - - Then the baggage is injected into *carrier*. - - :param span_context: span context to inject. - - :param carrier: carrier to inject into. - """ - if not isinstance(carrier, dict): - raise InvalidCarrierException("propagator expects carrier to be a dict") - - DDHTTPPropagator.inject(span_context._dd_context, carrier) - - # Add the baggage - if span_context.baggage is not None: - for key in span_context.baggage: - carrier[_HTTP_BAGGAGE_PREFIX + key] = span_context.baggage[key] - - @staticmethod - def extract(carrier): - # type: (Dict[str, str]) -> SpanContext - """Extract a span context from a carrier. - - :class:`ddtrace.propagation.http.HTTPPropagator` is used to extract - ddtracer supported fields into a `ddtrace.Context` context which is - combined with new logic to extract the baggage which is returned in an - OpenTracing compatible span context. - - :param carrier: carrier to extract from. - - :return: extracted span context. - """ - if not isinstance(carrier, dict): - raise InvalidCarrierException("propagator expects carrier to be a dict") - - ddspan_ctx = DDHTTPPropagator.extract(carrier) - baggage = {} - for key in carrier: - if key.startswith(_HTTP_BAGGAGE_PREFIX): - baggage[key[_HTTP_BAGGAGE_PREFIX_LEN:]] = carrier[key] - - return SpanContext(ddcontext=ddspan_ctx, baggage=baggage) diff --git a/ddtrace/opentracer/propagation/propagator.py b/ddtrace/opentracer/propagation/propagator.py deleted file mode 100644 index 77eadf3912b..00000000000 --- a/ddtrace/opentracer/propagation/propagator.py +++ /dev/null @@ -1,13 +0,0 @@ -import abc - - -class Propagator(metaclass=abc.ABCMeta): - @staticmethod - @abc.abstractmethod - def inject(span_context, carrier): - pass - - @staticmethod - @abc.abstractmethod - def extract(carrier): - pass diff --git a/ddtrace/opentracer/propagation/text.py b/ddtrace/opentracer/propagation/text.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/ddtrace/opentracer/settings.py b/ddtrace/opentracer/settings.py deleted file mode 100644 index 944df88233b..00000000000 --- a/ddtrace/opentracer/settings.py +++ /dev/null @@ -1,41 +0,0 @@ -from collections import namedtuple -from typing import Any # noqa:F401 -from typing import Dict # noqa:F401 -from typing import List # noqa:F401 - - -# Keys used for the configuration dict -ConfigKeyNames = namedtuple( - "ConfigKeyNames", - [ - "AGENT_HOSTNAME", - "AGENT_HTTPS", - "AGENT_PORT", - "DEBUG", - "ENABLED", - "GLOBAL_TAGS", - "SAMPLER", - "PRIORITY_SAMPLING", - "UDS_PATH", - "SETTINGS", - ], -) - -ConfigKeys = ConfigKeyNames( - AGENT_HOSTNAME="agent_hostname", - AGENT_HTTPS="agent_https", - AGENT_PORT="agent_port", - DEBUG="debug", - ENABLED="enabled", - GLOBAL_TAGS="global_tags", - SAMPLER="sampler", - PRIORITY_SAMPLING="priority_sampling", - UDS_PATH="uds_path", - SETTINGS="settings", -) - - -def config_invalid_keys(config): - # type: (Dict[str, Any]) -> List[str] - """Returns a list of keys that exist in *config* and not in KEYS.""" - return [key for key in config.keys() if key not in ConfigKeys] diff --git a/ddtrace/opentracer/span.py b/ddtrace/opentracer/span.py deleted file mode 100644 index 3aea2eda580..00000000000 --- a/ddtrace/opentracer/span.py +++ /dev/null @@ -1,197 +0,0 @@ -import threading -from typing import TYPE_CHECKING # noqa:F401 -from typing import Any # noqa:F401 -from typing import Dict # noqa:F401 -from typing import Optional # noqa:F401 -from typing import Text # noqa:F401 -from typing import Union # noqa:F401 - -from opentracing import Span as OpenTracingSpan -from opentracing.ext import tags as OTTags - -from ddtrace.constants import ERROR_MSG -from ddtrace.constants import ERROR_STACK -from ddtrace.constants import ERROR_TYPE -from ddtrace.internal.compat import NumericType # noqa:F401 -from ddtrace.internal.constants import SPAN_API_OPENTRACING -from ddtrace.trace import Context as DatadogContext # noqa:F401 -from ddtrace.trace import Span as DatadogSpan - -from .span_context import SpanContext -from .tags import Tags - - -if TYPE_CHECKING: # pragma: no cover - from ddtrace.trace import Tracer # noqa:F401 - - -_TagNameType = Union[Text, bytes] - - -class Span(OpenTracingSpan): - """Datadog implementation of :class:`opentracing.Span`""" - - def __init__(self, tracer, context, operation_name): - # type: (Tracer, Optional[SpanContext], str) -> None - if context is not None: - context = SpanContext(ddcontext=context._dd_context, baggage=context.baggage) - else: - context = SpanContext() - - super(Span, self).__init__(tracer, context) - - self.finished = False - self._lock = threading.Lock() - # use a datadog span - self._dd_span = DatadogSpan(operation_name, context=context._dd_context, span_api=SPAN_API_OPENTRACING) - - def finish(self, finish_time=None): - # type: (Optional[float]) -> None - """Finish the span. - - This calls finish on the ddspan. - - :param finish_time: specify a custom finish time with a unix timestamp - per time.time() - :type timestamp: float - """ - if self.finished: - return - - # finish the datadog span - self._dd_span.finish(finish_time) - self.finished = True - - def set_baggage_item(self, key, value): - # type: (str, Any) -> Span - """Sets a baggage item in the span context of this span. - - Baggage is used to propagate state between spans. - - :param key: baggage item key - :type key: str - - :param value: baggage item value - :type value: a type that can be str'd - - :rtype: Span - :return: itself for chaining calls - """ - new_ctx = self.context.with_baggage_item(key, value) - with self._lock: - self._context = new_ctx - return self - - def get_baggage_item(self, key): - # type: (str) -> Optional[str] - """Gets a baggage item from the span context of this span. - - :param key: baggage item key - :type key: str - - :rtype: str - :return: the baggage value for the given key or ``None``. - """ - return self.context.get_baggage_item(key) - - def set_operation_name(self, operation_name): - # type: (str) -> Span - """Set the operation name.""" - self._dd_span.name = operation_name - return self - - def log_kv(self, key_values, timestamp=None): - # type: (Dict[_TagNameType, Any], Optional[float]) -> Span - """Add a log record to this span. - - Passes on relevant opentracing key values onto the datadog span. - - :param key_values: a dict of string keys and values of any type - :type key_values: dict - - :param timestamp: a unix timestamp per time.time() - :type timestamp: float - - :return: the span itself, for call chaining - :rtype: Span - """ - - # match opentracing defined keys to datadog functionality - # opentracing/specification/blob/1be630515dafd4d2a468d083300900f89f28e24d/semantic_conventions.md#log-fields-table # noqa: E501 - for key, val in key_values.items(): - if key == "event" and val == "error": - # TODO: not sure if it's actually necessary to set the error manually - self._dd_span.error = 1 - self.set_tag("error", 1) - elif key == "error" or key == "error.object": - self.set_tag(ERROR_TYPE, val) - elif key == "message": - self.set_tag(ERROR_MSG, val) - elif key == "stack": - self.set_tag(ERROR_STACK, val) - else: - pass - - return self - - def set_tag(self, key, value): - # type: (_TagNameType, Any) -> Span - """Set a tag on the span. - - This sets the tag on the underlying datadog span. - """ - if key == Tags.SPAN_TYPE: - self._dd_span.span_type = value - elif key == Tags.SERVICE_NAME: - self._dd_span.service = value - elif key == Tags.RESOURCE_NAME or key == OTTags.DATABASE_STATEMENT: - self._dd_span.resource = value - elif key == OTTags.PEER_HOSTNAME: - self._dd_span._set_tag_str(Tags.TARGET_HOST, value) - elif key == OTTags.PEER_PORT: - self._dd_span.set_tag(Tags.TARGET_PORT, value) - elif key == Tags.SAMPLING_PRIORITY: - self._dd_span.context.sampling_priority = value - else: - self._dd_span.set_tag(key, value) - return self - - def _get_tag(self, key): - # type: (_TagNameType) -> Optional[Text] - """Gets a tag from the span. - - This method retrieves the tag from the underlying datadog span. - """ - return self._dd_span.get_tag(key) - - def _get_metric(self, key): - # type: (_TagNameType) -> Optional[NumericType] - """Gets a metric from the span. - - This method retrieves the metric from the underlying datadog span. - """ - return self._dd_span.get_metric(key) - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if exc_type: - self._dd_span.set_exc_info(exc_type, exc_val, exc_tb) - - # note: self.finish() AND _dd_span.__exit__ will call _span.finish() but - # it is idempotent - self._dd_span.__exit__(exc_type, exc_val, exc_tb) - self.finish() - - def _associate_dd_span(self, ddspan): - # type: (DatadogSpan) -> None - """Associates a DD span with this span.""" - # get the datadog span context - self._dd_span = ddspan - self.context._dd_context = ddspan.context - - @property - def _dd_context(self): - # type: () -> DatadogContext - return self._dd_span.context diff --git a/ddtrace/opentracer/span_context.py b/ddtrace/opentracer/span_context.py deleted file mode 100644 index 171142d18a8..00000000000 --- a/ddtrace/opentracer/span_context.py +++ /dev/null @@ -1,66 +0,0 @@ -from typing import Any # noqa:F401 -from typing import Dict # noqa:F401 -from typing import Optional # noqa:F401 - -from opentracing import SpanContext as OpenTracingSpanContext - -from ddtrace.internal.compat import NumericType # noqa:F401 -from ddtrace.trace import Context as DatadogContext - - -class SpanContext(OpenTracingSpanContext): - """Implementation of the OpenTracing span context.""" - - def __init__( - self, - trace_id=None, # type: Optional[int] - span_id=None, # type: Optional[int] - sampling_priority=None, # type: Optional[NumericType] - baggage=None, # type: Optional[Dict[str, Any]] - ddcontext=None, # type: Optional[DatadogContext] - ): - # type: (...) -> None - # create a new dict for the baggage if it is not provided - # NOTE: it would be preferable to use opentracing.SpanContext.EMPTY_BAGGAGE - # but it is mutable. - # see: opentracing-python/blob/8775c7bfc57fd66e1c8bcf9a54d3e434d37544f9/opentracing/span.py#L30 - baggage = baggage or {} - - if ddcontext is not None: - self._dd_context = ddcontext - else: - self._dd_context = DatadogContext( - trace_id=trace_id, - span_id=span_id, - sampling_priority=sampling_priority, - ) - - self._baggage = dict(baggage) - - @property - def baggage(self): - # type: () -> Dict[str, Any] - return self._baggage - - def set_baggage_item(self, key, value): - # type: (str, Any) -> None - """Sets a baggage item in this span context. - - Note that this operation mutates the baggage of this span context - """ - self.baggage[key] = value - - def with_baggage_item(self, key, value): - # type: (str, Any) -> SpanContext - """Returns a copy of this span with a new baggage item. - - Useful for instantiating new child span contexts. - """ - baggage = dict(self._baggage) - baggage[key] = value - return SpanContext(ddcontext=self._dd_context, baggage=baggage) - - def get_baggage_item(self, key): - # type: (str) -> Optional[Any] - """Gets a baggage item in this span context.""" - return self.baggage.get(key, None) diff --git a/ddtrace/opentracer/tags.py b/ddtrace/opentracer/tags.py deleted file mode 100644 index ebc2d86d146..00000000000 --- a/ddtrace/opentracer/tags.py +++ /dev/null @@ -1,23 +0,0 @@ -from collections import namedtuple - - -TagNames = namedtuple( - "TagNames", - [ - "RESOURCE_NAME", - "SAMPLING_PRIORITY", - "SERVICE_NAME", - "SPAN_TYPE", - "TARGET_HOST", - "TARGET_PORT", - ], -) - -Tags = TagNames( - RESOURCE_NAME="resource.name", - SAMPLING_PRIORITY="sampling.priority", - SERVICE_NAME="service.name", - TARGET_HOST="out.host", - TARGET_PORT="network.destination.port", - SPAN_TYPE="span.type", -) diff --git a/ddtrace/opentracer/tracer.py b/ddtrace/opentracer/tracer.py deleted file mode 100644 index a783d3263dc..00000000000 --- a/ddtrace/opentracer/tracer.py +++ /dev/null @@ -1,399 +0,0 @@ -from typing import Any # noqa:F401 -from typing import Dict # noqa:F401 -from typing import List # noqa:F401 -from typing import Optional # noqa:F401 -from typing import Union # noqa:F401 -from urllib.parse import urlparse - -import opentracing -from opentracing import Format -from opentracing import Scope # noqa:F401 -from opentracing import ScopeManager # noqa:F401 -from opentracing.scope_managers import ThreadLocalScopeManager - -import ddtrace -from ddtrace import config as ddconfig -from ddtrace.internal.constants import SPAN_API_OPENTRACING -from ddtrace.internal.utils.config import get_application_name -from ddtrace.internal.writer import AgentWriterInterface -from ddtrace.settings.exceptions import ConfigException -from ddtrace.trace import Context as DatadogContext # noqa:F401 -from ddtrace.trace import Span as DatadogSpan -from ddtrace.trace import Tracer as DatadogTracer - -from ..internal.logger import get_logger -from .propagation import HTTPPropagator -from .settings import ConfigKeys as keys -from .settings import config_invalid_keys -from .span import Span -from .span_context import SpanContext -from .utils import get_context_provider_for_scope_manager - - -log = get_logger(__name__) - -DEFAULT_CONFIG: Dict[str, Optional[Any]] = { - keys.AGENT_HOSTNAME: None, - keys.AGENT_HTTPS: None, - keys.AGENT_PORT: None, - keys.DEBUG: False, - keys.ENABLED: None, - keys.GLOBAL_TAGS: {}, - keys.SAMPLER: None, - # Not used, priority sampling can not be disabled in +v3.0 - keys.PRIORITY_SAMPLING: None, - keys.UDS_PATH: None, - keys.SETTINGS: { - "FILTERS": [], - }, -} - - -class Tracer(opentracing.Tracer): - """A wrapper providing an OpenTracing API for the Datadog tracer.""" - - def __init__( - self, - service_name: Optional[str] = None, - config: Optional[Dict[str, Any]] = None, - scope_manager: Optional[ScopeManager] = None, - _dd_tracer: Optional[DatadogTracer] = None, - ) -> None: - """Initialize a new Datadog opentracer. - - :param service_name: (optional) the name of the service that this - tracer will be used with. Note if not provided, a service name will - try to be determined based off of ``sys.argv``. If this fails a - :class:`ddtrace.settings.ConfigException` will be raised. - :param config: (optional) a configuration object to specify additional - options. See the documentation for further information. - :param scope_manager: (optional) the scope manager for this tracer to - use. The available managers are listed in the Python OpenTracing repo - here: https://github.com/opentracing/opentracing-python#scope-managers. - If ``None`` is provided, defaults to - :class:`opentracing.scope_managers.ThreadLocalScopeManager`. - """ - # Merge the given config with the default into a new dict - self._config = DEFAULT_CONFIG.copy() - if config is not None: - self._config.update(config) - # Pull out commonly used properties for performance - self._service_name = service_name or get_application_name() - self._debug = self._config.get(keys.DEBUG) - - if self._debug and ddconfig._raise: - # Ensure there are no typos in any of the keys - invalid_keys = config_invalid_keys(self._config) - if invalid_keys: - str_invalid_keys = ",".join(invalid_keys) - raise ConfigException("invalid key(s) given ({})".format(str_invalid_keys)) - - if not self._service_name and ddconfig._raise: - raise ConfigException( - """ Cannot detect the \'service_name\'. - Please set the \'service_name=\' - keyword argument. - """ - ) - - self._scope_manager = scope_manager or ThreadLocalScopeManager() - self._dd_tracer = _dd_tracer or ddtrace.tracer - self._dd_tracer.context_provider = get_context_provider_for_scope_manager(self._scope_manager) - - self._dd_tracer.set_tags(self._config.get(keys.GLOBAL_TAGS)) # type: ignore[arg-type] - trace_processors = None - if isinstance(self._config.get(keys.SETTINGS), dict) and self._config[keys.SETTINGS].get("FILTERS"): # type: ignore[union-attr] - trace_processors = self._config[keys.SETTINGS]["FILTERS"] # type: ignore[index] - self._dd_tracer._span_aggregator.user_processors = trace_processors - - if self._config[keys.ENABLED]: - self._dd_tracer.enabled = self._config[keys.ENABLED] - - if ( - self._config[keys.AGENT_HOSTNAME] - or self._config[keys.AGENT_HTTPS] - or self._config[keys.AGENT_PORT] - or self._config[keys.UDS_PATH] - ): - scheme = "https" if self._config[keys.AGENT_HTTPS] else "http" - hostname = self._config[keys.AGENT_HOSTNAME] - port = self._config[keys.AGENT_PORT] - if self._dd_tracer._agent_url: - curr_agent_url = urlparse(self._dd_tracer._agent_url) - scheme = "https" if self._config[keys.AGENT_HTTPS] else curr_agent_url.scheme - hostname = hostname or curr_agent_url.hostname - port = port or curr_agent_url.port - uds_path = self._config[keys.UDS_PATH] - - if uds_path: - new_url = f"unix://{uds_path}" - else: - new_url = f"{scheme}://{hostname}:{port}" - if isinstance(self._dd_tracer._span_aggregator.writer, AgentWriterInterface): - self._dd_tracer._span_aggregator.writer.intake_url = new_url - self._dd_tracer._recreate() - - if self._config[keys.SAMPLER]: - self._dd_tracer._sampler = self._config[keys.SAMPLER] - - self._propagators = { - Format.HTTP_HEADERS: HTTPPropagator, - Format.TEXT_MAP: HTTPPropagator, - } - - @property - def scope_manager(self): - # type: () -> ScopeManager - """Returns the scope manager being used by this tracer.""" - return self._scope_manager - - def start_active_span( - self, - operation_name, # type: str - child_of=None, # type: Optional[Union[Span, SpanContext]] - references=None, # type: Optional[List[Any]] - tags=None, # type: Optional[Dict[str, str]] - start_time=None, # type: Optional[int] - ignore_active_span=False, # type: bool - finish_on_close=True, # type: bool - ): - # type: (...) -> Scope - """Returns a newly started and activated `Scope`. - The returned `Scope` supports with-statement contexts. For example:: - - with tracer.start_active_span('...') as scope: - scope.span.set_tag('http.method', 'GET') - do_some_work() - # Span.finish() is called as part of Scope deactivation through - # the with statement. - - It's also possible to not finish the `Span` when the `Scope` context - expires:: - - with tracer.start_active_span('...', - finish_on_close=False) as scope: - scope.span.set_tag('http.method', 'GET') - do_some_work() - # Span.finish() is not called as part of Scope deactivation as - # `finish_on_close` is `False`. - - :param operation_name: name of the operation represented by the new - span from the perspective of the current service. - :param child_of: (optional) a Span or SpanContext instance representing - the parent in a REFERENCE_CHILD_OF Reference. If specified, the - `references` parameter must be omitted. - :param references: (optional) a list of Reference objects that identify - one or more parent SpanContexts. (See the Reference documentation - for detail). - :param tags: an optional dictionary of Span Tags. The caller gives up - ownership of that dictionary, because the Tracer may use it as-is - to avoid extra data copying. - :param start_time: an explicit Span start time as a unix timestamp per - time.time(). - :param ignore_active_span: (optional) an explicit flag that ignores - the current active `Scope` and creates a root `Span`. - :param finish_on_close: whether span should automatically be finished - when `Scope.close()` is called. - :return: a `Scope`, already registered via the `ScopeManager`. - """ - otspan = self.start_span( - operation_name=operation_name, - child_of=child_of, - references=references, - tags=tags, - start_time=start_time, - ignore_active_span=ignore_active_span, - ) - - # activate this new span - scope = self._scope_manager.activate(otspan, finish_on_close) - self._dd_tracer.context_provider.activate(otspan._dd_span) - return scope - - def start_span( - self, - operation_name: Optional[str] = None, - child_of: Optional[Union[Span, SpanContext]] = None, - references: Optional[List[Any]] = None, - tags: Optional[Dict[str, str]] = None, - start_time: Optional[int] = None, - ignore_active_span: bool = False, - ) -> Span: - """Starts and returns a new Span representing a unit of work. - - Starting a root Span (a Span with no causal references):: - - tracer.start_span('...') - - Starting a child Span (see also start_child_span()):: - - tracer.start_span( - '...', - child_of=parent_span) - - Starting a child Span in a more verbose way:: - - tracer.start_span( - '...', - references=[opentracing.child_of(parent_span)]) - - Note: the precedence when defining a relationship is the following, from highest to lowest: - 1. *child_of* - 2. *references* - 3. `scope_manager.active` (unless *ignore_active_span* is True) - 4. None - - Currently Datadog only supports `child_of` references. - - :param operation_name: name of the operation represented by the new - span from the perspective of the current service. - :param child_of: (optional) a Span or SpanContext instance representing - the parent in a REFERENCE_CHILD_OF Reference. If specified, the - `references` parameter must be omitted. - :param references: (optional) a list of Reference objects that identify - one or more parent SpanContexts. (See the Reference documentation - for detail) - :param tags: an optional dictionary of Span Tags. The caller gives up - ownership of that dictionary, because the Tracer may use it as-is - to avoid extra data copying. - :param start_time: an explicit Span start time as a unix timestamp per - time.time() - :param ignore_active_span: an explicit flag that ignores the current - active `Scope` and creates a root `Span`. - :return: an already-started Span instance. - """ - ot_parent = None # 'ot_parent' is more readable than 'child_of' - ot_parent_context = None # the parent span's context - # dd_parent: the child_of to pass to the ddtracer - dd_parent = None # type: Optional[Union[DatadogSpan, DatadogContext]] - - if child_of is not None: - ot_parent = child_of # 'ot_parent' is more readable than 'child_of' - elif references and isinstance(references, list): - # we currently only support child_of relations to one span - ot_parent = references[0].referenced_context - - # - whenever child_of is not None ddspans with parent-child - # relationships will share a ddcontext which maintains a hierarchy of - # ddspans for the execution flow - # - when child_of is a ddspan then the ddtracer uses this ddspan to - # create the child ddspan - # - when child_of is a ddcontext then the ddtracer uses the ddcontext to - # get_current_span() for the parent - if ot_parent is None and not ignore_active_span: - # attempt to get the parent span from the scope manager - scope = self._scope_manager.active - parent_span = getattr(scope, "span", None) - ot_parent_context = getattr(parent_span, "context", None) - - # Compare the active ot and dd spans. Using the one which - # was created later as the parent. - active_dd_parent = self._dd_tracer.context_provider.active() - if parent_span and isinstance(active_dd_parent, DatadogSpan): - dd_parent_span = parent_span._dd_span - if active_dd_parent.start_ns >= dd_parent_span.start_ns: - dd_parent = active_dd_parent - else: - dd_parent = dd_parent_span - else: - dd_parent = active_dd_parent - elif ot_parent is not None and isinstance(ot_parent, Span): - # a span is given to use as a parent - ot_parent_context = ot_parent.context - dd_parent = ot_parent._dd_span - elif ot_parent is not None and isinstance(ot_parent, SpanContext): - # a span context is given to use to find the parent ddspan - dd_parent = ot_parent._dd_context - elif ot_parent is None: - # user wants to create a new parent span we don't have to do - # anything - pass - elif ddconfig._raise: - raise TypeError("invalid span configuration given") - - # create a new otspan and ddspan using the ddtracer and associate it - # with the new otspan - ddspan = self._dd_tracer.start_span( - name=operation_name, # type: ignore[arg-type] - child_of=dd_parent, - service=self._service_name, - activate=False, - span_api=SPAN_API_OPENTRACING, - ) - - # set the start time if one is specified - ddspan.start = start_time or ddspan.start - - otspan = Span(self, ot_parent_context, operation_name) # type: ignore[arg-type] - # sync up the OT span with the DD span - otspan._associate_dd_span(ddspan) - - if tags is not None: - for k in tags: - # Make sure we set the tags on the otspan to ensure that the special compatibility tags - # are handled correctly (resource name, span type, sampling priority, etc). - otspan.set_tag(k, tags[k]) - - return otspan - - @property - def active_span(self): - # type: () -> Optional[Span] - """Retrieves the active span from the opentracing scope manager - - Falls back to using the datadog active span if one is not found. This - allows opentracing users to use datadog instrumentation. - """ - scope = self._scope_manager.active - if scope: - return scope.span - else: - dd_span = self._dd_tracer.current_span() - ot_span = None # type: Optional[Span] - if dd_span: - ot_span = Span(self, None, dd_span.name) - ot_span._associate_dd_span(dd_span) - return ot_span - - def inject(self, span_context, format, carrier): # noqa: A002 - # type: (SpanContext, str, Dict[str, str]) -> None - """Injects a span context into a carrier. - - :param span_context: span context to inject. - :param format: format to encode the span context with. - :param carrier: the carrier of the encoded span context. - """ - propagator = self._propagators.get(format, None) - - if propagator is None: - raise opentracing.UnsupportedFormatException - - propagator.inject(span_context, carrier) - - def extract(self, format, carrier): # noqa: A002 - # type: (str, Dict[str, str]) -> SpanContext - """Extracts a span context from a carrier. - - :param format: format that the carrier is encoded with. - :param carrier: the carrier to extract from. - """ - propagator = self._propagators.get(format, None) - - if propagator is None: - raise opentracing.UnsupportedFormatException - - # we have to manually activate the returned context from a distributed - # trace - ot_span_ctx = propagator.extract(carrier) - dd_span_ctx = ot_span_ctx._dd_context - self._dd_tracer.context_provider.activate(dd_span_ctx) - return ot_span_ctx - - def get_log_correlation_context(self): - # type: () -> Dict[str, str] - """Retrieves the data used to correlate a log with the current active trace. - Generates a dictionary for custom logging instrumentation including the trace id and - span id of the current active span, as well as the configured service, version, and environment names. - If there is no active span, a dictionary with an empty string for each value will be returned. - """ - return self._dd_tracer.get_log_correlation_context() diff --git a/ddtrace/opentracer/utils.py b/ddtrace/opentracer/utils.py deleted file mode 100644 index 886e998d8a3..00000000000 --- a/ddtrace/opentracer/utils.py +++ /dev/null @@ -1,43 +0,0 @@ -from opentracing import ScopeManager # noqa:F401 - -from ddtrace._trace.provider import BaseContextProvider -from ddtrace._trace.provider import DefaultContextProvider - - -# DEV: If `asyncio` or `gevent` are unavailable we do not throw an error, -# `context_provider` will just not be set and we'll get an `AttributeError` instead - - -def get_context_provider_for_scope_manager(scope_manager: ScopeManager) -> BaseContextProvider: - """Returns the context_provider to use with a given scope_manager.""" - - dd_context_provider = DefaultContextProvider() - _patch_scope_manager(scope_manager, dd_context_provider) - - return dd_context_provider - - -def _patch_scope_manager(scope_manager: ScopeManager, context_provider: BaseContextProvider) -> None: - """ - Patches a scope manager so that any time a span is activated - it'll also activate the underlying ddcontext with the underlying - datadog context provider. - - This allows opentracing users to rely on ddtrace.contrib patches and - have them parent correctly. - - :param scope_manager: Something that implements `opentracing.ScopeManager` - :param context_provider: Something that implements `datadog.provider.BaseContextProvider` - """ - if getattr(scope_manager, "_datadog_patch", False): - return - scope_manager._datadog_patch = True - - old_method = scope_manager.activate - - def _patched_activate(*args, **kwargs): - otspan = kwargs.get("span", args[0]) - context_provider.activate(otspan._dd_context) - return old_method(*args, **kwargs) - - scope_manager.activate = _patched_activate diff --git a/ddtrace/profiling/_asyncio.py b/ddtrace/profiling/_asyncio.py index 6dcd96b96cb..967c3081d3f 100644 --- a/ddtrace/profiling/_asyncio.py +++ b/ddtrace/profiling/_asyncio.py @@ -11,9 +11,9 @@ from ddtrace.internal._unpatched import _threading as ddtrace_threading from ddtrace.internal.datadog.profiling import stack_v2 from ddtrace.internal.module import ModuleWatchdog +from ddtrace.internal.settings.profiling import config from ddtrace.internal.utils import get_argument_value from ddtrace.internal.wrapping import wrap -from ddtrace.settings.profiling import config from . import _threading @@ -93,14 +93,12 @@ def _(asyncio: ModuleType) -> None: elif hasattr(asyncio.Task, "all_tasks"): globals()["all_tasks"] = asyncio.Task.all_tasks - if hasattr(asyncio.Task, "get_name"): - # `get_name` is only available in PythonΒ β‰₯Β 3.8 - globals()["_task_get_name"] = lambda task: task.get_name() + globals()["_task_get_name"] = lambda task: task.get_name() if THREAD_LINK is None: THREAD_LINK = _threading._ThreadLink() - init_stack_v2: bool = config.stack.v2_enabled and stack_v2.is_available + init_stack_v2: bool = config.stack.enabled and stack_v2.is_available @partial(wrap, sys.modules["asyncio.events"].BaseDefaultEventLoopPolicy.set_event_loop) def _(f, args, kwargs): diff --git a/ddtrace/profiling/_threading.pyx b/ddtrace/profiling/_threading.pyx index 2a20b29b678..70896332424 100644 --- a/ddtrace/profiling/_threading.pyx +++ b/ddtrace/profiling/_threading.pyx @@ -55,14 +55,7 @@ cpdef get_thread_native_id(thread_id): if thread is None: return thread_id - try: - # We prioritize using native ids since we expect them to be surely unique for a program. This is less true - # for hashes since they are relative to the memory address which can easily be the same across different - # objects. - return thread.native_id - except AttributeError: - # PythonΒ <Β 3.8 - return hash(thread) + return thread.native_id # cython does not play well with mypy diff --git a/ddtrace/profiling/collector/__init__.py b/ddtrace/profiling/collector/__init__.py index 4b066483460..26a842a112f 100644 --- a/ddtrace/profiling/collector/__init__.py +++ b/ddtrace/profiling/collector/__init__.py @@ -1,9 +1,8 @@ # -*- encoding: utf-8 -*- import typing -from ddtrace.internal import periodic from ddtrace.internal import service -from ddtrace.settings.profiling import config +from ddtrace.internal.settings.profiling import config class CollectorError(Exception): @@ -25,20 +24,6 @@ def snapshot() -> None: """Take a snapshot of collected data, to be exported.""" -class PeriodicCollector(Collector, periodic.PeriodicService): - """A collector that needs to run periodically.""" - - __slots__ = () - - def periodic(self) -> None: - # This is to simply override periodic.PeriodicService.periodic() - self.collect() - - def collect(self) -> None: - """Collect the actual data.""" - raise NotImplementedError - - class CaptureSampler(object): """Determine the events that should be captured based on a sampling percentage.""" diff --git a/ddtrace/profiling/collector/_lock.py b/ddtrace/profiling/collector/_lock.py index 863e871ff56..dba744fa858 100644 --- a/ddtrace/profiling/collector/_lock.py +++ b/ddtrace/profiling/collector/_lock.py @@ -20,12 +20,12 @@ import wrapt from ddtrace.internal.datadog.profiling import ddup +from ddtrace.internal.settings.profiling import config from ddtrace.profiling import _threading from ddtrace.profiling import collector from ddtrace.profiling.collector import _task from ddtrace.profiling.collector import _traceback from ddtrace.profiling.event import DDFrame -from ddtrace.settings.profiling import config from ddtrace.trace import Tracer diff --git a/ddtrace/profiling/collector/_task.pyi b/ddtrace/profiling/collector/_task.pyi index f26c5d69b7d..6b1a923724c 100644 --- a/ddtrace/profiling/collector/_task.pyi +++ b/ddtrace/profiling/collector/_task.pyi @@ -4,4 +4,3 @@ import typing def get_task( thread_id: int, ) -> typing.Tuple[typing.Optional[int], typing.Optional[str], typing.Optional[types.FrameType]]: ... -def list_tasks() -> typing.List[typing.Tuple[int, str, types.FrameType]]: ... diff --git a/ddtrace/profiling/collector/_task.pyx b/ddtrace/profiling/collector/_task.pyx index b7939d908d8..f91f43a3019 100644 --- a/ddtrace/profiling/collector/_task.pyx +++ b/ddtrace/profiling/collector/_task.pyx @@ -1,70 +1,14 @@ -import sys -from types import ModuleType -import weakref - from wrapt.importer import when_imported from .. import _asyncio -from .. import _threading -from ddtrace.settings.profiling import config - - -if (is_stack_v2 := config.stack.v2_enabled): - - @when_imported("gevent") - def _(gevent): - from .. import _gevent - - _gevent.patch() - -else: - _gevent_tracer = None - - - @when_imported("gevent") - def install_greenlet_tracer(gevent): - global _gevent_tracer - - try: - import gevent.hub - import gevent.thread - from greenlet import getcurrent - from greenlet import greenlet - from greenlet import settrace - except ImportError: - # We don't seem to have the required dependencies. - return - - class DDGreenletTracer(object): - def __init__(self, gevent): - # type: (ModuleType) -> None - self.gevent = gevent - - self.previous_trace_function = settrace(self) - self.greenlets = weakref.WeakValueDictionary() - self.active_greenlet = getcurrent() - self._store_greenlet(self.active_greenlet) - def _store_greenlet( - self, - greenlet, # type: greenlet.greenlet - ): - # type: (...) -> None - self.greenlets[gevent.thread.get_ident(greenlet)] = greenlet - def __call__(self, event, args): - if event in ('switch', 'throw'): - # Do not trace gevent Hub: the Hub is a greenlet but we want to know the latest active greenlet *before* - # the application yielded back to the Hub. There's no point showing the Hub most of the time to the - # users as that does not give any information about user code. - if not isinstance(args[1], gevent.hub.Hub): - self.active_greenlet = args[1] - self._store_greenlet(args[1]) +@when_imported("gevent") +def _(gevent): + from .. import _gevent - if self.previous_trace_function is not None: - self.previous_trace_function(event, args) + _gevent.patch() - _gevent_tracer = DDGreenletTracer(gevent) cdef _asyncio_task_get_frame(task): @@ -96,55 +40,4 @@ cpdef get_task(thread_id): task_name = _asyncio._task_get_name(task) frame = _asyncio_task_get_frame(task) - if not is_stack_v2: - # legacy gevent greenlet support: - # - we only support tracing tasks in the greenlets run in the MainThread. - # - if both gevent and asyncio are in use (!) we only return asyncio - if task_id is None and _gevent_tracer is not None: - gevent_thread = _gevent_tracer.gevent.thread - task_id = gevent_thread.get_ident(_gevent_tracer.active_greenlet) - # Greenlets might be started as Thread in gevent - task_name = _threading.get_thread_name(task_id) - frame = _gevent_tracer.active_greenlet.gr_frame - return task_id, task_name, frame - - -cpdef list_tasks(thread_id): - # type: (...) -> typing.List[typing.Tuple[int, str, types.FrameType]] - """Return the list of running tasks. - - This is computed for gevent by taking the list of existing threading.Thread object and removing if any real OS - thread that might be running. - - :return: [(task_id, task_name, task_frame), ...]""" - - tasks = [] - - if not is_stack_v2 and _gevent_tracer is not None: - if type(_threading.get_thread_by_id(thread_id)).__name__.endswith("_MainThread"): - # Under normal circumstances, the Hub is running in the main thread. - # Python will only ever have a single instance of a _MainThread - # class, so if we find it we attribute all the greenlets to it. - tasks.extend( - [ - ( - greenlet_id, - _threading.get_thread_name(greenlet_id), - greenlet.gr_frame - ) - for greenlet_id, greenlet in dict(_gevent_tracer.greenlets).items() - if not greenlet.dead - ] - ) - - loop = _asyncio.get_event_loop_for_thread(thread_id) - if loop is not None: - tasks.extend([ - (id(task), - _asyncio._task_get_name(task), - _asyncio_task_get_frame(task)) - for task in _asyncio.all_tasks(loop) - ]) - - return tasks diff --git a/ddtrace/profiling/collector/_traceback.pyi b/ddtrace/profiling/collector/_traceback.pyi index d962ed5c795..7a25dc2d985 100644 --- a/ddtrace/profiling/collector/_traceback.pyi +++ b/ddtrace/profiling/collector/_traceback.pyi @@ -3,7 +3,4 @@ import typing from .. import event -def traceback_to_frames( - traceback: types.TracebackType, max_nframes: int -) -> typing.Tuple[typing.List[event.DDFrame], int]: ... def pyframe_to_frames(frame: types.FrameType, max_nframes: int) -> typing.Tuple[typing.List[event.DDFrame], int]: ... diff --git a/ddtrace/profiling/collector/_traceback.pyx b/ddtrace/profiling/collector/_traceback.pyx index a562b6e7629..1c9d1cb4321 100644 --- a/ddtrace/profiling/collector/_traceback.pyx +++ b/ddtrace/profiling/collector/_traceback.pyx @@ -33,27 +33,6 @@ cpdef _extract_class_name(frame): return "" -cpdef traceback_to_frames(traceback, max_nframes): - """Serialize a Python traceback object into a list of tuple of (filename, lineno, function_name). - - :param traceback: The traceback object to serialize. - :param max_nframes: The maximum number of frames to return. - :return: The serialized frames and the number of frames present in the original traceback. - """ - tb = traceback - frames = [] - nframes = 0 - while tb is not None: - if nframes < max_nframes: - frame = tb.tb_frame - code = frame.f_code - lineno = 0 if frame.f_lineno is None else frame.f_lineno - frames.insert(0, DDFrame(code.co_filename, lineno, code.co_name, _extract_class_name(frame))) - nframes += 1 - tb = tb.tb_next - return frames, nframes - - cpdef pyframe_to_frames(frame, max_nframes): """Convert a Python frame to a list of frames. diff --git a/ddtrace/profiling/collector/memalloc.py b/ddtrace/profiling/collector/memalloc.py index 7cef93806d7..04b53fc7cf2 100644 --- a/ddtrace/profiling/collector/memalloc.py +++ b/ddtrace/profiling/collector/memalloc.py @@ -21,9 +21,9 @@ _memalloc = None # type: ignore[assignment] from ddtrace.internal.datadog.profiling import ddup +from ddtrace.internal.settings.profiling import config from ddtrace.profiling import _threading from ddtrace.profiling import collector -from ddtrace.settings.profiling import config LOG = logging.getLogger(__name__) diff --git a/ddtrace/profiling/collector/pytorch.py b/ddtrace/profiling/collector/pytorch.py index 731c92ebb24..34d8736882e 100644 --- a/ddtrace/profiling/collector/pytorch.py +++ b/ddtrace/profiling/collector/pytorch.py @@ -8,9 +8,9 @@ import wrapt from ddtrace.internal.datadog.profiling import ddup +from ddtrace.internal.settings.profiling import config from ddtrace.profiling import _threading from ddtrace.profiling import collector -from ddtrace.settings.profiling import config from ddtrace.trace import Tracer diff --git a/ddtrace/profiling/collector/stack.py b/ddtrace/profiling/collector/stack.py new file mode 100644 index 00000000000..b6f4f77e82b --- /dev/null +++ b/ddtrace/profiling/collector/stack.py @@ -0,0 +1,72 @@ +"""CPU profiling collector.""" + +from __future__ import absolute_import + +import logging +import typing + +from ddtrace.internal import core +from ddtrace.internal.datadog.profiling import stack_v2 +from ddtrace.internal.settings.profiling import config +from ddtrace.profiling import collector +from ddtrace.profiling.collector import threading +from ddtrace.trace import Tracer + + +LOG = logging.getLogger(__name__) + + +class StackCollector(collector.Collector): + """Execution stacks collector.""" + + __slots__ = ( + "nframes", + "tracer", + ) + + def __init__(self, nframes: int = config.max_frames, tracer: typing.Optional[Tracer] = None): + super().__init__() + + self.nframes: int = nframes + self.tracer: typing.Optional[Tracer] = tracer + + def __repr__(self): + class_name = self.__class__.__name__ + attrs = {k: v for k, v in self.__dict__.items() if not k.startswith("_")} + attrs_str = ", ".join(f"{k}={v!r}" for k, v in attrs.items()) + + slot_attrs = {slot: getattr(self, slot) for slot in self.__slots__ if not slot.startswith("_")} + slot_attrs_str = ", ".join(f"{k}={v!r}" for k, v in slot_attrs.items()) + + return f"{class_name}({attrs_str}, {slot_attrs_str})" + + def _init(self): + # type: (...) -> None + if self.tracer is not None: + link_span = stack_v2.link_span + core.on("ddtrace.context_provider.activate", link_span) + + # stack v2 requires us to patch the Threading module. It's possible to do this from the stack v2 code + # itself, but it's a little bit fiddly and it's easier to make it correct here. + # TODO take the `threading` import out of here and just handle it in v2 startup + threading.init_stack_v2() + stack_v2.set_adaptive_sampling(config.stack.v2_adaptive_sampling) + stack_v2.start() + + def _start_service(self): + # type: (...) -> None + # This is split in its own function to ease testing + LOG.debug("Profiling StackCollector starting") + self._init() + LOG.debug("Profiling StackCollector started") + + def _stop_service(self): + # type: (...) -> None + LOG.debug("Profiling StackCollector stopping") + if self.tracer is not None: + link_span = stack_v2.link_span + core.reset_listeners("ddtrace.context_provider.activate", link_span) + LOG.debug("Profiling StackCollector stopped") + + # Also tell the native thread running the v2 sampler to stop, if needed + stack_v2.stop() diff --git a/ddtrace/profiling/collector/stack.pyi b/ddtrace/profiling/collector/stack.pyi deleted file mode 100644 index f99d134d52c..00000000000 --- a/ddtrace/profiling/collector/stack.pyi +++ /dev/null @@ -1,7 +0,0 @@ -import typing - -from ddtrace.trace import Tracer -from ddtrace.profiling import collector - -class StackCollector(collector.PeriodicCollector): - tracer: typing.Optional[Tracer] diff --git a/ddtrace/profiling/collector/stack.pyx b/ddtrace/profiling/collector/stack.pyx deleted file mode 100644 index 78fb0efd26a..00000000000 --- a/ddtrace/profiling/collector/stack.pyx +++ /dev/null @@ -1,521 +0,0 @@ -"""CPU profiling collector.""" -from __future__ import absolute_import - -from itertools import chain -import logging -import sys -import time -import typing - -from ddtrace.internal._unpatched import _threading as ddtrace_threading -from ddtrace._trace import context -from ddtrace._trace import span as ddspan -from ddtrace.trace import Tracer -from ddtrace.internal import core -from ddtrace.internal._threads import periodic_threads -from ddtrace.internal.datadog.profiling import ddup -from ddtrace.internal.datadog.profiling import stack_v2 -from ddtrace.profiling import _threading -from ddtrace.profiling import collector -from ddtrace.profiling.collector import _task -from ddtrace.profiling.collector import _traceback -from ddtrace.profiling.collector import threading -from ddtrace.settings.profiling import config - - -LOG = logging.getLogger(__name__) - - -# These are special features that might not be available depending on your Python version and platform -FEATURES = { - "cpu-time": False, - "stack-exceptions": True, - "transparent_events": False, -} - - -IF UNAME_SYSNAME == "Linux": - FEATURES['cpu-time'] = True - - from posix.time cimport clock_gettime - from posix.time cimport timespec - from posix.types cimport clockid_t - - from cpython.exc cimport PyErr_SetFromErrno - - cdef extern from "": - # POSIX says this might be a struct, but CPython relies on it being an unsigned long. - # We should be defining pthread_t here like this: - # ctypedef unsigned long pthread_t - # but e.g. musl libc defines pthread_t as a struct __pthread * which breaks the arithmetic Cython - # wants to do. - # We pay this with a warning at compilation time, but it works anyhow. - int pthread_getcpuclockid(unsigned long thread, clockid_t *clock_id) - - cdef p_pthread_getcpuclockid(tid): - cdef clockid_t clock_id - if pthread_getcpuclockid(tid, &clock_id) == 0: - return clock_id - PyErr_SetFromErrno(OSError) - - # Python < 3.3 does not have `time.clock_gettime` - cdef p_clock_gettime_ns(clk_id): - cdef timespec tp - if clock_gettime(clk_id, &tp) == 0: - return int(tp.tv_nsec + tp.tv_sec * 10e8) - PyErr_SetFromErrno(OSError) - - cdef class _ThreadTime(object): - cdef dict _last_thread_time - - def __init__(self): - # This uses a tuple of (pthread_id, thread_native_id) as the key to identify the thread: you'd think using - # the pthread_t id would be enough, but the glibc reuses the id. - self._last_thread_time = {} - - # Only used in tests - def _get_last_thread_time(self): - return dict(self._last_thread_time) - - def __call__(self, pthread_ids): - cdef list cpu_times = [] - for pthread_id in pthread_ids: - # TODO: Use QueryThreadCycleTime on Windows? - # ⚠ WARNING ⚠ - # `pthread_getcpuclockid` can make Python segfault if the thread is does not exist anymore. - # In order avoid this, this function must be called with the GIL being held the entire time. - # This is why this whole file is compiled down to C: we make sure we never release the GIL between - # calling sys._current_frames() and pthread_getcpuclockid, making sure no thread disappeared. - try: - cpu_time = p_clock_gettime_ns(p_pthread_getcpuclockid(pthread_id)) - except OSError: - # Just in case it fails, set it to 0 - # (Note that glibc never fails, it segfaults instead) - cpu_time = 0 - cpu_times.append(cpu_time) - - cdef dict pthread_cpu_time = {} - - # We should now be safe doing more Pythonic stuff and maybe releasing the GIL - for pthread_id, cpu_time in zip(pthread_ids, cpu_times): - thread_native_id = _threading.get_thread_native_id(pthread_id) - key = pthread_id, thread_native_id - # Do a max(0, …) here just in case the result is < 0: - # This should never happen, but it can happen if the one chance in a billion happens: - # - A new thread has been created and has the same native id and the same pthread_id. - # - We got an OSError with clock_gettime_ns - pthread_cpu_time[key] = max(0, cpu_time - self._last_thread_time.get(key, cpu_time)) - self._last_thread_time[key] = cpu_time - - # Clear cache - keys = list(pthread_cpu_time.keys()) - for key in list(self._last_thread_time.keys()): - if key not in keys: - del self._last_thread_time[key] - - return pthread_cpu_time -ELSE: - from libc cimport stdint - - cdef class _ThreadTime(object): - cdef stdint.int64_t _last_process_time - - def __init__(self): - self._last_process_time = time.process_time_ns() - - def __call__(self, pthread_ids): - current_process_time = time.process_time_ns() - cpu_time = current_process_time - self._last_process_time - self._last_process_time = current_process_time - # Spread the consumed CPU time on all threads. - # It's not fair, but we have no clue which CPU used more unless we can use `pthread_getcpuclockid` - # Check that we don't have zero thread β€” _might_ very rarely happen at shutdown - nb_threads = len(pthread_ids) - if nb_threads == 0: - cpu_time = 0 - else: - cpu_time //= nb_threads - return { - (pthread_id, _threading.get_thread_native_id(pthread_id)): cpu_time - for pthread_id in pthread_ids - } - - -from cpython.object cimport PyObject -from cpython.ref cimport Py_DECREF - -cdef extern from "": - PyObject* _PyThread_CurrentFrames() - -IF 0x030b0000 <= PY_VERSION_HEX < 0x30d0000: - cdef extern from "": - PyObject* _PyThread_CurrentExceptions() - -ELIF UNAME_SYSNAME != "Windows": - from cpython cimport PyInterpreterState - from cpython cimport PyInterpreterState_Head - from cpython cimport PyInterpreterState_Next - from cpython cimport PyInterpreterState_ThreadHead - from cpython cimport PyThreadState_Next - from cpython.pythread cimport PY_LOCK_ACQUIRED - from cpython.pythread cimport PyThread_acquire_lock - from cpython.pythread cimport PyThread_release_lock - from cpython.pythread cimport PyThread_type_lock - from cpython.pythread cimport WAIT_LOCK - - cdef extern from "": - # This one is provided as an opaque struct from Cython's cpython/pystate.pxd, - # but we need to access some of its fields so we redefine it here. - ctypedef struct PyThreadState: - unsigned long thread_id - PyObject* frame - - _PyErr_StackItem * _PyErr_GetTopmostException(PyThreadState *tstate) - - ctypedef struct _PyErr_StackItem: - PyObject* exc_type - PyObject* exc_value - PyObject* exc_traceback - - PyObject* PyException_GetTraceback(PyObject* exc) - PyObject* Py_TYPE(PyObject* ob) - - IF PY_VERSION_HEX >= 0x03080000: - # Python 3.8 - cdef extern from "": - - cdef struct pyinterpreters: - PyThread_type_lock mutex - - ctypedef struct _PyRuntimeState: - pyinterpreters interpreters - - cdef extern _PyRuntimeState _PyRuntime - - IF PY_VERSION_HEX >= 0x03090000: - # Needed for accessing _PyGC_FINALIZED when we build with -DPy_BUILD_CORE - cdef extern from "": - pass - cdef extern from "": - PyObject* PyThreadState_GetFrame(PyThreadState* tstate) -ELSE: - FEATURES['stack-exceptions'] = False - - -cdef collect_threads(thread_id_ignore_list, thread_time, thread_span_links) with gil: - cdef dict running_threads = _PyThread_CurrentFrames() - Py_DECREF(running_threads) - - IF PY_VERSION_HEX >= 0x030b0000: - IF PY_VERSION_HEX >= 0x030d0000: - current_exceptions = sys._current_exceptions() - ELSE: - cdef dict current_exceptions = _PyThread_CurrentExceptions() - Py_DECREF(current_exceptions) - - for thread_id, exc_info in current_exceptions.items(): - if exc_info is None: - continue - IF PY_VERSION_HEX >= 0x030c0000: - exc_type = type(exc_info) - exc_traceback = getattr(exc_info, "__traceback__", None) - ELSE: - exc_type, exc_value, exc_traceback = exc_info - current_exceptions[thread_id] = exc_type, exc_traceback - - ELIF UNAME_SYSNAME != "Windows": - cdef PyInterpreterState* interp - cdef PyThreadState* tstate - cdef _PyErr_StackItem* exc_info - cdef PyThread_type_lock lmutex = _PyRuntime.interpreters.mutex - cdef PyObject* exc_type - cdef PyObject* exc_tb - cdef dict current_exceptions = {} - - # This is an internal lock but we do need it. - # See https://bugs.python.org/issue1021318 - if PyThread_acquire_lock(lmutex, WAIT_LOCK) == PY_LOCK_ACQUIRED: - # Do not try to do anything fancy here: - # Even calling print() will deadlock the program has it will try - # to lock the GIL and somehow touching this mutex. - try: - interp = PyInterpreterState_Head() - - while interp: - tstate = PyInterpreterState_ThreadHead(interp) - while tstate: - exc_info = _PyErr_GetTopmostException(tstate) - if exc_info and exc_info.exc_type and exc_info.exc_traceback: - current_exceptions[tstate.thread_id] = (exc_info.exc_type, exc_info.exc_traceback) - tstate = PyThreadState_Next(tstate) - - interp = PyInterpreterState_Next(interp) - finally: - PyThread_release_lock(lmutex) - ELSE: - cdef dict current_exceptions = {} - - cdef dict cpu_times = thread_time(running_threads.keys()) - - return tuple( - ( - pthread_id, - native_thread_id, - _threading.get_thread_name(pthread_id), - running_threads[pthread_id], - current_exceptions.get(pthread_id), - thread_span_links.get_active_span_from_thread_id(pthread_id) if thread_span_links else None, - cpu_time, - ) - for (pthread_id, native_thread_id), cpu_time in cpu_times.items() - if pthread_id not in thread_id_ignore_list - ) - - -cdef stack_collect(ignore_profiler, thread_time, max_nframes, interval, wall_time, thread_span_links, collect_endpoint, now_ns = 0): - # Do not use `threading.enumerate` to not mess with locking (gevent!) - # Also collect the native threads, that are not registered with the built-in - # threading module, to keep backward compatibility with the previous - # pure-Python implementation of periodic threads. - thread_id_ignore_list = { - thread_id - for thread_id, thread in chain(periodic_threads.items(), ddtrace_threading._active.items()) - if getattr(thread, "_ddtrace_profiling_ignore", False) - } if ignore_profiler else set() - - running_threads = collect_threads(thread_id_ignore_list, thread_time, thread_span_links) - - if thread_span_links: - # FIXME also use native thread id - thread_span_links.clear_threads(set(thread[0] for thread in running_threads)) - - stack_events = [] - exc_events = [] - - for thread_id, thread_native_id, thread_name, thread_pyframes, exception, span, cpu_time in running_threads: - if thread_name is None: - # A Python thread with no name is likely still initialising so we - # ignore it to avoid reporting potentially misleading data. - # Effectively we would be discarding a negligible number of samples. - continue - - tasks = _task.list_tasks(thread_id) - - # Inject wall time for all running tasks - for task_id, task_name, task_pyframes in tasks: - - # Ignore tasks with no frames; nothing to show. - if task_pyframes is None: - continue - - frames, nframes = _traceback.pyframe_to_frames(task_pyframes, max_nframes) - - if nframes: - handle = ddup.SampleHandle() - handle.push_monotonic_ns(now_ns) - handle.push_walltime(wall_time, 1) - handle.push_threadinfo(thread_id, thread_native_id, thread_name) - handle.push_task_id(task_id) - handle.push_task_name(task_name) - handle.push_class_name(frames[0].class_name) - for frame in frames: - handle.push_frame(frame.function_name, frame.file_name, 0, frame.lineno) - handle.flush_sample() - - frames, nframes = _traceback.pyframe_to_frames(thread_pyframes, max_nframes) - - if nframes: - handle = ddup.SampleHandle() - handle.push_monotonic_ns(now_ns) - handle.push_cputime( cpu_time, 1) - handle.push_walltime( wall_time, 1) - handle.push_threadinfo(thread_id, thread_native_id, thread_name) - handle.push_class_name(frames[0].class_name) - for frame in frames: - handle.push_frame(frame.function_name, frame.file_name, 0, frame.lineno) - handle.push_span(span) - handle.flush_sample() - - if exception is not None: - exc_type, exc_traceback = exception - - frames, nframes = _traceback.traceback_to_frames(exc_traceback, max_nframes) - - if nframes: - handle = ddup.SampleHandle() - handle.push_monotonic_ns(now_ns) - handle.push_threadinfo(thread_id, thread_native_id, thread_name) - handle.push_exceptioninfo(exc_type, 1) - handle.push_class_name(frames[0].class_name) - for frame in frames: - handle.push_frame(frame.function_name, frame.file_name, 0, frame.lineno) - handle.push_span(span) - handle.flush_sample() - - return stack_events, exc_events - - -if typing.TYPE_CHECKING: - _thread_span_links_base = _threading._ThreadLink[ddspan.Span] -else: - _thread_span_links_base = _threading._ThreadLink - - -class _ThreadSpanLinks(_thread_span_links_base): - - __slots__ = () - - def link_span( - self, - span # type: typing.Optional[typing.Union[context.Context, ddspan.Span]] - ): - # type: (...) -> None - """Link a span to its running environment. - - Track threads, tasks, etc. - """ - # Since we're going to iterate over the set, make sure it's locked - if isinstance(span, ddspan.Span): - self.link_object(span) - - def get_active_span_from_thread_id( - self, - thread_id # type: int - ): - # type: (...) -> typing.Optional[ddspan.Span] - """Return the latest active span for a thread. - - :param thread_id: The thread id. - :return: A set with the active spans. - """ - active_span = self.get_object(thread_id) - if active_span is not None and not active_span.finished: - return active_span - return None - - -def _default_min_interval_time(): - return sys.getswitchinterval() * 2 - - -class StackCollector(collector.PeriodicCollector): - """Execution stacks collector.""" - - __slots__ = ( - "_real_thread", - "min_interval_time", - "max_time_usage_pct", - "nframes", - "ignore_profiler", - "endpoint_collection_enabled", - "tracer", - "_thread_time", - "_last_wall_time", - "_thread_span_links", - "_stack_collector_v2_enabled", - ) - - def __init__(self, - max_time_usage_pct: float = config.max_time_usage_pct, - nframes: int = config.max_frames, - ignore_profiler: bool = config.ignore_profiler, - endpoint_collection_enabled: typing.Optional[bool] = None, - tracer: typing.Optional[Tracer] = None, - _stack_collector_v2_enabled: bool = config.stack.v2_enabled): - super().__init__(interval= _default_min_interval_time()) - if max_time_usage_pct <= 0 or max_time_usage_pct > 100: - raise ValueError("Max time usage percent must be greater than 0 and smaller or equal to 100") - - # This need to be a real OS thread in order to catch - self._real_thread: bool = True - self.min_interval_time: float = _default_min_interval_time() - - self.max_time_usage_pct: float = max_time_usage_pct - self.nframes: int = nframes - self.ignore_profiler: bool = ignore_profiler - self.endpoint_collection_enabled: typing.Optional[bool] = endpoint_collection_enabled - self.tracer: typing.Optional[Tracer] = tracer - self._thread_time: typing.Optional[_ThreadTime] = None - self._last_wall_time: int = 0 # Placeholder for initial value - self._thread_span_links: typing.Optional[_ThreadSpanLinks] = None - self._stack_collector_v2_enabled: bool = _stack_collector_v2_enabled - - - def __repr__(self): - class_name = self.__class__.__name__ - attrs = {k: v for k, v in self.__dict__.items() if not k.startswith("_")} - attrs_str = ", ".join(f"{k}={v!r}" for k, v in attrs.items()) - - slot_attrs = {slot: getattr(self, slot) for slot in self.__slots__ if not slot.startswith("_")} - slot_attrs_str = ", ".join(f"{k}={v!r}" for k, v in slot_attrs.items()) - - return f"{class_name}({attrs_str}, {slot_attrs_str})" - - - def _init(self): - # type: (...) -> None - self._thread_time = _ThreadTime() - self._last_wall_time = time.monotonic_ns() - if self.tracer is not None: - self._thread_span_links = _ThreadSpanLinks() - link_span = stack_v2.link_span if self._stack_collector_v2_enabled else self._thread_span_links.link_span - core.on("ddtrace.context_provider.activate", link_span) - - # If stack v2 is enabled, then use the v2 sampler - if self._stack_collector_v2_enabled: - # stack v2 requires us to patch the Threading module. It's possible to do this from the stack v2 code - # itself, but it's a little bit fiddly and it's easier to make it correct here. - # TODO take the `threading` import out of here and just handle it in v2 startup - threading.init_stack_v2() - stack_v2.set_adaptive_sampling(config.stack.v2_adaptive_sampling) - stack_v2.start() - - def _start_service(self): - # type: (...) -> None - # This is split in its own function to ease testing - LOG.debug("Profiling StackCollector starting") - self._init() - super(StackCollector, self)._start_service() - LOG.debug("Profiling StackCollector started") - - def _stop_service(self): - # type: (...) -> None - LOG.debug("Profiling StackCollector stopping") - super(StackCollector, self)._stop_service() - if self.tracer is not None: - link_span = stack_v2.link_span if self._stack_collector_v2_enabled else self._thread_span_links.link_span - core.reset_listeners("ddtrace.context_provider.activate", link_span) - LOG.debug("Profiling StackCollector stopped") - - # Also tell the native thread running the v2 sampler to stop, if needed - if self._stack_collector_v2_enabled: - stack_v2.stop() - - def _compute_new_interval(self, used_wall_time_ns): - interval = (used_wall_time_ns / (self.max_time_usage_pct / 100.0)) - used_wall_time_ns - return max(interval / 1e9, self.min_interval_time) - - def collect(self): - # Compute wall time - now = time.monotonic_ns() - wall_time = now - self._last_wall_time - self._last_wall_time = now - all_events = [] - - # If the stack v2 collector is enabled, then do not collect the stack samples here. - if not self._stack_collector_v2_enabled: - all_events = stack_collect( - self.ignore_profiler, - self._thread_time, - self.nframes, - self.interval, - wall_time, - self._thread_span_links, - self.endpoint_collection_enabled, - now_ns=now, - ) - - used_wall_time_ns = time.monotonic_ns() - now - self.interval = self._compute_new_interval(used_wall_time_ns) - - return all_events diff --git a/ddtrace/profiling/collector/threading.py b/ddtrace/profiling/collector/threading.py index 3d346cdcf87..dc1f1404546 100644 --- a/ddtrace/profiling/collector/threading.py +++ b/ddtrace/profiling/collector/threading.py @@ -5,7 +5,7 @@ from ddtrace.internal._unpatched import _threading as ddtrace_threading from ddtrace.internal.datadog.profiling import stack_v2 -from ddtrace.settings.profiling import config +from ddtrace.internal.settings.profiling import config from . import _lock @@ -50,7 +50,7 @@ def _set_patch_target( # Also patch threading.Thread so echion can track thread lifetimes def init_stack_v2() -> None: - if config.stack.v2_enabled and stack_v2.is_available: + if config.stack.enabled and stack_v2.is_available: _thread_set_native_id = ddtrace_threading.Thread._set_native_id # type: ignore[attr-defined] _thread_bootstrap_inner = ddtrace_threading.Thread._bootstrap_inner # type: ignore[attr-defined] diff --git a/ddtrace/profiling/profiler.py b/ddtrace/profiling/profiler.py index b4ac6c3e79c..4fe470f54d9 100644 --- a/ddtrace/profiling/profiler.py +++ b/ddtrace/profiling/profiler.py @@ -16,6 +16,8 @@ from ddtrace.internal import uwsgi from ddtrace.internal.datadog.profiling import ddup from ddtrace.internal.module import ModuleWatchdog +from ddtrace.internal.settings.profiling import config as profiling_config +from ddtrace.internal.settings.profiling import config_str from ddtrace.internal.telemetry import telemetry_writer from ddtrace.internal.telemetry.constants import TELEMETRY_APM_PRODUCT from ddtrace.profiling import collector @@ -25,8 +27,6 @@ from ddtrace.profiling.collector import pytorch from ddtrace.profiling.collector import stack from ddtrace.profiling.collector import threading -from ddtrace.settings.profiling import config as profiling_config -from ddtrace.settings.profiling import config_str # TODO(vlad): add type annotations @@ -124,7 +124,6 @@ def __init__( api_key: Optional[str] = None, _memory_collector_enabled: bool = profiling_config.memory.enabled, _stack_collector_enabled: bool = profiling_config.stack.enabled, - _stack_v2_enabled: bool = profiling_config.stack.v2_enabled, _lock_collector_enabled: bool = profiling_config.lock.enabled, _pytorch_collector_enabled: bool = profiling_config.pytorch.enabled, enable_code_provenance: bool = profiling_config.code_provenance, @@ -140,7 +139,6 @@ def __init__( self.api_key: Optional[str] = api_key if api_key is not None else config._dd_api_key self._memory_collector_enabled: bool = _memory_collector_enabled self._stack_collector_enabled: bool = _stack_collector_enabled - self._stack_v2_enabled: bool = _stack_v2_enabled self._lock_collector_enabled: bool = _lock_collector_enabled self._pytorch_collector_enabled: bool = _pytorch_collector_enabled self.enable_code_provenance: bool = enable_code_provenance @@ -193,12 +191,7 @@ def __post_init__(self): if self._stack_collector_enabled: LOG.debug("Profiling collector (stack) enabled") try: - self._collectors.append( - stack.StackCollector( - tracer=self.tracer, - endpoint_collection_enabled=self.endpoint_collection_enabled, - ) - ) + self._collectors.append(stack.StackCollector(tracer=self.tracer)) LOG.debug("Profiling collector (stack) initialized") except Exception: LOG.error("Failed to start stack collector, disabling.", exc_info=True) diff --git a/ddtrace/profiling/scheduler.py b/ddtrace/profiling/scheduler.py index 228b6cc7675..35af121e487 100644 --- a/ddtrace/profiling/scheduler.py +++ b/ddtrace/profiling/scheduler.py @@ -8,7 +8,7 @@ import ddtrace from ddtrace.internal import periodic from ddtrace.internal.datadog.profiling import ddup -from ddtrace.settings.profiling import config +from ddtrace.internal.settings.profiling import config from ddtrace.trace import Tracer diff --git a/ddtrace/propagation/_database_monitoring.py b/ddtrace/propagation/_database_monitoring.py index 9d3c6cf594c..12cc5a3335d 100644 --- a/ddtrace/propagation/_database_monitoring.py +++ b/ddtrace/propagation/_database_monitoring.py @@ -6,13 +6,13 @@ from ddtrace import config as dd_config from ddtrace.internal import core from ddtrace.internal.logger import get_logger -from ddtrace.settings.peer_service import PeerServiceConfig +from ddtrace.internal.settings.peer_service import PeerServiceConfig from ddtrace.vendor.sqlcommenter import generate_sql_comment as _generate_sql_comment from ..internal import compat +from ..internal.settings._database_monitoring import dbm_config from ..internal.utils import get_argument_value from ..internal.utils import set_argument_value -from ..settings._database_monitoring import dbm_config if TYPE_CHECKING: diff --git a/ddtrace/propagation/http.py b/ddtrace/propagation/http.py index 5a0101d351e..08bacd5451d 100644 --- a/ddtrace/propagation/http.py +++ b/ddtrace/propagation/http.py @@ -1,15 +1,12 @@ import itertools import re -from typing import Any # noqa:F401 from typing import Dict # noqa:F401 from typing import FrozenSet # noqa:F401 from typing import List # noqa:F401 from typing import Literal # noqa:F401 from typing import Optional # noqa:F401 -from typing import Text # noqa:F401 from typing import Tuple # noqa:F401 from typing import Union -from typing import cast # noqa:F401 import urllib.parse from ddtrace._trace._span_link import SpanLink @@ -17,15 +14,12 @@ from ddtrace._trace.span import Span # noqa:F401 from ddtrace._trace.span import _get_64_highest_order_bits_as_hex from ddtrace._trace.span import _get_64_lowest_order_bits_as_int -from ddtrace._trace.types import _MetaDictType from ddtrace.appsec._constants import APPSEC from ddtrace.internal import core +from ddtrace.internal.settings._config import config +from ddtrace.internal.settings.asm import config as asm_config from ddtrace.internal.telemetry import telemetry_writer from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE -from ddtrace.internal.utils.deprecations import DDTraceDeprecationWarning -from ddtrace.settings._config import config -from ddtrace.settings.asm import config as asm_config -from ddtrace.vendor.debtcollector import deprecate from ..constants import AUTO_KEEP from ..constants import AUTO_REJECT @@ -284,11 +278,8 @@ def _inject(span_context, headers): # Only propagate trace tags which means ignoring the _dd.origin tags_to_encode = { - # DEV: Context._meta is a _MetaDictType but we need Dict[str, str] - ensure_text(k): ensure_text(v) - for k, v in span_context._meta.items() - if _DatadogMultiHeader._is_valid_datadog_trace_tag_key(k) - } # type: Dict[Text, Text] + k: v for k, v in span_context._meta.items() if _DatadogMultiHeader._is_valid_datadog_trace_tag_key(k) + } if tags_to_encode: try: @@ -384,10 +375,7 @@ def _extract(headers): span_id=int(parent_span_id) or None, # type: ignore[arg-type] sampling_priority=sampling_priority, # type: ignore[arg-type] dd_origin=origin, - # DEV: This cast is needed because of the type requirements of - # span tags and trace tags which are currently implemented using - # the same type internally (_MetaDictType). - meta=cast(_MetaDictType, meta), + meta=meta, ) except (TypeError, ValueError): log.debug( @@ -829,14 +817,14 @@ def _extract(headers): log.exception("received invalid w3c traceparent: %s ", tp) return None - meta = {W3C_TRACEPARENT_KEY: tp} # type: _MetaDictType + meta = {W3C_TRACEPARENT_KEY: tp} ts = _extract_header_value(_POSSIBLE_HTTP_HEADER_TRACESTATE, headers) return _TraceContext._get_context(trace_id, span_id, trace_flag, ts, meta) @staticmethod def _get_context(trace_id, span_id, trace_flag, ts, meta=None): - # type: (int, int, Literal[0,1], Optional[str], Optional[_MetaDictType]) -> Context + # type: (int, int, Literal[0,1], Optional[str], Optional[Dict[str, str]]) -> Context if meta is None: meta = {} origin = None @@ -1121,7 +1109,7 @@ def _resolve_contexts(contexts, styles_w_ctx, normalized_headers): return primary_context @staticmethod - def inject(context: Union[Context, Span], headers: Dict[str, str], non_active_span: Optional[Span] = None) -> None: + def inject(context: Union[Context, Span], headers: Dict[str, str]) -> None: """Inject Context attributes that have to be propagated as HTTP headers. Here is an example using `requests`:: @@ -1150,26 +1138,16 @@ def parent_call(): Span objects automatically trigger sampling decisions. Context objects should have sampling_priority set to avoid inconsistent downstream sampling. :param dict headers: HTTP headers to extend with tracing attributes. - :param Span non_active_span: **DEPRECATED** - Pass Span objects to the context parameter instead. """ - if non_active_span is not None: - # non_active_span is only used for sampling decisions, not to inject headers. - deprecate( - "The non_active_span parameter is deprecated", - message="Use the context parameter instead.", - category=DDTraceDeprecationWarning, - removal_version="4.0.0", - ) # Cannot rename context parameter due to backwards compatibility # Handle sampling and get context for header injection - span_context = HTTPPropagator._get_sampled_injection_context(context, non_active_span) + span_context = HTTPPropagator._get_sampled_injection_context(context, None) # Log a warning if we cannot determine a sampling decision before injecting headers. if span_context.span_id and span_context.trace_id and span_context.sampling_priority is None: log.debug( "Sampling decision not available. Downstream spans will not inherit a sampling priority: " - "args=(context=%s, ..., non_active_span=%s) detected span context=%s", + "args=(context=%s, ...) detected span context=%s", context, - non_active_span, span_context, ) diff --git a/ddtrace/runtime/__init__.py b/ddtrace/runtime/__init__.py index 79745217f11..2963023fc29 100644 --- a/ddtrace/runtime/__init__.py +++ b/ddtrace/runtime/__init__.py @@ -1,5 +1,6 @@ from typing import Optional # noqa:F401 +import ddtrace import ddtrace.internal.runtime.runtime_metrics from ddtrace.internal.telemetry import telemetry_writer @@ -29,27 +30,22 @@ class RuntimeMetrics(metaclass=_RuntimeMetricsStatus): """ @staticmethod - def enable(tracer=None, dogstatsd_url=None, flush_interval=None): - # type: (Optional[ddtrace.trace.Tracer], Optional[str], Optional[float]) -> None + def enable( + tracer: Optional[ddtrace.trace.Tracer] = None, + dogstatsd_url: Optional[str] = None, + ) -> None: """ - Enable the runtime metrics collection service. - If the service has already been activated before, this method does nothing. Use ``disable`` to turn off the runtime metric collection service. :param tracer: The tracer instance to correlate with. - :param dogstatsd_url: The DogStatsD URL. - :param flush_interval: The flush interval. """ telemetry_writer.add_configuration(TELEMETRY_RUNTIMEMETRICS_ENABLED, True, origin="code") - ddtrace.internal.runtime.runtime_metrics.RuntimeWorker.enable( - tracer=tracer, dogstatsd_url=dogstatsd_url, flush_interval=flush_interval - ) + ddtrace.internal.runtime.runtime_metrics.RuntimeWorker.enable(tracer=tracer, dogstatsd_url=dogstatsd_url) @staticmethod - def disable(): - # type: () -> None + def disable() -> None: """ Disable the runtime metrics collection service. diff --git a/ddtrace/settings/__init__.py b/ddtrace/settings/__init__.py deleted file mode 100644 index 01e10b33296..00000000000 --- a/ddtrace/settings/__init__.py +++ /dev/null @@ -1,36 +0,0 @@ -from ddtrace.internal.utils.deprecations import DDTraceDeprecationWarning - -from ..vendor.debtcollector import deprecate - - -def __getattr__(name): - if name in set( - [ - "ConfigException", - "HttpConfig", - "Hooks", - "IntegrationConfig", - ] - ): - deprecate( - ("%s.%s is deprecated" % (__name__, name)), - removal_version="4.0.0", # TODO: update this to the correct version - category=DDTraceDeprecationWarning, - ) - if name == "ConfigException": - from ddtrace.settings.exceptions import ConfigException - - return ConfigException - elif name == "HttpConfig": - from .http import HttpConfig - - return HttpConfig - elif name == "Hooks": - from .._hooks import Hooks - - return Hooks - elif name == "IntegrationConfig": - from .integration import IntegrationConfig - - return IntegrationConfig - raise AttributeError("'%s' has no attribute '%s'" % (__name__, name)) diff --git a/ddtrace/settings/exceptions.py b/ddtrace/settings/exceptions.py deleted file mode 100644 index c11b83be316..00000000000 --- a/ddtrace/settings/exceptions.py +++ /dev/null @@ -1,6 +0,0 @@ -class ConfigException(Exception): - """Configuration exception when an integration that is not available - is called in the `Config` object. - """ - - pass diff --git a/ddtrace/trace/__init__.py b/ddtrace/trace/__init__.py index d89d8e09944..6eacedba8eb 100644 --- a/ddtrace/trace/__init__.py +++ b/ddtrace/trace/__init__.py @@ -1,14 +1,9 @@ -from typing import Any - from ddtrace._trace.context import Context from ddtrace._trace.filters import TraceFilter -from ddtrace._trace.pin import Pin as _Pin from ddtrace._trace.provider import BaseContextProvider from ddtrace._trace.span import Span from ddtrace._trace.tracer import Tracer from ddtrace.internal import core -from ddtrace.internal.utils.deprecations import DDTraceDeprecationWarning -from ddtrace.vendor.debtcollector import deprecate # a global tracer instance with integration settings @@ -16,22 +11,9 @@ core.tracer = tracer # type: ignore -def __getattr__(name: str) -> Any: - if name == "Pin": - deprecate( - prefix="ddtrace.trace.Pin is deprecated", - message="Please use environment variables for configuration instead", - category=DDTraceDeprecationWarning, - removal_version="4.0.0", - ) - return _Pin - raise AttributeError(f"module '{__name__}' has no attribute '{name}'") - - __all__ = [ "BaseContextProvider", "Context", - "Pin", "TraceFilter", "Tracer", "Span", diff --git a/docs/api.rst b/docs/api.rst index 1483d449a95..f2db84ca1fa 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -17,9 +17,6 @@ Tracing .. autoclass:: ddtrace.trace.Span :members: -.. autoclass:: ddtrace.trace.Pin - :members: - .. autoclass:: ddtrace.trace.Context :members: :undoc-members: diff --git a/docs/configuration.rst b/docs/configuration.rst index a729c222c54..0179cc8c0a0 100644 --- a/docs/configuration.rst +++ b/docs/configuration.rst @@ -27,7 +27,7 @@ Unified Service Tagging DD_SERVICE: default: (autodetected) - + description: | Set the service name to be used for this application. A default is provided for these integrations: :ref:`bottle`, :ref:`flask`, :ref:`grpc`, @@ -40,7 +40,7 @@ Unified Service Tagging ``6c44da20``, ``2020.02.13``. Generally set along with ``DD_SERVICE``. See `Unified Service Tagging`_ for more information. - + version_added: v0.36.0: @@ -50,7 +50,7 @@ Traces .. ddtrace-configuration-options:: DD__DISTRIBUTED_TRACING: default: True - + description: | Enables distributed tracing for the specified . @@ -60,18 +60,18 @@ Traces DD__SERVICE: type: String default: - + description: | Set the service name, allowing default service name overrides for traces for the specific . - + version_added: v2.11.0: DD_ASGI_TRACE_WEBSOCKET: default: False - + description: | - Enables tracing ASGI websockets. Please note that the websocket span duration will last until the + Enables tracing ASGI websockets. Please note that the websocket span duration will last until the connection is closed, which can result in long running spans. version_added: @@ -80,21 +80,21 @@ Traces DD_BOTOCORE_EMPTY_POLL_ENABLED: type: Boolean default: True - + description: | Enables creation of consumer span when AWS SQS and AWS Kinesis ``poll()`` operations return no records. When disabled, no consumer span is created if no records are returned. - + version_added: v2.6.0: DD_BOTOCORE_PROPAGATION_ENABLED: type: Boolean default: False - + description: | Enables trace context propagation connecting producer and consumer spans within a single trace for AWS SQS, SNS, and Kinesis messaging services. - + version_added: v2.6.0: @@ -130,33 +130,33 @@ Traces DD_TRACE__ENABLED: type: Boolean default: True - + description: | Enables to be patched. For example, ``DD_TRACE_DJANGO_ENABLED=false`` will disable the Django integration from being installed. - + version_added: v0.41.0: DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED: type: Boolean default: True - + description: | This configuration enables the generation of 128 bit trace ids. - + version_added: v1.12.0: DD_TRACE_API_VERSION: default: | ``v0.5`` - + description: | The trace API version to use when sending traces to the Datadog agent. Currently, the supported versions are: ``v0.4`` and ``v0.5``. - + version_added: v0.56.0: v1.7.0: default changed to ``v0.5``. @@ -219,7 +219,7 @@ Traces DD_TRACE_HTTP_SERVER_ERROR_STATUSES: type: String default: "500-599" - + description: | Comma-separated list of HTTP status codes that should be considered errors when returned by an HTTP request. Multiple comma separated error ranges can be set (ex: ``200,400-404,500-599``). @@ -228,21 +228,21 @@ Traces DD_TRACE_METHODS: type: String default: "" - + description: | Specify methods to trace. For example: ``mod.submod:method1,method2;mod.submod:Class.method1``. Note that this setting is only compatible with ``ddtrace-run``, and that it doesn't work for methods implemented by libraries for which there's an integration in ``ddtrace/contrib``. - + version_added: v2.1.0: DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP: default: | ``'(?ix)(?:(?:"|%22)?)(?:(?:old[-_]?|new[-_]?)?p(?:ass)?w(?:or)?d(?:1|2)?|pass(?:[-_]?phrase)?|secret|(?:api[-_]?|private[-_]?|public[-_]?|access[-_]?|secret[-_]?)key(?:[-_]?id)?|token|consumer[-_]?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?)(?:(?:\\s|%20)*(?:=|%3D)[^&]+|(?:"|%22)(?:\\s|%20)*(?::|%3A)(?:\\s|%20)*(?:"|%22)(?:%2[^2]|%[^2]|[^"%])+(?:"|%22))|(?: bearer(?:\\s|%20)+[a-z0-9._\\-]+|token(?::|%3A)[a-z0-9]{13}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L](?:[\\w=-]|%3D)+\\.ey[I-L](?:[\\w=-]|%3D)+(?:\\.(?:[\\w.+/=-]|%3D|%2F|%2B)+)?|-{5}BEGIN(?:[a-z\\s]|%20)+PRIVATE(?:\\s|%20)KEY-{5}[^\\-]+-{5}END(?:[a-z\\s]|%20)+PRIVATE(?:\\s|%20)KEY(?:-{5})?(?:\\n|%0A)?|(?:ssh-(?:rsa|dss)|ecdsa-[a-z0-9]+-[a-z0-9]+)(?:\\s|%20|%09)+(?:[a-z0-9/.+]|%2F|%5C|%2B){100,}(?:=|%3D)*(?:(?:\\s|%20|%09)+[a-z0-9._-]+)?)'`` - + description: A regexp to redact sensitive query strings. Obfuscation disabled if set to empty string - + version_added: v1.19.0: | ``DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP`` replaces ``DD_TRACE_OBFUSCATION_QUERY_STRING_PATTERN`` which is deprecated @@ -251,11 +251,11 @@ Traces DD_TRACE_OTEL_ENABLED: type: Boolean default: False - + description: | When used with ``ddtrace-run`` this configuration enables OpenTelemetry support. To enable OpenTelemetry without `ddtrace-run` refer to the following :mod:`docs `. - + version_added: v1.12.0: @@ -273,24 +273,24 @@ Traces type: Boolean default: False description: Whether the propagator stops after extracting the first header. - + version_added: v2.3.0: DD_TRACE_PROPAGATION_HTTP_BAGGAGE_ENABLED: type: Boolean default: False - + description: | Enables propagation of baggage items through http headers with prefix ``ot-baggage-``. - + version_added: v2.4.0: DD_TRACE_PROPAGATION_STYLE: default: | ``datadog,tracecontext,baggage`` - + description: | Comma separated list of propagation styles used for extracting trace context from inbound request headers and injecting trace context into outbound request headers. @@ -318,10 +318,10 @@ Traces DD_TRACE_SPAN_TRACEBACK_MAX_SIZE: type: Integer default: 30 - + description: | The maximum length of a traceback included in a span. - + version_added: v2.3.0: @@ -338,7 +338,7 @@ Traces DD_TRACE_WRITER_MAX_PAYLOAD_SIZE_BYTES: type: Int default: 8388608 - + description: | The max size in bytes of each payload item sent to the trace agent. If the max payload size is greater than buffer size, then max size of each payload item will be the buffer size. @@ -346,7 +346,7 @@ Traces DD_TRACE_X_DATADOG_TAGS_MAX_LENGTH: type: Integer default: 512 - + description: | The maximum length of ``x-datadog-tags`` header allowed in the Datadog propagation style. Must be a value between 0 to 512. If 0, propagation of ``x-datadog-tags`` is disabled. @@ -354,29 +354,29 @@ Traces DD_UNLOAD_MODULES_FROM_SITECUSTOMIZE: type: String default: "auto" - + description: | Controls whether module cloning logic is executed by ``ddtrace-run``. Module cloning involves saving copies of dependency modules for internal use by ``ddtrace`` that will be unaffected by future imports of and changes to those modules by application code. Valid values for this variable are ``1``, ``0``, and ``auto``. ``1`` tells ``ddtrace`` to run its module cloning logic unconditionally, ``0`` tells it not to run that logic, and ``auto`` tells it to run module cloning logic only if ``gevent`` is accessible from the application's runtime. - + version_added: v1.9.0: DD_TRACE_SAFE_INSTRUMENTATION_ENABLED: type: Boolean default: False - + description: | Whether to enable safe instrumentation. When enabled, ``ddtrace`` will check if the version of an installed package is compatible with the respective ``ddtrace`` integration patching the package. If the version is not compatible, ``ddtrace`` will not patch the respective package. - This is useful to avoid application crashes from patching packages that are incompatible with the ``ddtrace`` supported integration + This is useful to avoid application crashes from patching packages that are incompatible with the ``ddtrace`` supported integration version ranges. - + version_added: v3.11.0: @@ -388,7 +388,7 @@ Trace Context propagation DD_TRACE_PROPAGATION_STYLE_EXTRACT: default: | ``datadog,tracecontext`` - + description: | Comma separated list of propagation styles used for extracting trace context from inbound request headers. @@ -408,7 +408,7 @@ Trace Context propagation DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT: default: | ``continue`` - + description: | String for how to handle incoming request headers that are extracted for propagation of trace info. @@ -417,7 +417,7 @@ Trace Context propagation After extracting the headers for propagation, this configuration determines what is done with them. The default value is ``continue`` which always propagates valid headers. - ``ignore`` ignores all incoming headers and ``restart`` turns the first extracted valid propagation header + ``ignore`` ignores all incoming headers and ``restart`` turns the first extracted valid propagation header into a span link and propagates baggage if present. Example: ``DD_TRACE_PROPAGATION_STYLE_EXTRACT="ignore"`` to ignore all incoming headers and to start a root span without a parent. @@ -428,7 +428,7 @@ Trace Context propagation DD_TRACE_PROPAGATION_STYLE_INJECT: default: | ``tracecontext,datadog`` - + description: | Comma separated list of propagation styles used for injecting trace context into outbound request headers. @@ -453,10 +453,10 @@ Metrics DD_RUNTIME_METRICS_ENABLED: type: Boolean default: False - + description: | When used with ``ddtrace-run`` this configuration enables sending runtime metrics to Datadog. - These metrics track the memory management and concurrency of the python runtime. + These metrics track the memory management and concurrency of the python runtime. Refer to the following `docs ` _ for more information. DD_RUNTIME_METRICS_RUNTIME_ID_ENABLED: @@ -473,11 +473,11 @@ Metrics DD_METRICS_OTEL_ENABLED: type: Boolean default: False - + description: | When used with ``ddtrace-run`` this configuration enables support for exporting OTLP metrics generated by the OpenTelemetry Metrics API. The application must also include its own OTLP metrics exporter. - + version_added: v3.11.0: @@ -489,13 +489,13 @@ Application & API Security DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING: type: String default: "safe" - + description: | Sets the mode for the automated user login events tracking feature which sets some traces on each user login event. The supported modes are ``safe`` which will only store the user id or primary key, ``extended`` which will also store the username, email and full name and ``disabled``. Note that this feature requires ``DD_APPSEC_ENABLED`` to be set to ``true`` to work. - + version_added: v1.17.0: Added support to the Django integration. No other integrations support this configuration. @@ -507,13 +507,13 @@ Application & API Security DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP: default: | ``(?i)(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?)key)|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)|bearer|authorization`` - + description: Sensitive parameter key regexp for obfuscation. DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP: default: | ``(?i)(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)key(?:_?id)?|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?)(?:\s*=[^;]|"\s*:\s*"[^"]+")|bearer\s+[a-z0-9\._\-]+|token:[a-z0-9]{13}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L][\w=-]+\.ey[I-L][\w=-]+(?:\.[\w.+\/=-]+)?|[\-]{5}BEGIN[a-z\s]+PRIVATE\sKEY[\-]{5}[^\-]+[\-]{5}END[a-z\s]+PRIVATE\sKEY|ssh-rsa\s*[a-z0-9\/\.+]{100,}`` - + description: Sensitive parameter value regexp for obfuscation. DD_APPSEC_RULES: @@ -589,37 +589,37 @@ Code Security DD_IAST_REDACTION_ENABLED: type: Boolean default: True - + description: | Replace potentially sensitive information in the vulnerability report, like passwords with ``*`` for non tainted strings and ``abcde...`` for tainted ones. This will use the regular expressions of the two next settings to decide what to scrub. - + version_added: v1.17.0: DD_IAST_REDACTION_NAME_PATTERN: type: String - + default: | ``(?i)^.*(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)key(?:_?id)?|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?)`` - + description: | Regular expression containing key or name style strings matched against vulnerability origin and evidence texts. If it matches, the scrubbing of the report will be enabled. - + version_added: v1.17.0: DD_IAST_REDACTION_VALUE_PATTERN: type: String - + default: | ``(?i)bearer\s+[a-z0-9\._\-]+|token:[a-z0-9]{13}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L][\w=-]+\.ey[I-L][\w=-]+(\.[\w.+\/=-]+)?|[\-]{5}BEGIN[a-z\s]+PRIVATE\sKEY[\-]{5}[^\-]+[\-]{5}END[a-z\s]+PRIVATE\sKEY|ssh-rsa\s*[a-z0-9\/\.+]{100,}`` - + description: | Regular expression containing value style strings matched against vulnerability origin and evidence texts. If it matches, the scrubbing of the report will be enabled. - + version_added: v1.17.0: @@ -660,63 +660,63 @@ Test Visibility DD_CIVISIBILITY_AGENTLESS_ENABLED: type: Boolean default: False - + description: | Configures the ``CIVisibility`` service to use a test-reporting ``CIVisibilityWriter``. This writer sends payloads for traces on which it's used to the intake endpoint for Datadog CI Visibility. If there is a reachable Datadog agent that supports proxying these requests, the writer will send its payloads to that agent instead. - + version_added: v1.12.0: DD_CIVISIBILITY_AGENTLESS_URL: type: String default: "" - + description: | Configures the ``CIVisibility`` service to send event payloads to the specified host. If unspecified, the host "https://citestcycle-intake." is used, where ```` is replaced by that environment variable's value, or "datadoghq.com" if unspecified. - + version_added: v1.13.0: DD_CIVISIBILITY_ITR_ENABLED: type: Boolean default: True - + description: | Configures the ``CIVisibility`` service to query the Datadog API to decide whether to enable the Datadog `Test Impact Analysis `_ (formerly Intelligent Test Runner). Setting the variable to ``false`` will skip querying the API and disable code coverage collection and test skipping. - + version_added: v1.13.0: DD_CIVISIBILITY_LOG_LEVEL: type: String default: "info" - + description: | Configures the ``CIVisibility`` service to replace the default Datadog logger's stream handler with one that only displays messages related to the ``CIVisibility`` service, at a level of or higher than the given log level. The Datadog logger's file handler is unaffected. Valid, case-insensitive, values are ``critical``, ``error``, ``warning``, ``info``, or ``debug``. A value of ``none`` silently disables the logger. Note: enabling debug logging with the ``DD_TRACE_DEBUG`` environment variable overrides this behavior. - + version_added: v2.5.0: DD_TEST_SESSION_NAME: type: String default: (autodetected) - + description: | Configures the ``CIVisibility`` service to use the given string as the value of the ``test_session.name`` tag in test events. If not specified, this string will be constructed from the CI job id (if available) and the test command used to start the test session. - + version_added: v2.16.0: @@ -761,10 +761,10 @@ Agent DD_AGENT_HOST: type: String - + default: | ``localhost`` - + description: | The host name to use to connect the Datadog agent for traces. The host name can be IPv4, IPv6, or a domain name. If ``DD_TRACE_AGENT_URL`` is specified, the @@ -775,18 +775,18 @@ Agent Example for IPv6: ``DD_AGENT_HOST=2001:db8:3333:4444:CCCC:DDDD:EEEE:FFFF`` Example for domain name: ``DD_AGENT_HOST=host`` - + version_added: v0.17.0: v1.7.0: DD_DOGSTATSD_URL: type: URL - + default: | ``unix:///var/run/datadog/dsd.socket`` if available otherwise ``udp://localhost:8125`` - + description: | The URL to use to connect the Datadog agent for Dogstatsd metrics. The url can start with ``udp://`` to connect using UDP or with ``unix://`` to use a Unix @@ -801,14 +801,14 @@ Agent Override the modules patched for this execution of the program. Must be a list in the ``module1:boolean,module2:boolean`` format. For example, ``boto:true,redis:false``. - + version_added: v0.55.0: | Formerly named ``DATADOG_PATCH_MODULES`` DD_SITE: default: datadoghq.com - + description: | Specify which site to use for uploading profiles and logs. Set to ``datadoghq.eu`` to use EU site. @@ -818,7 +818,7 @@ Agent Set global tags to be attached to every span. Value must be either comma and/or space separated. e.g. ``key1:value1,key2:value2,key3``, ``key1:value key2:value2 key3`` or ``key1:value1, key2:value2, key3``. If a tag value is not supplied the value will be an empty string. - + version_added: v0.38.0: Comma separated support added v0.48.0: Space separated support added @@ -830,11 +830,11 @@ Agent DD_TRACE_AGENT_URL: type: URL - + default: | ``unix:///var/run/datadog/apm.socket`` if available otherwise ``http://localhost:8126`` - + description: | The URL to use to connect the Datadog agent for traces. The url can start with ``http://`` to connect using HTTP or with ``unix://`` to use a Unix @@ -867,19 +867,19 @@ Logs description: | When used with ``ddtrace-run`` this configuration enables support for exporting OTLP logs generated by the OpenTelemetry Logging API. The application must also include its own OTLP logs exporter. - + version_added: v3.12.0: Adds support for submitting logs via an OTLP Exporter. DD_TRACE_DEBUG: type: Boolean default: False - + description: | Enables debug logging in the tracer. Can be used with `DD_TRACE_LOG_FILE` to route logs to a file. - + version_added: v0.41.0: | Formerly named ``DATADOG_TRACE_DEBUG`` @@ -891,7 +891,7 @@ Logs DD_TRACE_LOG_FILE_LEVEL: default: DEBUG - + description: | Configures the ``RotatingFileHandler`` used by the `ddtrace` logger to write logs to a file based on the level specified. Defaults to `DEBUG`, but will accept the values found in the standard **logging** library, such as WARNING, ERROR, and INFO, @@ -900,7 +900,7 @@ Logs DD_TRACE_LOG_FILE_SIZE_BYTES: type: Int default: 15728640 - + description: | Max size for a file when used with `DD_TRACE_LOG_FILE`. When a log has exceeded this size, there will be one backup log file created. In total, the files will store ``2 * DD_TRACE_LOG_FILE_SIZE_BYTES`` worth of logs. @@ -917,7 +917,7 @@ Sampling DD_SPAN_SAMPLING_RULES: type: string - + description: | A JSON array of objects. Each object must have a "name" and/or "service" field, while the "max_per_second" and "sample_rate" fields are optional. The "sample_rate" value must be between 0.0 and 1.0 (inclusive), and will default to 1.0 (100% sampled). @@ -933,7 +933,7 @@ Sampling DD_SPAN_SAMPLING_RULES_FILE: type: string - + description: | A path to a JSON file containing span sampling rules organized as JSON array of objects. For the rules each object must have a "name" and/or "service" field, and the "sample_rate" field is optional. @@ -952,11 +952,11 @@ Sampling DD_TRACE_RATE_LIMIT: type: int default: 100 - + description: | Maximum number of traces per second to sample. Set a rate limit to avoid the ingestion volume overages in the case of traffic spikes. This configuration is only applied when client based sampling is configured, otherwise agent based rate limits are used. - + version_added: v0.33.0: v2.15.0: Only applied when DD_TRACE_SAMPLE_RATE, DD_TRACE_SAMPLING_RULES, or DD_SPAN_SAMPLING_RULE are set. @@ -964,14 +964,14 @@ Sampling DD_TRACE_SAMPLING_RULES: type: JSON array - + description: | A JSON array of objects. Each object must have a β€œsample_rate”, and the β€œname”, β€œservice”, "resource", and "tags" fields are optional. The β€œsample_rate” value must be between 0.0 and 1.0 (inclusive). **Example:** ``DD_TRACE_SAMPLING_RULES='[{"sample_rate":0.5,"service":"my-service","resource":"my-url","tags":{"my-tag":"example"}}]'`` **Note** that the JSON object must be included in single quotes (') to avoid problems with escaping of the double quote (") character.' - + version_added: v1.19.0: added support for "resource" v1.20.0: added support for "tags" @@ -985,14 +985,14 @@ Other DD_INSTRUMENTATION_TELEMETRY_ENABLED: type: Boolean default: True - + description: | Enables sending :ref:`telemetry ` events to the agent. DD_TRACE_EXPERIMENTAL_FEATURES_ENABLED: type: string version_added: - v3.2.0: Adds initial support and support for enabling experimental runtime metrics. + v3.2.0: Adds initial support and support for enabling experimental runtime metrics. default: "" description: | @@ -1000,7 +1000,7 @@ Other DD_SUBPROCESS_SENSITIVE_WILDCARDS: type: String - + description: | Add more possible matches to the internal list of subprocess execution argument scrubbing. Must be a comma-separated list and each item can take `fnmatch` style wildcards, for example: ``*ssn*,*personalid*,*idcard*,*creditcard*``. @@ -1008,32 +1008,32 @@ Other DD_USER_MODEL_EMAIL_FIELD: type: String default: "" - + description: | Field to be used to read the user email when using a custom ``User`` model for the automatic login events. This field will take precedence over automatic inference. - + version_added: v1.15.0: DD_USER_MODEL_LOGIN_FIELD: type: String default: "" - + description: | Field to be used to read the user login when using a custom ``User`` model for the automatic login events. This field will take precedence over automatic inference. Please note that, if set, this field will be used to retrieve the user login even if ``DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING`` is set to ``safe`` and, in some cases, the selected field could hold potentially private information. - + version_added: v1.15.0: DD_USER_MODEL_NAME_FIELD: type: String default: "" - + description: | Field to be used to read the user name when using a custom ``User`` model for the automatic login events. This field will take precedence over automatic inference. - + version_added: v1.15.0: @@ -1043,10 +1043,10 @@ Other description: | A comma-separated list of baggage keys, sent via HTTP headers, to automatically tag as baggage. on the local root span. - Only baggage extracted from incoming headers is supported. Baggage set via ``Context.set_baggage_item(..., ...)`` is not included. Keys must have non-empty values. + Only baggage extracted from incoming headers is supported. Baggage set via ``Context.set_baggage_item(..., ...)`` is not included. Keys must have non-empty values. Set to * to tag all baggage keys (use with caution to avoid exposing sensitive data). Set to an empty string to disable the feature. - version_added: + version_added: v3.6.0: .. _Unified Service Tagging: https://docs.datadoghq.com/getting_started/tagging/unified_service_tagging/ @@ -1057,33 +1057,33 @@ Other Profiling --------- -.. ddtrace-envier-configuration:: ddtrace.settings.profiling:ProfilingConfig +.. ddtrace-envier-configuration:: ddtrace.internal.settings.profiling:ProfilingConfig :recursive: true Dynamic Instrumentation ----------------------- -.. ddtrace-envier-configuration:: ddtrace.settings.dynamic_instrumentation:DynamicInstrumentationConfig +.. ddtrace-envier-configuration:: ddtrace.internal.settings.dynamic_instrumentation:DynamicInstrumentationConfig Exception Replay ---------------- -.. ddtrace-envier-configuration:: ddtrace.settings.exception_replay:ExceptionReplayConfig +.. ddtrace-envier-configuration:: ddtrace.internal.settings.exception_replay:ExceptionReplayConfig Code Origin ----------- -.. ddtrace-envier-configuration:: ddtrace.settings.code_origin:CodeOriginConfig +.. ddtrace-envier-configuration:: ddtrace.internal.settings.code_origin:CodeOriginConfig :recursive: true Live Debugging -------------- -.. ddtrace-envier-configuration:: ddtrace.settings.live_debugging:LiveDebuggerConfig +.. ddtrace-envier-configuration:: ddtrace.internal.settings.live_debugging:LiveDebuggerConfig Error Tracking -------------- diff --git a/docs/contributing-integrations.rst b/docs/contributing-integrations.rst index 8db0e205a2f..9aa848b51df 100644 --- a/docs/contributing-integrations.rst +++ b/docs/contributing-integrations.rst @@ -22,7 +22,7 @@ include Tracing Spans and the AppSec WAF. Integrations should avoid exposing a public API unless it is absolutely necessary. Users should be able to configure the integration by setting environment variables or using the Pin API. For cases where a public API is necessary, integrations -should expose the API in ``ddtrace.contrib..py``. +should expose the API in ``ddtrace.contrib..py``. Integrations should define a ``ddtrace.contrib.internal..__init__.py`` module that contains a doc string describing the integration and it's supported configurations. This module should be referenced in the ``docs/integrations.rst`` file. @@ -37,7 +37,7 @@ into the runtime execution of third-party libraries. The essential task of writi the functions in the third-party library that would serve as useful entrypoints and wrapping them with ``wrap_function_wrapper``. There are exceptions, but this is generally a useful starting point. -The Pin API in ``ddtrace.trace.Pin`` is used to configure the instrumentation at runtime. It provides a ``Pin`` class +The Pin API in ``ddtrace._trace.pin.Pin`` is used to configure the instrumentation at runtime. It provides a ``Pin`` class that can store configuration data in memory in a manner that is accessible from within functions wrapped by Wrapt. ``Pin`` objects are most often used for storing configuration data scoped to a given integration, such as enable/disable flags and service name overrides. @@ -204,7 +204,6 @@ are not yet any expected spans stored for it, so we need to create some. mongo: - ddtrace/contrib/internal/pymongo/* - - ddtrace/contrib/internal/mongoengine/* - ddtrace/ext/mongo.py 15. Add a `suite` for your integration in `tests/contrib/suitespec.yml`. This defines test configuration diff --git a/docs/index.rst b/docs/index.rst index caf65d873e2..65e88bb1b95 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -52,7 +52,7 @@ contacting support. +--------------------------------------------------+------------+----------+------+ | :ref:`aiopg` | >= 0.16.0 | Yes | | +--------------------------------------------------+------------+----------+------+ -| :ref:`algoliasearch` | >= 2.5.0 | Yes | | +| :ref:`algoliasearch` | >= 2.6.3 | Yes | | +--------------------------------------------------+------------+----------+------+ | :ref:`anthropic` | >= 0.28.0 | Yes | | +--------------------------------------------------+------------+----------+------+ @@ -62,7 +62,7 @@ contacting support. +--------------------------------------------------+------------+----------+------+ | :ref:`asyncio` | \* | Yes | | +--------------------------------------------------+------------+----------+------+ -| :ref:`asyncpg` | >= 0.22.0 | Yes | | +| :ref:`asyncpg` | >= 0.23.0 | Yes | | +--------------------------------------------------+------------+----------+------+ | :ref:`avro` | \* | Yes | | +--------------------------------------------------+------------+----------+------+ @@ -72,8 +72,6 @@ contacting support. +--------------------------------------------------+------------+----------+------+ | :ref:`bottle` | >= 0.12 | No | | +--------------------------------------------------+------------+----------+------+ -| :ref:`cassandra` | >= 3.24 | Yes | | -+--------------------------------------------------+------------+----------+------+ | :ref:`celery` | >= 4.4 | Yes | | +--------------------------------------------------+------------+----------+------+ | :ref:`cherrypy` | >= 17.0 | No | | @@ -106,18 +104,14 @@ contacting support. +--------------------------------------------------+------------+----------+------+ | :ref:`flask_cache` | >= 0.13 | No | | +--------------------------------------------------+------------+----------+------+ -| :ref:`freezegun` | \* | Yes | | -+--------------------------------------------------+------------+----------+------+ | :ref:`futures` | \* | Yes | | +--------------------------------------------------+------------+----------+------+ -| :ref:`gevent` (greenlet>=1.0) | >= 20.12 | Yes | | +| :ref:`gevent` (greenlet>=1.0) | >= 21.1.2 | Yes | | +--------------------------------------------------+------------+----------+------+ | :ref:`google_adk` | >= 1.0.0 | Yes | | +--------------------------------------------------+------------+----------+------+ | :ref:`google_genai` | >= 1.21.1 | Yes | | +--------------------------------------------------+------------+----------+------+ -| :ref:`google_generativeai` | >= 0.7.0 | Yes | | -+--------------------------------------------------+------------+----------+------+ | :ref:`grpc` | >= 1.34 | Yes [4]_ | | +--------------------------------------------------+------------+----------+------+ | :ref:`graphene ` | >= 3.0.0 | Yes | | @@ -154,8 +148,6 @@ contacting support. +--------------------------------------------------+------------+----------+------+ | :ref:`molten` | >= 1.0 | Yes | | +--------------------------------------------------+------------+----------+------+ -| :ref:`mongoengine` | >= 0.23 | Yes | | -+--------------------------------------------------+------------+----------+------+ | :ref:`mysql-connector` | >= 8.0.5 | Yes | | +--------------------------------------------------+------------+----------+------+ | :ref:`mysqldb` | \* | Yes | | @@ -168,7 +160,7 @@ contacting support. +--------------------------------------------------+------------+----------+------+ | :ref:`protobuf` | \* | Yes [6]_ | | +--------------------------------------------------+------------+----------+------+ -| :ref:`psycopg` | >= 2.8 | Yes | | +| :ref:`psycopg` | >= 2.9.10 | Yes | | +--------------------------------------------------+------------+----------+------+ | :ref:`pylibmc` | >= 1.6.2 | Yes | | +--------------------------------------------------+------------+----------+------+ @@ -178,7 +170,7 @@ contacting support. +--------------------------------------------------+------------+----------+------+ | :ref:`pymysql` | >= 0.10 | Yes | | +--------------------------------------------------+------------+----------+------+ -| :ref:`pynamodb` | >= 5.0 | Yes | | +| :ref:`pynamodb` | >= 5.5.1 | Yes | | +--------------------------------------------------+------------+----------+------+ | :ref:`pyodbc` | >= 4.0.31 | Yes | | +--------------------------------------------------+------------+----------+------+ @@ -194,7 +186,7 @@ contacting support. +--------------------------------------------------+------------+----------+------+ | :ref:`rediscluster` | >= 2.0 | Yes | | +--------------------------------------------------+------------+----------+------+ -| :ref:`requests` | >= 2.20 | Yes | | +| :ref:`requests` | >= 2.25.1 | Yes | | +--------------------------------------------------+------------+----------+------+ | :ref:`rq` | >= 1.8 | Yes | | +--------------------------------------------------+------------+----------+------+ @@ -202,7 +194,7 @@ contacting support. +--------------------------------------------------+------------+----------+------+ | :ref:`selenium` | \* | Yes | | +--------------------------------------------------+------------+----------+------+ -| :ref:`snowflake` | >= 2.3.0 | No | | +| :ref:`snowflake` | >= 2.4.6 | No | | +--------------------------------------------------+------------+----------+------+ | :ref:`sqlalchemy` | >= 1.3 | No | | +--------------------------------------------------+------------+----------+------+ diff --git a/docs/integrations.rst b/docs/integrations.rst index c33138a508e..87907d141ab 100644 --- a/docs/integrations.rst +++ b/docs/integrations.rst @@ -110,13 +110,6 @@ Bottle .. automodule:: ddtrace.contrib.bottle -.. _cassandra: - -Cassandra -^^^^^^^^^ -.. automodule:: ddtrace.contrib.internal.cassandra - - .. _celery: Celery @@ -216,13 +209,6 @@ Flask Cache .. automodule:: ddtrace.contrib.flask_cache -.. _freezegun: - -FreezeGun -^^^^^^^^^ -.. automodule:: ddtrace.contrib.internal.freezegun - - .. _futures: futures @@ -250,12 +236,6 @@ google-genai ^^^^^^^^^^^^ .. automodule:: ddtrace.contrib.internal.google_genai -.. _google_generativeai: - -google-generativeai -^^^^^^^^^^^^^^^^^^^ -.. automodule:: ddtrace.contrib.internal.google_generativeai - .. _graphql: @@ -375,13 +355,6 @@ Molten .. automodule:: ddtrace.contrib.internal.molten -.. _mongoengine: - -Mongoengine -^^^^^^^^^^^ -.. automodule:: ddtrace.contrib.internal.mongoengine - - .. _mysql-connector: mysql-connector diff --git a/hatch.toml b/hatch.toml index e46a08acacc..687cad54949 100644 --- a/hatch.toml +++ b/hatch.toml @@ -159,7 +159,7 @@ DD_PROFILING_PYTORCH_ENABLED = "true" test = [ "uname -a", "pip freeze", - "python -m pytest tests/profiling_v2/test_pytorch.py -vvv --capture=tee-sys", + "python -m pytest tests/profiling/test_pytorch.py -vvv --capture=tee-sys", ] [[envs.profiling_pytorch.matrix]] @@ -194,7 +194,7 @@ test = [ ] [[envs.multiple_os_tests.matrix]] -python = ["3.14", "3.12", "3.10", "3.8"] +python = ["3.14", "3.12", "3.10"] [envs.snapshot_viewer] dev-mode = false diff --git a/pyproject.toml b/pyproject.toml index f1ce0ccefb2..fddfc36c81f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,13 +11,13 @@ build-backend = "setuptools.build_meta" [project] name = "ddtrace" # DEV: to directly override the version specifier, comment this... -dynamic = ["version"] +#dynamic = ["version"] # ...and uncomment this -# version = "4.0.0.dev0" +version = "4.0.0.dev0" description = "Datadog APM client library" readme = "README.md" license = { text = "LICENSE.BSD3" } -requires-python = ">=3.8" +requires-python = ">=3.9" authors = [ { name = "Datadog, Inc.", email = "dev@datadoghq.com" }, ] @@ -62,8 +62,6 @@ ddcontextvars_context = "ddtrace.internal.opentelemetry.context:DDRuntimeContext [project.entry-points.pytest11] ddtrace = "ddtrace.contrib.internal.pytest.plugin" -"ddtrace.pytest_bdd" = "ddtrace.contrib.internal.pytest_bdd.plugin" -"ddtrace.pytest_benchmark" = "ddtrace.contrib.internal.pytest_benchmark.plugin" [project.entry-points.'ddtrace.products'] "apm-tracing-rc" = "ddtrace.internal.remoteconfig.products.apm_tracing" @@ -104,7 +102,7 @@ exclude = ''' [tool.black] line-length = 120 -target_version = ['py37', 'py38', 'py39', 'py310', 'py311', 'py312'] +target_version = ['py39', 'py310', 'py311', 'py312'] include = '''\.py[ix]?$''' exclude = ''' ( @@ -120,7 +118,6 @@ exclude = ''' | ddtrace/profiling/collector/_traceback.pyx$ | ddtrace/profiling/collector/_task.pyx$ | ddtrace/profiling/_threading.pyx$ - | ddtrace/profiling/collector/stack.pyx$ | ddtrace/profiling/exporter/pprof.pyx$ | ddtrace/internal/datadog/profiling/ddup/_ddup.pyx$ | ddtrace/vendor/ @@ -234,7 +231,6 @@ exclude = [ "ddtrace/vendor/*", "ddtrace/appsec/_iast/_taint_tracking/_vendor/*", "tests/profiling/collector/pprof_*pb2.py", - "tests/profiling/simple_program_gevent.py", "tests/contrib/grpc/hello_pb2.py", "tests/contrib/django_celery/app/*", "tests/contrib/protobuf/schemas/**/*.py", diff --git a/releasenotes/notes/cassandra-d3c8aaf478bddc56.yaml b/releasenotes/notes/cassandra-d3c8aaf478bddc56.yaml new file mode 100644 index 00000000000..07df72ad56a --- /dev/null +++ b/releasenotes/notes/cassandra-d3c8aaf478bddc56.yaml @@ -0,0 +1,5 @@ +--- +other: + - | + cassandra: The Cassandra integration is removed because it is only compatible with Python 3.8, + which is a year past its end-of-life. diff --git a/releasenotes/notes/ci_visibility-update-remove-deprecated-pytest-entrypoints-5cb519a8a0858c9b.yaml b/releasenotes/notes/ci_visibility-update-remove-deprecated-pytest-entrypoints-5cb519a8a0858c9b.yaml new file mode 100644 index 00000000000..16b3fe1f872 --- /dev/null +++ b/releasenotes/notes/ci_visibility-update-remove-deprecated-pytest-entrypoints-5cb519a8a0858c9b.yaml @@ -0,0 +1,5 @@ +--- +upgrade: + - | + CI Visibility: Removed deprecated entry points for the ``pytest_benchmark`` and ``pytest_bdd`` integrations. These + plugins are now supported by the regular ``pytest`` integration. diff --git a/releasenotes/notes/di-remove-deprecated-var-d61cf16b8608c7bd.yaml b/releasenotes/notes/di-remove-deprecated-var-d61cf16b8608c7bd.yaml new file mode 100644 index 00000000000..a6ff93015d5 --- /dev/null +++ b/releasenotes/notes/di-remove-deprecated-var-d61cf16b8608c7bd.yaml @@ -0,0 +1,5 @@ +--- +upgrade: + - | + dynamic instrumentation: removed the deprecated + ``DD_DYNAMIC_INSTRUMENTATION_UPLOAD_FLUSH_INTERVAL`` variable. diff --git a/releasenotes/notes/er-deprecate-env-var-58386e5884e0de10.yaml b/releasenotes/notes/er-deprecate-env-var-58386e5884e0de10.yaml new file mode 100644 index 00000000000..e93aa2a5187 --- /dev/null +++ b/releasenotes/notes/er-deprecate-env-var-58386e5884e0de10.yaml @@ -0,0 +1,5 @@ +--- +upgrade: + - | + exception replay: removed the deprecated ``DD_EXCEPTION_DEBUGGING_ENABLED`` + variable. diff --git a/releasenotes/notes/explicit-span-tag-typing-99abb4d3ec065a55.yaml b/releasenotes/notes/explicit-span-tag-typing-99abb4d3ec065a55.yaml new file mode 100644 index 00000000000..9c4787f6677 --- /dev/null +++ b/releasenotes/notes/explicit-span-tag-typing-99abb4d3ec065a55.yaml @@ -0,0 +1,18 @@ +--- +upgrade: + - | + tracing: ``Span.set_tag`` typing is now ``set_tag(key: str, value: Optional[str] = None) -> None`` + - | + tracing: ``Span.get_tag`` typing is now ``get_tag(key: str) -> Optional[str]`` + - | + tracing: ``Span.set_tags`` typing is now ``set_tags(tags: dict[str, str]) -> None`` + - | + tracing: ``Span.get_tags`` typing is now ``get_tags() -> dict[str, str]`` + - | + tracing: ``Span.set_metric`` typing is now ``set_metric(key: str, value: int | float) -> None`` + - | + tracing: ``Span.get_metric`` typing is now ``get_metric(key: str) -> Optional[int | float]`` + - | + tracing: ``Span.set_metrics`` typing is now ``set_metrics(metrics: Dict[str, int | float]) -> None`` + - | + tracing: ``Span.get_metrics`` typing is now ``get_metrics() -> dict[str, int | float]`` diff --git a/releasenotes/notes/freezegun-remove-44312810d30f9e0b.yaml b/releasenotes/notes/freezegun-remove-44312810d30f9e0b.yaml new file mode 100644 index 00000000000..75620f165d7 --- /dev/null +++ b/releasenotes/notes/freezegun-remove-44312810d30f9e0b.yaml @@ -0,0 +1,4 @@ +--- +upgrade: + - | + freezegun: The deprecated `freezegun` integration is now removed. diff --git a/releasenotes/notes/internal-settings-3b45c1e8a96edc99.yaml b/releasenotes/notes/internal-settings-3b45c1e8a96edc99.yaml new file mode 100644 index 00000000000..6dafe750d36 --- /dev/null +++ b/releasenotes/notes/internal-settings-3b45c1e8a96edc99.yaml @@ -0,0 +1,5 @@ +--- +other: + - | + This change removes the `ddtrace.settings` package and replaces it with `ddtrace.internal.settings`. + Environment variables can be used to adjust settings. diff --git a/releasenotes/notes/non-active-span-3398e88b19eb94c3.yaml b/releasenotes/notes/non-active-span-3398e88b19eb94c3.yaml new file mode 100644 index 00000000000..32eef0cfafe --- /dev/null +++ b/releasenotes/notes/non-active-span-3398e88b19eb94c3.yaml @@ -0,0 +1,4 @@ +--- +other: + - | + This change removes the deprecated non_active_span parameter to `HttpPropagator.inject` diff --git a/releasenotes/notes/opentracer-remove-b1883d26ea035c50.yaml b/releasenotes/notes/opentracer-remove-b1883d26ea035c50.yaml new file mode 100644 index 00000000000..6d248930d9c --- /dev/null +++ b/releasenotes/notes/opentracer-remove-b1883d26ea035c50.yaml @@ -0,0 +1,4 @@ +--- +upgrade: + - | + opentracer: This change removes the deprecated ``opentracer`` package diff --git a/releasenotes/notes/pin-remove-46288db02ed90799.yaml b/releasenotes/notes/pin-remove-46288db02ed90799.yaml new file mode 100644 index 00000000000..ebb670c5633 --- /dev/null +++ b/releasenotes/notes/pin-remove-46288db02ed90799.yaml @@ -0,0 +1,8 @@ +--- +upgrade: + - | + tracing: The deprecated ``Tracer.on_start_span`` method has been removed. + - | + tracing: The deprecated ``Tracer.deregister_on_start_span` method has been removed. + - | + tracing: The deprecated ``ddtrace.trace.Pin`` has been removed. diff --git a/releasenotes/notes/py38-remove-52943a5d318b4736.yaml b/releasenotes/notes/py38-remove-52943a5d318b4736.yaml new file mode 100644 index 00000000000..f71a666e383 --- /dev/null +++ b/releasenotes/notes/py38-remove-52943a5d318b4736.yaml @@ -0,0 +1,8 @@ +--- +upgrade: + - | + Support for ddtrace with Python 3.8 is removed after being deprecated in the 3.0 release line. Use ddtrace 4.x with + Python 3.9 or newer. +deprecations: + - | + Support for ddtrace with Python 3.9 is deprecated after Python 3.9 reached its end-of-life. diff --git a/releasenotes/notes/remove-app-analytics-52ac993f27e2607f.yaml b/releasenotes/notes/remove-app-analytics-52ac993f27e2607f.yaml new file mode 100644 index 00000000000..4cc2631d603 --- /dev/null +++ b/releasenotes/notes/remove-app-analytics-52ac993f27e2607f.yaml @@ -0,0 +1,4 @@ +--- +other: + - | + tracing: This change removes the deprecated functionality that controls ingestion via analytics. diff --git a/releasenotes/notes/remove-deprecated-span-methods-0e7bfc757ba64595.yaml b/releasenotes/notes/remove-deprecated-span-methods-0e7bfc757ba64595.yaml new file mode 100644 index 00000000000..141b3a8c0d6 --- /dev/null +++ b/releasenotes/notes/remove-deprecated-span-methods-0e7bfc757ba64595.yaml @@ -0,0 +1,8 @@ +--- +upgrade: + - | + tracing: ``Span.set_tag_str`` has been removed, use ``Span.set_tag`` instead. + - | + tracing: ``Span.set_struct_tag`` has been removed. + - | + tracing: ``Span.get_struct_tag`` has been removed. diff --git a/releasenotes/notes/remove-interval-envvar-88c126a791a448a0.yaml b/releasenotes/notes/remove-interval-envvar-88c126a791a448a0.yaml new file mode 100644 index 00000000000..2dcd05f9c50 --- /dev/null +++ b/releasenotes/notes/remove-interval-envvar-88c126a791a448a0.yaml @@ -0,0 +1,4 @@ +--- +other: + - | + This change removes the deprecated environment variable `DEFAULT_RUNTIME_METRICS_INTERVAL`. diff --git a/releasenotes/notes/remove-pymongo-engine-0584c2055377f718.yaml b/releasenotes/notes/remove-pymongo-engine-0584c2055377f718.yaml new file mode 100644 index 00000000000..772aade8185 --- /dev/null +++ b/releasenotes/notes/remove-pymongo-engine-0584c2055377f718.yaml @@ -0,0 +1,5 @@ +--- +other: + - | + mongoengine: Drops support for the ``ddtrace.Pin`` object with mongoengine. With this change, the ddtrace library no longer directly supports mongoengine. + Mongoengine will be supported through the ``pymongo`` integration. diff --git a/releasenotes/notes/remove-span-finished-finish-with-ancestors-fb2d11b874206f59.yaml b/releasenotes/notes/remove-span-finished-finish-with-ancestors-fb2d11b874206f59.yaml new file mode 100644 index 00000000000..f305babaebc --- /dev/null +++ b/releasenotes/notes/remove-span-finished-finish-with-ancestors-fb2d11b874206f59.yaml @@ -0,0 +1,6 @@ +--- +upgrade: + - | + tracing: ``Span.finished`` setter was removed, please use ``Span.finish()`` method instead. + - | + tracing: ``Span.finish_with_ancestors`` was removed with no replacement. diff --git a/releasenotes/notes/remove-submit-evaluation-for-ef0c5a217eb18a46.yaml b/releasenotes/notes/remove-submit-evaluation-for-ef0c5a217eb18a46.yaml new file mode 100644 index 00000000000..dc8ef083b82 --- /dev/null +++ b/releasenotes/notes/remove-submit-evaluation-for-ef0c5a217eb18a46.yaml @@ -0,0 +1,7 @@ +upgrade: + - | + LLM Observability: ``LLMObs.submit_evaluation_for()`` has been removed. Please use ``LLMObs.submit_evaluation()`` instead for submitting evaluations. + To migrate: + - ``LLMObs.submit_evaluation_for(...)`` users: rename to ``LLMObs.submit_evaluation(...)`` + - ``LLMObs.submit_evaluation_for(...)`` users: rename the ``span_context`` argument to ``span``, i.e. + ``LLMObs.submit_evaluation(span_context={"span_id": ..., "trace_id": ...}, ...)`` to ``LLMObs.submit_evaluation(span={"span_id": ..., "trace_id": ...}, ...)`` diff --git a/releasenotes/notes/span-args-remove-5feecae6cf00537f.yaml b/releasenotes/notes/span-args-remove-5feecae6cf00537f.yaml new file mode 100644 index 00000000000..03d65342e97 --- /dev/null +++ b/releasenotes/notes/span-args-remove-5feecae6cf00537f.yaml @@ -0,0 +1,4 @@ +--- +other: + - | + This change removes deprecated methods and method parameters from the `Span` class. diff --git a/releasenotes/notes/upgrade-google-generativeai-removed-23cedc4c9dc95408.yaml b/releasenotes/notes/upgrade-google-generativeai-removed-23cedc4c9dc95408.yaml new file mode 100644 index 00000000000..d6e7507978e --- /dev/null +++ b/releasenotes/notes/upgrade-google-generativeai-removed-23cedc4c9dc95408.yaml @@ -0,0 +1,5 @@ +--- +upgrade: + - | + google_generativeai: The ``google_generativeai`` integration has been removed as the ``google_generativeai`` library has reached end-of-life. + As an alternative, you can use the recommended ``google_genai`` library and corresponding integration instead. diff --git a/riotfile.py b/riotfile.py index ce7b8081c3a..550b0f01166 100644 --- a/riotfile.py +++ b/riotfile.py @@ -10,21 +10,18 @@ latest = "" SUPPORTED_PYTHON_VERSIONS: List[Tuple[int, int]] = [ - (3, 8), (3, 9), (3, 10), (3, 11), (3, 12), (3, 13), (3, 14), -] # type: List[Tuple[int, int]] +] def version_to_str(version: Tuple[int, int]) -> str: """Convert a Python version tuple to a string - >>> version_to_str((3, 8)) - '3.8' >>> version_to_str((3, 9)) '3.9' >>> version_to_str((3, 10)) @@ -46,8 +43,6 @@ def version_to_str(version: Tuple[int, int]) -> str: def str_to_version(version: str) -> Tuple[int, int]: """Convert a Python version string to a tuple - >>> str_to_version("3.8") - (3, 8) >>> str_to_version("3.9") (3, 9) >>> str_to_version("3.10") @@ -74,13 +69,13 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT """Helper to select python versions from the list of versions we support >>> select_pys() - ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13', '3.14'] + ['3.9', '3.10', '3.11', '3.12', '3.13', '3.14'] >>> select_pys(min_version='3') - ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13', '3.14'] + ['3.9', '3.10', '3.11', '3.12', '3.13', '3.14'] >>> select_pys(max_version='3') [] - >>> select_pys(min_version='3.8', max_version='3.9') - ['3.8', '3.9'] + >>> select_pys(min_version='3.9', max_version='3.10') + ['3.9', '3.10'] """ min_version = str_to_version(min_version) max_version = str_to_version(max_version) @@ -211,7 +206,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT Venv( name="appsec_iast_packages", # FIXME: GrpcIO is hanging with 3.13 on CI + hatch for some reason - pys=["3.8", "3.9", "3.10", "3.11", "3.12"], + pys=["3.9", "3.10", "3.11", "3.12"], command="pytest {cmdargs} tests/appsec/iast_packages/", pkgs={ "requests": latest, @@ -267,23 +262,23 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=["3.8", "3.9"], + pys=["3.9"], pkgs={"django": "~=2.2"}, ), Venv( - pys=["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"], + pys=["3.9", "3.10", "3.11", "3.12", "3.13"], pkgs={"django": "~=3.2", "legacy-cgi": latest}, ), Venv( - pys=["3.8", "3.9", "3.10", "3.11", "3.12"], - pkgs={"django": "==4.0.10"}, + pys=["3.9", "3.10", "3.11", "3.12", "3.13"], + pkgs={"django": "==4.0.10", "legacy-cgi": latest}, ), Venv( pys=["3.13"], pkgs={"django": "==4.0.10", "legacy-cgi": latest}, ), Venv( - pys=["3.8", "3.9", "3.10", "3.11", "3.12"], + pys=["3.9", "3.10", "3.11", "3.12", "3.13"], pkgs={"django": "~=4.2"}, ), Venv( @@ -320,18 +315,6 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT "DD_IAST_DEDUPLICATION_ENABLED": "false", }, venvs=[ - Venv( - pys=["3.8"], - pkgs={"fastapi": "==0.86.0", "anyio": "==3.7.1"}, - ), - Venv( - pys=["3.8"], - pkgs={"fastapi": "==0.94.1"}, - ), - Venv( - pys=["3.8"], - pkgs={"fastapi": "~=0.114.2"}, - ), Venv( pys=select_pys(min_version="3.9", max_version="3.13"), pkgs={"fastapi": "==0.86.0", "anyio": "==3.7.1"}, @@ -346,15 +329,6 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ), ], ), - Venv( - name="profile-diff", - command="python scripts/diff.py {cmdargs}", - pys="3", - pkgs={ - "austin-python": "~=1.0", - "rich": latest, - }, - ), Venv( name="appsec_iast_default", command="pytest -v {cmdargs} tests/appsec/iast/", @@ -558,7 +532,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.11"), + pys=select_pys(min_version="3.9", max_version="3.11"), pkgs={ "pytest-asyncio": "~=0.23.7", }, @@ -605,14 +579,6 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT "opensearch-py": latest, }, venvs=[ - Venv( - pys="3.8", - pkgs={ - "gevent": "~=20.12.0", - # greenlet v1.0.0 adds support for contextvars - "greenlet": "~=1.0.0", - }, - ), Venv( pys="3.9", pkgs={ @@ -720,7 +686,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT # FIXME: tests fail on vertica 1.x # Venv( # # vertica-python added support for Python 3.9/3.10 in 1.0 - # pys=select_pys(min_version="3.8", max_version="3.10"), + # pys=select_pys(min_version="3.9", max_version="3.10"), # pkgs={"vertica-python": ["~=1.0", latest]}, # ), # Venv( @@ -779,7 +745,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.12"), + pys=select_pys(min_version="3.9", max_version="3.12"), pkgs={ "falcon": [ "~=3.0.0", @@ -835,23 +801,8 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT "pytest-randomly": latest, }, venvs=[ - # Celery 4.3 wants Kombu >= 4.4 and Redis >= 3.2 - # Split into <3.8 and >=3.8 to pin importlib_metadata dependency for kombu - # # celery added support for Python 3.9 in 4.x - # pys=select_pys(min_version="3.8", max_version="3.9"), - # pkgs={ - # "pytest": "~=4.0", - # "celery": [ - # "latest", # most recent 4.x - # ], - # "redis": "~=3.5", - # "kombu": "~=4.4", - # }, - # ), - # Celery 5.x wants Python 3.6+ - # Split into <3.8 and >=3.8 to pin importlib_metadata dependency for kombu - Venv( - pys=select_pys(min_version="3.8", max_version="3.9"), + Venv( + pys=["3.9"], env={ # https://docs.celeryproject.org/en/v5.0.5/userguide/testing.html#enabling "PYTEST_PLUGINS": "celery.contrib.pytest", @@ -898,7 +849,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ), Venv( # cherrypy added support for Python 3.11 in 18.7 - pys=select_pys(min_version="3.8"), + pys=select_pys(), pkgs={ "cherrypy": [">=18.0,<19", latest], "more_itertools": "<8.11.0", @@ -918,7 +869,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT # pymmongo<3.9, 3.9<=pymongo<3.12, 3.12<=pymongo<4.5, pymongo>=4.5 # To get full test coverage we must test all these version ranges Venv( - pys=select_pys(min_version="3.8", max_version="3.9"), + pys=["3.9"], pkgs={"pymongo": ["~=3.8.0", "~=3.9.0", "~=3.11", "~=4.0", latest]}, ), Venv( @@ -933,14 +884,14 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT name="ddtrace_api", command="pytest {cmdargs} tests/contrib/ddtrace_api", pkgs={"ddtrace-api": "==0.0.1", "requests": latest}, - pys=select_pys(min_version="3.8"), + pys=select_pys(), ), # Django Python version support - # 2.2 3.5, 3.6, 3.7, 3.8 3.9 - # 3.2 3.6, 3.7, 3.8, 3.9, 3.10 - # 4.0 3.8, 3.9, 3.10 - # 4.1 3.8, 3.9, 3.10, 3.11 - # 4.2 3.8, 3.9, 3.10, 3.11, 3.12 + # 2.2 3.9 + # 3.2 3.9, 3.10 + # 4.0 3.9, 3.10 + # 4.1 3.9, 3.10, 3.11 + # 4.2 3.9, 3.10, 3.11, 3.12 # 5.0 3.10, 3.11, 3.12 # 5.1 3.10, 3.11, 3.12, 3.13 # 5.2 3.10, 3.11, 3.12, 3.13 @@ -972,10 +923,10 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - # django dropped support for Python 3.8/3.9 in 5.0 + # django dropped support for Python 3.9 in 5.0 # limit tests to only the main django test files to avoid import errors due to some tests # targeting newer django versions - pys=select_pys(min_version="3.8", max_version="3.9"), + pys=["3.9"], command="pytest {cmdargs} --ignore=tests/contrib/django/test_django_snapshots.py \ --ignore=tests/contrib/django/test_django_wsgi.py tests/contrib/django", pkgs={ @@ -985,7 +936,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ), Venv( # django started supporting psycopg3 in 4.2 for versions >3.1.8 - pys=select_pys(min_version="3.8", max_version="3.13"), + pys=select_pys(min_version="3.9", max_version="3.13"), pkgs={ "django": ["~=4.2"], "psycopg": latest, @@ -1006,14 +957,14 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.10"), + pys=select_pys(min_version="3.9", max_version="3.10"), pkgs={ "django_hosts": "~=4.0", "django": "~=3.2", }, ), Venv( - pys=select_pys(min_version="3.8", max_version="3.13"), + pys=select_pys(min_version="3.9", max_version="3.13"), pkgs={ "django_hosts": ["~=5.0", latest], "django": "~=4.0", @@ -1032,21 +983,21 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT venvs=[ Venv( # djangorestframework dropped support for Django 2.x in 3.14 - pys=select_pys(min_version="3.8", max_version="3.9"), + pys=["3.9"], pkgs={ "django": ">=2.2,<2.3", "djangorestframework": ["==3.12.4", "==3.13.1"], }, ), Venv( - pys=select_pys(min_version="3.8", max_version="3.10"), + pys=select_pys(min_version="3.9", max_version="3.10"), pkgs={ "django": "~=3.2", "djangorestframework": ">=3.11,<3.12", }, ), Venv( - pys=select_pys(min_version="3.8", max_version="3.13"), + pys=select_pys(min_version="3.9", max_version="3.13"), pkgs={ "django": ["~=4.0"], "djangorestframework": ["~=3.13", latest], @@ -1070,7 +1021,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.9"), + pys=["3.9"], pkgs={ "sqlalchemy": "~=1.2.18", "django": "~=2.2.0", @@ -1090,7 +1041,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT command="pytest {cmdargs} tests/contrib/dramatiq", venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.9"), + pys=["3.9"], pkgs={"dramatiq": "~=1.10.0", "pytest": latest, "redis": latest, "pika": latest}, ), Venv( @@ -1213,7 +1164,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, ), Venv( - pys=select_pys(min_version="3.8"), + pys=select_pys(), pkgs={ "flask": [ "~=2.0", @@ -1225,7 +1176,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, ), Venv( - pys=select_pys(min_version="3.8"), + pys=select_pys(), command="python tests/ddtrace_run.py pytest {cmdargs} tests/contrib/flask_autopatch", env={ "DD_SERVICE": "test.flask.service", @@ -1271,7 +1222,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT "markupsafe": "<2.0", }, venvs=[ - Venv(pys=select_pys(min_version="3.8", max_version="3.9"), pkgs={"exceptiongroup": latest}), + Venv(pys=["3.9"], pkgs={"exceptiongroup": latest}), ], ), Venv( @@ -1287,7 +1238,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.11"), + pys=select_pys(min_version="3.9", max_version="3.11"), ), Venv( pys=select_pys(min_version="3.12", max_version="3.13"), @@ -1304,7 +1255,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.11"), + pys=select_pys(min_version="3.9", max_version="3.11"), ), Venv(pys=select_pys(min_version="3.12", max_version="3.13"), pkgs={"redis": latest}), ], @@ -1328,7 +1279,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.9"), + pys=["3.9"], pkgs={"mysql-connector-python": ["==8.0.5", latest]}, ), Venv( @@ -1355,13 +1306,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys="3.8", - pkgs={"psycopg2-binary": "~=2.8.0"}, - ), - Venv( - pys=select_pys(min_version="3.8"), - # psycopg2-binary added support for Python 3.9/3.10 in 2.9.1 - # psycopg2-binary added support for Python 3.11 in 2.9.2 + pys=select_pys(), pkgs={"psycopg2-binary": ["~=2.9.2", latest]}, ), ], @@ -1376,14 +1321,14 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT Venv( venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.9"), + pys=["3.9"], pkgs={ "psycopg": "~=3.0.0", "pytest-asyncio": "==0.21.1", }, ), Venv( - pys=select_pys(min_version="3.8", max_version="3.11"), + pys=select_pys(min_version="3.9", max_version="3.11"), pkgs={ "psycopg": latest, "pytest-asyncio": "==0.21.1", @@ -1470,19 +1415,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT # TODO: Py312 requires changes to test code venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.8"), - pkgs={ - "pynamodb": ["~=5.0.0"], - "botocore": ["<=1.25.0"], - "moto": ">=1.0,<2.0", - "cfn-lint": "~=0.53.1", - "Jinja2": "~=2.10.0", - "pytest-randomly": latest, - "pytest-xdist": latest, - }, - ), - Venv( - pys=select_pys(min_version="3.8", max_version="3.11"), + pys=select_pys(min_version="3.9", max_version="3.11"), pkgs={ "pynamodb": ["~=5.3", "<6.0"], "moto": ">=1.0,<2.0", @@ -1513,7 +1446,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT # starlette added new root_path/path definitions after v0.33 Venv( # starlette added support for Python 3.9 in 0.14 - pys=select_pys(min_version="3.8", max_version="3.9"), + pys="3.9", pkgs={"starlette": ["~=0.14.0", "~=0.20.0", "~=0.33.0"], "httpx": "~=0.22.0"}, ), Venv( @@ -1531,7 +1464,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT pkgs={"starlette": latest, "httpx": "~=0.27.0"}, ), Venv( - pys=select_pys(min_version="3.8", max_version="3.11"), + pys=select_pys(min_version="3.9", max_version="3.11"), pkgs={"starlette": [latest], "httpx": "~=0.22.0"}, ), ], @@ -1556,7 +1489,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.12"), + pys=select_pys(min_version="3.9", max_version="3.12"), pkgs={ "greenlet": "==3.0.3", "sqlalchemy": ["~=1.3.0", latest], @@ -1585,16 +1518,6 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT "requests-mock": ">=1.4", }, venvs=[ - Venv( - # requests added support for Python 3.8 in 2.23 - pys="3.8", - pkgs={ - "requests": [ - "~=2.20.0", - latest, - ], - }, - ), Venv( # requests added support for Python 3.9 in 2.25 pys="3.9", @@ -1669,7 +1592,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT pkgs={"botocore": "==1.34.49", "boto3": "==1.34.49"}, venvs=[ Venv( - pys=select_pys(min_version="3.8"), + pys=select_pys(), ), ], ), @@ -1677,32 +1600,12 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT pkgs={"vcrpy": "==7.0.0", "botocore": "==1.38.26", "boto3": "==1.38.26"}, venvs=[ Venv( - pys=select_pys(min_version="3.9"), + pys=select_pys(), ), ], ), ], ), - Venv( - name="mongoengine", - command="pytest {cmdargs} tests/contrib/mongoengine", - pkgs={ - # pymongo v4.9.0 introduced breaking changes that are not yet supported by mongoengine - "pymongo": "<4.9.0", - "pytest-randomly": latest, - }, - venvs=[ - Venv( - pys="3.8", - pkgs={"mongoengine": ["~=0.23.0", latest]}, - ), - Venv( - # mongoengine added support for Python 3.9/3.10 in 0.24 - pys=select_pys(min_version="3.9"), - pkgs={"mongoengine": ["~=0.24.0", "~=0.24", latest]}, - ), - ], - ), Venv( name="asgi", pkgs={ @@ -1711,7 +1614,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT "asgiref": ["~=3.0.0", "~=3.0", latest], "pytest-randomly": latest, }, - pys=select_pys(min_version="3.8"), + pys=select_pys(), command="pytest {cmdargs} tests/contrib/asgi", ), Venv( @@ -1722,7 +1625,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.10"), + pys=select_pys(min_version="3.9", max_version="3.10"), pkgs={ "mariadb": [ "~=1.0.0", @@ -1742,12 +1645,12 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - # pymysql added support for Python 3.8/3.9 in 0.10 - pys=select_pys(min_version="3.8", max_version="3.9"), + # pymysql added support for Python 3.9 in 0.10 + pys="3.9", pkgs={"pymysql": "~=0.10"}, ), Venv( - pys=select_pys(min_version="3.8", max_version="3.12"), + pys=select_pys(min_version="3.9", max_version="3.12"), pkgs={ "pymysql": [ "~=1.0", @@ -1776,7 +1679,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.9"), + pys="3.9", pkgs={ "pyramid": [ "~=1.10", @@ -1814,7 +1717,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.11"), + pys=select_pys(min_version="3.9", max_version="3.11"), pkgs={ "aiobotocore": ["~=1.0.0", "~=1.4.2", "~=2.0.0", latest], }, @@ -1838,7 +1741,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.10"), + pys=select_pys(min_version="3.9", max_version="3.10"), pkgs={"fastapi": ["~=0.64.0", "~=0.90.0", latest]}, ), Venv( @@ -1857,7 +1760,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT command="pytest {cmdargs} tests/contrib/aiomysql", venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.12"), + pys=select_pys(min_version="3.9", max_version="3.12"), pkgs={ "pytest-randomly": latest, "pytest-asyncio": "==0.21.1", @@ -1886,7 +1789,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.9"), + pys="3.9", pkgs={ "pytest": [ ">=6.0,<7.0", @@ -1952,7 +1855,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.9"), + pys="3.9", pkgs={ "pytest": [ ">=6.0,<7.0", @@ -1978,7 +1881,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.9"), + pys="3.9", pkgs={ "pytest-bdd": [ ">=4.0,<5.0", @@ -2000,7 +1903,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ), Venv( name="pytest_benchmark", - pys=select_pys(min_version="3.8"), + pys=select_pys(), command="pytest {cmdargs} --no-ddtrace --no-cov tests/contrib/pytest_benchmark/", pkgs={ "msgpack": latest, @@ -2018,7 +1921,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ), Venv( name="pytest:flaky", - pys=select_pys(min_version="3.8"), + pys=select_pys(), command="pytest {cmdargs} --no-ddtrace --no-cov -p no:flaky tests/contrib/pytest_flaky/", pkgs={ "flaky": latest, @@ -2036,7 +1939,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT # Versions between 1.14 and 1.20 have known threading issues # See https://github.com/grpc/grpc/issues/18994 Venv( - pys=select_pys(min_version="3.8", max_version="3.9"), + pys="3.9", pkgs={"grpcio": ["~=1.34.0", latest]}, ), Venv( @@ -2083,7 +1986,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT env={"_DD_TRACE_GRPC_AIO_ENABLED": "true"}, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.9"), + pys="3.9", pkgs={ "grpcio": ["~=1.34.0", "~=1.59.0"], "pytest-asyncio": "==0.23.7", @@ -2117,7 +2020,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.13"), + pys=select_pys(min_version="3.9", max_version="3.13"), pkgs={ "graphene": ["~=3.0.0", latest], "pytest-asyncio": "==0.21.1", @@ -2135,7 +2038,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT Venv( name="graphql", command="pytest {cmdargs} tests/contrib/graphql", - pys=select_pys(min_version="3.8"), + pys=select_pys(), pkgs={ "pytest-asyncio": "==0.21.1", "graphql-core": ["~=3.2.0", latest], @@ -2150,18 +2053,6 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT "pytest-randomly": latest, }, venvs=[ - Venv( - pys="3.8", - pkgs={ - "rq": [ - "~=1.8.0", - "~=1.10.0", - latest, - ], - # https://github.com/rq/rq/issues/1469 rq [1.0,1.8] is incompatible with click 8.0+ - "click": "==7.1.2", - }, - ), Venv( # rq added support for Python 3.9 in 1.8.1 pys="3.9", @@ -2211,11 +2102,6 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT "pytest-randomly": latest, }, venvs=[ - Venv( - # Support added for Python 3.8 in 1.25.0 - pys="3.8", - pkgs={"urllib3": ["==1.25.0", latest]}, - ), Venv( # Support added for Python 3.9 in 1.25.8 pys="3.9", @@ -2238,24 +2124,14 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ), ], ), - Venv( - name="cassandra", - pys="3.8", # see https://github.com/r4fek/django-cassandra-engine/issues/104 - pkgs={"cassandra-driver": ["~=3.24.0", latest], "pytest-randomly": latest}, - command="pytest {cmdargs} tests/contrib/cassandra", - ), Venv( name="algoliasearch", command="pytest {cmdargs} tests/contrib/algoliasearch", pkgs={"urllib3": "~=1.26.15", "pytest-randomly": latest}, venvs=[ - Venv( - pys="3.8", - pkgs={"algoliasearch": ["~=2.5.0", "~=2.6"]}, - ), Venv( # algoliasearch added support for Python 3.9, 3.10, 3.11 in 3.0 - pys=select_pys(min_version="3.9"), + pys=select_pys(), pkgs={"algoliasearch": "~=2.6"}, ), ], @@ -2269,13 +2145,13 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.9"), + pys="3.9", pkgs={ "aiopg": ["~=0.16.0"], }, ), Venv( - pys=select_pys(min_version="3.8"), + pys=select_pys(), pkgs={ "aiopg": ["~=1.0", "~=1.4.0"], }, @@ -2298,7 +2174,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT # only test a subset of files for older aiohttp versions command="pytest {cmdargs} tests/contrib/aiohttp/test_aiohttp_client.py \ tests/contrib/aiohttp/test_aiohttp_patch.py", - pys=select_pys(min_version="3.8", max_version="3.9"), + pys="3.9", pkgs={ "pytest-aiohttp": ["<=1.0.5"], "aiohttp": ["~=3.7.0"], @@ -2306,7 +2182,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, ), Venv( - pys=select_pys(min_version="3.8", max_version="3.12"), + pys=select_pys(min_version="3.9", max_version="3.12"), pkgs={ "pytest-asyncio": ["==0.23.7"], "pytest-aiohttp": ["==1.0.5"], @@ -2339,7 +2215,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.12"), + pys=select_pys(min_version="3.9", max_version="3.12"), pkgs={ "pytest-asyncio": ["==0.23.7"], }, @@ -2359,7 +2235,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(max_version="3.9"), + pys="3.9", pkgs={ "jinja2": "~=2.10.0", # https://github.com/pallets/markupsafe/issues/282 @@ -2368,7 +2244,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, ), Venv( - pys=select_pys(min_version="3.8"), + pys=select_pys(), pkgs={ "jinja2": ["~=3.0.0", latest], }, @@ -2401,7 +2277,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.10"), + pys=select_pys(min_version="3.9", max_version="3.10"), pkgs={ "pytest-asyncio": "==0.23.7", }, @@ -2437,7 +2313,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ), Venv( name="aredis", - pys=select_pys(min_version="3.8", max_version="3.9"), + pys="3.9", command="pytest {cmdargs} tests/contrib/aredis", pkgs={ "pytest-asyncio": "==0.21.1", @@ -2447,7 +2323,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ), Venv( name="avro", - pys=select_pys(min_version="3.8"), + pys=select_pys(), command="pytest {cmdargs} tests/contrib/avro", pkgs={ "avro": latest, @@ -2457,7 +2333,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT Venv( name="protobuf", command="pytest {cmdargs} tests/contrib/protobuf", - pys=select_pys(min_version="3.8"), + pys=select_pys(), pkgs={ "protobuf": latest, "pytest-randomly": latest, @@ -2472,7 +2348,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.9"), + pys="3.9", pkgs={"yaaredis": ["~=2.0.0", latest]}, ), Venv( @@ -2494,14 +2370,14 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT venvs=[ Venv( # sanic added support for Python 3.9 in 20.12 - pys=select_pys(min_version="3.8", max_version="3.9"), + pys="3.9", pkgs={ "sanic": "~=20.12", "pytest-sanic": "~=1.6.2", }, ), Venv( - pys=select_pys(min_version="3.8", max_version="3.9"), + pys="3.9", pkgs={ "sanic": [ "~=21.3", @@ -2519,7 +2395,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, ), Venv( - pys=select_pys(min_version="3.8", max_version="3.10"), + pys=select_pys(min_version="3.9", max_version="3.10"), pkgs={ "sanic": ["~=22.3", "~=22.12"], "sanic-testing": "~=22.3.0", @@ -2551,10 +2427,6 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT "pytest-randomly": latest, }, venvs=[ - Venv( - pys="3.8", - pkgs={"snowflake-connector-python": ["~=2.3.0", "~=2.9.0", latest]}, - ), Venv( # snowflake-connector-python added support for Python 3.9 in 2.4.0 pys="3.9", @@ -2588,11 +2460,6 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT "pytest-randomly": latest, }, venvs=[ - # our test_asyncpg.py uses `yield` in an async function and is not compatible with Python 3.5 - Venv( - pys="3.8", - pkgs={"asyncpg": ["~=0.22.0", latest]}, - ), Venv( # asyncpg added support for Python 3.9 in 0.22 pys="3.9", @@ -2634,7 +2501,6 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT # To test a range of versions without updating Python, we use Linux only pysqlite3-binary package # Remove pysqlite3-binary on Python 3.9+ locally on non-linux machines Venv(pys=select_pys(min_version="3.9", max_version="3.12"), pkgs={"pysqlite3-binary": [latest]}), - Venv(pys=select_pys(max_version="3.8"), pkgs={"importlib-metadata": latest}), ], ), Venv( @@ -2661,7 +2527,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT "pytest-randomly": latest, }, venvs=[ - Venv(pys=select_pys(min_version="3.8", max_version="3.10")), + Venv(pys=select_pys(min_version="3.9", max_version="3.10")), Venv(pys=select_pys(min_version="3.11"), pkgs={"attrs": latest}), ], ), @@ -2673,7 +2539,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.10"), + pys=select_pys(min_version="3.9", max_version="3.10"), pkgs={ "dogpile.cache": [ "~=0.6.0", @@ -2722,29 +2588,16 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT "markupsafe": "==2.0.1", "mock": latest, "flask": latest, - "gevent": latest, # gevent>22.12 is not compatible with py3.8 + "gevent": latest, "requests": "==2.28.1", # specific version expected by tests }, venvs=[ - Venv( - pys="3.8", - # Ensure we test against versions of opentelemetry-api that broke compatibility with ddtrace - # gevent>24.2.1 is not compatible with py3.8 so we pin it to the last compatible version - pkgs={"gevent": "<=24.2.1", "opentelemetry-api": ["~=1.0.0", "~=1.15.0", "~=1.26.0", latest]}, - ), Venv( # opentelemetry-api doesn't yet work with Python 3.14 pys=select_pys(min_version="3.9", max_version="3.13"), # Ensure we test against versions of opentelemetry-api that broke compatibility with ddtrace pkgs={"opentelemetry-api": ["~=1.0.0", "~=1.15.0", "~=1.26.0", latest]}, ), - Venv( - pys="3.8", - # Ensure we test against versions of opentelemetry-api that broke compatibility with ddtrace - # gevent>24.2.1 is not compatible with py3.8 so we pin it to the last compatible version - pkgs={"gevent": "<=24.2.1", "opentelemetry-exporter-otlp": ["~=1.15.0", latest]}, - env={"SDK_EXPORTER_INSTALLED": "1"}, - ), Venv( # opentelemetry-exporter-otlp doesn't yet work with Python 3.14 pys=select_pys(min_version="3.9", max_version="3.13"), @@ -2801,7 +2654,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.11"), + pys=select_pys(min_version="3.9", max_version="3.11"), pkgs={ "openai[embeddings,datalib]": ["==1.0.0", "==1.30.1"], "pillow": "==9.5.0", @@ -2809,7 +2662,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, ), Venv( - pys=select_pys(min_version="3.8", max_version="3.13"), + pys=select_pys(min_version="3.9", max_version="3.13"), pkgs={ "openai": [latest, "<2.0.0", "~=1.76.2", "==1.66.0"], "pillow": latest, @@ -2826,12 +2679,12 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT command="pytest {cmdargs} tests/opentracer/core", ), Venv( - pys=select_pys(min_version="3.8"), + pys=select_pys(), command="pytest {cmdargs} tests/opentracer/test_tracer_asyncio.py", pkgs={"pytest-asyncio": "==0.21.1"}, ), Venv( - pys=select_pys(min_version="3.8", max_version="3.11"), + pys=select_pys(min_version="3.9", max_version="3.11"), command="pytest {cmdargs} tests/opentracer/test_tracer_tornado.py", # TODO: update opentracing tests to be compatible with Tornado v6. # https://github.com/opentracing/opentracing-python/issues/136 @@ -2842,13 +2695,6 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT Venv( command="pytest {cmdargs} tests/opentracer/test_tracer_gevent.py", venvs=[ - Venv( - pys="3.8", - pkgs={ - "gevent": latest, - "greenlet": latest, - }, - ), Venv( pys="3.9", pkgs={"gevent": latest, "greenlet": latest}, @@ -2878,10 +2724,6 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT command="pytest {cmdargs} tests/contrib/pyodbc", pkgs={"pytest-randomly": latest}, venvs=[ - Venv( - pys=select_pys(max_version="3.8"), - pkgs={"pyodbc": ["~=4.0.31", latest]}, - ), Venv( # pyodbc added support for Python 3.9/3.10 in 4.0.34 pys=select_pys(min_version="3.9", max_version="3.10"), @@ -2900,8 +2742,8 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT pkgs={"pytest-randomly": latest}, venvs=[ Venv( - # pylibmc added support for Python 3.8/3.9/3.10 in 1.6.2 - pys=select_pys(min_version="3.8", max_version="3.10"), + # pylibmc added support for Python 3.9/3.10 in 1.6.2 + pys=select_pys(min_version="3.9", max_version="3.10"), pkgs={ "pylibmc": ["~=1.6.2", latest], }, @@ -2920,7 +2762,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT pkgs={"pytest-randomly": latest}, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.9"), + pys="3.9", pkgs={ "kombu": [">=4.6,<4.7", ">=5.0,<5.1", latest], }, @@ -2941,8 +2783,8 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT pkgs={"pytest-randomly": latest}, venvs=[ Venv( - # tornado added support for Python 3.8/3.9 in 6.1 - pys=select_pys(min_version="3.8", max_version="3.9"), + # tornado added support for Python 3.9 in 6.1 + pys="3.9", # tornado 6.0.x and pytest 8.x have a compatibility bug pkgs={"tornado": ["~=6.0.0", "~=6.2"], "pytest": "<=8"}, ), @@ -2964,7 +2806,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT pkgs={"pytest-randomly": latest}, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.9"), + pys="3.9", pkgs={"mysqlclient": ["~=2.0", "~=2.1", latest]}, ), Venv( @@ -3090,31 +2932,15 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.12"), + pys=select_pys(min_version="3.9", max_version="3.12"), pkgs={"anthropic": "~=0.28.0", "httpx": "~=0.27.0"}, ), Venv( - pys=select_pys(min_version="3.8"), + pys=select_pys(), pkgs={"anthropic": latest, "httpx": "<0.28.0"}, ), ], ), - Venv( - name="google_generativeai", - command="pytest {cmdargs} tests/contrib/google_generativeai", - venvs=[ - Venv( - pys=select_pys(min_version="3.9", max_version="3.13"), - pkgs={ - "pytest-asyncio": latest, - "google-generativeai": ["~=0.7.0", latest], - "pillow": latest, - "google-ai-generativelanguage": [latest], - "vertexai": [latest], - }, - ) - ], - ), Venv( name="vertexai", command="pytest {cmdargs} tests/contrib/vertexai", @@ -3136,7 +2962,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.9"), + pys=select_pys(), ), ], ), @@ -3149,7 +2975,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.9"), + pys=select_pys(), ), ], ), @@ -3174,7 +3000,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=select_pys(min_version="3.9"), + pys=select_pys(), pkgs={ "pydantic-ai": ["==0.3.0", "==0.4.4"], "pydantic": "==2.12.0a1", @@ -3244,7 +3070,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT command="pytest {cmdargs} -vv tests/contrib/kafka", venvs=[ Venv( - pys=select_pys(min_version="3.8", max_version="3.10"), + pys=select_pys(min_version="3.9", max_version="3.10"), pkgs={"confluent-kafka": ["~=1.9.2", latest]}, ), # confluent-kafka added support for Python 3.11 in 2.0.2 @@ -3256,7 +3082,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT Venv( name="aws_lambda", command="pytest --no-ddtrace {cmdargs} tests/contrib/aws_lambda", - pys=select_pys(min_version="3.8", max_version="3.13"), + pys=select_pys(min_version="3.9", max_version="3.13"), pkgs={ "boto3": latest, "datadog-lambda": [">=6.105.0", latest], @@ -3267,7 +3093,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT Venv( name="azure_eventhubs", command="pytest {cmdargs} tests/contrib/azure_eventhubs", - pys=select_pys(min_version="3.8", max_version="3.13"), + pys=select_pys(min_version="3.9", max_version="3.13"), pkgs={ "azure.eventhub": ["~=5.12.0", latest], "pytest-asyncio": "==0.23.7", @@ -3276,7 +3102,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT Venv( name="azure_functions", command="pytest {cmdargs} tests/contrib/azure_functions", - pys=select_pys(min_version="3.8", max_version="3.11"), + pys=select_pys(min_version="3.9", max_version="3.11"), pkgs={ "azure.functions": ["~=1.10.1", latest], "requests": latest, @@ -3285,7 +3111,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT Venv( name="azure_functions:eventhubs", command="pytest {cmdargs} tests/contrib/azure_functions_eventhubs", - pys=select_pys(min_version="3.8", max_version="3.11"), + pys=select_pys(min_version="3.9", max_version="3.11"), pkgs={ "azure.functions": ["~=1.10.1", latest], "azure.eventhub": latest, @@ -3295,7 +3121,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT Venv( name="azure_functions:servicebus", command="pytest {cmdargs} tests/contrib/azure_functions_servicebus", - pys=select_pys(min_version="3.8", max_version="3.11"), + pys=select_pys(min_version="3.9", max_version="3.11"), pkgs={ "azure.functions": ["~=1.10.1", latest], "azure.servicebus": latest, @@ -3343,12 +3169,6 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT "DD_AGENT_PORT": "9126", }, venvs=[ - Venv( - pys=["3.8"], - pkgs={"greenlet": "==3.1.0"}, - # Prevent segfaults from zope.interface c optimizations - env={"PURE_PYTHON": "1"}, - ), Venv( pys=select_pys(min_version="3.9", max_version="3.13"), ), @@ -3385,7 +3205,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT "langchain": latest, "pandas": latest, }, - pys=select_pys(min_version="3.8", max_version="3.13"), + pys=select_pys(min_version="3.9", max_version="3.13"), ), Venv( name="valkey", @@ -3395,113 +3215,12 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT "pytest-randomly": latest, "pytest-asyncio": "==0.23.7", }, - pys=select_pys(min_version="3.8"), + pys=select_pys(), ), Venv( name="profile", # NB riot commands that use this Venv must include --pass-env to work properly - command="python -m tests.profiling.run pytest -v --no-cov --capture=no --benchmark-disable {cmdargs} tests/profiling", # noqa: E501 - env={ - "DD_PROFILING_ENABLE_ASSERTS": "1", - "DD_PROFILING_STACK_V2_ENABLED": "0", - "CPUCOUNT": "12", - # TODO: Remove once pkg_resources warnings are no longer emitted from this internal module - "PYTHONWARNINGS": "ignore::UserWarning:ddtrace.internal.module,ignore::UserWarning:gevent.events", - }, - pkgs={ - "gunicorn": latest, - "zstandard": latest, - # - # pytest-benchmark depends on cpuinfo which dropped support for Python<=3.6 in 9.0 - # See https://github.com/workhorsy/py-cpuinfo/issues/177 - "pytest-benchmark": latest, - "py-cpuinfo": "~=8.0.0", - "pytest-asyncio": "==0.21.1", - "pytest-randomly": latest, - }, - venvs=[ - # Python 3.8 + 3.9 - Venv( - pys=["3.8", "3.9"], - pkgs={"uwsgi": latest}, - venvs=[ - Venv( - pkgs={ - "protobuf": [">3", latest], - }, - ), - # Gevent - Venv( - env={ - "DD_PROFILE_TEST_GEVENT": "1", - }, - pkgs={ - "gunicorn[gevent]": latest, - "gevent": latest, - "protobuf": latest, - }, - ), - ], - ), - # Python 3.10 - Venv( - pys="3.10", - pkgs={"uwsgi": latest}, - venvs=[ - Venv( - pkgs={ - "protobuf": [">3", latest], - }, - ), - # Gevent - Venv( - env={ - "DD_PROFILE_TEST_GEVENT": "1", - }, - pkgs={ - "gunicorn[gevent]": latest, - "protobuf": latest, - }, - venvs=[ - Venv( - pkgs={ - "gevent": latest, - "greenlet": latest, - "protobuf": latest, - } - ), - Venv( - pkgs={"gevent": latest, "protobuf": latest}, - ), - ], - ), - ], - ), - # Python >= 3.11 - Venv( - pys=select_pys("3.11", "3.13"), - pkgs={"uwsgi": latest}, - venvs=[ - Venv( - pkgs={ - "protobuf": ["==4.22.0", latest], - }, - ), - # Gevent - Venv( - env={ - "DD_PROFILE_TEST_GEVENT": "1", - }, - pkgs={"gunicorn[gevent]": latest, "gevent": latest, "protobuf": latest}, - ), - ], - ), - ], - ), - Venv( - name="profile-v2", - # NB riot commands that use this Venv must include --pass-env to work properly - command="python -m tests.profiling.run pytest -v --no-cov --capture=no --benchmark-disable --ignore='tests/profiling_v2/collector/test_memalloc.py' {cmdargs} tests/profiling_v2", # noqa: E501 + command="python -m tests.profiling.run pytest -v --no-cov --capture=no --benchmark-disable --ignore='tests/profiling/collector/test_memalloc.py' {cmdargs} tests/profiling", # noqa: E501 env={ "DD_PROFILING_ENABLE_ASSERTS": "1", "CPUCOUNT": "12", @@ -3523,17 +3242,16 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - name="profile-v2-uwsgi", - command="python -m tests.profiling.run pytest -v --no-cov --capture=no --benchmark-disable {cmdargs} tests/profiling_v2/test_uwsgi.py", # noqa: E501 + name="profile-uwsgi", + command="python -m tests.profiling.run pytest -v --no-cov --capture=no --benchmark-disable {cmdargs} tests/profiling/test_uwsgi.py", # noqa: E501 pys=select_pys(max_version="3.13"), pkgs={ "uwsgi": "<2.0.30", "protobuf": latest, }, ), - # Python 3.8 + 3.9 Venv( - pys=["3.8", "3.9"], + pys="3.9", pkgs={"uwsgi": latest}, venvs=[ Venv( @@ -3642,8 +3360,8 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ], ), Venv( - name="profile-v2-memalloc", - command="python -m tests.profiling.run pytest -v --no-cov --capture=no --benchmark-disable {cmdargs} tests/profiling_v2/collector/test_memalloc.py", # noqa: E501 + name="profile-memalloc", + command="python -m tests.profiling.run pytest -v --no-cov --capture=no --benchmark-disable {cmdargs} tests/profiling/collector/test_memalloc.py", # noqa: E501 # skipping v3.14 for now due to an unstable `lz4 ` lib issue: https://gitlab.ddbuild.io/DataDog/apm-reliability/dd-trace-py/-/jobs/1163312347 pys=select_pys(max_version="3.13"), pkgs={ @@ -3686,21 +3404,6 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT ), ], ), - Venv( - name="freezegun", - command="pytest tests/contrib/freezegun {cmdargs}", - pkgs={ - "pytest-randomly": latest, - }, - venvs=[ - Venv( - pys=["3.10", "3.12"], - pkgs={ - "freezegun": ["~=1.3.0", "~=1.5.0"], - }, - ), - ], - ), Venv( name="appsec_integrations_flask", command="pytest -vvv {cmdargs} tests/appsec/integrations/flask_tests/", @@ -3722,7 +3425,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=["3.8", "3.9"], + pys="3.9", pkgs={ "flask": "~=1.1", "MarkupSafe": "~=1.1", @@ -3731,26 +3434,25 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, ), Venv( - pys=["3.8", "3.9", "3.10", "3.11"], + pys=select_pys(min_version="3.9", max_version="3.11"), pkgs={ "flask": "~=2.2", }, ), Venv( - pys=["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"], + pys=select_pys(max_version="3.13"), pkgs={ "flask": "~=2.2", }, ), Venv( - pys=["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"], + pys=select_pys(max_version="3.13"), pkgs={ "flask": "~=3.0", }, ), Venv( - # werkzeug 3.1 drops support for py3.8 - pys=["3.11", "3.12", "3.13"], + pys=select_pys(min_version="3.11", max_version="3.13"), pkgs={ "flask": "~=3.1", "Werkzeug": "~=3.1", @@ -3813,28 +3515,28 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=["3.8", "3.9"], + pys="3.9", pkgs={ "django": "~=2.2", }, venvs=_appsec_threats_iast_variants, ), Venv( - pys=["3.8", "3.9", "3.10"], + pys=["3.9", "3.10"], pkgs={ "django": "~=3.2", }, venvs=_appsec_threats_iast_variants, ), Venv( - pys=["3.8", "3.10"], + pys="3.10", pkgs={ "django": "==4.0.10", }, venvs=_appsec_threats_iast_variants, ), Venv( - pys=["3.8", "3.11", "3.13"], + pys=["3.11", "3.13"], pkgs={ "django": "~=4.2", }, @@ -3867,7 +3569,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=["3.8", "3.9"], + pys="3.9", pkgs={ "flask": "~=1.1", "MarkupSafe": "~=1.1", @@ -3875,7 +3577,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT venvs=_appsec_threats_iast_variants, ), Venv( - pys=["3.8", "3.9"], + pys="3.9", pkgs={ "flask": "==2.1.3", "Werkzeug": "<3.0", @@ -3883,14 +3585,14 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT venvs=_appsec_threats_iast_variants, ), Venv( - pys=["3.8", "3.10", "3.13"], + pys=["3.10", "3.13"], pkgs={ "flask": "~=2.3", }, venvs=_appsec_threats_iast_variants, ), Venv( - pys=["3.8", "3.11", "3.13"], + pys=["3.11", "3.13"], pkgs={ "flask": "~=3.0", }, @@ -3918,7 +3620,7 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, venvs=[ Venv( - pys=["3.8", "3.10", "3.13"], + pys=["3.10", "3.13"], pkgs={ "fastapi": "==0.86.0", "anyio": "==3.7.1", @@ -3926,14 +3628,14 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT venvs=_appsec_threats_iast_variants, ), Venv( - pys=["3.8", "3.10", "3.13"], + pys=["3.10", "3.13"], pkgs={ "fastapi": "==0.94.1", }, venvs=_appsec_threats_iast_variants, ), Venv( - pys=["3.8", "3.10", "3.13"], + pys=["3.10", "3.13"], pkgs={ "fastapi": "~=0.114.2", }, diff --git a/setup.py b/setup.py index d1467ad1a8a..28accda990a 100644 --- a/setup.py +++ b/setup.py @@ -1173,16 +1173,6 @@ def get_exts_for(name): ["ddtrace/internal/telemetry/metrics_namespaces.pyx"], language="c", ), - Cython.Distutils.Extension( - "ddtrace.profiling.collector.stack", - sources=["ddtrace/profiling/collector/stack.pyx"], - language="c", - # cython generated code errors on build in toolchains that are strict about int->ptr conversion - # OTOH, the MSVC toolchain is different. In a perfect world we'd deduce the underlying - # toolchain and emit the right flags, but as a compromise we assume Windows implies MSVC and - # everything else is on a GNU-like toolchain - extra_compile_args=extra_compile_args + (["-Wno-int-conversion"] if CURRENT_OS != "Windows" else []), - ), Cython.Distutils.Extension( "ddtrace.profiling.collector._traceback", sources=["ddtrace/profiling/collector/_traceback.pyx"], diff --git a/supported_versions_output.json b/supported_versions_output.json index 0686fa78c7d..b44f5520451 100644 --- a/supported_versions_output.json +++ b/supported_versions_output.json @@ -45,7 +45,7 @@ { "dependency": "algoliasearch", "integration": "algoliasearch", - "minimum_tracer_supported": "2.5.0", + "minimum_tracer_supported": "2.6.3", "max_tracer_supported": "2.6.3", "pinned": "true", "auto-instrumented": true @@ -75,7 +75,7 @@ { "dependency": "asyncpg", "integration": "asyncpg", - "minimum_tracer_supported": "0.22.0", + "minimum_tracer_supported": "0.23.0", "max_tracer_supported": "0.30.0", "auto-instrumented": true }, @@ -106,7 +106,7 @@ "minimum_tracer_supported": "5.12.2", "max_tracer_supported": "5.15.0", "pinned": "true", - "auto-instrumented": false + "auto-instrumented": true }, { "dependency": "azure-functions", @@ -147,13 +147,6 @@ "max_tracer_supported": "0.13.4", "auto-instrumented": true }, - { - "dependency": "cassandra-driver", - "integration": "cassandra", - "minimum_tracer_supported": "3.24.0", - "max_tracer_supported": "3.28.0", - "auto-instrumented": true - }, { "dependency": "celery", "integration": "celery", @@ -308,18 +301,10 @@ "max_tracer_supported": "2.3.0", "auto-instrumented": false }, - { - "dependency": "freezegun", - "integration": "freezegun", - "minimum_tracer_supported": "1.3.1", - "max_tracer_supported": "1.5.2", - "pinned": "true", - "auto-instrumented": false - }, { "dependency": "gevent", "integration": "gevent", - "minimum_tracer_supported": "20.12.1", + "minimum_tracer_supported": "21.1.2", "max_tracer_supported": "25.5.1", "auto-instrumented": true }, @@ -337,13 +322,6 @@ "max_tracer_supported": "1.41.0", "auto-instrumented": true }, - { - "dependency": "google-generativeai", - "integration": "google_generativeai", - "minimum_tracer_supported": "0.7.2", - "max_tracer_supported": "0.8.5", - "auto-instrumented": true - }, { "dependency": "graphql-core", "integration": "graphql", @@ -464,13 +442,6 @@ "max_tracer_supported": "1.0.2", "auto-instrumented": true }, - { - "dependency": "mongoengine", - "integration": "mongoengine", - "minimum_tracer_supported": "0.23.1", - "max_tracer_supported": "0.29.1", - "auto-instrumented": true - }, { "dependency": "mysql-connector-python", "integration": "mysql", @@ -489,7 +460,7 @@ "dependency": "openai", "integration": "openai", "minimum_tracer_supported": "1.0.0", - "max_tracer_supported": "2.2.0", + "max_tracer_supported": "2.3.0", "auto-instrumented": true }, { @@ -502,7 +473,7 @@ { "dependency": "protobuf", "integration": "protobuf", - "minimum_tracer_supported": "5.29.3", + "minimum_tracer_supported": "6.30.1", "max_tracer_supported": "6.32.0", "auto-instrumented": false }, @@ -516,7 +487,7 @@ { "dependency": "psycopg2-binary", "integration": "psycopg", - "minimum_tracer_supported": "2.8.6", + "minimum_tracer_supported": "2.9.10", "max_tracer_supported": "2.9.10", "auto-instrumented": true }, @@ -559,7 +530,7 @@ { "dependency": "pynamodb", "integration": "pynamodb", - "minimum_tracer_supported": "5.0.3", + "minimum_tracer_supported": "5.5.1", "max_tracer_supported": "5.5.1", "pinned": "true", "auto-instrumented": true @@ -618,7 +589,7 @@ { "dependency": "requests", "integration": "requests", - "minimum_tracer_supported": "2.20.1", + "minimum_tracer_supported": "2.25.1", "max_tracer_supported": "2.32.5", "auto-instrumented": true }, @@ -639,7 +610,7 @@ { "dependency": "snowflake-connector-python", "integration": "snowflake", - "minimum_tracer_supported": "2.3.10", + "minimum_tracer_supported": "2.4.6", "max_tracer_supported": "3.17.2", "auto-instrumented": false }, @@ -682,7 +653,7 @@ { "dependency": "urllib3", "integration": "urllib3", - "minimum_tracer_supported": "1.25", + "minimum_tracer_supported": "1.25.8", "max_tracer_supported": "2.5.0", "auto-instrumented": false }, diff --git a/supported_versions_table.csv b/supported_versions_table.csv index 5c168a400ce..46e993b9e41 100644 --- a/supported_versions_table.csv +++ b/supported_versions_table.csv @@ -5,21 +5,20 @@ aiohttp-jinja2,aiohttp_jinja2,1.5.1,1.6,True aiohttp_jinja2,aiohttp_jinja2,1.5.1,1.6,True aiomysql,aiomysql,0.1.1,0.2.0,True aiopg,aiopg *,0.16.0,1.4.0,True -algoliasearch,algoliasearch *,2.5.0,2.6.3,True +algoliasearch,algoliasearch *,2.6.3,2.6.3,True anthropic,anthropic,0.28.1,0.69.0,True aredis,aredis,1.1.8,1.1.8,True pytest-asyncio,asyncio *,0.21.1,1.2.0,True -asyncpg,asyncpg,0.22.0,0.30.0,True +asyncpg,asyncpg,0.23.0,0.30.0,True avro,avro,1.12.0,1.12.0,True datadog-lambda,aws_lambda,6.105.0,6.105.0,True datadog_lambda,aws_lambda,6.105.0,6.105.0,True -azure-eventhub,azure_eventhubs *,5.12.2,5.15.0,False +azure-eventhub,azure_eventhubs *,5.12.2,5.15.0,True azure-functions,azure_functions *,1.10.1,1.23.0,True azure-servicebus,azure_servicebus *,7.14.2,7.14.2,True boto3,botocore *,1.34.49,1.38.26,True botocore,botocore *,1.34.49,1.38.26,True bottle,bottle,0.12.25,0.13.4,True -cassandra-driver,cassandra,3.24.0,3.28.0,True celery,celery,5.5.3,5.5.3,True cherrypy,cherrypy,17.0.0,18.10.0,False python-consul,consul,1.1.0,1.1.0,True @@ -42,11 +41,9 @@ fastapi,fastapi,0.64.0,0.118.0,True flask,flask,1.1.4,3.1.2,True flask-cache,flask_cache,0.13.1,0.13.1,False flask-caching,flask_cache,1.10.1,2.3.0,False -freezegun,freezegun *,1.3.1,1.5.2,False -gevent,gevent,20.12.1,25.5.1,True +gevent,gevent,21.1.2,25.5.1,True google-adk,google_adk,1.0.0,1.15.1,True google-genai,google_genai,1.21.1,1.41.0,True -google-generativeai,google_generativeai,0.7.2,0.8.5,True graphql-core,graphql,3.1.7,3.2.6,True grpcio,grpc,1.34.1,1.75.1,True httpx,httpx,0.17.1,0.28.1,True @@ -64,20 +61,19 @@ mako,mako,1.0.14,1.3.10,True mariadb,mariadb,1.0.11,1.1.13,True mcp,mcp,1.10.1,1.16.0,True molten,molten,1.0.2,1.0.2,True -mongoengine,mongoengine,0.23.1,0.29.1,True mysql-connector-python,mysql,8.0.5,9.4.0,True mysqlclient,mysqldb,2.2.1,2.2.6,True -openai,openai,1.0.0,2.2.0,True +openai,openai,1.0.0,2.3.0,True openai-agents,openai_agents,0.0.8,0.0.16,True -protobuf,protobuf,5.29.3,6.32.0,False +protobuf,protobuf,6.30.1,6.32.0,False psycopg,psycopg,3.0.18,3.2.10,True -psycopg2-binary,psycopg,2.8.6,2.9.10,True +psycopg2-binary,psycopg,2.9.10,2.9.10,True pydantic-ai-slim,pydantic_ai *,0.3.0,0.4.4,True pylibmc,pylibmc,1.6.3,1.6.3,True pymemcache,pymemcache,3.4.4,4.0.0,True pymongo,pymongo,3.8.0,4.15.0,True pymysql,pymysql,0.10.1,1.1.2,True -pynamodb,pynamodb *,5.0.3,5.5.1,True +pynamodb,pynamodb *,5.5.1,5.5.1,True pyodbc,pyodbc,4.0.39,5.2.0,True pyramid,pyramid,1.10.8,2.0.2,True pytest,pytest,6.2.5,8.4.2,False @@ -85,16 +81,16 @@ pytest-bdd,pytest_bdd *,4.1.0,6.0.1,False ray,ray *,2.46.0,2.49.2,False redis,redis,4.6.0,6.4.0,True redis-py-cluster,rediscluster,2.0.0,2.1.3,True -requests,requests,2.20.1,2.32.5,True +requests,requests,2.25.1,2.32.5,True rq,rq,1.8.1,1.16.2,True sanic,sanic,20.12.7,24.6.0,True -snowflake-connector-python,snowflake,2.3.10,3.17.2,False +snowflake-connector-python,snowflake,2.4.6,3.17.2,False sqlalchemy,sqlalchemy,1.3.24,2.0.43,False pysqlite3-binary,sqlite3,0.5.2.post3,0.5.2.post3,True starlette,starlette,0.14.2,0.48.0,True structlog,structlog,20.2.0,25.4.0,True tornado,tornado *,6.0.4,6.5.1,False -urllib3,urllib3,1.25,2.5.0,False +urllib3,urllib3,1.25.8,2.5.0,False valkey,valkey,6.0.2,6.1.1,True google-cloud-aiplatform,vertexai,1.71.1,1.71.1,True vertexai,vertexai,1.71.1,1.71.1,True diff --git a/tests/README.md b/tests/README.md index 8dcf61e07f3..7db61940905 100644 --- a/tests/README.md +++ b/tests/README.md @@ -57,13 +57,12 @@ suites: DD_TRACE_AGENT_URL: '' parallelism: 20 retry: 2 - pattern: profile$|profile-v2 + pattern: profile paths: - '@bootstrap' - '@core' - '@profiling' - tests/profiling/* - - tests/profiling_v2/* services: - redis ``` diff --git a/tests/appsec/ai_guard/api/test_api_client.py b/tests/appsec/ai_guard/api/test_api_client.py index a903dff578c..cc8a24c8b29 100644 --- a/tests/appsec/ai_guard/api/test_api_client.py +++ b/tests/appsec/ai_guard/api/test_api_client.py @@ -12,7 +12,7 @@ from ddtrace.appsec.ai_guard import Options from ddtrace.appsec.ai_guard import ToolCall from ddtrace.appsec.ai_guard import new_ai_guard_client -from ddtrace.settings.asm import ai_guard_config +from ddtrace.internal.settings.asm import ai_guard_config from tests.appsec.ai_guard.utils import assert_ai_guard_span from tests.appsec.ai_guard.utils import assert_mock_execute_request_call from tests.appsec.ai_guard.utils import find_ai_guard_span diff --git a/tests/appsec/ai_guard/utils.py b/tests/appsec/ai_guard/utils.py index cca4a2e3ef5..970b097f2d3 100644 --- a/tests/appsec/ai_guard/utils.py +++ b/tests/appsec/ai_guard/utils.py @@ -13,7 +13,7 @@ from ddtrace.appsec._constants import AI_GUARD from ddtrace.appsec.ai_guard import AIGuardClient from ddtrace.appsec.ai_guard._api_client import Message -from ddtrace.settings.asm import ai_guard_config +from ddtrace.internal.settings.asm import ai_guard_config from tests.utils import DummyTracer diff --git a/tests/appsec/appsec/test_remoteconfiguration.py b/tests/appsec/appsec/test_remoteconfiguration.py index 00729bb261f..e134b7d0118 100644 --- a/tests/appsec/appsec/test_remoteconfiguration.py +++ b/tests/appsec/appsec/test_remoteconfiguration.py @@ -22,8 +22,8 @@ from ddtrace.internal.remoteconfig.client import TargetFile from ddtrace.internal.remoteconfig.worker import remoteconfig_poller from ddtrace.internal.service import ServiceStatus +from ddtrace.internal.settings.asm import config as asm_config from ddtrace.internal.utils.formats import asbool -from ddtrace.settings.asm import config as asm_config import tests.appsec.rules as rules from tests.appsec.utils import asm_context from tests.appsec.utils import build_payload diff --git a/tests/appsec/architectures/mini.py b/tests/appsec/architectures/mini.py index bb10e976ed4..179c85bfb80 100644 --- a/tests/appsec/architectures/mini.py +++ b/tests/appsec/architectures/mini.py @@ -11,8 +11,8 @@ from flask import request # noqa: E402 import requests # noqa: E402 F401 +from ddtrace.internal.settings.asm import config as asm_config # noqa: E402 import ddtrace.internal.telemetry.writer # noqa: E402 -from ddtrace.settings.asm import config as asm_config # noqa: E402 from ddtrace.version import get_version # noqa: E402 diff --git a/tests/appsec/architectures/test_appsec_loading_modules.py b/tests/appsec/architectures/test_appsec_loading_modules.py index a363e7c3e81..1d34d76d8c9 100644 --- a/tests/appsec/architectures/test_appsec_loading_modules.py +++ b/tests/appsec/architectures/test_appsec_loading_modules.py @@ -9,7 +9,7 @@ import pytest -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config MODULES_ALWAYS_LOADED = ["ddtrace.appsec", "ddtrace.appsec._constants"] diff --git a/tests/appsec/contrib_appsec/conftest.py b/tests/appsec/contrib_appsec/conftest.py index dc5e5454a58..951465bedcf 100644 --- a/tests/appsec/contrib_appsec/conftest.py +++ b/tests/appsec/contrib_appsec/conftest.py @@ -8,7 +8,7 @@ import pytest # noqa: E402 -from ddtrace.settings.asm import config as asm_config # noqa: E402 +from ddtrace.internal.settings.asm import config as asm_config # noqa: E402 from tests.utils import TracerSpanContainer # noqa: E402 from tests.utils import _build_tree # noqa: E402 diff --git a/tests/appsec/contrib_appsec/utils.py b/tests/appsec/contrib_appsec/utils.py index 401c38995be..2c0da84a486 100644 --- a/tests/appsec/contrib_appsec/utils.py +++ b/tests/appsec/contrib_appsec/utils.py @@ -16,8 +16,8 @@ from ddtrace.appsec import _constants as asm_constants from ddtrace.appsec._utils import get_triggers from ddtrace.internal import constants +from ddtrace.internal.settings.asm import config as asm_config from ddtrace.internal.utils.http import _format_template -from ddtrace.settings.asm import config as asm_config import tests.appsec.rules as rules from tests.utils import DummyTracer from tests.utils import override_env @@ -89,7 +89,7 @@ def body(self, response) -> str: raise NotImplementedError def get_stack_trace(self, entry_span, namespace): - appsec_traces = entry_span().get_struct_tag(asm_constants.STACK_TRACE.TAG) or {} + appsec_traces = entry_span()._get_struct_tag(asm_constants.STACK_TRACE.TAG) or {} stacks = appsec_traces.get(namespace, []) return stacks @@ -148,7 +148,7 @@ def test_healthcheck(self, interface: Interface, get_entry_span_tag, asm_enabled response = interface.client.get("/") assert self.status(response) == 200, "healthcheck failed" assert self.body(response) == "ok ASM" - from ddtrace.settings.asm import config as asm_config + from ddtrace.internal.settings.asm import config as asm_config assert asm_config._asm_enabled is asm_enabled assert get_entry_span_tag("http.status_code") == "200" diff --git a/tests/appsec/iast/fixtures/integration/main_configure.py b/tests/appsec/iast/fixtures/integration/main_configure.py index 1ab365da869..b6c25d75afc 100644 --- a/tests/appsec/iast/fixtures/integration/main_configure.py +++ b/tests/appsec/iast/fixtures/integration/main_configure.py @@ -5,7 +5,7 @@ import ddtrace.auto # noqa: F401 from ddtrace.ext import SpanTypes -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config from ddtrace.trace import tracer diff --git a/tests/appsec/iast/iast_utils.py b/tests/appsec/iast/iast_utils.py index 952fe9000a8..ce59e25098c 100644 --- a/tests/appsec/iast/iast_utils.py +++ b/tests/appsec/iast/iast_utils.py @@ -154,7 +154,7 @@ def load_iast_report(span): else: iast_report_json = span.get_tag(IAST.JSON) if iast_report_json is None: - iast_report = span.get_struct_tag(IAST.STRUCT) + iast_report = span._get_struct_tag(IAST.STRUCT) else: iast_report = json.loads(iast_report_json) return iast_report diff --git a/tests/appsec/iast/taint_sinks/test_sql_injection_dbapi.py b/tests/appsec/iast/taint_sinks/test_sql_injection_dbapi.py index d16cb29cbcc..7721beecaf6 100644 --- a/tests/appsec/iast/taint_sinks/test_sql_injection_dbapi.py +++ b/tests/appsec/iast/taint_sinks/test_sql_injection_dbapi.py @@ -6,9 +6,9 @@ from ddtrace.appsec._iast import load_iast from ddtrace.appsec._iast._overhead_control_engine import oce from ddtrace.contrib.dbapi import TracedCursor -from ddtrace.settings._config import Config -from ddtrace.settings.asm import config as asm_config -from ddtrace.settings.integration import IntegrationConfig +from ddtrace.internal.settings._config import Config +from ddtrace.internal.settings.asm import config as asm_config +from ddtrace.internal.settings.integration import IntegrationConfig from tests.appsec.iast.iast_utils import _end_iast_context_and_oce from tests.appsec.iast.iast_utils import _start_iast_context_and_oce from tests.utils import TracerTestCase diff --git a/tests/appsec/iast/taint_tracking/test_multiprocessing_tracer_iast_env.py b/tests/appsec/iast/taint_tracking/test_multiprocessing_tracer_iast_env.py index 6b52d77d8ce..b8c8387981a 100644 --- a/tests/appsec/iast/taint_tracking/test_multiprocessing_tracer_iast_env.py +++ b/tests/appsec/iast/taint_tracking/test_multiprocessing_tracer_iast_env.py @@ -20,7 +20,7 @@ def _child_check(q: Queue): Reports tracer and IAST status back to parent via Queue. """ try: - from ddtrace.settings.asm import config as asm_config + from ddtrace.internal.settings.asm import config as asm_config from ddtrace.trace import tracer # Start IAST context in child process diff --git a/tests/appsec/iast/test_fork_handler_regression.py b/tests/appsec/iast/test_fork_handler_regression.py index 8ea9632e24e..a7145e35878 100644 --- a/tests/appsec/iast/test_fork_handler_regression.py +++ b/tests/appsec/iast/test_fork_handler_regression.py @@ -30,7 +30,7 @@ def test_fork_handler_callable(iast_context_defaults): """Verify that _reset_iast_after_fork is callable and disables IAST.""" from ddtrace.appsec._iast import _disable_iast_after_fork - from ddtrace.settings.asm import config as asm_config + from ddtrace.internal.settings.asm import config as asm_config # Should not raise any exception try: @@ -48,7 +48,7 @@ def test_fork_handler_with_active_context(iast_context_defaults): """Verify fork handler disables IAST and clears context when active.""" from ddtrace.appsec._iast import _disable_iast_after_fork from ddtrace.appsec._iast._taint_tracking import is_tainted - from ddtrace.settings.asm import config as asm_config + from ddtrace.internal.settings.asm import config as asm_config _start_iast_context_and_oce() @@ -83,7 +83,7 @@ def child_process_work(queue): """Child process where IAST should be disabled.""" try: from ddtrace.appsec._iast._taint_tracking import is_tainted - from ddtrace.settings.asm import config as asm_config + from ddtrace.internal.settings.asm import config as asm_config # Start IAST in child (will be a no-op since IAST is disabled) _start_iast_context_and_oce() @@ -139,7 +139,7 @@ def test_multiple_fork_operations(iast_context_defaults): def simple_child_work(queue, child_id): """Simple child process work - IAST will be disabled.""" try: - from ddtrace.settings.asm import config as asm_config + from ddtrace.internal.settings.asm import config as asm_config # These should be safe no-ops since IAST is disabled _start_iast_context_and_oce() @@ -196,7 +196,7 @@ def test_fork_with_os_fork_no_segfault(iast_context_defaults): if pid == 0: # Child process - IAST should be disabled try: - from ddtrace.settings.asm import config as asm_config + from ddtrace.internal.settings.asm import config as asm_config # IAST should be disabled after fork if asm_config._iast_enabled: @@ -237,7 +237,7 @@ def test_fork_handler_clears_state(iast_context_defaults): """ from ddtrace.appsec._iast import _disable_iast_after_fork from ddtrace.appsec._iast._taint_tracking import is_tainted - from ddtrace.settings.asm import config as asm_config + from ddtrace.internal.settings.asm import config as asm_config _start_iast_context_and_oce() tainted = taint_pyobject("test", "source", "value", OriginType.PARAMETER) @@ -278,7 +278,7 @@ def child_eval_work(queue): """Child process with IAST disabled.""" try: from ddtrace.appsec._iast._taint_tracking import is_tainted - from ddtrace.settings.asm import config as asm_config + from ddtrace.internal.settings.asm import config as asm_config # IAST should be disabled, so this is a no-op _start_iast_context_and_oce() @@ -325,7 +325,7 @@ def test_early_fork_keeps_iast_enabled(): """ from ddtrace.appsec._iast import _disable_iast_after_fork from ddtrace.appsec._iast._taint_tracking import is_tainted - from ddtrace.settings.asm import config as asm_config + from ddtrace.internal.settings.asm import config as asm_config # Ensure IAST is enabled but NO context is active (simulating early fork) # Don't call _start_iast_context_and_oce() - this simulates pre-fork state diff --git a/tests/appsec/iast/test_loader.py b/tests/appsec/iast/test_loader.py index 8c91e725667..6942b4b6968 100644 --- a/tests/appsec/iast/test_loader.py +++ b/tests/appsec/iast/test_loader.py @@ -6,7 +6,7 @@ import ddtrace.appsec._iast._loader from ddtrace.internal.iast.product import post_preload -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config ASPECTS_MODULE = "ddtrace.appsec._iast._taint_tracking.aspects" diff --git a/tests/appsec/iast/test_overhead_control_engine.py b/tests/appsec/iast/test_overhead_control_engine.py index e40231951fe..3c28e472bed 100644 --- a/tests/appsec/iast/test_overhead_control_engine.py +++ b/tests/appsec/iast/test_overhead_control_engine.py @@ -7,7 +7,7 @@ from ddtrace.appsec._iast._taint_tracking._context import finish_request_context from ddtrace.appsec._iast._taint_tracking._context import start_request_context from ddtrace.appsec._iast.sampling.vulnerability_detection import reset_request_vulnerabilities -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config def function_with_vulnerabilities_3(tracer): diff --git a/tests/appsec/integrations/django_tests/test_appsec_django.py b/tests/appsec/integrations/django_tests/test_appsec_django.py index 850052368b9..1786ee542a8 100644 --- a/tests/appsec/integrations/django_tests/test_appsec_django.py +++ b/tests/appsec/integrations/django_tests/test_appsec_django.py @@ -13,7 +13,7 @@ from ddtrace.ext import http from ddtrace.ext import user from ddtrace.internal import constants -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config from tests.appsec.integrations.django_tests.utils import _aux_appsec_get_root_span import tests.appsec.rules as rules from tests.utils import override_global_config diff --git a/tests/appsec/integrations/django_tests/test_iast_django.py b/tests/appsec/integrations/django_tests/test_iast_django.py index 39580863142..d35f91cda03 100644 --- a/tests/appsec/integrations/django_tests/test_iast_django.py +++ b/tests/appsec/integrations/django_tests/test_iast_django.py @@ -16,7 +16,7 @@ from ddtrace.appsec._iast.constants import VULN_SSRF from ddtrace.appsec._iast.constants import VULN_STACKTRACE_LEAK from ddtrace.appsec._iast.constants import VULN_UNVALIDATED_REDIRECT -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config from tests.appsec.iast.iast_utils import get_line_and_hash from tests.appsec.iast.iast_utils import load_iast_report from tests.appsec.integrations.django_tests.utils import _aux_appsec_get_root_span @@ -28,7 +28,7 @@ def get_iast_stack_trace(root_span): - appsec_traces = root_span.get_struct_tag(STACK_TRACE.TAG) or {} + appsec_traces = root_span._get_struct_tag(STACK_TRACE.TAG) or {} stacks = appsec_traces.get("vulnerability", []) return stacks diff --git a/tests/appsec/integrations/flask_tests/test_iast_flask.py b/tests/appsec/integrations/flask_tests/test_iast_flask.py index 27c88cd976a..f1664294412 100644 --- a/tests/appsec/integrations/flask_tests/test_iast_flask.py +++ b/tests/appsec/integrations/flask_tests/test_iast_flask.py @@ -22,7 +22,7 @@ from ddtrace.appsec._iast.taint_sinks.unvalidated_redirect import patch as patch_unvalidated_redirect from ddtrace.appsec._iast.taint_sinks.xss import patch as patch_xss_injection from ddtrace.contrib.internal.sqlite3.patch import patch as patch_sqlite_sqli -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config from tests.appsec.iast.iast_utils import get_line_and_hash from tests.appsec.iast.iast_utils import load_iast_report from tests.appsec.integrations.flask_tests.utils import flask_version diff --git a/tests/appsec/suitespec.yml b/tests/appsec/suitespec.yml index aed2eee8780..39782d2ef8b 100644 --- a/tests/appsec/suitespec.yml +++ b/tests/appsec/suitespec.yml @@ -2,7 +2,7 @@ components: appsec: - ddtrace/appsec/* - - ddtrace/settings/asm.py + - ddtrace/internal/settings/asm.py appsec_iast: - ddtrace/appsec/iast/* urllib: @@ -27,7 +27,7 @@ suites: runner: riot snapshot: true appsec_iast_default: - parallelism: 6 + parallelism: 4 paths: - '@bootstrap' - '@core' @@ -139,7 +139,7 @@ suites: retry: 2 runner: riot appsec_integrations_flask: - parallelism: 17 + parallelism: 13 paths: - '@bootstrap' - '@core' @@ -154,7 +154,7 @@ suites: - testagent timeout: 40m appsec_integrations_django: - parallelism: 22 + parallelism: 16 paths: - '@bootstrap' - '@core' @@ -169,7 +169,7 @@ suites: - testagent timeout: 30m appsec_integrations_fastapi: - parallelism: 21 + parallelism: 17 paths: - '@bootstrap' - '@core' @@ -183,7 +183,7 @@ suites: services: - testagent appsec_threats_django: - parallelism: 12 + parallelism: 8 paths: - '@bootstrap' - '@core' @@ -199,7 +199,7 @@ suites: retry: 2 runner: riot appsec_threats_fastapi: - parallelism: 9 + parallelism: 6 paths: - '@bootstrap' - '@core' @@ -216,7 +216,7 @@ suites: retry: 2 runner: riot appsec_threats_flask: - parallelism: 10 + parallelism: 4 paths: - '@bootstrap' - '@core' @@ -270,4 +270,4 @@ suites: retry: 2 runner: riot services: - - testagent \ No newline at end of file + - testagent diff --git a/tests/ci_visibility/api_client/test_ci_visibility_api_client.py b/tests/ci_visibility/api_client/test_ci_visibility_api_client.py index 0188848c20d..170c77dfcbb 100644 --- a/tests/ci_visibility/api_client/test_ci_visibility_api_client.py +++ b/tests/ci_visibility/api_client/test_ci_visibility_api_client.py @@ -17,7 +17,7 @@ from ddtrace.internal.ci_visibility.git_data import GitData from ddtrace.internal.evp_proxy.constants import EVP_PROXY_AGENT_BASE_PATH from ddtrace.internal.evp_proxy.constants import EVP_PROXY_AGENT_BASE_PATH_V4 -from ddtrace.settings._config import Config +from ddtrace.internal.settings._config import Config from tests.ci_visibility.api_client._util import _AGENTLESS from tests.ci_visibility.api_client._util import _EVP_PROXY from tests.ci_visibility.api_client._util import TestTestVisibilityAPIClientBase @@ -491,7 +491,7 @@ def test_civisibility_api_client_evp_proxy_config_success(self, env_vars, expect "ddtrace.internal.ci_visibility.recorder.CIVisibility._agent_evp_proxy_base_url", return_value=EVP_PROXY_AGENT_BASE_PATH, ), mock.patch( - "ddtrace.settings._agent.config.trace_agent_url", return_value="http://shouldntbeused:6218" + "ddtrace.internal.settings._agent.config.trace_agent_url", return_value="http://shouldntbeused:6218" ), mock.patch( "ddtrace.internal.ci_visibility.recorder.ddtrace.tracer._span_aggregator.writer.intake_url", "http://patchedagenturl:6218", @@ -600,7 +600,7 @@ def test_civisibility_api_client_evp_respects_agent_default_config(self): ), mock.patch( "ddtrace.internal.agent.info", return_value=agent_info_response ), mock.patch( - "ddtrace.settings._agent.config.trace_agent_url", + "ddtrace.internal.settings._agent.config.trace_agent_url", new_callable=mock.PropertyMock, return_value="http://shouldntbeused:6218", ), mock.patch( diff --git a/tests/ci_visibility/suitespec.yml b/tests/ci_visibility/suitespec.yml index 99b565bcd10..6046b518c31 100644 --- a/tests/ci_visibility/suitespec.yml +++ b/tests/ci_visibility/suitespec.yml @@ -14,8 +14,6 @@ components: - ddtrace/contrib/internal/selenium/* unittest: - ddtrace/contrib/internal/unittest/* - freezegun: - - ddtrace/contrib/internal/freezegun/* suites: ci_visibility: parallelism: 4 @@ -28,14 +26,12 @@ suites: - '@pytest' - '@codeowners' - '@unittest' - - '@freezegun' - - '@tracing' - tests/ci_visibility/* - tests/snapshots/test_api_fake_runners.* runner: riot snapshot: true dd_coverage: - parallelism: 5 + parallelism: 3 paths: - '@bootstrap' - '@core' @@ -55,7 +51,6 @@ suites: - '@ci_visibility' - '@coverage' - '@codeowners' - - '@freezegun' - tests/contrib/pytest/* - tests/contrib/pytest_benchmark/* - tests/contrib/pytest_bdd/* @@ -85,8 +80,6 @@ suites: - '@unittest' - '@ci_visibility' - '@coverage' - - '@freezegun' - - '@tracing' - tests/contrib/unittest/* - tests/snapshots/tests.contrib.unittest.* runner: riot diff --git a/tests/ci_visibility/test_ci_visibility.py b/tests/ci_visibility/test_ci_visibility.py index dcce718e19d..6ea211e4c54 100644 --- a/tests/ci_visibility/test_ci_visibility.py +++ b/tests/ci_visibility/test_ci_visibility.py @@ -32,9 +32,9 @@ from ddtrace.internal.ci_visibility.recorder import _is_item_itr_skippable from ddtrace.internal.evp_proxy.constants import EVP_PROXY_AGENT_BASE_PATH from ddtrace.internal.evp_proxy.constants import EVP_PROXY_AGENT_BASE_PATH_V4 +from ddtrace.internal.settings._config import Config from ddtrace.internal.test_visibility._library_capabilities import LibraryCapabilities from ddtrace.internal.utils.http import Response -from ddtrace.settings._config import Config from ddtrace.trace import Span from tests.ci_visibility.api_client._util import _make_fqdn_suite_ids from tests.ci_visibility.api_client._util import _make_fqdn_test_ids @@ -728,7 +728,7 @@ def test_civisibilitywriter_coverage_evp_proxy_url(self): DD_API_KEY="foobar.baz", ) ), mock.patch( - "ddtrace.settings._agent.config.trace_agent_url", + "ddtrace.internal.settings._agent.config.trace_agent_url", new_callable=mock.PropertyMock, return_value="http://arandomhost:9126", ) as agent_url_mock, mock.patch( @@ -773,10 +773,10 @@ def test_civisibilitywriter_evp_proxy_url(self): DD_API_KEY="foobar.baz", ) ), mock.patch( - "ddtrace.settings._agent.config.trace_agent_url", + "ddtrace.internal.settings._agent.config.trace_agent_url", new_callable=mock.PropertyMock, return_value="http://evpproxy.bar:1234", - ), mock.patch("ddtrace.settings._config.Config", _get_default_civisibility_ddconfig()), mock.patch( + ), mock.patch("ddtrace.internal.settings._config.Config", _get_default_civisibility_ddconfig()), mock.patch( "ddtrace.tracer", CIVisibilityTracer() ), mock.patch( "ddtrace.internal.ci_visibility.recorder.CIVisibility._agent_evp_proxy_base_url", @@ -797,7 +797,7 @@ def test_civisibilitywriter_only_traces(self): DD_API_KEY="foobar.baz", ) ), mock.patch( - "ddtrace.settings._agent.config.trace_agent_url", + "ddtrace.internal.settings._agent.config.trace_agent_url", new_callable=mock.PropertyMock, return_value="http://onlytraces:1234", ), mock.patch("ddtrace.tracer", CIVisibilityTracer()), mock.patch( diff --git a/tests/ci_visibility/test_cli.py b/tests/ci_visibility/test_cli.py index a34f0f389be..9b879873656 100644 --- a/tests/ci_visibility/test_cli.py +++ b/tests/ci_visibility/test_cli.py @@ -31,8 +31,6 @@ def test_thing(): ], ["pytest", "-p", "no:ddtrace"], ["pytest", "-p", "ddtrace"], - ["pytest", "-p", "ddtrace", "-p", "ddtrace.pytest_bdd", "-p", "ddtrace.pytest_benchmark"], - ["pytest", "-p", "no:ddtrace", "-p", "no:ddtrace.pytest_bdd", "-p", "no:ddtrace.pytest_benchmark"], ] for command_args in commands_to_test: diff --git a/tests/ci_visibility/util.py b/tests/ci_visibility/util.py index df9fe8c2f20..3992597c544 100644 --- a/tests/ci_visibility/util.py +++ b/tests/ci_visibility/util.py @@ -14,7 +14,7 @@ from ddtrace.internal.ci_visibility.git_client import CIVisibilityGitClient from ddtrace.internal.ci_visibility.recorder import CIVisibility from ddtrace.internal.ci_visibility.recorder import CIVisibilityTracer -from ddtrace.settings._config import Config +from ddtrace.internal.settings._config import Config from tests.utils import DummyCIVisibilityWriter from tests.utils import override_env diff --git a/tests/commands/ddtrace_run_app_name.py b/tests/commands/ddtrace_run_app_name.py deleted file mode 100644 index 4cf41192e79..00000000000 --- a/tests/commands/ddtrace_run_app_name.py +++ /dev/null @@ -1,6 +0,0 @@ -from ddtrace.opentracer import Tracer - - -if __name__ == "__main__": - tracer = Tracer() - print(tracer._service_name) diff --git a/tests/commands/ddtrace_run_global_tags.py b/tests/commands/ddtrace_run_global_tags.py deleted file mode 100644 index 2441d80f93a..00000000000 --- a/tests/commands/ddtrace_run_global_tags.py +++ /dev/null @@ -1,8 +0,0 @@ -from ddtrace.trace import tracer - - -if __name__ == "__main__": - assert tracer._tags.get("a") == "True" - assert tracer._tags.get("b") == "0" - assert tracer._tags.get("c") == "C" - print("Test success") diff --git a/tests/commands/test_runner.py b/tests/commands/test_runner.py index f4bc9faebd9..71c11722905 100644 --- a/tests/commands/test_runner.py +++ b/tests/commands/test_runner.py @@ -197,19 +197,6 @@ def test_argv_passed(self): out = subprocess.check_output(["ddtrace-run", "python", "tests/commands/ddtrace_run_argv.py", "foo", "bar"]) assert out.startswith(b"Test success") - def test_got_app_name(self): - """ - apps run with ddtrace-run have a proper app name - """ - out = subprocess.check_output(["ddtrace-run", "python", "tests/commands/ddtrace_run_app_name.py"]) - assert out.startswith(b"ddtrace_run_app_name.py") - - def test_global_trace_tags(self): - """Ensure global tags are passed in from environment""" - with self.override_env(dict(DD_TRACE_GLOBAL_TAGS="a:True,b:0,c:C")): - out = subprocess.check_output(["ddtrace-run", "python", "tests/commands/ddtrace_run_global_tags.py"]) - assert out.startswith(b"Test success") - def test_logs_injection(self): """Ensure logs injection works""" with self.override_env(dict(DD_TAGS="service:my-service,env:my-env,version:my-version")): @@ -522,23 +509,6 @@ def test_ddtrace_run_and_auto_sitecustomize(): assert final_modules - starting_modules == set(["ddtrace.auto"]) -@pytest.mark.subprocess(env=dict(DD_TRACE_GLOBAL_TAGS="a:True"), err=None) -def test_global_trace_tags_deprecation_warning(): - """Ensure DD_TRACE_GLOBAL_TAGS deprecation warning shows""" - import warnings - - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter("always") - import ddtrace.auto # noqa: F401 - - assert len(warns) >= 1 - warning_messages = [str(warn.message) for warn in warns] - assert ( - "DD_TRACE_GLOBAL_TAGS is deprecated and will be removed in version '4.0.0': Please migrate to using " - "DD_TAGS instead" in warning_messages - ), warning_messages - - @pytest.mark.subprocess(ddtrace_run=False, err="") def test_ddtrace_auto_atexit(): """When ddtrace-run is used, ensure atexit hooks are registered exactly once""" diff --git a/tests/contrib/aiobotocore/test.py b/tests/contrib/aiobotocore/test.py index 5e7151797b0..0fdf25414e8 100644 --- a/tests/contrib/aiobotocore/test.py +++ b/tests/contrib/aiobotocore/test.py @@ -303,92 +303,6 @@ async def test_double_patch(tracer): assert len(traces[0]) == 1 -@pytest.mark.asyncio -async def test_opentraced_client(tracer): - from tests.opentracer.utils import init_tracer - - ot_tracer = init_tracer("my_svc", tracer) - - with ot_tracer.start_active_span("ot_outer_span"): - async with aiobotocore_client("ec2", tracer) as ec2: - await ec2.describe_instances() - - traces = tracer.pop_traces() - assert len(traces) == 1 - assert len(traces[0]) == 2 - ot_span = traces[0][0] - dd_span = traces[0][1] - - assert ot_span.resource == "ot_outer_span" - assert ot_span.service == "my_svc" - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert_is_measured(dd_span) - assert dd_span.get_tag("aws.agent") == "aiobotocore" - assert dd_span.get_tag("aws.region") == "us-west-2" - assert dd_span.get_tag("region") == "us-west-2" - assert dd_span.get_tag("aws.operation") == "DescribeInstances" - assert_span_http_status_code(dd_span, 200) - assert dd_span.get_metric("retry_attempts") == 0 - assert dd_span.service == "aws.ec2" - assert dd_span.resource == "ec2.describeinstances" - assert dd_span.name == "ec2.command" - assert dd_span.get_tag("component") == "aiobotocore" - assert dd_span.get_tag("span.kind") == "client" - - -@pytest.mark.asyncio -async def test_opentraced_s3_client(tracer): - from tests.opentracer.utils import init_tracer - - ot_tracer = init_tracer("my_svc", tracer) - - with ot_tracer.start_active_span("ot_outer_span"): - async with aiobotocore_client("s3", tracer) as s3: - await s3.list_buckets() - with ot_tracer.start_active_span("ot_inner_span1"): - await s3.list_buckets() - with ot_tracer.start_active_span("ot_inner_span2"): - pass - - traces = tracer.pop_traces() - assert len(traces) == 1 - assert len(traces[0]) == 5 - ot_outer_span = traces[0][0] - dd_span = traces[0][1] - ot_inner_span = traces[0][2] - dd_span2 = traces[0][3] - ot_inner_span2 = traces[0][4] - - assert ot_outer_span.resource == "ot_outer_span" - assert ot_inner_span.resource == "ot_inner_span1" - assert ot_inner_span2.resource == "ot_inner_span2" - - # confirm the parenting - assert ot_outer_span.parent_id is None - assert dd_span.parent_id == ot_outer_span.span_id - assert ot_inner_span.parent_id == ot_outer_span.span_id - assert dd_span2.parent_id == ot_inner_span.span_id - assert ot_inner_span2.parent_id == ot_outer_span.span_id - - assert_is_measured(dd_span) - assert dd_span.get_tag("aws.operation") == "ListBuckets" - assert_span_http_status_code(dd_span, 200) - assert dd_span.service == "aws.s3" - assert dd_span.resource == "s3.listbuckets" - assert dd_span.name == "s3.command" - - assert dd_span2.get_tag("aws.operation") == "ListBuckets" - assert_span_http_status_code(dd_span2, 200) - assert dd_span2.service == "aws.s3" - assert dd_span2.resource == "s3.listbuckets" - assert dd_span2.name == "s3.command" - assert dd_span.get_tag("component") == "aiobotocore" - - @pytest.mark.asyncio async def test_user_specified_service(tracer): """ diff --git a/tests/contrib/aiohttp/test_aiohttp_client.py b/tests/contrib/aiohttp/test_aiohttp_client.py index 76595f6c408..c8d18c485f8 100644 --- a/tests/contrib/aiohttp/test_aiohttp_client.py +++ b/tests/contrib/aiohttp/test_aiohttp_client.py @@ -101,7 +101,7 @@ async def test_distributed_tracing_disabled(ddtrace_run_python_code_in_subproces import asyncio import sys import aiohttp -from ddtrace.trace import Pin +from ddtrace._trace.pin import Pin from tests.contrib.aiohttp.test_aiohttp_client import URL async def test(): @@ -184,7 +184,7 @@ def test_configure_service_name_pin(ddtrace_run_python_code_in_subprocess): import asyncio import sys import aiohttp -from ddtrace.trace import Pin +from ddtrace._trace.pin import Pin from tests.contrib.aiohttp.test_aiohttp_client import URL_200 async def test(): diff --git a/tests/contrib/aiohttp/test_middleware.py b/tests/contrib/aiohttp/test_middleware.py index 37e6ea2e3de..e067c197685 100644 --- a/tests/contrib/aiohttp/test_middleware.py +++ b/tests/contrib/aiohttp/test_middleware.py @@ -1,6 +1,5 @@ import os -from opentracing.scope_managers.asyncio import AsyncioScopeManager import pytest import pytest_asyncio @@ -14,7 +13,6 @@ from ddtrace.contrib.internal.aiohttp.middlewares import trace_middleware from ddtrace.ext import http from ddtrace.internal.utils.version import parse_version -from tests.opentracer.utils import init_tracer from tests.tracer.utils_inferred_spans.test_helpers import assert_web_and_inferred_aws_api_gateway_span_data from tests.utils import assert_span_http_status_code from tests.utils import override_global_config @@ -545,22 +543,6 @@ async def test_parenting_200_dd(app_tracer, aiohttp_client): _assert_200_parenting(client, traces) -async def test_parenting_200_ot(app_tracer, aiohttp_client): - """OpenTracing version of test_handler.""" - app, tracer = app_tracer - client = await aiohttp_client(app) - ot_tracer = init_tracer("aiohttp_svc", tracer, scope_manager=AsyncioScopeManager()) - - with ot_tracer.start_active_span("aiohttp_op"): - request = await client.request("GET", "/") - assert 200 == request.status - text = await request.text() - - assert "What's tracing?" == text - traces = tracer.pop_traces() - _assert_200_parenting(client, traces) - - @pytest.mark.parametrize( "test_app", [ diff --git a/tests/contrib/aiopg/test.py b/tests/contrib/aiopg/test.py index b60e5989dda..63f2b89a379 100644 --- a/tests/contrib/aiopg/test.py +++ b/tests/contrib/aiopg/test.py @@ -11,7 +11,6 @@ from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME from tests.contrib.asyncio.utils import AsyncioTestCase from tests.contrib.config import POSTGRES_CONFIG -from tests.opentracer.utils import init_tracer from tests.subprocesstest import run_in_subprocess from tests.utils import assert_is_measured @@ -75,29 +74,6 @@ async def assert_conn_is_traced(self, tracer, db, service): assert span.get_tag("component") == "aiopg" assert span.get_tag("span.kind") == "client" - # Ensure OpenTracing compatibility - ot_tracer = init_tracer("aiopg_svc", tracer) - with ot_tracer.start_active_span("aiopg_op"): - cursor = await db.cursor() - await cursor.execute(q) - rows = await cursor.fetchall() - assert rows == [("foobarblah",)] - spans = self.pop_spans() - assert len(spans) == 2 - ot_span, dd_span = spans - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - assert ot_span.name == "aiopg_op" - assert ot_span.service == "aiopg_svc" - assert dd_span.name == "postgres.query" - assert dd_span.resource == q - assert dd_span.service == service - assert dd_span.error == 0 - assert dd_span.span_type == "sql" - assert dd_span.get_tag("component") == "aiopg" - assert span.get_tag("span.kind") == "client" - # run a query with an error and ensure all is well q = "select * from some_non_existant_table" cur = await db.cursor() diff --git a/tests/contrib/aredis/test_aredis.py b/tests/contrib/aredis/test_aredis.py index c1ae507aae5..f374c67668a 100644 --- a/tests/contrib/aredis/test_aredis.py +++ b/tests/contrib/aredis/test_aredis.py @@ -9,7 +9,6 @@ from ddtrace.contrib.internal.aredis.patch import unpatch from ddtrace.internal.compat import is_wrapted from tests.conftest import DEFAULT_DDTRACE_SUBPROCESS_TEST_SERVICE_NAME -from tests.opentracer.utils import init_tracer from tests.utils import override_config from ..config import REDIS_CONFIG @@ -152,7 +151,7 @@ def test_schematization_of_service_and_operation(ddtrace_run_python_code_in_subp import pytest import sys from tests.conftest import * -from ddtrace.trace import Pin +from ddtrace._trace.pin import Pin import aredis from tests.contrib.config import REDIS_CONFIG from tests.contrib.aredis.test_aredis import traced_aredis @@ -185,19 +184,6 @@ async def test(tracer, test_spans): assert err == b"", err.decode() -@pytest.mark.asyncio -async def test_opentracing(tracer, snapshot_context): - """Ensure OpenTracing works with redis.""" - - with snapshot_context(): - r = aredis.StrictRedis(port=REDIS_CONFIG["port"]) - pin = Pin.get_from(r) - ot_tracer = init_tracer("redis_svc", pin.tracer) - - with ot_tracer.start_active_span("redis_get"): - await r.get("cheese") - - @pytest.mark.subprocess(ddtrace_run=True, env=dict(DD_REDIS_RESOURCE_ONLY_COMMAND="false")) @pytest.mark.snapshot def test_full_command_in_resource_env(): diff --git a/tests/contrib/asyncio/test_propagation.py b/tests/contrib/asyncio/test_propagation.py index fd962e544ea..fc976d59ea8 100644 --- a/tests/contrib/asyncio/test_propagation.py +++ b/tests/contrib/asyncio/test_propagation.py @@ -7,7 +7,6 @@ from ddtrace.contrib.internal.asyncio.patch import patch from ddtrace.contrib.internal.asyncio.patch import unpatch from ddtrace.trace import Context -from tests.opentracer.utils import init_tracer _orig_create_task = asyncio.BaseEventLoop.create_task @@ -115,59 +114,3 @@ async def test_propagation_with_new_context(tracer): span = traces[0][0] assert span.trace_id == 100 assert span.parent_id == 101 - - -@pytest.mark.asyncio -async def test_trace_multiple_coroutines_ot_outer(tracer): - """OpenTracing version of test_trace_multiple_coroutines.""" - - # if multiple coroutines have nested tracing, they must belong - # to the same trace - async def coro(): - # another traced coroutine - with tracer.trace("coroutine_2"): - return 42 - - ot_tracer = init_tracer("asyncio_svc", tracer) - with ot_tracer.start_active_span("coroutine_1"): - value = await coro() - - # the coroutine has been called correctly - assert 42 == value - # a single trace has been properly reported - traces = tracer.pop_traces() - assert 1 == len(traces) - assert 2 == len(traces[0]) - assert "coroutine_1" == traces[0][0].name - assert "coroutine_2" == traces[0][1].name - # the parenting is correct - assert traces[0][0] == traces[0][1]._parent - assert traces[0][0].trace_id == traces[0][1].trace_id - - -@pytest.mark.asyncio -async def test_trace_multiple_coroutines_ot_inner(tracer): - """OpenTracing version of test_trace_multiple_coroutines.""" - # if multiple coroutines have nested tracing, they must belong - # to the same trace - ot_tracer = init_tracer("asyncio_svc", tracer) - - async def coro(): - # another traced coroutine - with ot_tracer.start_active_span("coroutine_2"): - return 42 - - with tracer.trace("coroutine_1"): - value = await coro() - - # the coroutine has been called correctly - assert 42 == value - # a single trace has been properly reported - traces = tracer.pop_traces() - assert 1 == len(traces) - assert 2 == len(traces[0]) - assert "coroutine_1" == traces[0][0].name - assert "coroutine_2" == traces[0][1].name - # the parenting is correct - assert traces[0][0] == traces[0][1]._parent - assert traces[0][0].trace_id == traces[0][1].trace_id diff --git a/tests/contrib/boto/test.py b/tests/contrib/boto/test.py index 91c626e6cbc..76dd974692a 100644 --- a/tests/contrib/boto/test.py +++ b/tests/contrib/boto/test.py @@ -20,7 +20,6 @@ from ddtrace.contrib.internal.boto.patch import unpatch from ddtrace.ext import http from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME -from tests.opentracer.utils import init_tracer from tests.utils import TracerTestCase from tests.utils import assert_is_measured from tests.utils import assert_span_http_status_code @@ -759,56 +758,3 @@ def test_elasticache_client(self): self.assertEqual(span.get_tag("span.kind"), "client") self.assertEqual(span.service, "test-boto-tracing.elasticache") self.assertEqual(span.resource, "elasticache") - - @mock_ec2 - def test_ec2_client_ot(self): - """OpenTracing compatibility check of the test_ec2_client test.""" - ec2 = boto.ec2.connect_to_region("us-west-2") - ot_tracer = init_tracer("my_svc", self.tracer) - pin = Pin(service=self.TEST_SERVICE) - pin._tracer = self.tracer - pin.onto(ec2) - - with ot_tracer.start_active_span("ot_span"): - ec2.get_all_instances() - spans = self.pop_spans() - assert spans - self.assertEqual(len(spans), 2) - ot_span, dd_span = spans - - # confirm the parenting - self.assertIsNone(ot_span.parent_id) - self.assertEqual(dd_span.parent_id, ot_span.span_id) - - self.assertEqual(ot_span.resource, "ot_span") - self.assertEqual(dd_span.get_tag("aws.operation"), "DescribeInstances") - self.assertEqual(dd_span.get_tag("component"), "boto") - self.assertEqual(dd_span.get_tag("span.kind"), "client") - assert_span_http_status_code(dd_span, 200) - self.assertEqual(dd_span.get_tag(http.METHOD), "POST") - self.assertEqual(dd_span.get_tag("aws.region"), "us-west-2") - self.assertEqual(dd_span.get_tag("region"), "us-west-2") - self.assertEqual(dd_span.get_tag("aws.partition"), "aws") - - with ot_tracer.start_active_span("ot_span"): - ec2.run_instances(21) - spans = self.pop_spans() - assert spans - self.assertEqual(len(spans), 2) - ot_span, dd_span = spans - - # confirm the parenting - self.assertIsNone(ot_span.parent_id) - self.assertEqual(dd_span.parent_id, ot_span.span_id) - - self.assertEqual(dd_span.get_tag("aws.operation"), "RunInstances") - assert_span_http_status_code(dd_span, 200) - self.assertEqual(dd_span.get_tag(http.METHOD), "POST") - self.assertEqual(dd_span.get_tag("aws.region"), "us-west-2") - self.assertEqual(dd_span.get_tag("region"), "us-west-2") - self.assertEqual(dd_span.get_tag("aws.partition"), "aws") - self.assertEqual(dd_span.get_tag("component"), "boto") - self.assertEqual(dd_span.get_tag("span.kind"), "client") - self.assertEqual(dd_span.service, "test-boto-tracing.ec2") - self.assertEqual(dd_span.resource, "ec2.runinstances") - self.assertEqual(dd_span.name, "ec2.command") diff --git a/tests/contrib/botocore/test.py b/tests/contrib/botocore/test.py index 5270f69f021..1cfdb2306fd 100644 --- a/tests/contrib/botocore/test.py +++ b/tests/contrib/botocore/test.py @@ -47,7 +47,6 @@ from ddtrace.internal.utils.version import parse_version from ddtrace.propagation.http import HTTP_HEADER_PARENT_ID from ddtrace.propagation.http import HTTP_HEADER_TRACE_ID -from tests.opentracer.utils import init_tracer from tests.utils import TracerTestCase from tests.utils import assert_is_measured from tests.utils import assert_span_http_status_code @@ -2245,43 +2244,6 @@ def test_schematized_unspecified_service_kms_client_v1(self): assert span.service == DEFAULT_SPAN_SERVICE_NAME assert span.name == "aws.kms.request" - @mock_ec2 - def test_traced_client_ot(self): - """OpenTracing version of test_traced_client.""" - ot_tracer = init_tracer("ec2_svc", self.tracer) - - with ot_tracer.start_active_span("ec2_op"): - ec2 = self.session.create_client("ec2", region_name="us-west-2") - pin = Pin(service=self.TEST_SERVICE) - pin._tracer = self.tracer - pin.onto(ec2) - ec2.describe_instances() - - spans = self.get_spans() - assert spans - assert len(spans) == 2 - - ot_span, dd_span = spans - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.name == "ec2_op" - assert ot_span.service == "ec2_svc" - - assert dd_span.get_tag("aws.agent") == "botocore" - assert dd_span.get_tag("aws.region") == "us-west-2" - assert dd_span.get_tag("region") == "us-west-2" - assert dd_span.get_tag("aws.operation") == "DescribeInstances" - assert dd_span.get_tag("component") == "botocore" - assert dd_span.get_tag("span.kind"), "client" - assert_span_http_status_code(dd_span, 200) - assert dd_span.get_metric("retry_attempts") == 0 - assert dd_span.service == "test-botocore-tracing.ec2" - assert dd_span.resource == "ec2.describeinstances" - assert dd_span.name == "ec2.command" - @unittest.skipIf(BOTOCORE_VERSION < (1, 9, 0), "Skipping for older versions of botocore without Stubber") def test_stubber_no_response_metadata(self): """When no ResponseMetadata key is provided in the response""" diff --git a/tests/contrib/bottle/test.py b/tests/contrib/bottle/test.py index 5a274802d85..d74ceb4935d 100644 --- a/tests/contrib/bottle/test.py +++ b/tests/contrib/bottle/test.py @@ -7,7 +7,6 @@ from ddtrace.contrib.internal.bottle.patch import TracePlugin from ddtrace.ext import http from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME -from tests.opentracer.utils import init_tracer from tests.tracer.utils_inferred_spans.test_helpers import assert_web_and_inferred_aws_api_gateway_span_data from tests.utils import TracerTestCase from tests.utils import assert_is_measured @@ -316,44 +315,6 @@ def home(): assert s.get_tag("span.kind") == "server" assert s.get_tag("http.route") == "/home/" - def test_200_ot(self): - ot_tracer = init_tracer("my_svc", self.tracer) - - # setup our test app - @self.app.route("/hi/") - def hi(name): - return "hi %s" % name - - self._trace_app(self.tracer) - - # make a request - with ot_tracer.start_active_span("ot_span"): - resp = self.app.get("/hi/dougie") - - assert resp.status_int == 200 - assert resp.body.decode("utf-8", errors="ignore") == "hi dougie" - # validate it's traced - spans = self.pop_spans() - assert len(spans) == 2 - ot_span, dd_span = spans - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.resource == "ot_span" - - assert_is_measured(dd_span) - assert dd_span.name == "bottle.request" - assert dd_span.service == "bottle-app" - assert dd_span.resource == "GET /hi/" - assert_span_http_status_code(dd_span, 200) - assert dd_span.get_tag("http.method") == "GET" - assert dd_span.get_tag(http.URL) == "http://localhost:80/hi/dougie" - assert dd_span.get_tag("component") == "bottle" - assert dd_span.get_tag("span.kind") == "server" - assert dd_span.get_tag("http.route") == "/hi/" - @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc")) def test_user_specified_service_default_schema(self): """ diff --git a/tests/contrib/cassandra/test.py b/tests/contrib/cassandra/test.py deleted file mode 100644 index 708d599040a..00000000000 --- a/tests/contrib/cassandra/test.py +++ /dev/null @@ -1,546 +0,0 @@ -import contextlib -import logging -from threading import Event -import unittest - -from cassandra.cluster import Cluster -from cassandra.cluster import ResultSet -from cassandra.query import BatchStatement -from cassandra.query import SimpleStatement -import mock - -from ddtrace import config -from ddtrace._trace.pin import Pin -from ddtrace.constants import ERROR_MSG -from ddtrace.constants import ERROR_TYPE -from ddtrace.contrib.internal.cassandra.patch import patch -from ddtrace.contrib.internal.cassandra.patch import unpatch -from ddtrace.contrib.internal.cassandra.session import SERVICE -from ddtrace.ext import cassandra as cassx -from ddtrace.ext import net -from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME -from tests.contrib.config import CASSANDRA_CONFIG -from tests.opentracer.utils import init_tracer -from tests.utils import DummyTracer -from tests.utils import TracerTestCase -from tests.utils import assert_is_measured - - -# Oftentimes our tests fails because Cassandra connection timeouts during keyspace drop. Slowness in keyspace drop -# is known and is due to 'auto_snapshot' configuration. In our test env we should disable it, but the official cassandra -# image that we are using only allows us to configure a few configs: -# https://github.com/docker-library/cassandra/blob/4474c6c5cc2a81ee57c5615aae00555fca7e26a6/3.11/docker-entrypoint.sh#L51 -# So for now we just increase the timeout, if this is not enough we may want to extend the official image with our own -# custom image. -CONNECTION_TIMEOUT_SECS = 20 # override the default value of 5 - -logging.getLogger("cassandra").setLevel(logging.INFO) - - -def _setup(testObject): - self = testObject or mock.Mock() - - # skip all the modules if the Cluster is not available - if not Cluster: - raise unittest.SkipTest("cassandra.cluster.Cluster is not available.") - - # create the KEYSPACE for this test module - self.cluster = Cluster(port=CASSANDRA_CONFIG["port"], connect_timeout=CONNECTION_TIMEOUT_SECS) - self.session = self.cluster.connect() - self.session.execute("DROP KEYSPACE IF EXISTS test", timeout=10) - self.session.execute( - "CREATE KEYSPACE if not exists test WITH REPLICATION = { 'class' : 'SimpleStrategy', 'replication_factor': 1};" # noqa:E501 - ) - self.session.execute("CREATE TABLE if not exists test.person (name text PRIMARY KEY, age int, description text)") - self.session.execute( - "CREATE TABLE if not exists test.person_write (name text PRIMARY KEY, age int, description text)" - ) - self.session.execute( - "INSERT INTO test.person (name, age, description) VALUES ('Cassandra', 100, 'A cruel mistress')" - ) - self.session.execute( - "INSERT INTO test.person (name, age, description) VALUES ('Athena', 100, 'Whose shield is thunder')" - ) - self.session.execute( - "INSERT INTO test.person (name, age, description) VALUES ('Calypso', 100, 'Softly-braided nymph')" - ) - - -def _teardown(testObject): - self = testObject or mock.Mock() - # destroy the KEYSPACE - self.session.execute("DROP TABLE IF EXISTS test.person") - self.session.execute("DROP TABLE IF EXISTS test.person_write") - self.session.execute("DROP KEYSPACE IF EXISTS test", timeout=10) - - -def setUpModule(): - _setup(None) - - -def tearDownModule(): - _teardown(None) - - -class CassandraBase(object): - """ - Needs a running Cassandra - """ - - TEST_QUERY = "SELECT * from test.person WHERE name = 'Cassandra'" - TEST_QUERY_PAGINATED = "SELECT * from test.person" - TEST_KEYSPACE = "test" - TEST_PORT = CASSANDRA_CONFIG["port"] - TEST_SERVICE = "test-cassandra" - - def setUp(self): - _setup(self) - - def tearDown(self): - _teardown(self) - - @contextlib.contextmanager - def override_config(self, integration, values): - """ - Temporarily override an integration configuration value - >>> with self.override_config('flask', dict(service_name='test-service')): - ... # Your test - """ - options = getattr(config, integration) - - original = dict((key, options.get(key)) for key in values.keys()) - - options.update(values) - try: - yield - finally: - options.update(original) - - def _assert_result_correct(self, result): - assert len(result.current_rows) == 1 - for r in result: - assert r.name == "Cassandra" - assert r.age == 100 - assert r.description == "A cruel mistress" - - def _test_query_base(self, execute_fn): - session, tracer = self._traced_session() - - result = execute_fn(session, self.TEST_QUERY) - self._assert_result_correct(result) - - spans = tracer.pop() - assert spans, spans - - # another for the actual query - assert len(spans) == 1 - - query = spans[0] - - assert_is_measured(query) - assert query.service == self.TEST_SERVICE - assert query.resource == self.TEST_QUERY - assert query.span_type == "cassandra" - - assert query.get_tag(cassx.KEYSPACE) == self.TEST_KEYSPACE - assert query.get_metric("db.row_count") == 1 - assert query.get_metric("network.destination.port") == self.TEST_PORT - assert query.get_tag(cassx.PAGE_NUMBER) is None - assert query.get_tag(cassx.PAGINATED) == "False" - assert query.get_tag(net.TARGET_HOST) == "127.0.0.1" - assert query.get_tag(net.SERVER_ADDRESS) == "127.0.0.1" - assert query.get_tag("component") == "cassandra" - assert query.get_tag("span.kind") == "client" - assert query.get_tag("db.system") == "cassandra" - - def test_query(self): - def execute_fn(session, query): - return session.execute(query) - - self._test_query_base(execute_fn) - - def test_query_ot(self): - """Ensure that cassandra works with the opentracer.""" - - def execute_fn(session, query): - return session.execute(query) - - session, tracer = self._traced_session() - ot_tracer = init_tracer("cass_svc", tracer) - - with ot_tracer.start_active_span("cass_op"): - result = execute_fn(session, self.TEST_QUERY) - self._assert_result_correct(result) - - spans = tracer.pop() - assert spans, spans - - # another for the actual query - assert len(spans) == 2 - ot_span, dd_span = spans - - # confirm parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.name == "cass_op" - assert ot_span.service == "cass_svc" - - assert dd_span.service == self.TEST_SERVICE - assert dd_span.resource == self.TEST_QUERY - assert dd_span.span_type == "cassandra" - - assert dd_span.get_tag(cassx.KEYSPACE) == self.TEST_KEYSPACE - assert dd_span.get_metric("db.row_count") == 1 - assert dd_span.get_metric("network.destination.port") == self.TEST_PORT - assert dd_span.get_tag(cassx.PAGE_NUMBER) is None - assert dd_span.get_tag(cassx.PAGINATED) == "False" - assert dd_span.get_tag(net.TARGET_HOST) == "127.0.0.1" - assert dd_span.get_tag(net.SERVER_ADDRESS) == "127.0.0.1" - assert dd_span.get_tag("component") == "cassandra" - assert dd_span.get_tag("span.kind") == "client" - assert dd_span.get_tag("db.system") == "cassandra" - - def test_query_async(self): - def execute_fn(session, query): - event = Event() - result = [] - future = session.execute_async(query) - - def callback(results): - result.append(ResultSet(future, results)) - event.set() - - future.add_callback(callback) - event.wait() - return result[0] - - self._test_query_base(execute_fn) - - def test_query_async_clearing_callbacks(self): - def execute_fn(session, query): - future = session.execute_async(query) - future.clear_callbacks() - return future.result() - - self._test_query_base(execute_fn) - - def test_span_is_removed_from_future(self): - session, tracer = self._traced_session() - future = session.execute_async(self.TEST_QUERY) - future.result() - span = getattr(future, "_ddtrace_current_span", None) - assert span is None - - def test_paginated_query(self): - session, tracer = self._traced_session() - - statement = SimpleStatement(self.TEST_QUERY_PAGINATED, fetch_size=1) - result = session.execute(statement) - # iterate over all pages - results = list(result) - assert len(results) == 3 - - spans = tracer.pop() - assert spans, spans - - # There are 4 spans for 3 results since the driver makes a request with - # no result to check that it has reached the last page - assert len(spans) == 4 - - for i in range(4): - query = spans[i] - assert query.service == self.TEST_SERVICE - assert query.resource == self.TEST_QUERY_PAGINATED - assert query.span_type == "cassandra" - - assert query.get_tag(cassx.KEYSPACE) == self.TEST_KEYSPACE - assert query.get_metric("network.destination.port") == self.TEST_PORT - if i == 3: - assert query.get_metric("db.row_count") == 0 - else: - assert query.get_metric("db.row_count") == 1 - assert query.get_tag(net.TARGET_HOST) == "127.0.0.1" - assert query.get_tag(net.SERVER_ADDRESS) == "127.0.0.1" - assert query.get_tag(cassx.PAGINATED) == "True" - assert query.get_metric(cassx.PAGE_NUMBER) == i + 1 - assert query.get_tag("db.system") == "cassandra" - - def test_trace_with_service(self): - session, tracer = self._traced_session() - - session.execute(self.TEST_QUERY) - spans = tracer.pop() - assert spans - assert len(spans) == 1 - query = spans[0] - assert query.service == self.TEST_SERVICE - - def test_trace_error(self): - session, tracer = self._traced_session() - - try: - session.execute("select * from test.i_dont_exist limit 1") - except Exception: - pass - else: - assert 0 - - spans = tracer.pop() - assert spans - query = spans[0] - assert query.error == 1 - for k in (ERROR_MSG, ERROR_TYPE): - assert query.get_tag(k) - - def test_bound_statement(self): - session, tracer = self._traced_session() - - query = "INSERT INTO test.person_write (name, age, description) VALUES (?, ?, ?)" - prepared = session.prepare(query) - session.execute(prepared, ("matt", 34, "can")) - - prepared = session.prepare(query) - bound_stmt = prepared.bind(("leo", 16, "fr")) - session.execute(bound_stmt) - - spans = tracer.pop() - assert len(spans) == 2 - for s in spans: - assert s.resource == query - - def test_batch_statement(self): - session, tracer = self._traced_session() - - batch = BatchStatement() - batch.add( - SimpleStatement("INSERT INTO test.person_write (name, age, description) VALUES (%s, %s, %s)"), - ("Joe", 1, "a"), - ) - batch.add( - SimpleStatement("INSERT INTO test.person_write (name, age, description) VALUES (%s, %s, %s)"), - ("Jane", 2, "b"), - ) - session.execute(batch) - - spans = tracer.pop() - assert len(spans) == 1 - s = spans[0] - assert s.resource == "BatchStatement" - assert s.get_metric("cassandra.batch_size") == 2 - assert "test.person" in s.get_tag("cassandra.query") - - def test_batched_bound_statement(self): - session, tracer = self._traced_session() - - batch = BatchStatement() - - prepared_statement = session.prepare("INSERT INTO test.person_write (name, age, description) VALUES (?, ?, ?)") - batch.add(prepared_statement.bind(("matt", 34, "can"))) - session.execute(batch) - - spans = tracer.pop() - assert len(spans) == 1 - s = spans[0] - assert s.resource == "BatchStatement" - assert s.get_tag("cassandra.query") == "" - - -class TestCassPatchDefault(unittest.TestCase, CassandraBase): - """Test Cassandra instrumentation with patching and default configuration""" - - TEST_SERVICE = SERVICE - - def tearDown(self): - unpatch() - - def setUp(self): - CassandraBase.setUp(self) - patch() - - def _traced_session(self): - tracer = DummyTracer() - Pin.get_from(self.cluster)._clone(tracer=tracer).onto(self.cluster) - return self.cluster.connect(self.TEST_KEYSPACE), tracer - - -class TestCassPatchAll(TestCassPatchDefault): - """Test Cassandra instrumentation with patching and custom service on all clusters""" - - TEST_SERVICE = "test-cassandra-patch-all" - - def tearDown(self): - unpatch() - - def setUp(self): - CassandraBase.setUp(self) - patch() - - def _traced_session(self): - tracer = DummyTracer() - # pin the global Cluster to test if they will conflict - pin = Pin(service=self.TEST_SERVICE) - pin._tracer = tracer - pin.onto(Cluster) - self.cluster = Cluster(port=CASSANDRA_CONFIG["port"]) - - return self.cluster.connect(self.TEST_KEYSPACE), tracer - - -class TestCassPatchOne(TestCassPatchDefault): - """Test Cassandra instrumentation with patching and custom service on one cluster""" - - TEST_SERVICE = "test-cassandra-patch-one" - - def tearDown(self): - unpatch() - - def setUp(self): - CassandraBase.setUp(self) - patch() - - def _traced_session(self): - tracer = DummyTracer() - # pin the global Cluster to test if they will conflict - Pin(service="not-%s" % self.TEST_SERVICE).onto(Cluster) - self.cluster = Cluster(port=CASSANDRA_CONFIG["port"]) - - pin = Pin(service=self.TEST_SERVICE) - pin._tracer = tracer - pin.onto(self.cluster) - return self.cluster.connect(self.TEST_KEYSPACE), tracer - - def test_patch_unpatch(self): - # Test patch idempotence - patch() - patch() - - tracer = DummyTracer() - Pin.get_from(Cluster)._clone(tracer=tracer).onto(Cluster) - - session = Cluster(port=CASSANDRA_CONFIG["port"]).connect(self.TEST_KEYSPACE) - session.execute(self.TEST_QUERY) - - spans = tracer.pop() - assert spans, spans - assert len(spans) == 1 - - # Test unpatch - unpatch() - - session = Cluster(port=CASSANDRA_CONFIG["port"]).connect(self.TEST_KEYSPACE) - session.execute(self.TEST_QUERY) - - spans = tracer.pop() - assert not spans, spans - - # Test patch again - patch() - Pin.get_from(Cluster)._clone(tracer=tracer).onto(Cluster) - - session = Cluster(port=CASSANDRA_CONFIG["port"]).connect(self.TEST_KEYSPACE) - session.execute(self.TEST_QUERY) - - spans = tracer.pop() - assert spans, spans - - -class TestCassandraConfig(TracerTestCase): - """ - Test various configurations of the Cassandra integration. - """ - - TEST_QUERY = "SELECT * from test.person WHERE name = 'Cassandra'" - TEST_KEYSPACE = "test" - - def setUp(self): - super(TestCassandraConfig, self).setUp() - patch() - self.tracer = DummyTracer() - self.cluster = Cluster(port=CASSANDRA_CONFIG["port"]) - Pin.get_from(self.cluster)._clone(tracer=self.tracer).onto(self.cluster) - self.session = self.cluster.connect(self.TEST_KEYSPACE) - - def tearDown(self): - unpatch() - super(TestCassandraConfig, self).tearDown() - - @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) - def test_user_specified_service_v0(self): - """ - v0: When a user specifies a service for the app - The cassandra integration should not use it. - """ - # Ensure that the service name was configured - from ddtrace import config - - assert config.service == "mysvc" - - self.session.execute(self.TEST_QUERY) - spans = self.pop_spans() - assert spans - assert len(spans) == 1 - query = spans[0] - assert query.service != "mysvc" - - @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc", DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) - def test_user_specified_service_v1(self): - """ - v1: When a user specifies a service for the app - The cassandra integration should use it. - """ - # Ensure that the service name was configured - from ddtrace import config - - assert config.service == "mysvc" - - self.session.execute(self.TEST_QUERY) - spans = self.pop_spans() - assert spans - assert len(spans) == 1 - query = spans[0] - assert query.service == "mysvc" - - @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) - def test_unspecified_service_v1(self): - """ - v1: When a user does not specify a service for the app - dd-trace-py should default to internal.schema.DEFAULT_SPAN_SERVICE_NAME - """ - # Ensure that the service name was configured - from ddtrace import config - - assert config.service == DEFAULT_SPAN_SERVICE_NAME - - self.session.execute(self.TEST_QUERY) - spans = self.pop_spans() - assert spans - assert len(spans) == 1 - query = spans[0] - assert query.service == DEFAULT_SPAN_SERVICE_NAME - - @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) - def test_span_name_v0_schema(self): - """ - When a user specifies a service for the app - The cassandra integration should not use it. - """ - self.session.execute(self.TEST_QUERY) - spans = self.pop_spans() - assert spans - assert len(spans) == 1 - query = spans[0] - assert query.name == "cassandra.query" - - @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) - def test_span_name_v1_schema(self): - """ - When a user specifies a service for the app - The cassandra integration should not use it. - """ - self.session.execute(self.TEST_QUERY) - spans = self.pop_spans() - assert spans - assert len(spans) == 1 - query = spans[0] - assert query.name == "cassandra.query" diff --git a/tests/contrib/cassandra/test_cassandra_patch.py b/tests/contrib/cassandra/test_cassandra_patch.py deleted file mode 100644 index 19a09daccf4..00000000000 --- a/tests/contrib/cassandra/test_cassandra_patch.py +++ /dev/null @@ -1,31 +0,0 @@ -# This test script was automatically generated by the contrib-patch-tests.py -# script. If you want to make changes to it, you should make sure that you have -# removed the ``_generated`` suffix from the file name, to prevent the content -# from being overwritten by future re-generations. - -from ddtrace.contrib.internal.cassandra.patch import patch -from ddtrace.contrib.internal.cassandra.session import get_version - - -try: - from ddtrace.contrib.internal.cassandra.patch import unpatch -except ImportError: - unpatch = None -from tests.contrib.patch import PatchTestCase - - -class TestCassandraPatch(PatchTestCase.Base): - __integration_name__ = "cassandra" - __module_name__ = "cassandra.cluster" - __patch_func__ = patch - __unpatch_func__ = unpatch - __get_version__ = get_version - - def assert_module_patched(self, cassandra_cluster): - pass - - def assert_not_module_patched(self, cassandra_cluster): - pass - - def assert_not_module_double_patched(self, cassandra_cluster): - pass diff --git a/tests/contrib/celery/test_integration.py b/tests/contrib/celery/test_integration.py index 9646c0aceda..8a831e2c709 100644 --- a/tests/contrib/celery/test_integration.py +++ b/tests/contrib/celery/test_integration.py @@ -15,7 +15,6 @@ import ddtrace.internal.forksafe as forksafe from ddtrace.propagation.http import HTTPPropagator from ddtrace.trace import Context -from tests.opentracer.utils import init_tracer from ...utils import override_global_config from .base import CeleryBaseTestCase @@ -599,55 +598,6 @@ def fn_task(): assert run_trace[1].name == "test" assert run_trace[1].parent_id == run_trace[0].span_id - def test_fn_task_apply_async_ot(self): - """OpenTracing version of test_fn_task_apply_async.""" - ot_tracer = init_tracer("celery_svc", self.tracer) - - # it should execute a traced async task that has parameters - @self.app.task - def fn_task_parameters(user, force_logout=False): - return (user, force_logout) - - with ot_tracer.start_active_span("celery_op"): - t = fn_task_parameters.apply_async(args=["user"], kwargs={"force_logout": True}) - assert tuple(t.get(timeout=self.ASYNC_GET_TIMEOUT)) == ("user", True) - - ot_span = self.find_span(name="celery_op") - assert ot_span.parent_id is None - assert ot_span.name == "celery_op" - assert ot_span.service == "celery_svc" - - if self.ASYNC_USE_CELERY_FIXTURES: - async_span = self.find_span(name="celery.apply") - self.assert_is_measured(async_span) - assert async_span.error == 0 - - # confirm the parenting - assert async_span.parent_id == ot_span.span_id - assert async_span.name == "celery.apply" - assert async_span.resource == "tests.contrib.celery.test_integration.fn_task_parameters" - assert async_span.service == "celery-producer" - assert async_span.get_tag("celery.id") == t.task_id - assert async_span.get_tag("celery.action") == "apply_async" - assert async_span.get_tag("celery.routing_key") == "celery" - assert async_span.get_tag("component") == "celery" - assert async_span.get_tag("span.kind") == "producer" - assert async_span.get_tag("out.host") == "memory://" - - run_span = self.find_span(name="celery.run") - assert run_span.name == "celery.run" - assert run_span.parent_id is None - assert run_span.resource == "tests.contrib.celery.test_integration.fn_task_parameters" - assert run_span.service == "celery-worker" - assert run_span.get_tag("celery.id") == t.task_id - assert run_span.get_tag("celery.action") == "run" - assert run_span.get_tag("component") == "celery" - assert run_span.get_tag("span.kind") == "consumer" - - traces = self.pop_traces() - assert len(traces) == 2 - assert len(traces[0]) + len(traces[1]) == 3 - @pytest.mark.no_getattr_patch # this mark is added to prevent patching of getattr necessary for integration registry update # see: https://github.com/DataDog/dd-trace-py/pull/13215 diff --git a/tests/contrib/dbapi/test_dbapi.py b/tests/contrib/dbapi/test_dbapi.py index 71ddaff78e6..a1299c575bc 100644 --- a/tests/contrib/dbapi/test_dbapi.py +++ b/tests/contrib/dbapi/test_dbapi.py @@ -5,9 +5,9 @@ from ddtrace.contrib.dbapi import FetchTracedCursor from ddtrace.contrib.dbapi import TracedConnection from ddtrace.contrib.dbapi import TracedCursor +from ddtrace.internal.settings._config import Config +from ddtrace.internal.settings.integration import IntegrationConfig from ddtrace.propagation._database_monitoring import _DBM_Propagator -from ddtrace.settings._config import Config -from ddtrace.settings.integration import IntegrationConfig from ddtrace.trace import Span # noqa:F401 from tests.utils import TracerTestCase from tests.utils import assert_is_measured diff --git a/tests/contrib/dbapi_async/test_dbapi_async.py b/tests/contrib/dbapi_async/test_dbapi_async.py index 794af1ebae4..4b615043268 100644 --- a/tests/contrib/dbapi_async/test_dbapi_async.py +++ b/tests/contrib/dbapi_async/test_dbapi_async.py @@ -5,9 +5,9 @@ from ddtrace.contrib.dbapi_async import FetchTracedAsyncCursor from ddtrace.contrib.dbapi_async import TracedAsyncConnection from ddtrace.contrib.dbapi_async import TracedAsyncCursor +from ddtrace.internal.settings._config import Config +from ddtrace.internal.settings.integration import IntegrationConfig from ddtrace.propagation._database_monitoring import _DBM_Propagator -from ddtrace.settings._config import Config -from ddtrace.settings.integration import IntegrationConfig from ddtrace.trace import Span # noqa:F401 from tests.contrib.asyncio.utils import AsyncioTestCase from tests.contrib.asyncio.utils import mark_asyncio diff --git a/tests/contrib/django/test_django.py b/tests/contrib/django/test_django.py index 7b58471c1d7..a10478fb143 100644 --- a/tests/contrib/django/test_django.py +++ b/tests/contrib/django/test_django.py @@ -36,7 +36,6 @@ from ddtrace.propagation.http import HTTP_HEADER_SAMPLING_PRIORITY from ddtrace.propagation.http import HTTP_HEADER_TRACE_ID from tests.conftest import DEFAULT_DDTRACE_SUBPROCESS_TEST_SERVICE_NAME -from tests.opentracer.utils import init_tracer from tests.tracer.utils_inferred_spans.test_helpers import assert_web_and_inferred_aws_api_gateway_span_data from tests.utils import assert_dict_issuperset from tests.utils import override_config @@ -1952,38 +1951,6 @@ def test_template_name(test_spans): assert span.resource == "/my-template" -""" -OpenTracing tests -""" - - -@pytest.mark.django_db -def test_middleware_trace_request_ot(client, test_spans, tracer): - """OpenTracing version of test_middleware_trace_request.""" - ot_tracer = init_tracer("my_svc", tracer) - - # ensures that the internals are properly traced - with ot_tracer.start_active_span("ot_span"): - assert client.get("/users/").status_code == 200 - - # check for spans - spans = test_spans.get_spans() - ot_span = spans[0] - sp_request = spans[1] - - # confirm parenting - assert ot_span.parent_id is None - assert sp_request.parent_id == ot_span.span_id - - assert ot_span.resource == "ot_span" - assert ot_span.service == "my_svc" - - assert sp_request.get_tag("http.status_code") == "200" - assert sp_request.get_tag(http.URL) == "http://testserver/users/" - assert sp_request.get_tag("django.user.is_authenticated") == "False" - assert sp_request.get_tag("http.method") == "GET" - - def test_collecting_requests_handles_improperly_configured_error(client, test_spans): """ Since it's difficult to reproduce the ImproperlyConfigured error via django (server setup), will instead diff --git a/tests/contrib/falcon/test_suite.py b/tests/contrib/falcon/test_suite.py index 2cb912ee760..9771158e34d 100644 --- a/tests/contrib/falcon/test_suite.py +++ b/tests/contrib/falcon/test_suite.py @@ -3,7 +3,6 @@ from ddtrace.constants import USER_KEEP from ddtrace.contrib.internal.falcon.patch import FALCON_VERSION from ddtrace.ext import http as httpx -from tests.opentracer.utils import init_tracer from tests.tracer.utils_inferred_spans.test_helpers import assert_web_and_inferred_aws_api_gateway_span_data from tests.utils import assert_is_measured from tests.utils import assert_span_http_status_code @@ -225,37 +224,6 @@ def test_404_exception_no_stacktracer(self): assert span.get_tag("component") == "falcon" assert span.get_tag("span.kind") == "server" - def test_200_ot(self): - """OpenTracing version of test_200.""" - writer = self.tracer._span_aggregator.writer - ot_tracer = init_tracer("my_svc", self.tracer) - ot_tracer._dd_tracer._span_aggregator.writer = writer - ot_tracer._dd_tracer._recreate() - - with ot_tracer.start_active_span("ot_span"): - out = self.make_test_call("/200", expected_status_code=200) - assert out.content.decode("utf-8") == "Success" - - traces = self.tracer.pop_traces() - assert len(traces) == 1 - assert len(traces[0]) == 2 - ot_span, dd_span = traces[0] - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.service == "my_svc" - assert ot_span.resource == "ot_span" - - assert_is_measured(dd_span) - assert dd_span.name == "falcon.request" - assert dd_span.service == self._service - assert dd_span.resource == "GET tests.contrib.falcon.app.resources.Resource200" - assert_span_http_status_code(dd_span, 200) - assert dd_span.get_tag(httpx.URL) == "http://falconframework.org/200" - assert dd_span.error == 0 - def test_falcon_request_hook(self): @config.falcon.hooks.on("request") def on_falcon_request(span, request, response): diff --git a/tests/contrib/flask_cache/test.py b/tests/contrib/flask_cache/test.py index 25ed861dbe2..6e23414eace 100644 --- a/tests/contrib/flask_cache/test.py +++ b/tests/contrib/flask_cache/test.py @@ -5,7 +5,6 @@ from ddtrace.contrib.internal.flask_cache.patch import get_traced_cache from ddtrace.ext import net from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME -from tests.opentracer.utils import init_tracer from tests.utils import TracerTestCase from tests.utils import assert_dict_issuperset from tests.utils import assert_is_measured @@ -317,44 +316,6 @@ def test_default_span_tags_memcached(self): self.assertEqual(span.get_tag(net.TARGET_HOST), "127.0.0.1") self.assertEqual(span.get_metric("network.destination.port"), self.TEST_MEMCACHED_PORT) - def test_simple_cache_get_ot(self): - """OpenTracing version of test_simple_cache_get.""" - ot_tracer = init_tracer("my_svc", self.tracer) - - # create the TracedCache instance for a Flask app - Cache = get_traced_cache(self.tracer, service=self.SERVICE) - app = Flask(__name__) - cache = Cache(app, config={"CACHE_TYPE": "simple"}) - - with ot_tracer.start_active_span("ot_span"): - cache.get("Γ‘_complex_operation") - - spans = self.get_spans() - self.assertEqual(len(spans), 2) - ot_span, dd_span = spans - - # confirm the parenting - self.assertIsNone(ot_span.parent_id) - self.assertEqual(dd_span.parent_id, ot_span.span_id) - - self.assertEqual(ot_span.resource, "ot_span") - self.assertEqual(ot_span.service, "my_svc") - - assert_is_measured(dd_span) - self.assertEqual(dd_span.service, self.SERVICE) - self.assertEqual(dd_span.resource, "get") - self.assertEqual(dd_span.name, "flask_cache.cmd") - self.assertEqual(dd_span.span_type, "cache") - self.assertEqual(dd_span.error, 0) - - expected_meta = { - "flask_cache.key": "Γ‘_complex_operation", - "flask_cache.backend": "simple", - "component": "flask_cache", - } - - assert_dict_issuperset(dd_span.get_tags(), expected_meta) - class TestFlaskCacheSchematization(TracerTestCase): TEST_REDIS_PORT = REDIS_CONFIG["port"] diff --git a/tests/contrib/freezegun/test_freezegun.py b/tests/contrib/freezegun/test_freezegun.py deleted file mode 100644 index aeb08c6edfb..00000000000 --- a/tests/contrib/freezegun/test_freezegun.py +++ /dev/null @@ -1,99 +0,0 @@ -import datetime -import os -import time - -import pytest - -from ddtrace.internal.utils.time import StopWatch -from ddtrace.trace import tracer as dd_tracer -from tests.contrib.pytest.test_pytest import PytestTestCaseBase - - -class TestFreezegunTestCase: - @pytest.fixture(autouse=True) - def _patch_freezegun(self): - from ddtrace.contrib.internal.freezegun.patch import patch - from ddtrace.contrib.internal.freezegun.patch import unpatch - - patch() - yield - unpatch() - - def test_freezegun_does_not_freeze_tracing(self): - import freezegun - - with freezegun.freeze_time("2020-01-01"): - with dd_tracer.trace("freezegun.test") as span: - time.sleep(1) - - assert span.duration >= 1 - - def test_freezegun_fast_forward_does_not_affect_tracing(self): - import freezegun - - with freezegun.freeze_time("2020-01-01") as frozen_time: - with dd_tracer.trace("freezegun.test") as span: - time.sleep(1) - frozen_time.tick(delta=datetime.timedelta(days=10)) - assert 1 <= span.duration <= 5 - - def test_freezegun_does_not_freeze_stopwatch(self): - import freezegun - - with freezegun.freeze_time("2020-01-01"): - with StopWatch() as sw: - time.sleep(1) - assert sw.elapsed() >= 1 - - def test_freezegun_configure_default_ignore_list_continues_to_ignore_ddtrace(self): - import freezegun - from freezegun.config import DEFAULT_IGNORE_LIST - - try: - freezegun.configure(default_ignore_list=[]) - - with freezegun.freeze_time("2020-01-01"): - with dd_tracer.trace("freezegun.test") as span: - time.sleep(1) - - assert span.duration >= 1 - finally: - # Reset the ignore list to its default value after the test - freezegun.configure(default_ignore_list=DEFAULT_IGNORE_LIST) - - -class PytestFreezegunTestCase(PytestTestCaseBase): - def test_freezegun_pytest_plugin(self): - """Tests that pytest's patching of freezegun in the v1 plugin version works""" - import sys - - from ddtrace.contrib.internal.freezegun.patch import unpatch - - unpatch() - if "freezegun" in sys.modules: - del sys.modules["freezegun"] - - py_file = self.testdir.makepyfile( - """ - import datetime - import time - - import freezegun - - from ddtrace.trace import tracer as dd_tracer - - def test_pytest_patched_freezegun(): - with freezegun.freeze_time("2020-01-01"): - with dd_tracer.trace("freezegun.test") as span: - time.sleep(1) - assert span.duration >= 1 - - """ - ) - file_name = os.path.basename(py_file.strpath) - self.inline_run("--ddtrace", "-s", file_name) - spans = self.pop_spans() - - assert len(spans) == 4 - for span in spans: - assert span.get_tag("test.status") == "pass" diff --git a/tests/contrib/futures/test_propagation.py b/tests/contrib/futures/test_propagation.py index 77a9e2f25a1..763052dda0c 100644 --- a/tests/contrib/futures/test_propagation.py +++ b/tests/contrib/futures/test_propagation.py @@ -6,7 +6,6 @@ from ddtrace.contrib.internal.futures.patch import patch from ddtrace.contrib.internal.futures.patch import unpatch -from tests.opentracer.utils import init_tracer from tests.utils import DummyTracer from tests.utils import TracerTestCase @@ -408,33 +407,6 @@ def fn(): assert spans[1].trace_id == spans[0].trace_id assert spans[1].parent_id == spans[0].span_id - def test_propagation_ot(self): - """OpenTracing version of test_propagation.""" - # it must propagate the tracing context if available - ot_tracer = init_tracer("my_svc", self.tracer) - - def fn(): - # an active context must be available - self.assertTrue(self.tracer.context_provider.active() is not None) - with self.tracer.trace("executor.thread"): - return 42 - - with self.override_global_tracer(): - with ot_tracer.start_active_span("main.thread"): - with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor: - future = executor.submit(fn) - result = future.result() - # assert the right result - self.assertEqual(result, 42) - - # the trace must be completed - self.assert_span_count(2) - spans = self.get_spans() - assert spans[0].name == "main.thread" - assert spans[1].name == "executor.thread" - assert spans[1].trace_id == spans[0].trace_id - assert spans[1].parent_id == spans[0].span_id - @pytest.mark.skipif(sys.version_info > (3, 12), reason="Fails on 3.13") @pytest.mark.subprocess(ddtrace_run=True, timeout=5) diff --git a/tests/contrib/gevent/test_tracer.py b/tests/contrib/gevent/test_tracer.py index dc72ccc08ca..a7505d56f7d 100644 --- a/tests/contrib/gevent/test_tracer.py +++ b/tests/contrib/gevent/test_tracer.py @@ -2,7 +2,6 @@ import gevent import gevent.pool -from opentracing.scope_managers.gevent import GeventScopeManager import ddtrace from ddtrace.constants import ERROR_MSG @@ -11,7 +10,6 @@ from ddtrace.trace import Context from ddtrace.contrib.internal.gevent.patch import patch from ddtrace.contrib.internal.gevent.patch import unpatch -from tests.opentracer.utils import init_tracer from tests.utils import TracerTestCase from .utils import silence_errors @@ -356,34 +354,6 @@ def green_2(): spans = self.pop_spans() self._assert_spawn_multiple_greenlets(spans) - def test_trace_spawn_multiple_greenlets_multiple_traces_ot(self): - """OpenTracing version of the same test.""" - - ot_tracer = init_tracer("my_svc", self.tracer, scope_manager=GeventScopeManager()) - - def entrypoint(): - with ot_tracer.start_active_span("greenlet.main") as span: - span.resource = "base" - jobs = [gevent.spawn(green_1), gevent.spawn(green_2)] - gevent.joinall(jobs) - - def green_1(): - with self.tracer.trace("greenlet.worker1") as span: - span.set_tag("worker_id", "1") - gevent.sleep(0.01) - - # note that replacing the `tracer.trace` call here with the - # OpenTracing equivalent will cause the checks to fail - def green_2(): - with ot_tracer.start_active_span("greenlet.worker2") as scope: - scope.span.set_tag("worker_id", "2") - gevent.sleep(0.01) - - gevent.spawn(entrypoint).join() - - spans = self.pop_spans() - self._assert_spawn_multiple_greenlets(spans) - def test_ddtracerun(self): """ Regression test case for the following issue. diff --git a/tests/contrib/google_generativeai/conftest.py b/tests/contrib/google_generativeai/conftest.py deleted file mode 100644 index b30aa1c0fc8..00000000000 --- a/tests/contrib/google_generativeai/conftest.py +++ /dev/null @@ -1,87 +0,0 @@ -import os - -import mock -import pytest - -from ddtrace._trace.pin import Pin -from ddtrace.contrib.internal.google_generativeai.patch import patch -from ddtrace.contrib.internal.google_generativeai.patch import unpatch -from ddtrace.llmobs import LLMObs -from tests.contrib.google_generativeai.utils import MockGenerativeModelAsyncClient -from tests.contrib.google_generativeai.utils import MockGenerativeModelClient -from tests.utils import DummyTracer -from tests.utils import DummyWriter -from tests.utils import override_config -from tests.utils import override_env -from tests.utils import override_global_config - - -def default_global_config(): - return {"_dd_api_key": ""} - - -@pytest.fixture -def ddtrace_global_config(): - return {} - - -@pytest.fixture -def ddtrace_config_google_generativeai(): - return {} - - -@pytest.fixture -def mock_tracer(ddtrace_global_config, genai): - try: - pin = Pin.get_from(genai) - mock_tracer = DummyTracer(writer=DummyWriter(trace_flush_enabled=False)) - pin._override(genai, tracer=mock_tracer) - if ddtrace_global_config.get("_llmobs_enabled", False): - # Have to disable and re-enable LLMObs to use to mock tracer. - LLMObs.disable() - LLMObs.enable(_tracer=mock_tracer, integrations_enabled=False) - yield mock_tracer - except Exception: - yield - - -@pytest.fixture -def mock_llmobs_writer(): - patcher = mock.patch("ddtrace.llmobs._llmobs.LLMObsSpanWriter") - try: - LLMObsSpanWriterMock = patcher.start() - m = mock.MagicMock() - LLMObsSpanWriterMock.return_value = m - yield m - finally: - patcher.stop() - - -@pytest.fixture -def mock_client(): - yield MockGenerativeModelClient() - - -@pytest.fixture -def mock_client_async(): - yield MockGenerativeModelAsyncClient() - - -@pytest.fixture -def genai(ddtrace_global_config, ddtrace_config_google_generativeai, mock_client, mock_client_async): - global_config = default_global_config() - global_config.update(ddtrace_global_config) - with override_global_config(global_config): - with override_config("google_generativeai", ddtrace_config_google_generativeai): - with override_env( - dict(GOOGLE_GENERATIVEAI_API_KEY=os.getenv("GOOGLE_GENERATIVEAI_API_KEY", "")) - ): - patch() - import google.generativeai as genai - from google.generativeai import client as client_lib - - client_lib._client_manager.clients["generative"] = mock_client - client_lib._client_manager.clients["generative_async"] = mock_client_async - - yield genai - unpatch() diff --git a/tests/contrib/google_generativeai/test_data/apple.jpg b/tests/contrib/google_generativeai/test_data/apple.jpg deleted file mode 100644 index f921762ae07..00000000000 Binary files a/tests/contrib/google_generativeai/test_data/apple.jpg and /dev/null differ diff --git a/tests/contrib/google_generativeai/test_google_generativeai.py b/tests/contrib/google_generativeai/test_google_generativeai.py deleted file mode 100644 index 1b081c799cc..00000000000 --- a/tests/contrib/google_generativeai/test_google_generativeai.py +++ /dev/null @@ -1,384 +0,0 @@ -import os - -from google.api_core.exceptions import InvalidArgument -import mock -from PIL import Image -import pytest - -from ddtrace.contrib.internal.google_generativeai.patch import get_version -from tests.contrib.google_generativeai.utils import MOCK_CHAT_COMPLETION_TOOL_RESPONSE -from tests.contrib.google_generativeai.utils import MOCK_COMPLETION_IMG_CALL -from tests.contrib.google_generativeai.utils import MOCK_COMPLETION_SIMPLE_1 -from tests.contrib.google_generativeai.utils import MOCK_COMPLETION_SIMPLE_2 -from tests.contrib.google_generativeai.utils import MOCK_COMPLETION_SIMPLE_SYSTEM -from tests.contrib.google_generativeai.utils import MOCK_COMPLETION_STREAM_CHUNKS -from tests.contrib.google_generativeai.utils import MOCK_COMPLETION_TOOL_CALL -from tests.contrib.google_generativeai.utils import MOCK_COMPLETION_TOOL_CALL_STREAM_CHUNKS -from tests.contrib.google_generativeai.utils import _async_streamed_response -from tests.contrib.google_generativeai.utils import _mock_completion_response -from tests.contrib.google_generativeai.utils import _mock_completion_stream_chunk -from tests.contrib.google_generativeai.utils import set_light_values -from tests.utils import override_global_config - - -def test_global_tags(genai, mock_client, mock_tracer): - """ - When the global config UST tags are set - The service name should be used for all data - The env should be used for all data - The version should be used for all data - """ - mock_client.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_SIMPLE_1)) - llm = genai.GenerativeModel("gemini-1.5-flash") - with override_global_config(dict(service="test-svc", env="staging", version="1234")): - llm.generate_content( - "What is the argument for LeBron James being the GOAT?", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=35, temperature=1.0), - ) - - span = mock_tracer.pop_traces()[0][0] - assert span.resource == "GenerativeModel.generate_content" - assert span.service == "test-svc" - assert span.get_tag("env") == "staging" - assert span.get_tag("version") == "1234" - assert span.get_tag("google_generativeai.request.model") == "gemini-1.5-flash" - - -SNAPSHOT_IGNORES = [] -if get_version().split(".")[0:2] == ["0", "7"]: - # ignore the function call args because it comes in with dict keys in a different order than expected - # for 0.7 versions of google-generativeai. - SNAPSHOT_IGNORES = [ - "meta.google_generativeai.response.candidates.0.content.parts.0.function_call.args", - "meta.google_generativeai.request.contents.1.parts.0.function_call.args", - ] - - -# ignore the function call arg because it comes in with dict keys in a different order than expected -@pytest.mark.snapshot(ignores=[*SNAPSHOT_IGNORES]) -def test_gemini_completion(genai, mock_client): - mock_client.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_SIMPLE_1)) - llm = genai.GenerativeModel("gemini-1.5-flash") - llm.generate_content( - "What is the argument for LeBron James being the GOAT?", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=35, temperature=1.0), - ) - - -@pytest.mark.snapshot( - token="tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion", - ignores=["resource", *SNAPSHOT_IGNORES], -) -async def test_gemini_completion_async(genai, mock_client_async): - mock_client_async.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_SIMPLE_1)) - llm = genai.GenerativeModel("gemini-1.5-flash") - await llm.generate_content_async( - "What is the argument for LeBron James being the GOAT?", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=35, temperature=1.0), - ) - - -@pytest.mark.snapshot(ignores=["meta.error.stack", *SNAPSHOT_IGNORES]) -def test_gemini_completion_error(genai, mock_client): - llm = genai.GenerativeModel("gemini-1.5-flash") - llm._client = mock.Mock() - llm._client.generate_content.side_effect = InvalidArgument("Invalid API key. Please pass a valid API key.") - with pytest.raises(InvalidArgument): - llm.generate_content( - "What is the argument for LeBron James being the GOAT?", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=30, temperature=1.0), - ) - - -@pytest.mark.snapshot( - token="tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_error", - ignores=[ - "resource", - "meta.error.stack", - *SNAPSHOT_IGNORES, - ], -) -async def test_gemini_completion_error_async(genai, mock_client): - llm = genai.GenerativeModel("gemini-1.5-flash") - llm._async_client = mock.Mock() - llm._async_client.generate_content.side_effect = InvalidArgument("Invalid API key. Please pass a valid API key.") - with pytest.raises(InvalidArgument): - await llm.generate_content_async( - "What is the argument for LeBron James being the GOAT?", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=30, temperature=1.0), - ) - - -@pytest.mark.snapshot(ignores=[*SNAPSHOT_IGNORES]) -def test_gemini_completion_multiple_messages(genai, mock_client): - mock_client.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_SIMPLE_2)) - llm = genai.GenerativeModel("gemini-1.5-flash") - llm.generate_content( - [ - {"role": "user", "parts": [{"text": "Hello world!"}]}, - {"role": "model", "parts": [{"text": "Great to meet you. What would you like to know?"}]}, - {"role": "user", "parts": [{"text": "Why is the sky blue?"}]}, - ], - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=35, temperature=1.0), - ) - - -@pytest.mark.snapshot( - token="tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_multiple_messages", - ignores=["resource", *SNAPSHOT_IGNORES], -) -async def test_gemini_completion_multiple_messages_async(genai, mock_client_async): - mock_client_async.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_SIMPLE_2)) - llm = genai.GenerativeModel("gemini-1.5-flash") - await llm.generate_content_async( - [ - {"role": "user", "parts": [{"text": "Hello world!"}]}, - {"role": "model", "parts": [{"text": "Great to meet you. What would you like to know?"}]}, - {"role": "user", "parts": [{"text": "Why is the sky blue?"}]}, - ], - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=35, temperature=1.0), - ) - - -@pytest.mark.snapshot( - token="tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_multiple_messages", - ignores=[ # send_message does not include all config options by default - "meta.google_generativeai.request.generation_config.candidate_count", - "meta.google_generativeai.request.generation_config.top_k", - "meta.google_generativeai.request.generation_config.top_p", - "meta.google_generativeai.request.generation_config.response_mime_type", - "meta.google_generativeai.request.generation_config.response_schema", - *SNAPSHOT_IGNORES, - ], -) -def test_gemini_chat_completion(genai, mock_client): - mock_client.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_SIMPLE_2)) - llm = genai.GenerativeModel("gemini-1.5-flash") - chat = llm.start_chat( - history=[ - {"role": "user", "parts": "Hello world!"}, - {"role": "model", "parts": "Great to meet you. What would you like to know?"}, - ] - ) - chat.send_message( - "Why is the sky blue?", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=35, temperature=1.0), - ) - - -@pytest.mark.snapshot( - token="tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_multiple_messages", - ignores=[ # send_message does not include all config options by default - "resource", - "meta.google_generativeai.request.generation_config.candidate_count", - "meta.google_generativeai.request.generation_config.top_k", - "meta.google_generativeai.request.generation_config.top_p", - "meta.google_generativeai.request.generation_config.response_mime_type", - "meta.google_generativeai.request.generation_config.response_schema", - *SNAPSHOT_IGNORES, - ], -) -async def test_gemini_chat_completion_async(genai, mock_client_async): - mock_client_async.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_SIMPLE_2)) - llm = genai.GenerativeModel("gemini-1.5-flash") - chat = llm.start_chat( - history=[ - {"role": "user", "parts": "Hello world!"}, - {"role": "model", "parts": "Great to meet you. What would you like to know?"}, - ] - ) - await chat.send_message_async( - "Why is the sky blue?", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=35, temperature=1.0), - ) - - -@pytest.mark.snapshot(ignores=[*SNAPSHOT_IGNORES]) -def test_gemini_completion_system_prompt(genai, mock_client): - mock_client.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_SIMPLE_SYSTEM)) - llm = genai.GenerativeModel( - "gemini-1.5-flash", - system_instruction="You are a die-hard Michael Jordan fan that always brings stats to the discussion.", - ) - llm.generate_content( - "What is the argument for LeBron James being the GOAT?", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=50, temperature=1.0), - ) - - -@pytest.mark.snapshot( - token="tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_system_prompt", - ignores=["resource", *SNAPSHOT_IGNORES], -) -async def test_gemini_completion_system_prompt_async(genai, mock_client_async): - mock_client_async.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_SIMPLE_SYSTEM)) - llm = genai.GenerativeModel( - "gemini-1.5-flash", - system_instruction="You are a die-hard Michael Jordan fan that always brings stats to the discussion.", - ) - await llm.generate_content_async( - "What is the argument for LeBron James being the GOAT?", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=50, temperature=1.0), - ) - - -@pytest.mark.snapshot(ignores=[*SNAPSHOT_IGNORES]) -def test_gemini_completion_stream(genai, mock_client): - mock_client.responses["stream_generate_content"] = [ - (_mock_completion_stream_chunk(chunk) for chunk in MOCK_COMPLETION_STREAM_CHUNKS) - ] - llm = genai.GenerativeModel("gemini-1.5-flash") - response = llm.generate_content( - "Can you recite the alphabet?", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=60, temperature=1.0), - stream=True, - ) - for _ in response: - pass - - -@pytest.mark.snapshot( - token="tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_stream", - ignores=["resource", *SNAPSHOT_IGNORES], -) -async def test_gemini_completion_stream_async(genai, mock_client_async): - mock_client_async.responses["stream_generate_content"] = [_async_streamed_response(MOCK_COMPLETION_STREAM_CHUNKS)] - llm = genai.GenerativeModel("gemini-1.5-flash") - response = await llm.generate_content_async( - "Can you recite the alphabet?", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=60, temperature=1.0), - stream=True, - ) - async for _ in response: - pass - - -@pytest.mark.snapshot(ignores=[*SNAPSHOT_IGNORES]) -def test_gemini_tool_completion(genai, mock_client): - mock_client.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_TOOL_CALL)) - llm = genai.GenerativeModel("gemini-1.5-flash", tools=[set_light_values]) - llm.generate_content( - "Dim the lights so the room feels cozy and warm.", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=30, temperature=1.0), - ) - - -@pytest.mark.snapshot( - token="tests.contrib.google_generativeai.test_google_generativeai.test_gemini_tool_completion", - ignores=["resource", *SNAPSHOT_IGNORES], -) -async def test_gemini_tool_completion_async(genai, mock_client_async): - mock_client_async.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_TOOL_CALL)) - llm = genai.GenerativeModel("gemini-1.5-flash", tools=[set_light_values]) - await llm.generate_content_async( - "Dim the lights so the room feels cozy and warm.", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=30, temperature=1.0), - ) - - -@pytest.mark.snapshot(ignores=[*SNAPSHOT_IGNORES]) -def test_gemini_tool_chat_completion(genai, mock_client): - mock_client.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_TOOL_CALL)) - mock_client.responses["generate_content"].append(_mock_completion_response(MOCK_CHAT_COMPLETION_TOOL_RESPONSE)) - model = genai.GenerativeModel(model_name="gemini-1.5-flash", tools=[set_light_values]) - chat = model.start_chat() - chat.send_message("Dim the lights so the room feels cozy and warm.") - response_parts = [ - genai.protos.Part( - function_response=genai.protos.FunctionResponse( - name="set_light_values", response={"result": {"brightness": 50, "color_temperature": "warm"}} - ) - ) - ] - chat.send_message(response_parts) - - -@pytest.mark.snapshot( - token="tests.contrib.google_generativeai.test_google_generativeai.test_gemini_tool_chat_completion", - ignores=["resource", *SNAPSHOT_IGNORES], -) -async def test_gemini_tool_chat_completion_async(genai, mock_client_async): - mock_client_async.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_TOOL_CALL)) - mock_client_async.responses["generate_content"].append( - _mock_completion_response(MOCK_CHAT_COMPLETION_TOOL_RESPONSE) - ) - model = genai.GenerativeModel(model_name="gemini-1.5-flash", tools=[set_light_values]) - chat = model.start_chat() - await chat.send_message_async("Dim the lights so the room feels cozy and warm.") - response_parts = [ - genai.protos.Part( - function_response=genai.protos.FunctionResponse( - name="set_light_values", response={"result": {"brightness": 50, "color_temperature": "warm"}} - ) - ) - ] - await chat.send_message_async(response_parts) - - -@pytest.mark.snapshot(ignores=[*SNAPSHOT_IGNORES]) -def test_gemini_completion_tool_stream(genai, mock_client): - mock_client.responses["stream_generate_content"] = [ - (_mock_completion_stream_chunk(chunk) for chunk in MOCK_COMPLETION_TOOL_CALL_STREAM_CHUNKS) - ] - llm = genai.GenerativeModel("gemini-1.5-flash", tools=[set_light_values]) - response = llm.generate_content( - "Dim the lights so the room feels cozy and warm.", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=30, temperature=1.0), - stream=True, - ) - for _ in response: - pass - - -@pytest.mark.snapshot( - token="tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_tool_stream", - ignores=["resource", *SNAPSHOT_IGNORES], -) -async def test_gemini_completion_tool_stream_async(genai, mock_client_async): - mock_client_async.responses["stream_generate_content"] = [ - _async_streamed_response(MOCK_COMPLETION_TOOL_CALL_STREAM_CHUNKS) - ] - llm = genai.GenerativeModel("gemini-1.5-flash", tools=[set_light_values]) - response = await llm.generate_content_async( - "Dim the lights so the room feels cozy and warm.", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=30, temperature=1.0), - stream=True, - ) - async for _ in response: - pass - - -@pytest.mark.snapshot( - ignores=[ - "meta.google_generativeai.request.contents.0.text", - *SNAPSHOT_IGNORES, - ] -) -def test_gemini_completion_image(genai, mock_client): - """Ensure passing images to generate_content() won't break patching.""" - img = Image.open(os.path.join(os.path.dirname(__file__), "test_data/apple.jpg")) - mock_client.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_IMG_CALL)) - llm = genai.GenerativeModel("gemini-1.5-flash") - llm.generate_content( - [img, "Return a bounding box for the apple. \n [ymin, xmin, ymax, xmax]"], - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=30, temperature=1.0), - ) - - -@pytest.mark.snapshot( - token="tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_image", - ignores=[ - "resource", - "meta.google_generativeai.request.contents.0.text", - *SNAPSHOT_IGNORES, - ], -) -async def test_gemini_completion_image_async(genai, mock_client_async): - """Ensure passing images to generate_content() won't break patching.""" - img = Image.open(os.path.join(os.path.dirname(__file__), "test_data/apple.jpg")) - mock_client_async.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_IMG_CALL)) - llm = genai.GenerativeModel("gemini-1.5-flash") - await llm.generate_content_async( - [img, "Return a bounding box for the apple. \n [ymin, xmin, ymax, xmax]"], - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=30, temperature=1.0), - ) diff --git a/tests/contrib/google_generativeai/test_google_generativeai_llmobs.py b/tests/contrib/google_generativeai/test_google_generativeai_llmobs.py deleted file mode 100644 index b9e6f6fa39c..00000000000 --- a/tests/contrib/google_generativeai/test_google_generativeai_llmobs.py +++ /dev/null @@ -1,610 +0,0 @@ -import os - -from google.api_core.exceptions import InvalidArgument -import mock -from PIL import Image -import pytest - -from tests.contrib.google_generativeai.utils import MOCK_COMPLETION_IMG_CALL -from tests.contrib.google_generativeai.utils import MOCK_COMPLETION_SIMPLE_1 -from tests.contrib.google_generativeai.utils import MOCK_COMPLETION_SIMPLE_2 -from tests.contrib.google_generativeai.utils import MOCK_COMPLETION_SIMPLE_SYSTEM -from tests.contrib.google_generativeai.utils import MOCK_COMPLETION_STREAM_CHUNKS -from tests.contrib.google_generativeai.utils import MOCK_COMPLETION_TOOL_CALL -from tests.contrib.google_generativeai.utils import MOCK_COMPLETION_TOOL_CALL_STREAM_CHUNKS -from tests.contrib.google_generativeai.utils import _async_streamed_response -from tests.contrib.google_generativeai.utils import _mock_completion_response -from tests.contrib.google_generativeai.utils import _mock_completion_stream_chunk -from tests.contrib.google_generativeai.utils import set_light_values -from tests.llmobs._utils import _expected_llmobs_llm_span_event - - -@pytest.mark.parametrize( - "ddtrace_global_config", [dict(_llmobs_enabled=True, _llmobs_sample_rate=1.0, _llmobs_ml_app="")] -) -class TestLLMObsGemini: - def test_completion(self, genai, ddtrace_global_config, mock_llmobs_writer, mock_client, mock_tracer): - mock_client.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_SIMPLE_1)) - llm = genai.GenerativeModel("gemini-1.5-flash") - llm.generate_content( - "What is the argument for LeBron James being the GOAT?", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=35, temperature=1.0), - ) - span = mock_tracer.pop_traces()[0][0] - assert mock_llmobs_writer.enqueue.call_count == 1 - expected_llmobs_span_event = _expected_llmobs_llm_span_event( - span, - model_name="gemini-1.5-flash", - model_provider="google", - input_messages=[{"content": "What is the argument for LeBron James being the GOAT?"}], - output_messages=[ - {"content": MOCK_COMPLETION_SIMPLE_1["candidates"][0]["content"]["parts"][0]["text"], "role": "model"}, - ], - metadata={"temperature": 1.0, "max_output_tokens": 35}, - token_metrics={"input_tokens": 12, "output_tokens": 30, "total_tokens": 42}, - tags={"ml_app": "", "service": "tests.contrib.google_generativeai"}, - ) - mock_llmobs_writer.enqueue.assert_called_with(expected_llmobs_span_event) - - async def test_completion_async( - self, genai, ddtrace_global_config, mock_llmobs_writer, mock_client_async, mock_tracer - ): - mock_client_async.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_SIMPLE_1)) - llm = genai.GenerativeModel("gemini-1.5-flash") - await llm.generate_content_async( - "What is the argument for LeBron James being the GOAT?", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=35, temperature=1.0), - ) - span = mock_tracer.pop_traces()[0][0] - assert mock_llmobs_writer.enqueue.call_count == 1 - expected_llmobs_span_event = _expected_llmobs_llm_span_event( - span, - model_name="gemini-1.5-flash", - model_provider="google", - input_messages=[{"content": "What is the argument for LeBron James being the GOAT?"}], - output_messages=[ - {"content": MOCK_COMPLETION_SIMPLE_1["candidates"][0]["content"]["parts"][0]["text"], "role": "model"} - ], - metadata={"temperature": 1.0, "max_output_tokens": 35}, - token_metrics={"input_tokens": 12, "output_tokens": 30, "total_tokens": 42}, - tags={"ml_app": "", "service": "tests.contrib.google_generativeai"}, - ) - mock_llmobs_writer.enqueue.assert_called_with(expected_llmobs_span_event) - - def test_completion_error(self, genai, ddtrace_global_config, mock_llmobs_writer, mock_client, mock_tracer): - llm = genai.GenerativeModel("gemini-1.5-flash") - llm._client = mock.Mock() - llm._client.generate_content.side_effect = InvalidArgument("Invalid API key. Please pass a valid API key.") - with pytest.raises(InvalidArgument): - llm.generate_content( - "What is the argument for LeBron James being the GOAT?", - generation_config=genai.types.GenerationConfig( - stop_sequences=["x"], max_output_tokens=35, temperature=1.0 - ), - ) - span = mock_tracer.pop_traces()[0][0] - assert mock_llmobs_writer.enqueue.call_count == 1 - mock_llmobs_writer.enqueue.assert_called_with( - _expected_llmobs_llm_span_event( - span, - model_name="gemini-1.5-flash", - model_provider="google", - input_messages=[{"content": "What is the argument for LeBron James being the GOAT?"}], - output_messages=[{"content": ""}], - error="google.api_core.exceptions.InvalidArgument", - error_message=span.get_tag("error.message"), - error_stack=span.get_tag("error.stack"), - metadata={"temperature": 1.0, "max_output_tokens": 35}, - tags={"ml_app": "", "service": "tests.contrib.google_generativeai"}, - ) - ) - - async def test_completion_error_async( - self, genai, ddtrace_global_config, mock_llmobs_writer, mock_client_async, mock_tracer - ): - llm = genai.GenerativeModel("gemini-1.5-flash") - llm._async_client = mock.Mock() - llm._async_client.generate_content.side_effect = InvalidArgument( - "Invalid API key. Please pass a valid API key." - ) - with pytest.raises(InvalidArgument): - await llm.generate_content_async( - "What is the argument for LeBron James being the GOAT?", - generation_config=genai.types.GenerationConfig( - stop_sequences=["x"], max_output_tokens=35, temperature=1.0 - ), - ) - span = mock_tracer.pop_traces()[0][0] - assert mock_llmobs_writer.enqueue.call_count == 1 - mock_llmobs_writer.enqueue.assert_called_with( - _expected_llmobs_llm_span_event( - span, - model_name="gemini-1.5-flash", - model_provider="google", - input_messages=[{"content": "What is the argument for LeBron James being the GOAT?"}], - output_messages=[{"content": ""}], - error="google.api_core.exceptions.InvalidArgument", - error_message=span.get_tag("error.message"), - error_stack=span.get_tag("error.stack"), - metadata={"temperature": 1.0, "max_output_tokens": 35}, - tags={"ml_app": "", "service": "tests.contrib.google_generativeai"}, - ) - ) - - def test_completion_multiple_messages( - self, genai, ddtrace_global_config, mock_llmobs_writer, mock_client, mock_tracer - ): - mock_client.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_SIMPLE_2)) - llm = genai.GenerativeModel("gemini-1.5-flash") - llm.generate_content( - [ - {"role": "user", "parts": [{"text": "Hello world!"}]}, - {"role": "model", "parts": [{"text": "Great to meet you. What would you like to know?"}]}, - {"role": "user", "parts": [{"text": "Why is the sky blue?"}]}, - ], - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=35, temperature=1.0), - ) - span = mock_tracer.pop_traces()[0][0] - assert mock_llmobs_writer.enqueue.call_count == 1 - expected_llmobs_span_event = _expected_llmobs_llm_span_event( - span, - model_name="gemini-1.5-flash", - model_provider="google", - input_messages=[ - {"content": "Hello world!", "role": "user"}, - {"content": "Great to meet you. What would you like to know?", "role": "model"}, - {"content": "Why is the sky blue?", "role": "user"}, - ], - output_messages=[ - {"content": MOCK_COMPLETION_SIMPLE_2["candidates"][0]["content"]["parts"][0]["text"], "role": "model"} - ], - metadata={"temperature": 1.0, "max_output_tokens": 35}, - token_metrics={"input_tokens": 24, "output_tokens": 35, "total_tokens": 59}, - tags={"ml_app": "", "service": "tests.contrib.google_generativeai"}, - ) - mock_llmobs_writer.enqueue.assert_called_with(expected_llmobs_span_event) - - async def test_completion_multiple_messages_async( - self, genai, ddtrace_global_config, mock_llmobs_writer, mock_client_async, mock_tracer - ): - mock_client_async.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_SIMPLE_2)) - llm = genai.GenerativeModel("gemini-1.5-flash") - await llm.generate_content_async( - [ - {"role": "user", "parts": [{"text": "Hello world!"}]}, - {"role": "model", "parts": [{"text": "Great to meet you. What would you like to know?"}]}, - {"role": "user", "parts": [{"text": "Why is the sky blue?"}]}, - ], - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=35, temperature=1.0), - ) - span = mock_tracer.pop_traces()[0][0] - assert mock_llmobs_writer.enqueue.call_count == 1 - expected_llmobs_span_event = _expected_llmobs_llm_span_event( - span, - model_name="gemini-1.5-flash", - model_provider="google", - input_messages=[ - {"content": "Hello world!", "role": "user"}, - {"content": "Great to meet you. What would you like to know?", "role": "model"}, - {"content": "Why is the sky blue?", "role": "user"}, - ], - output_messages=[ - {"content": MOCK_COMPLETION_SIMPLE_2["candidates"][0]["content"]["parts"][0]["text"], "role": "model"} - ], - metadata={"temperature": 1.0, "max_output_tokens": 35}, - token_metrics={"input_tokens": 24, "output_tokens": 35, "total_tokens": 59}, - tags={"ml_app": "", "service": "tests.contrib.google_generativeai"}, - ) - mock_llmobs_writer.enqueue.assert_called_with(expected_llmobs_span_event) - - def test_chat_completion(self, genai, ddtrace_global_config, mock_llmobs_writer, mock_client, mock_tracer): - mock_client.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_SIMPLE_2)) - llm = genai.GenerativeModel("gemini-1.5-flash") - chat = llm.start_chat( - history=[ - {"role": "user", "parts": "Hello world!"}, - {"role": "model", "parts": "Great to meet you. What would you like to know?"}, - ] - ) - chat.send_message( - "Why is the sky blue?", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=35, temperature=1.0), - ) - span = mock_tracer.pop_traces()[0][0] - assert mock_llmobs_writer.enqueue.call_count == 1 - expected_llmobs_span_event = _expected_llmobs_llm_span_event( - span, - model_name="gemini-1.5-flash", - model_provider="google", - input_messages=[ - {"content": "Hello world!", "role": "user"}, - {"content": "Great to meet you. What would you like to know?", "role": "model"}, - {"content": "Why is the sky blue?", "role": "user"}, - ], - output_messages=[ - {"content": MOCK_COMPLETION_SIMPLE_2["candidates"][0]["content"]["parts"][0]["text"], "role": "model"} - ], - metadata={"temperature": 1.0, "max_output_tokens": 35}, - token_metrics={"input_tokens": 24, "output_tokens": 35, "total_tokens": 59}, - tags={"ml_app": "", "service": "tests.contrib.google_generativeai"}, - ) - mock_llmobs_writer.enqueue.assert_called_with(expected_llmobs_span_event) - - async def test_chat_completion_async( - self, genai, ddtrace_global_config, mock_llmobs_writer, mock_client_async, mock_tracer - ): - mock_client_async.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_SIMPLE_2)) - llm = genai.GenerativeModel("gemini-1.5-flash") - chat = llm.start_chat( - history=[ - {"role": "user", "parts": "Hello world!"}, - {"role": "model", "parts": "Great to meet you. What would you like to know?"}, - ] - ) - await chat.send_message_async( - "Why is the sky blue?", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=35, temperature=1.0), - ) - span = mock_tracer.pop_traces()[0][0] - assert mock_llmobs_writer.enqueue.call_count == 1 - expected_llmobs_span_event = _expected_llmobs_llm_span_event( - span, - model_name="gemini-1.5-flash", - model_provider="google", - input_messages=[ - {"content": "Hello world!", "role": "user"}, - {"content": "Great to meet you. What would you like to know?", "role": "model"}, - {"content": "Why is the sky blue?", "role": "user"}, - ], - output_messages=[ - {"content": MOCK_COMPLETION_SIMPLE_2["candidates"][0]["content"]["parts"][0]["text"], "role": "model"} - ], - metadata={"temperature": 1.0, "max_output_tokens": 35}, - token_metrics={"input_tokens": 24, "output_tokens": 35, "total_tokens": 59}, - tags={"ml_app": "", "service": "tests.contrib.google_generativeai"}, - ) - mock_llmobs_writer.enqueue.assert_called_with(expected_llmobs_span_event) - - def test_completion_system_prompt(self, genai, ddtrace_global_config, mock_llmobs_writer, mock_client, mock_tracer): - mock_client.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_SIMPLE_SYSTEM)) - llm = genai.GenerativeModel( - "gemini-1.5-flash", - system_instruction="You are a die-hard Michael Jordan fan that always brings stats to the discussion.", - ) - llm.generate_content( - "What is the argument for LeBron James being the GOAT?", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=50, temperature=1.0), - ) - span = mock_tracer.pop_traces()[0][0] - assert mock_llmobs_writer.enqueue.call_count == 1 - expected_llmobs_span_event = _expected_llmobs_llm_span_event( - span, - model_name="gemini-1.5-flash", - model_provider="google", - input_messages=[ - { - "content": "You are a die-hard Michael Jordan fan that always brings stats to the discussion.", - "role": "system", - }, - {"content": "What is the argument for LeBron James being the GOAT?"}, - ], - output_messages=[ - { - "content": MOCK_COMPLETION_SIMPLE_SYSTEM["candidates"][0]["content"]["parts"][0]["text"], - "role": "model", - } - ], - metadata={"temperature": 1.0, "max_output_tokens": 50}, - token_metrics={"input_tokens": 29, "output_tokens": 45, "total_tokens": 74}, - tags={"ml_app": "", "service": "tests.contrib.google_generativeai"}, - ) - mock_llmobs_writer.enqueue.assert_called_with(expected_llmobs_span_event) - - async def test_completion_system_prompt_async( - self, genai, ddtrace_global_config, mock_llmobs_writer, mock_client_async, mock_tracer - ): - mock_client_async.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_SIMPLE_SYSTEM)) - llm = genai.GenerativeModel( - "gemini-1.5-flash", - system_instruction="You are a die-hard Michael Jordan fan that always brings stats to the discussion.", - ) - await llm.generate_content_async( - "What is the argument for LeBron James being the GOAT?", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=50, temperature=1.0), - ) - span = mock_tracer.pop_traces()[0][0] - assert mock_llmobs_writer.enqueue.call_count == 1 - expected_llmobs_span_event = _expected_llmobs_llm_span_event( - span, - model_name="gemini-1.5-flash", - model_provider="google", - input_messages=[ - { - "content": "You are a die-hard Michael Jordan fan that always brings stats to the discussion.", - "role": "system", - }, - {"content": "What is the argument for LeBron James being the GOAT?"}, - ], - output_messages=[ - { - "content": MOCK_COMPLETION_SIMPLE_SYSTEM["candidates"][0]["content"]["parts"][0]["text"], - "role": "model", - }, - ], - metadata={"temperature": 1.0, "max_output_tokens": 50}, - token_metrics={"input_tokens": 29, "output_tokens": 45, "total_tokens": 74}, - tags={"ml_app": "", "service": "tests.contrib.google_generativeai"}, - ) - mock_llmobs_writer.enqueue.assert_called_with(expected_llmobs_span_event) - - def test_completion_stream(self, genai, ddtrace_global_config, mock_llmobs_writer, mock_client, mock_tracer): - mock_client.responses["stream_generate_content"] = [ - (_mock_completion_stream_chunk(chunk) for chunk in MOCK_COMPLETION_STREAM_CHUNKS) - ] - llm = genai.GenerativeModel("gemini-1.5-flash") - response = llm.generate_content( - "Can you recite the alphabet?", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=60, temperature=1.0), - stream=True, - ) - for _ in response: - pass - span = mock_tracer.pop_traces()[0][0] - assert mock_llmobs_writer.enqueue.call_count == 1 - expected_llmobs_span_event = _expected_llmobs_llm_span_event( - span, - model_name="gemini-1.5-flash", - model_provider="google", - input_messages=[{"content": "Can you recite the alphabet?"}], - output_messages=[ - {"content": "".join(chunk["text"] for chunk in MOCK_COMPLETION_STREAM_CHUNKS), "role": "model"} - ], - metadata={"temperature": 1.0, "max_output_tokens": 60}, - token_metrics={"input_tokens": 6, "output_tokens": 52, "total_tokens": 58}, - tags={"ml_app": "", "service": "tests.contrib.google_generativeai"}, - ) - mock_llmobs_writer.enqueue.assert_called_with(expected_llmobs_span_event) - - async def test_completion_stream_async( - self, genai, ddtrace_global_config, mock_llmobs_writer, mock_client_async, mock_tracer - ): - mock_client_async.responses["stream_generate_content"] = [ - _async_streamed_response(MOCK_COMPLETION_STREAM_CHUNKS) - ] - llm = genai.GenerativeModel("gemini-1.5-flash") - response = await llm.generate_content_async( - "Can you recite the alphabet?", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=60, temperature=1.0), - stream=True, - ) - async for _ in response: - pass - span = mock_tracer.pop_traces()[0][0] - assert mock_llmobs_writer.enqueue.call_count == 1 - expected_llmobs_span_event = _expected_llmobs_llm_span_event( - span, - model_name="gemini-1.5-flash", - model_provider="google", - input_messages=[{"content": "Can you recite the alphabet?"}], - output_messages=[ - {"content": "".join(chunk["text"] for chunk in MOCK_COMPLETION_STREAM_CHUNKS), "role": "model"} - ], - metadata={"temperature": 1.0, "max_output_tokens": 60}, - token_metrics={"input_tokens": 6, "output_tokens": 52, "total_tokens": 58}, - tags={"ml_app": "", "service": "tests.contrib.google_generativeai"}, - ) - mock_llmobs_writer.enqueue.assert_called_with(expected_llmobs_span_event) - - def test_completion_tool_call(self, genai, ddtrace_global_config, mock_llmobs_writer, mock_client, mock_tracer): - mock_client.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_TOOL_CALL)) - llm = genai.GenerativeModel("gemini-1.5-flash", tools=[set_light_values]) - llm.generate_content( - "Dim the lights so the room feels cozy and warm.", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=30, temperature=1.0), - ) - span = mock_tracer.pop_traces()[0][0] - assert mock_llmobs_writer.enqueue.call_count == 1 - expected_llmobs_span_event = _expected_llmobs_llm_span_event( - span, - model_name="gemini-1.5-flash", - model_provider="google", - input_messages=[{"content": "Dim the lights so the room feels cozy and warm."}], - output_messages=[ - { - "content": "", - "role": "model", - "tool_calls": [ - { - "name": "set_light_values", - "arguments": { - "fields": [{"key": "color_temp", "value": "warm"}, {"key": "brightness", "value": 50.0}] - }, - "tool_id": "", - "type": "function_call", - } - ], - } - ], - metadata={"temperature": 1.0, "max_output_tokens": 30}, - token_metrics={"input_tokens": 150, "output_tokens": 25, "total_tokens": 175}, - tags={"ml_app": "", "service": "tests.contrib.google_generativeai"}, - ) - mock_llmobs_writer.enqueue.assert_called_with(expected_llmobs_span_event) - - async def test_completion_tool_call_async( - self, genai, ddtrace_global_config, mock_llmobs_writer, mock_client_async, mock_tracer - ): - mock_client_async.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_TOOL_CALL)) - llm = genai.GenerativeModel("gemini-1.5-flash", tools=[set_light_values]) - await llm.generate_content_async( - "Dim the lights so the room feels cozy and warm.", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=30, temperature=1.0), - ) - span = mock_tracer.pop_traces()[0][0] - assert mock_llmobs_writer.enqueue.call_count == 1 - expected_llmobs_span_event = _expected_llmobs_llm_span_event( - span, - model_name="gemini-1.5-flash", - model_provider="google", - input_messages=[{"content": "Dim the lights so the room feels cozy and warm."}], - output_messages=[ - { - "content": "", - "role": "model", - "tool_calls": [ - { - "name": "set_light_values", - "arguments": { - "fields": [{"key": "color_temp", "value": "warm"}, {"key": "brightness", "value": 50.0}] - }, - "tool_id": "", - "type": "function_call", - } - ], - } - ], - metadata={"temperature": 1.0, "max_output_tokens": 30}, - token_metrics={"input_tokens": 150, "output_tokens": 25, "total_tokens": 175}, - tags={"ml_app": "", "service": "tests.contrib.google_generativeai"}, - ) - mock_llmobs_writer.enqueue.assert_called_with(expected_llmobs_span_event) - - def test_gemini_completion_tool_stream( - self, genai, ddtrace_global_config, mock_llmobs_writer, mock_client, mock_tracer - ): - mock_client.responses["stream_generate_content"] = [ - (_mock_completion_stream_chunk(chunk) for chunk in MOCK_COMPLETION_TOOL_CALL_STREAM_CHUNKS) - ] - llm = genai.GenerativeModel("gemini-1.5-flash", tools=[set_light_values]) - response = llm.generate_content( - "Dim the lights so the room feels cozy and warm.", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=30, temperature=1.0), - stream=True, - ) - for _ in response: - pass - span = mock_tracer.pop_traces()[0][0] - assert mock_llmobs_writer.enqueue.call_count == 1 - expected_llmobs_span_event = _expected_llmobs_llm_span_event( - span, - model_name="gemini-1.5-flash", - model_provider="google", - input_messages=[{"content": "Dim the lights so the room feels cozy and warm."}], - output_messages=[ - { - "content": "", - "role": "model", - "tool_calls": [ - { - "name": "set_light_values", - "arguments": { - "fields": [{"key": "color_temp", "value": "warm"}, {"key": "brightness", "value": 50.0}] - }, - "tool_id": "", - "type": "function_call", - } - ], - } - ], - metadata={"temperature": 1.0, "max_output_tokens": 30}, - token_metrics={"input_tokens": 150, "output_tokens": 25, "total_tokens": 175}, - tags={"ml_app": "", "service": "tests.contrib.google_generativeai"}, - ) - mock_llmobs_writer.enqueue.assert_called_with(expected_llmobs_span_event) - - async def test_gemini_completion_tool_stream_async( - self, genai, ddtrace_global_config, mock_llmobs_writer, mock_client_async, mock_tracer - ): - mock_client_async.responses["stream_generate_content"] = [ - _async_streamed_response(MOCK_COMPLETION_TOOL_CALL_STREAM_CHUNKS) - ] - llm = genai.GenerativeModel("gemini-1.5-flash", tools=[set_light_values]) - response = await llm.generate_content_async( - "Dim the lights so the room feels cozy and warm.", - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=30, temperature=1.0), - stream=True, - ) - async for _ in response: - pass - span = mock_tracer.pop_traces()[0][0] - assert mock_llmobs_writer.enqueue.call_count == 1 - expected_llmobs_span_event = _expected_llmobs_llm_span_event( - span, - model_name="gemini-1.5-flash", - model_provider="google", - input_messages=[{"content": "Dim the lights so the room feels cozy and warm."}], - output_messages=[ - { - "content": "", - "role": "model", - "tool_calls": [ - { - "name": "set_light_values", - "arguments": { - "fields": [{"key": "color_temp", "value": "warm"}, {"key": "brightness", "value": 50.0}] - }, - "tool_id": "", - "type": "function_call", - } - ], - } - ], - metadata={"temperature": 1.0, "max_output_tokens": 30}, - token_metrics={"input_tokens": 150, "output_tokens": 25, "total_tokens": 175}, - tags={"ml_app": "", "service": "tests.contrib.google_generativeai"}, - ) - mock_llmobs_writer.enqueue.assert_called_with(expected_llmobs_span_event) - - def test_gemini_completion_image(self, genai, ddtrace_global_config, mock_llmobs_writer, mock_client, mock_tracer): - """Ensure passing images to generate_content() won't break patching.""" - img = Image.open(os.path.join(os.path.dirname(__file__), "test_data/apple.jpg")) - mock_client.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_IMG_CALL)) - llm = genai.GenerativeModel("gemini-1.5-flash") - llm.generate_content( - [img, "Return a bounding box for the apple. \n [ymin, xmin, ymax, xmax]"], - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=30, temperature=1.0), - ) - span = mock_tracer.pop_traces()[0][0] - assert mock_llmobs_writer.enqueue.call_count == 1 - expected_llmobs_span_event = _expected_llmobs_llm_span_event( - span, - model_name="gemini-1.5-flash", - model_provider="google", - input_messages=[ - {"content": "[Non-text content object: {}]".format(repr(img))}, - {"content": "Return a bounding box for the apple. \n [ymin, xmin, ymax, xmax]"}, - ], - output_messages=[{"content": "57 100 900 911", "role": "model"}], - metadata={"temperature": 1.0, "max_output_tokens": 30}, - token_metrics={"input_tokens": 277, "output_tokens": 14, "total_tokens": 291}, - tags={"ml_app": "", "service": "tests.contrib.google_generativeai"}, - ) - mock_llmobs_writer.enqueue.assert_called_with(expected_llmobs_span_event) - - async def test_gemini_completion_image_async( - self, genai, ddtrace_global_config, mock_llmobs_writer, mock_client_async, mock_tracer - ): - """Ensure passing images to generate_content() won't break patching.""" - img = Image.open(os.path.join(os.path.dirname(__file__), "test_data/apple.jpg")) - mock_client_async.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_IMG_CALL)) - llm = genai.GenerativeModel("gemini-1.5-flash") - await llm.generate_content_async( - [img, "Return a bounding box for the apple. \n [ymin, xmin, ymax, xmax]"], - generation_config=genai.types.GenerationConfig(stop_sequences=["x"], max_output_tokens=30, temperature=1.0), - ) - span = mock_tracer.pop_traces()[0][0] - assert mock_llmobs_writer.enqueue.call_count == 1 - expected_llmobs_span_event = _expected_llmobs_llm_span_event( - span, - model_name="gemini-1.5-flash", - model_provider="google", - input_messages=[ - {"content": "[Non-text content object: {}]".format(repr(img))}, - {"content": "Return a bounding box for the apple. \n [ymin, xmin, ymax, xmax]"}, - ], - output_messages=[{"content": "57 100 900 911", "role": "model"}], - metadata={"temperature": 1.0, "max_output_tokens": 30}, - token_metrics={"input_tokens": 277, "output_tokens": 14, "total_tokens": 291}, - tags={"ml_app": "", "service": "tests.contrib.google_generativeai"}, - ) - mock_llmobs_writer.enqueue.assert_called_with(expected_llmobs_span_event) diff --git a/tests/contrib/google_generativeai/test_google_generativeai_patch.py b/tests/contrib/google_generativeai/test_google_generativeai_patch.py deleted file mode 100644 index a98ad7e2d6a..00000000000 --- a/tests/contrib/google_generativeai/test_google_generativeai_patch.py +++ /dev/null @@ -1,24 +0,0 @@ -from ddtrace.contrib.internal.google_generativeai.patch import get_version -from ddtrace.contrib.internal.google_generativeai.patch import patch -from ddtrace.contrib.internal.google_generativeai.patch import unpatch -from tests.contrib.patch import PatchTestCase - - -class TestGoogleGenerativeAIPatch(PatchTestCase.Base): - __integration_name__ = "google_generativeai" - __module_name__ = "google.generativeai" - __patch_func__ = patch - __unpatch_func__ = unpatch - __get_version__ = get_version - - def assert_module_patched(self, genai): - self.assert_wrapped(genai.GenerativeModel.generate_content) - self.assert_wrapped(genai.GenerativeModel.generate_content_async) - - def assert_not_module_patched(self, genai): - self.assert_not_wrapped(genai.GenerativeModel.generate_content) - self.assert_not_wrapped(genai.GenerativeModel.generate_content_async) - - def assert_not_module_double_patched(self, genai): - self.assert_not_double_wrapped(genai.GenerativeModel.generate_content) - self.assert_not_double_wrapped(genai.GenerativeModel.generate_content_async) diff --git a/tests/contrib/google_generativeai/utils.py b/tests/contrib/google_generativeai/utils.py deleted file mode 100644 index c2319d50327..00000000000 --- a/tests/contrib/google_generativeai/utils.py +++ /dev/null @@ -1,192 +0,0 @@ -import collections - -from google.generativeai import protos -import mock - - -MOCK_COMPLETION_SIMPLE_1 = { - "candidates": [ - { - "content": { - "parts": [ - { - "text": "The argument for LeBron James being the 'Greatest of All Time' (" - "GOAT) is multifaceted and involves a variety of factors. Here's a " - "breakdown" - } - ], - "role": "model", - }, - "finish_reason": 2, - } - ], - "usage_metadata": {"prompt_token_count": 12, "candidates_token_count": 30, "total_token_count": 42}, -} -MOCK_COMPLETION_SIMPLE_2 = { - "candidates": [ - { - "content": { - "parts": [ - { - "text": "The sky appears blue due to a phenomenon called **Rayleigh " - "scattering**. \nHere's how it works:* **Sunlight is made up of " - "all colors of the" - } - ], - "role": "model", - }, - "finish_reason": 2, - } - ], - "usage_metadata": {"prompt_token_count": 24, "candidates_token_count": 35, "total_token_count": 59}, -} -MOCK_COMPLETION_SIMPLE_SYSTEM = { - "candidates": [ - { - "content": { - "parts": [ - { - "text": "Look, I respect LeBron James. He's a phenomenal player, " - "an incredible athlete, and a great ambassador for the game. But " - "when it comes to the GOAT, the crown belongs to His Airness, " - "Michael Jordan!" - } - ], - "role": "model", - }, - "finish_reason": 2, - } - ], - "usage_metadata": {"prompt_token_count": 29, "candidates_token_count": 45, "total_token_count": 74}, -} -MOCK_COMPLETION_STREAM_CHUNKS = ( - {"text": "A", "usage_metadata": {"prompt_token_count": 6, "candidates_token_count": 1, "total_token_count": 7}}, - { - "text": ", B, C, D, E, F, G, H, I", - "usage_metadata": {"prompt_token_count": 6, "candidates_token_count": 17, "total_token_count": 23}, - }, - { - "text": ", J, K, L, M, N, O, P, Q", - "usage_metadata": {"prompt_token_count": 6, "candidates_token_count": 33, "total_token_count": 39}, - }, - { - "text": ", R, S, T, U, V, W, X, Y, Z.\n", - "usage_metadata": {"prompt_token_count": 6, "candidates_token_count": 52, "total_token_count": 58}, - }, -) -MOCK_COMPLETION_TOOL_CALL = { - "candidates": [ - { - "content": { - "parts": [ - { - "function_call": { - "name": "set_light_values", - "args": { - "fields": [{"key": "color_temp", "value": "warm"}, {"key": "brightness", "value": 50}] - }, - } - } - ], - "role": "model", - }, - "finish_reason": 2, - } - ], - "usage_metadata": {"prompt_token_count": 150, "candidates_token_count": 25, "total_token_count": 175}, -} -MOCK_CHAT_COMPLETION_TOOL_RESPONSE = { - "candidates": [ - { - "content": { - "parts": [ - {"text": "OK. I've dimmed the lights to 50% and set the color temperature to warm. How's that? \n"} - ], - "role": "model", - }, - "finish_reason": 2, - }, - ], - "usage_metadata": {"prompt_token_count": 206, "candidates_token_count": 27, "total_token_count": 233}, -} -MOCK_COMPLETION_TOOL_CALL_STREAM_CHUNKS = ( - { - "function_call": { - "name": "set_light_values", - "args": {"fields": [{"key": "color_temp", "value": "warm"}, {"key": "brightness", "value": 50}]}, - }, - "usage_metadata": {"prompt_token_count": 150, "candidates_token_count": 25, "total_token_count": 175}, - }, -) -MOCK_COMPLETION_IMG_CALL = { - "candidates": [{"content": {"parts": [{"text": "57 100 900 911"}], "role": "model"}, "finish_reason": 2}], - "usage_metadata": {"prompt_token_count": 277, "candidates_token_count": 14, "total_token_count": 291}, -} - - -class MockGenerativeModelClient: - def __init__(self): - self.responses = collections.defaultdict(list) - self._client_options = mock.Mock() - self._client_options.api_key = "" - - def generate_content(self, request, **kwargs): - return self.responses["generate_content"].pop(0) - - def stream_generate_content(self, request, **kwargs): - return self.responses["stream_generate_content"].pop(0) - - -class MockGenerativeModelAsyncClient: - def __init__(self): - self.responses = collections.defaultdict(list) - self._client = mock.Mock() - self._client_options = mock.Mock() - self._client._client_options = self._client_options - self._client_options.api_key = "" - - async def generate_content(self, request, **kwargs): - return self.responses["generate_content"].pop(0) - - async def stream_generate_content(self, request, **kwargs): - return self.responses["stream_generate_content"].pop(0) - - -def set_light_values(brightness, color_temp): - """Set the brightness and color temperature of a room light. (mock API). - Args: - brightness: Light level from 0 to 100. Zero is off and 100 is full brightness - color_temp: Color temperature of the light fixture, which can be `daylight`, `cool` or `warm`. - Returns: - A dictionary containing the set brightness and color temperature. - """ - return {"brightness": brightness, "colorTemperature": color_temp} - - -async def _async_streamed_response(mock_chunks): - """Return async streamed response chunks to be processed by the mock async client.""" - for chunk in mock_chunks: - yield _mock_completion_stream_chunk(chunk) - - -def _mock_completion_response(mock_completion_dict): - mock_content = protos.Content(mock_completion_dict["candidates"][0]["content"]) - return protos.GenerateContentResponse( - { - "candidates": [ - {"content": mock_content, "finish_reason": mock_completion_dict["candidates"][0]["finish_reason"]} - ], - "usage_metadata": mock_completion_dict["usage_metadata"], - } - ) - - -def _mock_completion_stream_chunk(chunk): - mock_content = None - if chunk.get("text"): - mock_content = protos.Content({"parts": [{"text": chunk["text"]}], "role": "model"}) - elif chunk.get("function_call"): - mock_content = protos.Content({"parts": [{"function_call": chunk["function_call"]}], "role": "model"}) - return protos.GenerateContentResponse( - {"candidates": [{"content": mock_content, "finish_reason": 2}], "usage_metadata": chunk["usage_metadata"]} - ) diff --git a/tests/contrib/httplib/test_httplib.py b/tests/contrib/httplib/test_httplib.py index 7a2f4a07463..4a8c01c7eea 100644 --- a/tests/contrib/httplib/test_httplib.py +++ b/tests/contrib/httplib/test_httplib.py @@ -18,7 +18,6 @@ from ddtrace.ext import http from ddtrace.internal.constants import _HTTPLIB_NO_TRACE_REQUEST from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME -from tests.opentracer.utils import init_tracer from tests.utils import TracerTestCase from tests.utils import assert_span_http_status_code from tests.utils import override_global_tracer @@ -396,7 +395,7 @@ def test_httplib_request_and_response_headers(self): # Enabled when configured with self.override_config("httplib", {}): - from ddtrace.settings.integration import IntegrationConfig # noqa:F401 + from ddtrace.internal.settings.integration import IntegrationConfig # noqa:F401 integration_config = config.httplib # type: IntegrationConfig integration_config.http.trace_headers(["my-header", "access-control-allow-origin"]) @@ -529,37 +528,6 @@ def test_urllib_request_opener(self): self.assertEqual(span.get_tag("span.kind"), "client") self.assertEqual(span.get_tag("out.host"), "localhost") - def test_httplib_request_get_request_ot(self): - """OpenTracing version of test with same name.""" - ot_tracer = init_tracer("my_svc", self.tracer) - - with ot_tracer.start_active_span("ot_span"): - conn = self.get_http_connection(SOCKET) - with contextlib.closing(conn): - conn.request("GET", "/status/200") - resp = conn.getresponse() - self.assertEqual(self.to_str(resp.read()), "") - self.assertEqual(resp.status, 200) - - spans = self.pop_spans() - self.assertEqual(len(spans), 2) - ot_span, dd_span = spans - - # confirm the parenting - self.assertEqual(ot_span.parent_id, None) - self.assertEqual(dd_span.parent_id, ot_span.span_id) - - self.assertEqual(ot_span.service, "my_svc") - self.assertEqual(ot_span.name, "ot_span") - - self.assert_is_not_measured(dd_span) - self.assertEqual(dd_span.span_type, "http") - self.assertEqual(dd_span.name, self.SPAN_NAME) - self.assertEqual(dd_span.error, 0) - assert dd_span.get_tag("http.method") == "GET" - assert_span_http_status_code(dd_span, 200) - assert dd_span.get_tag("http.url") == URL_200 - def test_httplib_bad_url(self): conn = self.get_http_connection("DNE", "80") with contextlib.closing(conn): diff --git a/tests/contrib/httpx/test_httpx.py b/tests/contrib/httpx/test_httpx.py index 6fbbbee9427..a343add72e1 100644 --- a/tests/contrib/httpx/test_httpx.py +++ b/tests/contrib/httpx/test_httpx.py @@ -7,7 +7,7 @@ from ddtrace.contrib.internal.httpx.patch import patch from ddtrace.contrib.internal.httpx.patch import unpatch from ddtrace.internal.compat import is_wrapted -from ddtrace.settings.http import HttpConfig +from ddtrace.internal.settings.http import HttpConfig from tests.utils import override_config from tests.utils import override_http_config diff --git a/tests/contrib/httpx/test_httpx_pre_0_11.py b/tests/contrib/httpx/test_httpx_pre_0_11.py index dff425f1635..3b1c5132637 100644 --- a/tests/contrib/httpx/test_httpx_pre_0_11.py +++ b/tests/contrib/httpx/test_httpx_pre_0_11.py @@ -7,7 +7,7 @@ from ddtrace.contrib.internal.httpx.patch import patch from ddtrace.contrib.internal.httpx.patch import unpatch from ddtrace.internal.compat import is_wrapted -from ddtrace.settings.http import HttpConfig +from ddtrace.internal.settings.http import HttpConfig from tests.utils import override_config from tests.utils import override_http_config diff --git a/tests/contrib/integration_registry/registry_update_helpers/integration_registry_manager.py b/tests/contrib/integration_registry/registry_update_helpers/integration_registry_manager.py index f5990c6c6c2..4e6659b6321 100644 --- a/tests/contrib/integration_registry/registry_update_helpers/integration_registry_manager.py +++ b/tests/contrib/integration_registry/registry_update_helpers/integration_registry_manager.py @@ -39,7 +39,7 @@ def _is_valid_patch_call(self, tb_string): """Checks if the patch call originated from ddtrace.contrib.internal/*/patch.py.""" # reverse the lines to check the most recent patch call first since some integrations call # other integrations patches: - # e.g. mongoengine calls pymongo's patch + # e.g. django calls postgres's patch return any( "ddtrace/contrib/internal" in line and "/patch.py" in line for line in reversed(tb_string.splitlines()) ) diff --git a/tests/contrib/mongoengine/__init__.py b/tests/contrib/mongoengine/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/tests/contrib/mongoengine/test.py b/tests/contrib/mongoengine/test.py deleted file mode 100644 index d47c5a397e6..00000000000 --- a/tests/contrib/mongoengine/test.py +++ /dev/null @@ -1,415 +0,0 @@ -import time - -import mongoengine -import pymongo - -from ddtrace._trace.pin import Pin -from ddtrace.contrib.internal.mongoengine.patch import patch -from ddtrace.contrib.internal.mongoengine.patch import unpatch -from ddtrace.ext import mongo as mongox -from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME -from tests.opentracer.utils import init_tracer -from tests.utils import DummyTracer -from tests.utils import TracerTestCase -from tests.utils import assert_is_measured - -from ..config import MONGO_CONFIG - - -class Artist(mongoengine.Document): - first_name = mongoengine.StringField(max_length=50) - last_name = mongoengine.StringField(max_length=50) - - -class MongoEngineCore(object): - # Define the service at the class level, so that each test suite can use a different service - # and therefore catch any sneaky badly-unpatched stuff. - TEST_SERVICE = "deadbeef" - - def get_tracer_and_connect(self): - # implement me - pass - - def test_insert_update_delete_query(self): - tracer = self.get_tracer_and_connect() - - start = time.time() - Artist.drop_collection() - end = time.time() - - # ensure we get a drop collection span - spans = tracer.pop() - assert len(spans) == 2 - span = spans[1] - assert span.name == "pymongo.cmd" - - assert_is_measured(span) - assert span.resource == "drop artist" - assert span.span_type == "mongodb" - assert span.service == self.TEST_SERVICE - _assert_timing(span, start, end) - - start = end - joni = Artist() - joni.first_name = "Joni" - joni.last_name = "Mitchell" - joni.save() - end = time.time() - - # ensure we get an insert span - spans = tracer.pop() - assert len(spans) == 2 - span = spans[1] - assert span.name == "pymongo.cmd" - assert_is_measured(span) - assert span.resource == "insert artist" - assert span.span_type == "mongodb" - assert span.service == self.TEST_SERVICE - _assert_timing(span, start, end) - - # ensure full scans work - start = time.time() - artists = [a for a in Artist.objects] - end = time.time() - assert len(artists) == 1 - assert artists[0].first_name == "Joni" - assert artists[0].last_name == "Mitchell" - - # query names should be used in pymongo>3.1 - name = "find" if pymongo.version_tuple >= (3, 1, 0) else "query" - - spans = tracer.pop() - assert len(spans) == 2 - span = spans[1] - assert span.name == "pymongo.cmd" - assert_is_measured(span) - assert span.resource == "{} artist".format(name) - assert span.span_type == "mongodb" - assert span.service == self.TEST_SERVICE - _assert_timing(span, start, end) - - # ensure filtered queries work - start = time.time() - artists = [a for a in Artist.objects(first_name="Joni")] - end = time.time() - assert len(artists) == 1 - joni = artists[0] - assert artists[0].first_name == "Joni" - assert artists[0].last_name == "Mitchell" - - spans = tracer.pop() - assert len(spans) == 2 - span = spans[1] - assert span.name == "pymongo.cmd" - assert_is_measured(span) - assert span.resource == '{} artist {{"first_name": "?"}}'.format(name) - assert span.span_type == "mongodb" - assert span.service == self.TEST_SERVICE - _assert_timing(span, start, end) - - # ensure updates work - start = time.time() - joni.last_name = "From Saskatoon" - joni.save() - end = time.time() - - spans = tracer.pop() - assert len(spans) == 2 - span = spans[1] - assert span.name == "pymongo.cmd" - assert_is_measured(span) - assert span.resource == 'update artist {"_id": "?"}' - assert span.span_type == "mongodb" - assert span.service == self.TEST_SERVICE - _assert_timing(span, start, end) - - # ensure deletes - start = time.time() - joni.delete() - end = time.time() - - spans = tracer.pop() - assert len(spans) == 2 - span = spans[1] - assert span.name == "pymongo.cmd" - assert_is_measured(span) - assert span.resource == 'delete artist {"_id": "?"}' - assert span.span_type == "mongodb" - assert span.service == self.TEST_SERVICE - assert span.get_tag("component") == "pymongo" - assert span.get_tag("span.kind") == "client" - assert span.get_tag("db.system") == "mongodb" - _assert_timing(span, start, end) - - def test_opentracing(self): - """Ensure the opentracer works with mongoengine.""" - tracer = self.get_tracer_and_connect() - ot_tracer = init_tracer("my_svc", tracer) - - with ot_tracer.start_active_span("ot_span"): - start = time.time() - Artist.drop_collection() - end = time.time() - - # ensure we get a drop collection span - spans = tracer.pop() - assert len(spans) == 3 - ot_span, dd_server_span, dd_cmd_span = spans - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_server_span.parent_id == ot_span.span_id - - assert ot_span.name == "ot_span" - assert ot_span.service == "my_svc" - - assert_is_measured(dd_cmd_span) - assert dd_cmd_span.resource == "drop artist" - assert dd_cmd_span.span_type == "mongodb" - assert dd_cmd_span.service == self.TEST_SERVICE - _assert_timing(dd_cmd_span, start, end) - - -class TestMongoEnginePatchConnectDefault(TracerTestCase, MongoEngineCore): - """Test suite with a global Pin for the connect function with the default configuration""" - - TEST_SERVICE = mongox.SERVICE - - def setUp(self): - patch() - - def tearDown(self): - unpatch() - # Disconnect and remove the client - mongoengine.connection.disconnect() - - def get_tracer_and_connect(self): - tracer = DummyTracer() - client = mongoengine.connect(port=MONGO_CONFIG["port"]) - Pin.get_from(client)._clone(tracer=tracer).onto(client) - return tracer - - -class TestMongoEnginePatchConnectSchematization(TestMongoEnginePatchConnectDefault): - @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc")) - def test_user_specified_service_default(self): - """ - : When a user specifies a service for the app - The mongoengine integration should not use it. - """ - from ddtrace import config - - assert config.service == "mysvc" - - tracer = self.get_tracer_and_connect() - Artist.drop_collection() - - spans = tracer.pop() - assert len(spans) == 2 - assert spans[1].name == "pymongo.cmd" - assert spans[1].service != "mysvc" - - @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0", DD_SERVICE="mysvc")) - def test_user_specified_service_v0(self): - """ - v0: When a user specifies a service for the app - The mongoengine integration should not use it. - """ - from ddtrace import config - - assert config.service == "mysvc" - - tracer = self.get_tracer_and_connect() - Artist.drop_collection() - - spans = tracer.pop() - assert len(spans) == 2 - assert spans[1].name == "pymongo.cmd" - assert spans[1].service != "mysvc" - - @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1", DD_SERVICE="mysvc")) - def test_user_specified_service_v1(self): - """ - In v1 of the span attribute schema, when a user specifies a service for the app - The mongoengine integration should use it as the default. - """ - from ddtrace import config - - assert config.service == "mysvc" - - tracer = self.get_tracer_and_connect() - Artist.drop_collection() - - spans = tracer.pop() - assert len(spans) == 2 - assert spans[1].name == "mongodb.query" - assert spans[1].service == "mysvc" - - @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) - def test_unspecified_service_v0(self): - """ - In v0 of the span attribute schema, when there is no specified DD_SERVICE - The mongoengine integration should use None as the default. - """ - from ddtrace import config - - assert config.service is DEFAULT_SPAN_SERVICE_NAME - - tracer = self.get_tracer_and_connect() - Artist.drop_collection() - - spans = tracer.pop() - assert len(spans) == 2 - assert spans[0].service == "mongodb" - - @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) - def test_unspecified_service_v1(self): - """ - In v1 of the span attribute schema, when there is no specified DD_SERVICE - The mongoengine integration should use DEFAULT_SPAN_SERVICE_NAME as the default. - """ - from ddtrace import config - - assert config.service == DEFAULT_SPAN_SERVICE_NAME - - tracer = self.get_tracer_and_connect() - Artist.drop_collection() - - spans = tracer.pop() - assert len(spans) == 2 - assert spans[0].service == DEFAULT_SPAN_SERVICE_NAME - - @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v0")) - def test_span_name_v0_schema(self): - """ - When a user specifies a service for the app - The mongoengine integration should not use it. - """ - tracer = self.get_tracer_and_connect() - Artist.drop_collection() - - spans = tracer.pop() - assert len(spans) == 2 - assert spans[0].name == "pymongo.checkout" or spans[0].name == "pymongo.get_socket" - assert spans[1].name == "pymongo.cmd" - - @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA="v1")) - def test_span_name_v1_schema(self): - """ - When a user specifies a service for the app - The mongoengine integration should not use it. - """ - tracer = self.get_tracer_and_connect() - Artist.drop_collection() - - spans = tracer.pop() - assert len(spans) == 2 - assert spans[0].name == "pymongo.checkout" or spans[0].name == "pymongo.get_socket" - assert spans[1].name == "mongodb.query" - - -class TestMongoEnginePatchConnect(TestMongoEnginePatchConnectDefault): - """Test suite with a global Pin for the connect function with custom service""" - - TEST_SERVICE = "test-mongo-patch-connect" - - def get_tracer_and_connect(self): - tracer = TestMongoEnginePatchConnectDefault.get_tracer_and_connect(self) - pin = Pin(service=self.TEST_SERVICE) - pin._tracer = tracer - pin.onto(mongoengine.connect) - mongoengine.connect(port=MONGO_CONFIG["port"]) - - return tracer - - -class TestMongoEnginePatchClientDefault(TracerTestCase, MongoEngineCore): - """Test suite with a Pin local to a specific client with default configuration""" - - TEST_SERVICE = mongox.SERVICE - - def setUp(self): - patch() - - def tearDown(self): - unpatch() - # Disconnect and remove the client - mongoengine.connection.disconnect() - - def get_tracer_and_connect(self): - tracer = DummyTracer() - client = mongoengine.connect(port=MONGO_CONFIG["port"]) - Pin.get_from(client)._clone(tracer=tracer).onto(client) - - return tracer - - -class TestMongoEnginePatchClient(TestMongoEnginePatchClientDefault): - """Test suite with a Pin local to a specific client with custom service""" - - TEST_SERVICE = "test-mongo-patch-client" - - def get_tracer_and_connect(self): - tracer = DummyTracer() - # Set a connect-level service, to check that we properly override it - Pin(service="not-%s" % self.TEST_SERVICE).onto(mongoengine.connect) - client = mongoengine.connect(port=MONGO_CONFIG["port"]) - pin = Pin(service=self.TEST_SERVICE) - pin._tracer = tracer - pin.onto(client) - - return tracer - - def test_patch_unpatch(self): - tracer = DummyTracer() - - # Test patch idempotence - patch() - patch() - - client = mongoengine.connect(port=MONGO_CONFIG["port"]) - Pin.get_from(client)._clone(tracer=tracer).onto(client) - - Artist.drop_collection() - spans = tracer.pop() - assert spans, spans - assert len(spans) == 2 - - mongoengine.connection.disconnect() - tracer.pop() - - # Test unpatch - unpatch() - - mongoengine.connect(port=MONGO_CONFIG["port"]) - - Artist.drop_collection() - spans = tracer.pop() - assert not spans, spans - - # Disconnect so a new pymongo client can be created, - # connections are patched on instantiation - mongoengine.connection.disconnect() - # Test patch again - patch() - client = mongoengine.connect(port=MONGO_CONFIG["port"]) - Pin.get_from(client)._clone(tracer=tracer).onto(client) - - Artist.drop_collection() - spans = tracer.pop() - assert spans, spans - assert len(spans) == 2 - - def test_multiple_connect_no_double_patching(self): - """Ensure we do not double patch client._topology - - Regression test for https://github.com/DataDog/dd-trace-py/issues/2474 - """ - client = mongoengine.connect(port=MONGO_CONFIG["port"]) - assert Pin.get_from(client) is Pin.get_from(client._topology) - client.close() - - -def _assert_timing(span, start, end): - assert start < span.start < end - assert span.duration < end - start diff --git a/tests/contrib/mongoengine/test_mongoengine_patch.py b/tests/contrib/mongoengine/test_mongoengine_patch.py deleted file mode 100644 index 6f219d1566e..00000000000 --- a/tests/contrib/mongoengine/test_mongoengine_patch.py +++ /dev/null @@ -1,31 +0,0 @@ -# This test script was automatically generated by the contrib-patch-tests.py -# script. If you want to make changes to it, you should make sure that you have -# removed the ``_generated`` suffix from the file name, to prevent the content -# from being overwritten by future re-generations. - -from ddtrace.contrib.internal.mongoengine.patch import get_version -from ddtrace.contrib.internal.mongoengine.patch import patch - - -try: - from ddtrace.contrib.internal.mongoengine.patch import unpatch -except ImportError: - unpatch = None -from tests.contrib.patch import PatchTestCase - - -class TestMongoenginePatch(PatchTestCase.Base): - __integration_name__ = "mongoengine" - __module_name__ = "mongoengine" - __patch_func__ = patch - __unpatch_func__ = unpatch - __get_version__ = get_version - - def assert_module_patched(self, mongoengine): - pass - - def assert_not_module_patched(self, mongoengine): - pass - - def assert_not_module_double_patched(self, mongoengine): - pass diff --git a/tests/contrib/mysql/test_mysql.py b/tests/contrib/mysql/test_mysql.py index a071731d470..edb7fed2076 100644 --- a/tests/contrib/mysql/test_mysql.py +++ b/tests/contrib/mysql/test_mysql.py @@ -6,7 +6,6 @@ from ddtrace.contrib.internal.mysql.patch import unpatch from tests.contrib import shared_tests from tests.contrib.config import MYSQL_CONFIG -from tests.opentracer.utils import init_tracer from tests.utils import TracerTestCase from tests.utils import assert_dict_issuperset from tests.utils import assert_is_measured @@ -253,93 +252,6 @@ def test_query_proc(self): ) assert span.get_tag("sql.query") is None - def test_simple_query_ot(self): - """OpenTracing version of test_simple_query.""" - conn, tracer = self._get_conn_tracer() - - ot_tracer = init_tracer("mysql_svc", tracer) - - with ot_tracer.start_active_span("mysql_op"): - cursor = conn.cursor() - cursor.execute("SELECT 1") - rows = cursor.fetchall() - assert len(rows) == 1 - - spans = tracer.pop() - assert len(spans) == 2 - - ot_span, dd_span = spans - - # confirm parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.service == "mysql_svc" - assert ot_span.name == "mysql_op" - - assert_is_measured(dd_span) - assert dd_span.service == "mysql" - assert dd_span.name == "mysql.query" - assert dd_span.span_type == "sql" - assert dd_span.error == 0 - assert dd_span.get_metric("network.destination.port") == 3306 - assert_dict_issuperset( - dd_span.get_tags(), - { - "out.host": "127.0.0.1", - "db.name": "test", - "db.system": "mysql", - "db.user": "test", - "component": "mysql", - "span.kind": "client", - }, - ) - - def test_simple_query_ot_fetchall(self): - """OpenTracing version of test_simple_query.""" - with self.override_config("mysql", dict(trace_fetch_methods=True)): - conn, tracer = self._get_conn_tracer() - - ot_tracer = init_tracer("mysql_svc", tracer) - - with ot_tracer.start_active_span("mysql_op"): - cursor = conn.cursor() - cursor.execute("SELECT 1") - rows = cursor.fetchall() - assert len(rows) == 1 - - spans = tracer.pop() - assert len(spans) == 3 - - ot_span, dd_span, fetch_span = spans - - # confirm parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.service == "mysql_svc" - assert ot_span.name == "mysql_op" - - assert_is_measured(dd_span) - assert dd_span.service == "mysql" - assert dd_span.name == "mysql.query" - assert dd_span.span_type == "sql" - assert dd_span.error == 0 - assert dd_span.get_metric("network.destination.port") == 3306 - assert_dict_issuperset( - dd_span.get_tags(), - { - "out.host": "127.0.0.1", - "db.name": "test", - "db.system": "mysql", - "db.user": "test", - "component": "mysql", - "span.kind": "client", - }, - ) - - assert fetch_span.name == "mysql.query.fetchall" - def test_commit(self): conn, tracer = self._get_conn_tracer() conn.commit() diff --git a/tests/contrib/mysqldb/test_mysqldb.py b/tests/contrib/mysqldb/test_mysqldb.py index 344e42c46ad..82c99afd968 100644 --- a/tests/contrib/mysqldb/test_mysqldb.py +++ b/tests/contrib/mysqldb/test_mysqldb.py @@ -7,7 +7,6 @@ from ddtrace.contrib.internal.mysqldb.patch import unpatch from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME from tests.contrib import shared_tests -from tests.opentracer.utils import init_tracer from tests.utils import TracerTestCase from tests.utils import assert_dict_issuperset from tests.utils import assert_is_measured @@ -323,89 +322,6 @@ def test_query_proc(self): ) assert span.get_tag("sql.query") is None - def test_simple_query_ot(self): - """OpenTracing version of test_simple_query.""" - conn, tracer = self._get_conn_tracer() - - ot_tracer = init_tracer("mysql_svc", tracer) - with ot_tracer.start_active_span("mysql_op"): - cursor = conn.cursor() - cursor.execute("SELECT 1") - rows = cursor.fetchall() - assert len(rows) == 1 - - spans = tracer.pop() - assert len(spans) == 2 - ot_span, dd_span = spans - - # confirm parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.service == "mysql_svc" - assert ot_span.name == "mysql_op" - - assert_is_measured(dd_span) - assert dd_span.service == "mysql" - assert dd_span.name == "mysql.query" - assert dd_span.span_type == "sql" - assert dd_span.error == 0 - assert dd_span.get_metric("network.destination.port") == 3306 - assert_dict_issuperset( - dd_span.get_tags(), - { - "out.host": "127.0.0.1", - "db.name": "test", - "db.system": "mysql", - "db.user": "test", - "component": "mysqldb", - "span.kind": "client", - }, - ) - - def test_simple_query_ot_fetchall(self): - """OpenTracing version of test_simple_query.""" - with self.override_config("mysqldb", dict(trace_fetch_methods=True)): - conn, tracer = self._get_conn_tracer() - - ot_tracer = init_tracer("mysql_svc", tracer) - with ot_tracer.start_active_span("mysql_op"): - cursor = conn.cursor() - cursor.execute("SELECT 1") - rows = cursor.fetchall() - assert len(rows) == 1 - - spans = tracer.pop() - assert len(spans) == 3 - ot_span, dd_span, fetch_span = spans - - # confirm parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.service == "mysql_svc" - assert ot_span.name == "mysql_op" - - assert_is_measured(dd_span) - assert dd_span.service == "mysql" - assert dd_span.name == "mysql.query" - assert dd_span.span_type == "sql" - assert dd_span.error == 0 - assert dd_span.get_metric("network.destination.port") == 3306 - assert_dict_issuperset( - dd_span.get_tags(), - { - "out.host": "127.0.0.1", - "db.name": "test", - "db.system": "mysql", - "db.user": "test", - "component": "mysqldb", - "span.kind": "client", - }, - ) - - assert fetch_span.name == "mysql.query.fetchall" - def test_commit(self): conn, tracer = self._get_conn_tracer() diff --git a/tests/contrib/openai/test_openai_v1.py b/tests/contrib/openai/test_openai_v1.py index b492fd114d4..5021b8b0a28 100644 --- a/tests/contrib/openai/test_openai_v1.py +++ b/tests/contrib/openai/test_openai_v1.py @@ -860,7 +860,7 @@ def test_integration_sync(openai_api_key, ddtrace_run_python_code_in_subprocess) import ddtrace from tests.contrib.openai.conftest import FilterOrg from tests.contrib.openai.test_openai_v1 import get_openai_vcr -pin = ddtrace.trace.Pin.get_from(openai) +pin = ddtrace._trace.pin.Pin.get_from(openai) pin.tracer.configure(trace_processors=[FilterOrg()]) with get_openai_vcr(subdirectory_name="v1").use_cassette("completion.yaml"): client = openai.OpenAI() @@ -901,7 +901,7 @@ def test_integration_async(openai_api_key, ddtrace_run_python_code_in_subprocess import ddtrace from tests.contrib.openai.conftest import FilterOrg from tests.contrib.openai.test_openai_v1 import get_openai_vcr -pin = ddtrace.trace.Pin.get_from(openai) +pin = ddtrace._trace.pin.Pin.get_from(openai) pin.tracer.configure(trace_processors=[FilterOrg()]) async def task(): with get_openai_vcr(subdirectory_name="v1").use_cassette("completion.yaml"): @@ -1104,7 +1104,7 @@ def test_integration_service_name(openai_api_key, ddtrace_run_python_code_in_sub import ddtrace from tests.contrib.openai.conftest import FilterOrg from tests.contrib.openai.test_openai_v1 import get_openai_vcr -pin = ddtrace.trace.Pin.get_from(openai) +pin = ddtrace._trace.pin.Pin.get_from(openai) pin.tracer.configure(trace_processors=[FilterOrg()]) with get_openai_vcr(subdirectory_name="v1").use_cassette("completion.yaml"): client = openai.OpenAI() diff --git a/tests/contrib/psycopg/test_psycopg.py b/tests/contrib/psycopg/test_psycopg.py index 987890dbbd7..b433bcb68f3 100644 --- a/tests/contrib/psycopg/test_psycopg.py +++ b/tests/contrib/psycopg/test_psycopg.py @@ -14,7 +14,6 @@ from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME from ddtrace.internal.utils.version import parse_version from tests.contrib.config import POSTGRES_CONFIG -from tests.opentracer.utils import init_tracer from tests.utils import TracerTestCase from tests.utils import assert_is_measured from tests.utils import snapshot @@ -142,47 +141,6 @@ def test_psycopg3_connection_with_string(self): Pin.get_from(conn)._clone(service="postgres", tracer=self.tracer).onto(conn) self.assert_conn_is_traced(conn, "postgres") - def test_opentracing_propagation(self): - # ensure OpenTracing plays well with our integration - query = """SELECT 'tracing'""" - - db = self._get_conn() - ot_tracer = init_tracer("psycopg-svc", self.tracer) - - with ot_tracer.start_active_span("db.access"): - cursor = db.cursor() - cursor.execute(query) - rows = cursor.fetchall() - - self.assertEqual(rows, [("tracing",)]) - - self.assert_structure( - dict(name="db.access", service="psycopg-svc"), - (dict(name="postgres.query", resource=query, service="postgres", error=0, span_type="sql"),), - ) - assert_is_measured(self.get_spans()[1]) - self.reset() - - with self.override_config("psycopg", dict(trace_fetch_methods=True)): - db = self._get_conn() - ot_tracer = init_tracer("psycopg-svc", self.tracer) - - with ot_tracer.start_active_span("db.access"): - cursor = db.cursor() - cursor.execute(query) - rows = cursor.fetchall() - - self.assertEqual(rows, [("tracing",)]) - - self.assert_structure( - dict(name="db.access", service="psycopg-svc"), - ( - dict(name="postgres.query", resource=query, service="postgres", error=0, span_type="sql"), - dict(name="postgres.query.fetchall", resource=query, service="postgres", error=0, span_type="sql"), - ), - ) - assert_is_measured(self.get_spans()[1]) - def test_cursor_ctx_manager(self): # ensure cursors work with context managers # https://github.com/DataDog/dd-trace-py/issues/228 diff --git a/tests/contrib/psycopg/test_psycopg_async.py b/tests/contrib/psycopg/test_psycopg_async.py index b4778e0693a..a21dc2d794c 100644 --- a/tests/contrib/psycopg/test_psycopg_async.py +++ b/tests/contrib/psycopg/test_psycopg_async.py @@ -10,7 +10,6 @@ from ddtrace.contrib.internal.psycopg.patch import unpatch from tests.contrib.asyncio.utils import AsyncioTestCase from tests.contrib.config import POSTGRES_CONFIG -from tests.opentracer.utils import init_tracer from tests.utils import assert_is_measured @@ -127,47 +126,6 @@ async def assert_conn_is_traced_async(self, db, service): self.assertIsNone(root.get_tag("sql.query")) self.reset() - async def test_opentracing_propagation(self): - # ensure OpenTracing plays well with our integration - query = """SELECT 'tracing'""" - - db = await self._get_conn() - ot_tracer = init_tracer("psycopg-svc", self.tracer) - - with ot_tracer.start_active_span("db.access"): - cursor = db.cursor() - await cursor.execute(query) - rows = await cursor.fetchall() - - self.assertEqual(rows, [("tracing",)]) - - self.assert_structure( - dict(name="db.access", service="psycopg-svc"), - (dict(name="postgres.query", resource=query, service="postgres", error=0, span_type="sql"),), - ) - assert_is_measured(self.get_spans()[1]) - self.reset() - - with self.override_config("psycopg", dict(trace_fetch_methods=True)): - db = await self._get_conn() - ot_tracer = init_tracer("psycopg-svc", self.tracer) - - with ot_tracer.start_active_span("db.access"): - cursor = db.cursor() - await cursor.execute(query) - rows = await cursor.fetchall() - - self.assertEqual(rows, [("tracing",)]) - - self.assert_structure( - dict(name="db.access", service="psycopg-svc"), - ( - dict(name="postgres.query", resource=query, service="postgres", error=0, span_type="sql"), - dict(name="postgres.query.fetchall", resource=query, service="postgres", error=0, span_type="sql"), - ), - ) - assert_is_measured(self.get_spans()[1]) - async def test_cursor_ctx_manager(self): # ensure cursors work with context managers # https://github.com/DataDog/dd-trace-py/issues/228 diff --git a/tests/contrib/psycopg2/test_psycopg.py b/tests/contrib/psycopg2/test_psycopg.py index 209de02c880..10051da0cff 100644 --- a/tests/contrib/psycopg2/test_psycopg.py +++ b/tests/contrib/psycopg2/test_psycopg.py @@ -13,7 +13,6 @@ from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME from ddtrace.internal.utils.version import parse_version from tests.contrib.config import POSTGRES_CONFIG -from tests.opentracer.utils import init_tracer from tests.utils import TracerTestCase from tests.utils import assert_is_measured from tests.utils import snapshot @@ -148,47 +147,6 @@ def test_psycopg2_connection_with_string(self): Pin.get_from(conn)._clone(service="postgres", tracer=self.tracer).onto(conn) self.assert_conn_is_traced(conn, "postgres") - def test_opentracing_propagation(self): - # ensure OpenTracing plays well with our integration - query = """SELECT 'tracing'""" - - db = self._get_conn() - ot_tracer = init_tracer("psycopg-svc", self.tracer) - - with ot_tracer.start_active_span("db.access"): - cursor = db.cursor() - cursor.execute(query) - rows = cursor.fetchall() - - self.assertEqual(rows, [("tracing",)]) - - self.assert_structure( - dict(name="db.access", service="psycopg-svc"), - (dict(name="postgres.query", resource=query, service="postgres", error=0, span_type="sql"),), - ) - assert_is_measured(self.get_spans()[1]) - self.reset() - - with self.override_config("psycopg", dict(trace_fetch_methods=True)): - db = self._get_conn() - ot_tracer = init_tracer("psycopg-svc", self.tracer) - - with ot_tracer.start_active_span("db.access"): - cursor = db.cursor() - cursor.execute(query) - rows = cursor.fetchall() - - self.assertEqual(rows, [("tracing",)]) - - self.assert_structure( - dict(name="db.access", service="psycopg-svc"), - ( - dict(name="postgres.query", resource=query, service="postgres", error=0, span_type="sql"), - dict(name="postgres.query.fetchall", resource=query, service="postgres", error=0, span_type="sql"), - ), - ) - assert_is_measured(self.get_spans()[1]) - @skipIf(PSYCOPG2_VERSION < (2, 5), "context manager not available in psycopg2==2.4") def test_cursor_ctx_manager(self): # ensure cursors work with context managers diff --git a/tests/contrib/pylibmc/test.py b/tests/contrib/pylibmc/test.py index da5823fc6b3..91242bbe871 100644 --- a/tests/contrib/pylibmc/test.py +++ b/tests/contrib/pylibmc/test.py @@ -12,7 +12,6 @@ from ddtrace.contrib.internal.pylibmc.patch import unpatch from ddtrace.ext import memcached from tests.contrib.config import MEMCACHED_CONFIG as cfg -from tests.opentracer.utils import init_tracer from tests.utils import TracerTestCase from tests.utils import assert_is_measured @@ -78,33 +77,6 @@ def test_incr_decr(self): resources = sorted(s.resource for s in spans) assert expected_resources == resources - def test_incr_decr_ot(self): - """OpenTracing version of test_incr_decr.""" - client, tracer = self.get_client() - ot_tracer = init_tracer("memcached", tracer) - - start = time.time() - with ot_tracer.start_active_span("mc_ops"): - client.set("a", 1) - client.incr("a", 2) - client.decr("a", 1) - v = client.get("a") - assert v == 2 - end = time.time() - - # verify spans - spans = tracer.pop() - ot_span = spans[0] - - assert ot_span.name == "mc_ops" - - for s in spans[1:]: - assert s.parent_id == ot_span.span_id - self._verify_cache_span(s, start, end) - expected_resources = sorted(["get", "set", "incr", "decr"]) - resources = sorted(s.resource for s in spans[1:]) - assert expected_resources == resources - def test_clone(self): # ensure cloned connections are traced as well. client, tracer = self.get_client() diff --git a/tests/contrib/pymongo/test.py b/tests/contrib/pymongo/test.py index 236fa582910..9eb066cdae0 100644 --- a/tests/contrib/pymongo/test.py +++ b/tests/contrib/pymongo/test.py @@ -10,7 +10,6 @@ from ddtrace.contrib.internal.pymongo.patch import patch from ddtrace.contrib.internal.pymongo.patch import unpatch from ddtrace.ext import SpanTypes -from tests.opentracer.utils import init_tracer from tests.utils import DummyTracer from tests.utils import TracerTestCase from tests.utils import assert_is_measured @@ -298,67 +297,6 @@ def test_insert_find(self): assert spans[-1].resource == 'find teams {"name": "?"}' assert spans[-1].get_tag("mongodb.query") == '{"name": "?"}' - def test_update_ot(self): - """OpenTracing version of test_update.""" - tracer, client = self.get_tracer_and_client() - ot_tracer = init_tracer("mongo_svc", tracer) - - with ot_tracer.start_active_span("mongo_op"): - db = client["testdb"] - db.drop_collection("songs") - input_songs = [ - {"name": "Powderfinger", "artist": "Neil"}, - {"name": "Harvest", "artist": "Neil"}, - {"name": "Suzanne", "artist": "Leonard"}, - {"name": "Partisan", "artist": "Leonard"}, - ] - db.songs.insert_many(input_songs) - result = db.songs.update_many( - {"artist": "Neil"}, - {"$set": {"artist": "Shakey"}}, - ) - - assert result.matched_count == 2 - assert result.modified_count == 2 - - # ensure all is traced. - spans = tracer.pop() - assert spans, spans - assert len(spans) == 7 - - ot_span = spans[0] - assert ot_span.parent_id is None - assert ot_span.name == "mongo_op" - assert ot_span.service == "mongo_svc" - - # remove pymongo.get_socket and pymongo.checkout spans - spans = [s for s in spans if s.name == "pymongo.cmd"] - assert len(spans) == 3 - for span in spans: - # ensure all the of the common metadata is set - assert_is_measured(span) - assert span.service == "pymongo" - assert span.span_type == "mongodb" - assert span.get_tag("component") == "pymongo" - assert span.get_tag("span.kind") == "client" - assert span.get_tag("db.system") == "mongodb" - assert span.get_tag("mongodb.collection") == "songs" - assert span.get_tag("mongodb.db") == "testdb" - assert span.get_tag("out.host") - assert span.get_metric("network.destination.port") - - expected_resources = set( - [ - "drop songs", - 'update songs {"artist": "?"}', - "insert songs", - "pymongo.get_socket", - "pymongo.checkout", - ] - ) - - assert {s.resource for s in spans[1:]}.issubset(expected_resources) - def test_rowcount(self): tracer, client = self.get_tracer_and_client() db = client["testdb"] @@ -939,7 +877,7 @@ def test_dbm_propagation_full_mode(self): if pymongo.version_tuple < (3, 9): self.skipTest("DBM propagation requires PyMongo 3.9+") - from ddtrace.settings._database_monitoring import dbm_config + from ddtrace.internal.settings._database_monitoring import dbm_config assert dbm_config.propagation_mode == "full" @@ -992,7 +930,7 @@ def test_dbm_propagation_full_mode(self): @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_DBM_PROPAGATION_MODE="disabled")) def test_dbm_propagation_disabled(self): """Test that DBM comment is not injected when propagation mode is 'disabled'""" - from ddtrace.settings._database_monitoring import dbm_config + from ddtrace.internal.settings._database_monitoring import dbm_config assert dbm_config.propagation_mode == "disabled" @@ -1034,7 +972,7 @@ def test_dbm_propagation_service_mode(self): if pymongo.version_tuple < (3, 9): self.skipTest("DBM propagation requires PyMongo 3.9+") - from ddtrace.settings._database_monitoring import dbm_config + from ddtrace.internal.settings._database_monitoring import dbm_config assert dbm_config.propagation_mode == "service" @@ -1107,7 +1045,7 @@ def test_dbm_propagation_disabled_on_old_pymongo(self): if pymongo.version_tuple >= (3, 9): self.skipTest("Only test on PyMongo versions < 3.9") - from ddtrace.settings._database_monitoring import dbm_config + from ddtrace.internal.settings._database_monitoring import dbm_config assert dbm_config.propagation_mode == "service" diff --git a/tests/contrib/pymysql/test_pymysql.py b/tests/contrib/pymysql/test_pymysql.py index 762f55bed08..8fb5ef78621 100644 --- a/tests/contrib/pymysql/test_pymysql.py +++ b/tests/contrib/pymysql/test_pymysql.py @@ -6,7 +6,6 @@ from ddtrace.contrib.internal.pymysql.patch import unpatch from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME from tests.contrib import shared_tests -from tests.opentracer.utils import init_tracer from tests.utils import TracerTestCase from tests.utils import assert_dict_issuperset from tests.utils import assert_is_measured @@ -249,73 +248,6 @@ def test_query_proc(self): meta.update(self.DB_INFO) assert_dict_issuperset(span.get_tags(), meta) - def test_simple_query_ot(self): - """OpenTracing version of test_simple_query.""" - conn, tracer = self._get_conn_tracer() - - ot_tracer = init_tracer("mysql_svc", tracer) - with ot_tracer.start_active_span("mysql_op"): - cursor = conn.cursor() - cursor.execute("SELECT 1") - rows = cursor.fetchall() - assert len(rows) == 1 - - spans = tracer.pop() - assert len(spans) == 2 - ot_span, dd_span = spans - - # confirm parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.service == "mysql_svc" - assert ot_span.name == "mysql_op" - - assert_is_measured(dd_span) - assert dd_span.service == "pymysql" - assert dd_span.name == "pymysql.query" - assert dd_span.span_type == "sql" - assert dd_span.error == 0 - assert dd_span.get_metric("network.destination.port") == MYSQL_CONFIG.get("port") - meta = {} - meta.update(self.DB_INFO) - assert_dict_issuperset(dd_span.get_tags(), meta) - - def test_simple_query_ot_fetchall(self): - """OpenTracing version of test_simple_query.""" - with self.override_config("pymysql", dict(trace_fetch_methods=True)): - conn, tracer = self._get_conn_tracer() - - ot_tracer = init_tracer("mysql_svc", tracer) - with ot_tracer.start_active_span("mysql_op"): - cursor = conn.cursor() - cursor.execute("SELECT 1") - rows = cursor.fetchall() - assert len(rows) == 1 - - spans = tracer.pop() - assert len(spans) == 3 - ot_span, dd_span, fetch_span = spans - - # confirm parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.service == "mysql_svc" - assert ot_span.name == "mysql_op" - - assert_is_measured(dd_span) - assert dd_span.service == "pymysql" - assert dd_span.name == "pymysql.query" - assert dd_span.span_type == "sql" - assert dd_span.error == 0 - assert dd_span.get_metric("network.destination.port") == MYSQL_CONFIG.get("port") - meta = {} - meta.update(self.DB_INFO) - assert_dict_issuperset(dd_span.get_tags(), meta) - - assert fetch_span.name == "pymysql.query.fetchall" - def test_commit(self): conn, tracer = self._get_conn_tracer() diff --git a/tests/contrib/pyramid/utils.py b/tests/contrib/pyramid/utils.py index 3dec370d500..2f663258ac5 100644 --- a/tests/contrib/pyramid/utils.py +++ b/tests/contrib/pyramid/utils.py @@ -11,7 +11,6 @@ from tests.utils import assert_is_measured from tests.utils import assert_span_http_status_code -from ...opentracer.utils import init_tracer from .app import create_app @@ -273,33 +272,3 @@ def test_include_conflicts(self): self.app.get("/404", status=404) spans = self.pop_spans() assert len(spans) == 1 - - def test_200_ot(self): - """OpenTracing version of test_200.""" - ot_tracer = init_tracer("pyramid_svc", self.tracer) - - with ot_tracer.start_active_span("pyramid_get"): - res = self.app.get("/", status=200) - assert b"idx" in res.body - - spans = self.pop_spans() - assert len(spans) == 2 - - ot_span, dd_span = spans - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.name == "pyramid_get" - assert ot_span.service == "pyramid_svc" - - assert_is_measured(dd_span) - assert dd_span.service == "foobar" - assert dd_span.resource == "GET index" - assert dd_span.error == 0 - assert dd_span.span_type == "web" - assert dd_span.get_tag("http.method") == "GET" - assert_span_http_status_code(dd_span, 200) - assert dd_span.get_tag(http.URL) == "http://localhost/" - assert dd_span.get_tag("pyramid.route.name") == "index" diff --git a/tests/contrib/pytest/test_pytest.py b/tests/contrib/pytest/test_pytest.py index 6fa1f262b3b..3413ae55e2d 100644 --- a/tests/contrib/pytest/test_pytest.py +++ b/tests/contrib/pytest/test_pytest.py @@ -769,7 +769,7 @@ def test_dd_origin_tag_propagated_to_every_span(self): """ import pytest import ddtrace - from ddtrace.trace import Pin + from ddtrace._trace.pin import Pin def test_service(ddtracer): with ddtracer.trace("SPAN2") as span2: @@ -4557,7 +4557,7 @@ def test_pytest_disables_telemetry_dependency_collection(self): def test_dependency_collection_disabled(): # Check that the config is set to disable telemetry dependency collection # The pytest plugin should have done this earlier in the process - from ddtrace.settings._telemetry import config as telemetry_config + from ddtrace.internal.settings._telemetry import config as telemetry_config assert telemetry_config.DEPENDENCY_COLLECTION is False, "Dependency collection should be disabled" """ ) diff --git a/tests/contrib/pytest/test_pytest_early_config.py b/tests/contrib/pytest/test_pytest_early_config.py index b2c21a62ad5..9b1b2bfd7ad 100644 --- a/tests/contrib/pytest/test_pytest_early_config.py +++ b/tests/contrib/pytest/test_pytest_early_config.py @@ -44,7 +44,8 @@ def test_coverage_enabled_via_command_line_option(self): [suite_span] = _get_spans_from_list(spans, "suite") [test_span] = _get_spans_from_list(spans, "test") assert ( - suite_span.get_struct_tag(COVERAGE_TAG_NAME) is not None or test_span.get_tag(COVERAGE_TAG_NAME) is not None + suite_span._get_struct_tag(COVERAGE_TAG_NAME) is not None + or test_span.get_tag(COVERAGE_TAG_NAME) is not None ) def test_coverage_enabled_via_pytest_addopts_env_var(self): @@ -54,7 +55,8 @@ def test_coverage_enabled_via_pytest_addopts_env_var(self): [suite_span] = _get_spans_from_list(spans, "suite") [test_span] = _get_spans_from_list(spans, "test") assert ( - suite_span.get_struct_tag(COVERAGE_TAG_NAME) is not None or test_span.get_tag(COVERAGE_TAG_NAME) is not None + suite_span._get_struct_tag(COVERAGE_TAG_NAME) is not None + or test_span.get_tag(COVERAGE_TAG_NAME) is not None ) def test_coverage_enabled_via_addopts_ini_file_option(self): @@ -65,7 +67,8 @@ def test_coverage_enabled_via_addopts_ini_file_option(self): [suite_span] = _get_spans_from_list(spans, "suite") [test_span] = _get_spans_from_list(spans, "test") assert ( - suite_span.get_struct_tag(COVERAGE_TAG_NAME) is not None or test_span.get_tag(COVERAGE_TAG_NAME) is not None + suite_span._get_struct_tag(COVERAGE_TAG_NAME) is not None + or test_span.get_tag(COVERAGE_TAG_NAME) is not None ) def test_coverage_enabled_via_ddtrace_ini_file_option(self): @@ -76,5 +79,6 @@ def test_coverage_enabled_via_ddtrace_ini_file_option(self): [suite_span] = _get_spans_from_list(spans, "suite") [test_span] = _get_spans_from_list(spans, "test") assert ( - suite_span.get_struct_tag(COVERAGE_TAG_NAME) is not None or test_span.get_tag(COVERAGE_TAG_NAME) is not None + suite_span._get_struct_tag(COVERAGE_TAG_NAME) is not None + or test_span.get_tag(COVERAGE_TAG_NAME) is not None ) diff --git a/tests/contrib/pytest/utils.py b/tests/contrib/pytest/utils.py index 657c2f0b58c..b51058bf56e 100644 --- a/tests/contrib/pytest/utils.py +++ b/tests/contrib/pytest/utils.py @@ -34,7 +34,7 @@ def _get_tuples_from_segments(segments): def _get_span_coverage_data(span, use_plugin_v2=False): """Returns an abstracted view of the coverage data from the span that is independent of the coverage format.""" if use_plugin_v2: - tag_data = span.get_struct_tag(COVERAGE_TAG_NAME) + tag_data = span._get_struct_tag(COVERAGE_TAG_NAME) assert tag_data is not None, f"Coverage data not found in span {span}" return { file_data["filename"]: _get_tuples_from_bytearray(file_data["bitmap"]) for file_data in tag_data["files"] diff --git a/tests/contrib/redis/test_redis.py b/tests/contrib/redis/test_redis.py index 31fe287fdf1..f2a42b83d19 100644 --- a/tests/contrib/redis/test_redis.py +++ b/tests/contrib/redis/test_redis.py @@ -4,12 +4,10 @@ import pytest import redis -import ddtrace from ddtrace._trace.pin import Pin from ddtrace.contrib.internal.redis.patch import patch from ddtrace.contrib.internal.redis.patch import unpatch from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME -from tests.opentracer.utils import init_tracer from tests.utils import DummyTracer from tests.utils import TracerTestCase from tests.utils import snapshot @@ -238,39 +236,6 @@ def test_patch_unpatch(self): assert spans, spans assert len(spans) == 1 - def test_opentracing(self): - """Ensure OpenTracing works with redis.""" - ot_tracer = init_tracer("redis_svc", self.tracer) - - with ot_tracer.start_active_span("redis_get"): - us = self.r.get("cheese") - assert us is None - - spans = self.get_spans() - assert len(spans) == 2 - ot_span, dd_span = spans - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.name == "redis_get" - assert ot_span.service == "redis_svc" - - self.assert_is_measured(dd_span) - assert dd_span.service == "redis" - assert dd_span.name == "redis.command" - assert dd_span.span_type == "redis" - assert dd_span.error == 0 - assert dd_span.get_metric("out.redis_db") == 0 - assert dd_span.get_tag("out.host") == "localhost" - assert dd_span.get_tag("redis.raw_command") == "GET cheese" - assert dd_span.get_tag("component") == "redis" - assert dd_span.get_tag("span.kind") == "client" - assert dd_span.get_tag("db.system") == "redis" - assert dd_span.get_metric("redis.args_length") == 2 - assert dd_span.resource == "GET" - def test_redis_rowcount_all_keys_valid(self): self.r.set("key1", "value1") @@ -540,20 +505,6 @@ def test_patch_unpatch(self): assert spans, spans assert len(spans) == 1 - @snapshot() - def test_opentracing(self): - """Ensure OpenTracing works with redis.""" - writer = ddtrace.tracer._span_aggregator.writer - ot_tracer = init_tracer("redis_svc", ddtrace.tracer) - # FIXME: OpenTracing always overrides the hostname/port and creates a new - # writer so we have to reconfigure with the previous one - ddtrace.tracer._span_aggregator.writer = writer - ddtrace.tracer._recreate() - - with ot_tracer.start_active_span("redis_get"): - us = self.r.get("cheese") - assert us is None - @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc")) @snapshot() def test_user_specified_service(self): diff --git a/tests/contrib/requests/test_requests.py b/tests/contrib/requests/test_requests.py index e1a8d2672d6..f7f7c24bc07 100644 --- a/tests/contrib/requests/test_requests.py +++ b/tests/contrib/requests/test_requests.py @@ -18,7 +18,6 @@ from ddtrace.contrib.internal.requests.patch import unpatch from ddtrace.ext import http from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME -from tests.opentracer.utils import init_tracer from tests.utils import TracerTestCase from tests.utils import assert_is_measured from tests.utils import assert_span_http_status_code @@ -580,35 +579,6 @@ def test_global_config_service(self): spans = self.pop_spans() assert spans[0].service == "override" - def test_200_ot(self): - """OpenTracing version of test_200.""" - - ot_tracer = init_tracer("requests_svc", self.tracer) - - with ot_tracer.start_active_span("requests_get"): - out = self.session.get(URL_200) - assert out.status_code == 200 - - # validation - spans = self.pop_spans() - assert len(spans) == 2 - - ot_span, dd_span = spans - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.name == "requests_get" - assert ot_span.service == "requests_svc" - - assert_is_measured(dd_span) - assert dd_span.get_tag(http.METHOD) == "GET" - assert_span_http_status_code(dd_span, 200) - assert dd_span.error == 0 - assert dd_span.span_type == "http" - assert dd_span.resource == "GET /status/200" - def test_request_and_response_headers(self): # Disabled when not configured self.session.get(URL_200, headers={"my-header": "my_value"}) diff --git a/tests/contrib/requests/test_requests_distributed.py b/tests/contrib/requests/test_requests_distributed.py index 9cbeb3ab2ba..03d4caf00da 100644 --- a/tests/contrib/requests/test_requests_distributed.py +++ b/tests/contrib/requests/test_requests_distributed.py @@ -1,7 +1,7 @@ from requests_mock import Adapter from ddtrace._trace.pin import Pin -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config from tests.utils import TracerTestCase from tests.utils import get_128_bit_trace_id_from_headers diff --git a/tests/contrib/snowflake/test_snowflake.py b/tests/contrib/snowflake/test_snowflake.py index f4995255b4c..67ce242b8e4 100644 --- a/tests/contrib/snowflake/test_snowflake.py +++ b/tests/contrib/snowflake/test_snowflake.py @@ -9,8 +9,6 @@ from ddtrace._trace.pin import Pin from ddtrace.contrib.internal.snowflake.patch import patch from ddtrace.contrib.internal.snowflake.patch import unpatch -from ddtrace.trace import tracer -from tests.opentracer.utils import init_tracer from tests.utils import override_config from tests.utils import snapshot @@ -93,13 +91,6 @@ def client(): yield ctx -@contextlib.contextmanager -def ot_trace(): - ot = init_tracer("snowflake_svc", tracer) - with ot.start_active_span("snowflake_op"): - yield - - @snapshot() @req_mock.activate def test_snowflake_fetchone(client): @@ -224,72 +215,6 @@ def test_snowflake_executemany_insert(client): assert res.rowcount == 2 -@snapshot() -@req_mock.activate -def test_snowflake_ot_fetchone(client): - add_snowflake_query_response( - rowtype=["TEXT"], - rows=[("4.30.2",)], - ) - with ot_trace(): - with client.cursor() as cur: - res = cur.execute("select current_version();") - assert res == cur - assert cur.fetchone() == ("4.30.2",) - - -@snapshot() -@req_mock.activate -def test_snowflake_ot_fetchall(client): - add_snowflake_query_response( - rowtype=["TEXT"], - rows=[("4.30.2",)], - ) - with ot_trace(): - with client.cursor() as cur: - res = cur.execute("select current_version();") - assert res == cur - assert cur.fetchall() == [("4.30.2",)] - - -@snapshot() -@req_mock.activate -def test_snowflake_ot_fetchall_multiple_rows(client): - add_snowflake_query_response( - rowtype=["TEXT", "TEXT"], - rows=[("1a", "1b"), ("2a", "2b")], - ) - with ot_trace(): - with client.cursor() as cur: - res = cur.execute("select a, b from t;") - assert res == cur - assert cur.fetchall() == [ - ("1a", "1b"), - ("2a", "2b"), - ] - - -@snapshot() -@req_mock.activate -def test_snowflake_ot_executemany_insert(client): - add_snowflake_query_response( - rowtype=[], - rows=[], - total=2, - ) - with ot_trace(): - with client.cursor() as cur: - res = cur.executemany( - "insert into t (a, b) values (%s, %s);", - [ - ("1a", "1b"), - ("2a", "2b"), - ], - ) - assert res == cur - assert res.rowcount == 2 - - @pytest.mark.snapshot() @pytest.mark.parametrize( "service_schema", diff --git a/tests/contrib/sqlalchemy/mixins.py b/tests/contrib/sqlalchemy/mixins.py index 18b180db2d3..031c9ca3aea 100644 --- a/tests/contrib/sqlalchemy/mixins.py +++ b/tests/contrib/sqlalchemy/mixins.py @@ -9,7 +9,6 @@ from sqlalchemy.orm import sessionmaker from ddtrace.contrib.internal.sqlalchemy.engine import trace_engine -from tests.opentracer.utils import init_tracer Base = declarative_base() @@ -166,36 +165,3 @@ def test_engine_connect_execute(self): assert span.span_type == "sql" assert span.error == 0 assert span.duration > 0 - - def test_opentracing(self): - """Ensure that sqlalchemy works with the opentracer.""" - ot_tracer = init_tracer("sqlalch_svc", self.tracer) - - with ot_tracer.start_active_span("sqlalch_op"): - with self.connection() as conn: - rows = conn.execute(text("SELECT * FROM players")).fetchall() - assert len(rows) == 0 - - traces = self.pop_traces() - # trace composition - assert len(traces) == 1 - assert len(traces[0]) == 2 - ot_span, dd_span = traces[0] - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.name == "sqlalch_op" - assert ot_span.service == "sqlalch_svc" - - # span fields - assert dd_span.name == "{}.query".format(self.VENDOR) - assert dd_span.service == self.SERVICE - assert dd_span.resource == "SELECT * FROM players" - assert dd_span.get_tag("sql.db") == self.SQL_DB - assert dd_span.get_tag("component") == "sqlalchemy" - assert dd_span.get_tag("span.kind") == "client" - assert dd_span.span_type == "sql" - assert dd_span.error == 0 - assert dd_span.duration > 0 diff --git a/tests/contrib/sqlite3/test_sqlite3.py b/tests/contrib/sqlite3/test_sqlite3.py index e4b12d7b4e8..de2b18f72b4 100644 --- a/tests/contrib/sqlite3/test_sqlite3.py +++ b/tests/contrib/sqlite3/test_sqlite3.py @@ -21,7 +21,6 @@ from ddtrace.contrib.internal.sqlite3.patch import patch from ddtrace.contrib.internal.sqlite3.patch import unpatch from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME -from tests.opentracer.utils import init_tracer from tests.utils import TracerTestCase from tests.utils import assert_is_measured from tests.utils import assert_is_not_measured @@ -206,47 +205,6 @@ def test_sqlite_fetchmany_is_traced(self): self.assertIsNone(fetchmany_span.get_tag("sql.query")) self.assertEqual(fetchmany_span.get_tag("db.system"), "sqlite") - def test_sqlite_ot(self): - """Ensure sqlite works with the opentracer.""" - ot_tracer = init_tracer("sqlite_svc", self.tracer) - - # Ensure we can run a query and it's correctly traced - q = "select * from sqlite_master" - with ot_tracer.start_active_span("sqlite_op"): - db = sqlite3.connect(":memory:") - pin = Pin.get_from(db) - assert pin - pin._clone(tracer=self.tracer).onto(db) - cursor = db.execute(q) - rows = cursor.fetchall() - assert not rows - - self.assert_structure( - dict(name="sqlite_op", service="sqlite_svc"), - (dict(name="sqlite.query", service="sqlite", span_type="sql", resource=q, error=0),), - ) - assert_is_measured(self.get_spans()[1]) - self.reset() - - with self.override_config("sqlite", dict(trace_fetch_methods=True)): - with ot_tracer.start_active_span("sqlite_op"): - db = sqlite3.connect(":memory:") - pin = Pin.get_from(db) - assert pin - pin._clone(tracer=self.tracer).onto(db) - cursor = db.execute(q) - rows = cursor.fetchall() - assert not rows - - self.assert_structure( - dict(name="sqlite_op", service="sqlite_svc"), - ( - dict(name="sqlite.query", span_type="sql", resource=q, error=0), - dict(name="sqlite.query.fetchall", span_type="sql", resource=q, error=0), - ), - ) - assert_is_measured(self.get_spans()[1]) - def test_commit(self): connection = self._given_a_traced_connection(self.tracer) connection.commit() diff --git a/tests/contrib/subprocess/test_subprocess_patch.py b/tests/contrib/subprocess/test_subprocess_patch.py index 33e77698ceb..b2f65324c75 100644 --- a/tests/contrib/subprocess/test_subprocess_patch.py +++ b/tests/contrib/subprocess/test_subprocess_patch.py @@ -1,6 +1,6 @@ from ddtrace.contrib.internal.subprocess.patch import get_version from ddtrace.contrib.internal.subprocess.patch import patch -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config try: diff --git a/tests/contrib/suitespec.yml b/tests/contrib/suitespec.yml index 830cc5511b3..48246288682 100644 --- a/tests/contrib/suitespec.yml +++ b/tests/contrib/suitespec.yml @@ -31,9 +31,6 @@ components: bottle: - ddtrace/contrib/bottle.py - ddtrace/contrib/internal/bottle/* - cassandra: - - ddtrace/contrib/internal/cassandra/* - - ddtrace/ext/cassandra.py celery: - ddtrace/contrib/celery.py - ddtrace/contrib/internal/celery/* @@ -57,7 +54,7 @@ components: - ddtrace/ext/test.py - ddtrace/ext/user.py - ddtrace/propagation/* - - ddtrace/settings/_database_monitoring.py + - ddtrace/internal/settings/_database_monitoring.py - tests/contrib/patch.py - tests/contrib/config.py - tests/contrib/__init__.py @@ -89,8 +86,6 @@ components: - ddtrace/contrib/internal/flask/* - ddtrace/contrib/flask_cache.py - ddtrace/contrib/internal/flask_cache/* - freezegun: - - ddtrace/contrib/internal/freezegun/* futures: - ddtrace/contrib/internal/futures/* gevent: @@ -129,7 +124,6 @@ components: - ddtrace/contrib/internal/molten/* mongo: - ddtrace/contrib/internal/pymongo/* - - ddtrace/contrib/internal/mongoengine/* - ddtrace/ext/mongo.py mysql: - ddtrace/contrib/internal/mysql/* @@ -459,19 +453,6 @@ suites: - tests/contrib/bottle/* runner: riot snapshot: true - cassandra: - paths: - - '@bootstrap' - - '@core' - - '@contrib' - - '@tracing' - - '@cassandra' - - tests/contrib/cassandra/* - runner: riot - snapshot: true - parallelism: 2 - services: - - cassandra celery: env: DD_DISABLE_ERROR_RESPONSES: true @@ -694,16 +675,6 @@ suites: - memcached - redis snapshot: true - freezegun: - paths: - - '@bootstrap' - - '@core' - - '@contrib' - - '@tracing' - - '@freezegun' - - tests/contrib/freezegun/* - runner: riot - snapshot: true gevent: paths: - '@bootstrap' @@ -898,18 +869,6 @@ suites: - tests/contrib/molten/* runner: riot snapshot: true - mongoengine: - paths: - - '@bootstrap' - - '@core' - - '@contrib' - - '@tracing' - - '@mongo' - - tests/contrib/mongoengine/* - runner: riot - snapshot: true - services: - - mongo mysqlpython: paths: - '@bootstrap' @@ -937,15 +896,6 @@ suites: - tests/snapshots/tests.opentelemetry.* runner: riot snapshot: true - opentracer: - parallelism: 1 - paths: - - '@bootstrap' - - '@core' - - '@tracing' - - '@opentracer' - - tests/opentracer/* - runner: riot protobuf: parallelism: 1 paths: diff --git a/tests/contrib/tornado/test_tornado_web.py b/tests/contrib/tornado/test_tornado_web.py index 642246aa244..cbb4d2c7785 100644 --- a/tests/contrib/tornado/test_tornado_web.py +++ b/tests/contrib/tornado/test_tornado_web.py @@ -1,6 +1,3 @@ -import pytest -import tornado - from ddtrace import config from ddtrace.constants import _ORIGIN_KEY from ddtrace.constants import _SAMPLING_PRIORITY_KEY @@ -8,7 +5,6 @@ from ddtrace.constants import USER_KEEP from ddtrace.ext import http from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME -from tests.opentracer.utils import init_tracer from tests.tracer.utils_inferred_spans.test_helpers import assert_web_and_inferred_aws_api_gateway_span_data from tests.utils import assert_is_measured from tests.utils import assert_span_http_status_code @@ -385,46 +381,6 @@ def test_propagation(self): assert request_span.get_tag("component") == "tornado" assert request_span.get_tag("span.kind") == "server" - # Opentracing support depends on new AsyncioScopeManager - # See: https://github.com/opentracing/opentracing-python/pull/118 - @pytest.mark.skipif( - tornado.version_info >= (5, 0), reason="Opentracing ScopeManager not available for Tornado >= 5" - ) - def test_success_handler_ot(self): - """OpenTracing version of test_success_handler.""" - from opentracing.scope_managers.tornado import TornadoScopeManager - - ot_tracer = init_tracer("tornado_svc", self.tracer, scope_manager=TornadoScopeManager()) - - with ot_tracer.start_active_span("tornado_op"): - response = self.fetch("/success/") - assert 200 == response.code - - traces = self.pop_traces() - assert 1 == len(traces) - assert 2 == len(traces[0]) - # dd_span will start and stop before the ot_span finishes - ot_span, dd_span = traces[0] - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.name == "tornado_op" - assert ot_span.service == "tornado_svc" - - assert_is_measured(dd_span) - assert "tornado-web" == dd_span.service - assert "tornado.request" == dd_span.name - assert "web" == dd_span.span_type - assert "tests.contrib.tornado.web.app.SuccessHandler" == dd_span.resource - assert "GET" == dd_span.get_tag("http.method") - assert_span_http_status_code(dd_span, 200) - assert self.get_url("/success/") == dd_span.get_tag(http.URL) - assert 0 == dd_span.error - assert dd_span.get_tag("component") == "tornado" - assert dd_span.get_tag("span.kind") == "server" - class TestNoPropagationTornadoWebViaSetting(TornadoTestCase): """ diff --git a/tests/contrib/urllib3/test_urllib3.py b/tests/contrib/urllib3/test_urllib3.py index 01d3b87893c..3598ffb2608 100644 --- a/tests/contrib/urllib3/test_urllib3.py +++ b/tests/contrib/urllib3/test_urllib3.py @@ -12,9 +12,8 @@ from ddtrace.contrib.internal.urllib3.patch import unpatch from ddtrace.ext import http from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME -from ddtrace.settings.asm import config as asm_config +from ddtrace.internal.settings.asm import config as asm_config from tests.contrib.config import HTTPBIN_CONFIG -from tests.opentracer.utils import init_tracer from tests.utils import TracerTestCase from tests.utils import snapshot @@ -399,34 +398,6 @@ def test_split_by_domain_includes_port(self): assert s.error == 1 assert s.service == "httpbin.org:8000" - def test_200_ot(self): - """OpenTracing version of test_200.""" - - ot_tracer = init_tracer("urllib3_svc", self.tracer) - - with ot_tracer.start_active_span("urllib3_get"): - out = self.http.request("GET", URL_200) - assert out.status == 200 - - spans = self.pop_spans() - assert len(spans) == 2 - - ot_span, dd_span = spans - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.name == "urllib3_get" - assert ot_span.service == "urllib3_svc" - - assert dd_span.get_tag(http.METHOD) == "GET" - assert dd_span.get_tag(http.STATUS_CODE) == "200" - assert dd_span.get_tag("component") == "urllib3" - assert dd_span.get_tag("span.kind") == "client" - assert dd_span.error == 0 - assert dd_span.span_type == "http" - def test_request_and_response_headers(self): """Tests the headers are added as tag when the headers are whitelisted""" self.http.request("GET", URL_200, headers={"my-header": "my_value"}) diff --git a/tests/contrib/valkey/test_valkey.py b/tests/contrib/valkey/test_valkey.py index 867f435939e..447ae932771 100644 --- a/tests/contrib/valkey/test_valkey.py +++ b/tests/contrib/valkey/test_valkey.py @@ -4,12 +4,10 @@ import pytest import valkey -import ddtrace from ddtrace._trace.pin import Pin from ddtrace.contrib.internal.valkey.patch import patch from ddtrace.contrib.internal.valkey.patch import unpatch from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME -from tests.opentracer.utils import init_tracer from tests.utils import DummyTracer from tests.utils import TracerTestCase from tests.utils import snapshot @@ -238,39 +236,6 @@ def test_patch_unpatch(self): assert spans, spans assert len(spans) == 1 - def test_opentracing(self): - """Ensure OpenTracing works with valkey.""" - ot_tracer = init_tracer("valkey_svc", self.tracer) - - with ot_tracer.start_active_span("valkey_get"): - us = self.r.get("cheese") - assert us is None - - spans = self.get_spans() - assert len(spans) == 2 - ot_span, dd_span = spans - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert ot_span.name == "valkey_get" - assert ot_span.service == "valkey_svc" - - self.assert_is_measured(dd_span) - assert dd_span.service == "valkey" - assert dd_span.name == "valkey.command" - assert dd_span.span_type == "valkey" - assert dd_span.error == 0 - assert dd_span.get_metric("out.valkey_db") == 0 - assert dd_span.get_tag("out.host") == "localhost" - assert dd_span.get_tag("valkey.raw_command") == "GET cheese" - assert dd_span.get_tag("component") == "valkey" - assert dd_span.get_tag("span.kind") == "client" - assert dd_span.get_tag("db.system") == "valkey" - assert dd_span.get_metric("valkey.args_length") == 2 - assert dd_span.resource == "GET" - def test_valkey_rowcount_all_keys_valid(self): self.r.set("key1", "value1") @@ -540,15 +505,6 @@ def test_patch_unpatch(self): assert spans, spans assert len(spans) == 1 - @snapshot() - def test_opentracing(self): - """Ensure OpenTracing works with valkey.""" - ot_tracer = init_tracer("valkey_svc", ddtrace.tracer) - - with ot_tracer.start_active_span("valkey_get"): - us = self.r.get("cheese") - assert us is None - @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc")) @snapshot() def test_user_specified_service(self): diff --git a/tests/contrib/vertica/test_vertica.py b/tests/contrib/vertica/test_vertica.py index 1f3becdb8bf..e9404efeda1 100644 --- a/tests/contrib/vertica/test_vertica.py +++ b/tests/contrib/vertica/test_vertica.py @@ -10,9 +10,8 @@ from ddtrace.contrib.internal.vertica.patch import unpatch from ddtrace.internal.compat import is_wrapted from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME -from ddtrace.settings._config import _deepmerge +from ddtrace.internal.settings._config import _deepmerge from tests.contrib.config import VERTICA_CONFIG -from tests.opentracer.utils import init_tracer from tests.utils import DummyTracer from tests.utils import TracerTestCase from tests.utils import assert_is_measured @@ -367,37 +366,6 @@ def test_copy(self): assert spans[1].name == "vertica.query" assert spans[1].resource == "COMMIT;" - def test_opentracing(self): - """Ensure OpenTracing works with vertica.""" - conn, cur = self.test_conn - - ot_tracer = init_tracer("vertica_svc", self.test_tracer) - - with ot_tracer.start_active_span("vertica_execute"): - cur.execute("INSERT INTO {} (a, b) VALUES (1, 'aa');".format(TEST_TABLE)) - conn.close() - - spans = self.test_tracer.pop() - assert len(spans) == 2 - ot_span, dd_span = spans - - # confirm the parenting - assert ot_span.parent_id is None - assert dd_span.parent_id == ot_span.span_id - - assert_is_measured(dd_span) - assert dd_span.service == "vertica" - assert dd_span.span_type == "sql" - assert dd_span.name == "vertica.query" - assert dd_span.get_metric("db.row_count") == -1 - query = "INSERT INTO test_table (a, b) VALUES (1, 'aa');" - assert dd_span.resource == query - assert dd_span.get_tag("out.host") == "127.0.0.1" - assert dd_span.get_tag("span.kind") == "client" - assert dd_span.get_metric("network.destination.port") == 5433 - assert dd_span.get_tag("db.system") == "vertica" - assert dd_span.get_tag("component") == "vertica" - @TracerTestCase.run_in_subprocess(env_overrides=dict(DD_SERVICE="mysvc"), use_pytest=True) @pytest.mark.usefixtures("test_tracer", "test_conn") def test_user_specified_service_default(self): diff --git a/tests/contrib/yaaredis/test_yaaredis.py b/tests/contrib/yaaredis/test_yaaredis.py index 472612f11ca..d3fa5743b70 100644 --- a/tests/contrib/yaaredis/test_yaaredis.py +++ b/tests/contrib/yaaredis/test_yaaredis.py @@ -9,7 +9,6 @@ from ddtrace.contrib.internal.yaaredis.patch import patch from ddtrace.contrib.internal.yaaredis.patch import unpatch from ddtrace.internal.compat import is_wrapted -from tests.opentracer.utils import init_tracer from tests.utils import override_config from ..config import REDIS_CONFIG @@ -149,18 +148,6 @@ async def test_service_name_config(tracer, test_spans, traced_yaaredis): assert test_spans.spans[0].service == service -@pytest.mark.asyncio -async def test_opentracing(tracer, snapshot_context, traced_yaaredis): - """Ensure OpenTracing works with redis.""" - - with snapshot_context(): - pin = Pin.get_from(traced_yaaredis) - ot_tracer = init_tracer("redis_svc", pin.tracer) - - with ot_tracer.start_active_span("redis_get"): - await traced_yaaredis.get("cheese") - - @pytest.mark.parametrize( "service_schema", [ diff --git a/tests/debugging/exception/test_replay.py b/tests/debugging/exception/test_replay.py index e7f5f1e1a66..65055504d27 100644 --- a/tests/debugging/exception/test_replay.py +++ b/tests/debugging/exception/test_replay.py @@ -8,7 +8,7 @@ from ddtrace.debugging._exception import replay from ddtrace.internal.packages import _third_party_packages from ddtrace.internal.rate_limiter import BudgetRateLimiterWithJitter as RateLimiter -from ddtrace.settings.exception_replay import ExceptionReplayConfig +from ddtrace.internal.settings.exception_replay import ExceptionReplayConfig from tests.debugging.mocking import exception_replay from tests.utils import TracerTestCase from tests.utils import override_third_party_packages @@ -57,18 +57,6 @@ def test_exception_replay_config_enabled(monkeypatch): assert er_config.enabled -def test_exception_replay_config_enabled_deprecated(monkeypatch): - monkeypatch.setenv("DD_EXCEPTION_DEBUGGING_ENABLED", "1") - - er_config = ExceptionReplayConfig() - assert er_config.enabled - - monkeypatch.setenv("DD_EXCEPTION_REPLAY_ENABLED", "false") - - er_config = ExceptionReplayConfig() - assert not er_config.enabled - - def test_exception_chain_ident(): def a(v, d=None): if not v: diff --git a/tests/debugging/exploration/_config.py b/tests/debugging/exploration/_config.py index 5307125e2f8..1d6b7f8dfcd 100644 --- a/tests/debugging/exploration/_config.py +++ b/tests/debugging/exploration/_config.py @@ -5,7 +5,7 @@ from warnings import warn from ddtrace.debugging._probe.model import CaptureLimits -from ddtrace.settings._core import DDConfig +from ddtrace.internal.settings._core import DDConfig def parse_venv(value: str) -> t.Optional[Path]: diff --git a/tests/debugging/mocking.py b/tests/debugging/mocking.py index 23381e06c71..746f9cd2691 100644 --- a/tests/debugging/mocking.py +++ b/tests/debugging/mocking.py @@ -20,7 +20,7 @@ from ddtrace.debugging._signal.collector import SignalCollector from ddtrace.debugging._signal.snapshot import Snapshot from ddtrace.debugging._uploader import SignalUploader -from ddtrace.settings._core import DDConfig +from ddtrace.internal.settings._core import DDConfig from tests.debugging.probe.test_status import DummyProbeStatusLogger diff --git a/tests/debugging/suitespec.yml b/tests/debugging/suitespec.yml index 54ea27c76e2..2ed214de3ba 100644 --- a/tests/debugging/suitespec.yml +++ b/tests/debugging/suitespec.yml @@ -2,8 +2,8 @@ components: debugging: - ddtrace/debugging/* - - ddtrace/settings/dynamic_instrumentation.py - - ddtrace/settings/exception_replay.py + - ddtrace/internal/settings/dynamic_instrumentation.py + - ddtrace/internal/settings/exception_replay.py suites: debugger: parallelism: 1 diff --git a/tests/debugging/test_config.py b/tests/debugging/test_config.py index 490d64fda0f..07c8ef7d739 100644 --- a/tests/debugging/test_config.py +++ b/tests/debugging/test_config.py @@ -2,9 +2,9 @@ import pytest +from ddtrace.internal.settings._agent import config as agent_config +from ddtrace.internal.settings.dynamic_instrumentation import DynamicInstrumentationConfig from ddtrace.internal.utils.formats import parse_tags_str -from ddtrace.settings._agent import config as agent_config -from ddtrace.settings.dynamic_instrumentation import DynamicInstrumentationConfig from ddtrace.version import get_version from tests.utils import override_env @@ -12,22 +12,22 @@ @contextmanager def debugger_config(**kwargs): with override_env(kwargs, replace_os_env=True): - from ddtrace.settings._config import Config - import ddtrace.settings.dynamic_instrumentation + from ddtrace.internal.settings._config import Config + import ddtrace.internal.settings.dynamic_instrumentation - old_config = ddtrace.settings.dynamic_instrumentation.ddconfig - old_di_config = ddtrace.settings.dynamic_instrumentation.config.__dict__ + old_config = ddtrace.internal.settings.dynamic_instrumentation.ddconfig + old_di_config = ddtrace.internal.settings.dynamic_instrumentation.config.__dict__ try: - ddtrace.settings.dynamic_instrumentation.ddconfig = Config() + ddtrace.internal.settings.dynamic_instrumentation.ddconfig = Config() new_config = DynamicInstrumentationConfig() - ddtrace.settings.dynamic_instrumentation.config.__dict__ = new_config.__dict__ + ddtrace.internal.settings.dynamic_instrumentation.config.__dict__ = new_config.__dict__ - yield ddtrace.settings.dynamic_instrumentation.config + yield ddtrace.internal.settings.dynamic_instrumentation.config finally: - ddtrace.settings.dynamic_instrumentation.config.__dict__ = old_di_config - ddtrace.settings.dynamic_instrumentation.ddconfig = old_config + ddtrace.internal.settings.dynamic_instrumentation.config.__dict__ = old_di_config + ddtrace.internal.settings.dynamic_instrumentation.ddconfig = old_config def test_tags(): diff --git a/tests/debugging/test_debugger.py b/tests/debugging/test_debugger.py index 54335f7a972..6ab1c33bc83 100644 --- a/tests/debugging/test_debugger.py +++ b/tests/debugging/test_debugger.py @@ -743,7 +743,7 @@ def test_debugger_function_probe_duration(duration): def test_debugger_condition_eval_then_rate_limit(stuff): - with debugger(upload_flush_interval=float("inf")) as d: + with debugger(upload_interval_seconds=float("inf")) as d: d.add_probes( create_snapshot_line_probe( probe_id="foo", @@ -771,7 +771,7 @@ def test_debugger_condition_eval_then_rate_limit(stuff): def test_debugger_condition_eval_error_get_reported_once(stuff): - with debugger(upload_flush_interval=float("inf")) as d: + with debugger(upload_interval_seconds=float("inf")) as d: d.add_probes( create_snapshot_line_probe( probe_id="foo", @@ -889,7 +889,7 @@ def __init__(self, age, name): def test_debugger_log_line_probe_generate_messages(stuff): - with debugger(upload_flush_interval=float("inf")) as d: + with debugger(upload_interval_seconds=float("inf")) as d: d.add_probes( create_log_line_probe( probe_id="foo", @@ -1073,7 +1073,7 @@ def test_debugger_function_probe_ordering(self): def test_debugger_modified_probe(stuff): - with debugger(upload_flush_interval=float("inf")) as d: + with debugger(upload_interval_seconds=float("inf")) as d: d.add_probes( create_log_line_probe( probe_id="foo", @@ -1131,7 +1131,7 @@ def test_debugger_continue_wrapping_after_first_failure(): def test_debugger_redacted_identifiers(): import tests.submod.stuff as stuff - with debugger(upload_flush_interval=float("inf")) as d: + with debugger(upload_interval_seconds=float("inf")) as d: d.add_probes( create_snapshot_line_probe( probe_id="foo", @@ -1230,7 +1230,7 @@ def test_debugger_redacted_identifiers(): def test_debugger_redaction_excluded_identifiers(): import tests.submod.stuff as stuff - with debugger(upload_flush_interval=float("inf"), redaction_excluded_identifiers=frozenset(["token"])) as d: + with debugger(upload_interval_seconds=float("inf"), redaction_excluded_identifiers=frozenset(["token"])) as d: d.add_probes( create_snapshot_line_probe( probe_id="foo", diff --git a/tests/errortracking/suitespec.yml b/tests/errortracking/suitespec.yml index 40e391dd63a..bfb10577e72 100644 --- a/tests/errortracking/suitespec.yml +++ b/tests/errortracking/suitespec.yml @@ -2,7 +2,7 @@ components: errortracking: - ddtrace/errortracking/* - - ddtrace/settings/errortracking.py + - ddtrace/internal/settings/errortracking.py suites: errortracker: parallelism: 1 diff --git a/tests/integration/test_integration.py b/tests/integration/test_integration.py index 94a8b95a77b..fa1b90ab9df 100644 --- a/tests/integration/test_integration.py +++ b/tests/integration/test_integration.py @@ -817,7 +817,7 @@ def test_logging_during_tracer_init_succeeds_when_debug_logging_and_logs_injecti ), "stderr should not contain any exception logs" -@pytest.mark.skipif(PYTHON_VERSION_INFO < (3, 9), reason="Python 3.8 throws a deprecation warning") +@pytest.mark.skipif(PYTHON_VERSION_INFO < (3, 10), reason="ddtrace under Python 3.9 is deprecated") def test_no_warnings_when_Wall(): env = os.environ.copy() # Have to disable sqlite3 as coverage uses it on process shutdown diff --git a/tests/integration/test_integration_civisibility.py b/tests/integration/test_integration_civisibility.py index db44d67e536..974b8a0f7cc 100644 --- a/tests/integration/test_integration_civisibility.py +++ b/tests/integration/test_integration_civisibility.py @@ -10,7 +10,7 @@ from ddtrace.internal.evp_proxy.constants import EVP_PROXY_AGENT_ENDPOINT from ddtrace.internal.evp_proxy.constants import EVP_SUBDOMAIN_HEADER_EVENT_VALUE from ddtrace.internal.evp_proxy.constants import EVP_SUBDOMAIN_HEADER_NAME -from ddtrace.settings._agent import config as agent_config +from ddtrace.internal.settings._agent import config as agent_config from tests.ci_visibility.util import _get_default_civisibility_ddconfig from tests.utils import override_env diff --git a/tests/integration/test_integration_snapshots.py b/tests/integration/test_integration_snapshots.py index 8bb70cf70a6..2047386cbe2 100644 --- a/tests/integration/test_integration_snapshots.py +++ b/tests/integration/test_integration_snapshots.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +import logging import os import mock @@ -216,42 +217,54 @@ def test_wrong_span_name_type_not_sent(): ({"env": "my-env", "tag1": "some_str_1", "tag2": "some_str_2", "tag3": [1, 2, 3]}), ({"env": "test-env", b"tag1": {"wrong_type": True}, b"tag2": "some_str_2", b"tag3": "some_str_3"}), ({"env": "my-test-env", "😐": "some_str_1", b"tag2": "some_str_2", "unicode": 12345}), + ({"env": set([1, 2, 3])}), + ({"env": None}), + ({"env": True}), + ({"env": 1.0}), ], ) @pytest.mark.parametrize("encoding", ["v0.4", "v0.5"]) def test_trace_with_wrong_meta_types_not_sent(encoding, meta, monkeypatch): """Wrong meta types should raise TypeErrors during encoding and fail to send to the agent.""" with override_global_config(dict(_trace_api=encoding)): - with mock.patch("ddtrace._trace.span.log") as log: + logger = logging.getLogger("ddtrace.internal._encoding") + with mock.patch.object(logger, "warning") as log_warning: with tracer.trace("root") as root: root._meta = meta for _ in range(299): with tracer.trace("child") as child: child._meta = meta - log.exception.assert_called_once_with("error closing trace") + + assert log_warning.call_count == 300 + log_warning.assert_called_with( + "[span ID %d] Meta key %r has non-string value %r, skipping", mock.ANY, mock.ANY, mock.ANY + ) @pytest.mark.parametrize( - "metrics", + "metrics,expected_warning_count", [ - ({"num1": 12345, "num2": 53421, "num3": 1, "num4": "not-a-number"}), - ({b"num1": 123.45, b"num2": [1, 2, 3], b"num3": 11.0, b"num4": 1.20}), - ({"😐": "123.45", b"num2": "1", "num3": {"is_number": False}, "num4": "12345"}), + ({"num1": 12345, "num2": 53421, "num3": 1, "num4": "not-a-number"}, 300), + ({b"num1": 123.45, b"num2": [1, 2, 3], b"num3": 11.0, b"num4": 1.20}, 300), + ({"😐": "123.45", b"num2": "1", "num3": {"is_number": False}, "num4": "12345"}, 1200), ], ) @pytest.mark.parametrize("encoding", ["v0.4", "v0.5"]) -@snapshot() -@pytest.mark.xfail -def test_trace_with_wrong_metrics_types_not_sent(encoding, metrics, monkeypatch): +def test_trace_with_wrong_metrics_types_not_sent(encoding, metrics, expected_warning_count): """Wrong metric types should raise TypeErrors during encoding and fail to send to the agent.""" with override_global_config(dict(_trace_api=encoding)): - with mock.patch("ddtrace._trace.span.log") as log: + logger = logging.getLogger("ddtrace.internal._encoding") + with mock.patch.object(logger, "warning") as log_warning: with tracer.trace("root") as root: root._metrics = metrics for _ in range(299): with tracer.trace("child") as child: child._metrics = metrics - log.exception.assert_called_once_with("error closing trace") + + assert log_warning.call_count == expected_warning_count + log_warning.assert_called_with( + "[span ID %d] Metric key %r has non-numeric value %r, skipping", mock.ANY, mock.ANY, mock.ANY + ) @pytest.mark.subprocess() @@ -331,21 +344,6 @@ def test_encode_span_with_large_string_attributes(encoding): span.set_tag(key="c" * 25001, value="d" * 2000) -@pytest.mark.parametrize("encoding", ["v0.4", "v0.5"]) -@pytest.mark.snapshot() -def test_encode_span_with_large_bytes_attributes(encoding): - from ddtrace import tracer - - with override_global_config(dict(_trace_api=encoding)): - name = b"a" * 25000 - resource = b"b" * 25001 - key = b"c" * 25001 - value = b"d" * 2000 - - with tracer.trace(name=name, resource=resource) as span: - span.set_tag(key=key, value=value) - - @pytest.mark.parametrize("encoding", ["v0.4", "v0.5"]) @pytest.mark.snapshot() def test_encode_span_with_large_unicode_string_attributes(encoding): diff --git a/tests/internal/bytecode_injection/framework_injection/_config.py b/tests/internal/bytecode_injection/framework_injection/_config.py index 5af91592ece..370861ddc5e 100644 --- a/tests/internal/bytecode_injection/framework_injection/_config.py +++ b/tests/internal/bytecode_injection/framework_injection/_config.py @@ -4,7 +4,7 @@ import typing as t from warnings import warn -from ddtrace.settings._core import DDConfig +from ddtrace.internal.settings._core import DDConfig def parse_venv(value: str) -> t.Optional[Path]: diff --git a/tests/internal/crashtracker/test_crashtracker.py b/tests/internal/crashtracker/test_crashtracker.py index 412d29d1fbb..002ac16e564 100644 --- a/tests/internal/crashtracker/test_crashtracker.py +++ b/tests/internal/crashtracker/test_crashtracker.py @@ -36,7 +36,7 @@ def test_crashtracker_config_bytes(): import pytest from ddtrace.internal.core import crashtracking - from ddtrace.settings.crashtracker import config as crashtracker_config + from ddtrace.internal.settings.crashtracker import config as crashtracker_config from tests.internal.crashtracker.utils import read_files # Delete the stdout and stderr files if they exist diff --git a/tests/internal/crashtracker/utils.py b/tests/internal/crashtracker/utils.py index e26d05788dc..7ef880e1410 100644 --- a/tests/internal/crashtracker/utils.py +++ b/tests/internal/crashtracker/utils.py @@ -17,7 +17,7 @@ def start_crashtracker(port: int, stdout: Optional[str] = None, stderr: Optional ret = False try: from ddtrace.internal.core import crashtracking - from ddtrace.settings.crashtracker import config as crashtracker_config + from ddtrace.internal.settings.crashtracker import config as crashtracker_config crashtracker_config.debug_url = "http://localhost:%d" % port crashtracker_config.stdout_filename = stdout diff --git a/tests/internal/peer_service/test_processor.py b/tests/internal/peer_service/test_processor.py index d45b97e204e..2d5aeebea9e 100644 --- a/tests/internal/peer_service/test_processor.py +++ b/tests/internal/peer_service/test_processor.py @@ -6,7 +6,7 @@ from ddtrace.constants import SPAN_KIND from ddtrace.ext import SpanKind from ddtrace.internal.peer_service.processor import PeerServiceProcessor -from ddtrace.settings.peer_service import PeerServiceConfig +from ddtrace.internal.settings.peer_service import PeerServiceConfig from ddtrace.trace import Span @@ -96,7 +96,7 @@ def test_peer_service_enablement(schema_peer_enabled): schema_version, env_enabled, expected = schema_peer_enabled with mock.patch.dict(os.environ, {"DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED": env_enabled}): - with mock.patch("ddtrace.settings.peer_service.SCHEMA_VERSION", schema_version): + with mock.patch("ddtrace.internal.settings.peer_service.SCHEMA_VERSION", schema_version): assert PeerServiceConfig().set_defaults_enabled == expected @@ -104,7 +104,7 @@ def test_peer_service_enablement(schema_peer_enabled): def test_tracer_hooks(): from ddtrace.constants import SPAN_KIND from ddtrace.ext import SpanKind - from ddtrace.settings.peer_service import PeerServiceConfig + from ddtrace.internal.settings.peer_service import PeerServiceConfig from tests.utils import DummyTracer peer_service_config = PeerServiceConfig() diff --git a/tests/internal/service_name/test_inferred_base_service.py b/tests/internal/service_name/test_inferred_base_service.py index 0883be2aa22..ac323223564 100644 --- a/tests/internal/service_name/test_inferred_base_service.py +++ b/tests/internal/service_name/test_inferred_base_service.py @@ -8,9 +8,9 @@ import pytest -from ddtrace.settings._inferred_base_service import PythonDetector -from ddtrace.settings._inferred_base_service import _module_exists -from ddtrace.settings._inferred_base_service import detect_service +from ddtrace.internal.settings._inferred_base_service import PythonDetector +from ddtrace.internal.settings._inferred_base_service import _module_exists +from ddtrace.internal.settings._inferred_base_service import detect_service @pytest.fixture diff --git a/tests/internal/symbol_db/test_config.py b/tests/internal/symbol_db/test_config.py index ebaa713e0c6..a369cdab369 100644 --- a/tests/internal/symbol_db/test_config.py +++ b/tests/internal/symbol_db/test_config.py @@ -1,4 +1,4 @@ -from ddtrace.settings.symbol_db import SymbolDatabaseConfig +from ddtrace.internal.settings.symbol_db import SymbolDatabaseConfig def test_symbol_db_includes_pattern(monkeypatch): diff --git a/tests/internal/test_database_monitoring.py b/tests/internal/test_database_monitoring.py index 171118c7791..6d7e41a2dc8 100644 --- a/tests/internal/test_database_monitoring.py +++ b/tests/internal/test_database_monitoring.py @@ -2,8 +2,8 @@ import pytest +from ddtrace.internal.settings import _database_monitoring from ddtrace.propagation._database_monitoring import default_sql_injector -from ddtrace.settings import _database_monitoring from tests.utils import override_env diff --git a/tests/internal/test_module.py b/tests/internal/test_module.py index 8ae177387bc..27de6444ef0 100644 --- a/tests/internal/test_module.py +++ b/tests/internal/test_module.py @@ -429,7 +429,7 @@ def ns_hook(module): ModuleWatchdog.uninstall() -@pytest.mark.skipif(PYTHON_VERSION_INFO < (3, 9), reason="Python 3.8 throws a deprecation warning") +@pytest.mark.skipif(PYTHON_VERSION_INFO < (3, 10), reason="ddtrace under Python 3.9 is deprecated") @pytest.mark.subprocess( ddtrace_run=True, env=dict( diff --git a/tests/internal/test_settings.py b/tests/internal/test_settings.py index bea7c6c1989..2e8369e1ea5 100644 --- a/tests/internal/test_settings.py +++ b/tests/internal/test_settings.py @@ -6,7 +6,7 @@ from ddtrace._trace.product import apm_tracing_rc from ddtrace.internal.remoteconfig import Payload -from ddtrace.settings._config import Config +from ddtrace.internal.settings._config import Config from tests.utils import remote_config_build_payload as build_payload @@ -606,7 +606,7 @@ def test_remoteconfig_header_tags(ddtrace_run_python_code_in_subprocess): def test_config_public_properties_and_methods(): # Regression test to prevent unexpected changes to public attributes in Config # By default most attributes should be private and set via Environment Variables - from ddtrace.settings._config import Config + from ddtrace.internal.settings._config import Config public_attrs = set() c = Config() diff --git a/tests/llmobs/suitespec.yml b/tests/llmobs/suitespec.yml index d3741825585..d7f40c7bb47 100644 --- a/tests/llmobs/suitespec.yml +++ b/tests/llmobs/suitespec.yml @@ -4,8 +4,6 @@ components: - ddtrace/contrib/internal/anthropic/* google_adk: - ddtrace/contrib/internal/google_adk/* - google_generativeai: - - ddtrace/contrib/internal/google_generativeai/* google_genai: - ddtrace/contrib/internal/google_genai/* vertexai: @@ -55,19 +53,6 @@ suites: - tests/contrib/google_adk/* runner: riot snapshot: true - google_generativeai: - parallelism: 1 - paths: - - '@bootstrap' - - '@core' - - '@tracing' - - '@contrib' - - '@google_generativeai' - - '@llmobs' - - tests/contrib/google_generativeai/* - - tests/snapshots/tests.contrib.google_generativeai.* - runner: riot - snapshot: true google_genai: parallelism: 1 paths: diff --git a/tests/llmobs/test_llmobs_eval_metric_agent_writer.py b/tests/llmobs/test_llmobs_eval_metric_agent_writer.py index fd05ec64c6f..bfd678ac12c 100644 --- a/tests/llmobs/test_llmobs_eval_metric_agent_writer.py +++ b/tests/llmobs/test_llmobs_eval_metric_agent_writer.py @@ -3,9 +3,9 @@ import mock from ddtrace.internal.evp_proxy.constants import EVP_PROXY_AGENT_BASE_PATH +from ddtrace.internal.settings._agent import config as agent_config from ddtrace.llmobs._constants import EVAL_ENDPOINT from ddtrace.llmobs._writer import LLMObsEvalMetricWriter -from ddtrace.settings._agent import config as agent_config from tests.llmobs.test_llmobs_eval_metric_agentless_writer import _categorical_metric_event from tests.llmobs.test_llmobs_eval_metric_agentless_writer import _score_metric_event diff --git a/tests/llmobs/test_llmobs_service.py b/tests/llmobs/test_llmobs_service.py index 884a79a44d8..38af3aa8e28 100644 --- a/tests/llmobs/test_llmobs_service.py +++ b/tests/llmobs/test_llmobs_service.py @@ -1930,8 +1930,8 @@ def test_submit_evaluation_invalid_reasoning_raises_warning(llmobs, mock_llmobs_ mock_llmobs_logs.warning.assert_called_once_with("Failed to parse reasoning. reasoning must be a string.") -def test_submit_evaluation_for_enqueues_writer_with_reasoning(llmobs, mock_llmobs_eval_metric_writer): - llmobs.submit_evaluation_for( +def test_submit_evaluation_enqueues_writer_with_reasoning(llmobs, mock_llmobs_eval_metric_writer): + llmobs.submit_evaluation( span={"span_id": "123", "trace_id": "456"}, label="toxicity", metric_type="categorical", @@ -1955,7 +1955,7 @@ def test_submit_evaluation_for_enqueues_writer_with_reasoning(llmobs, mock_llmob ) ) mock_llmobs_eval_metric_writer.reset() - llmobs.submit_evaluation_for( + llmobs.submit_evaluation( span={"span_id": "123", "trace_id": "456"}, label="toxicity", metric_type="categorical", diff --git a/tests/llmobs/test_llmobs_span_agent_writer.py b/tests/llmobs/test_llmobs_span_agent_writer.py index ea8597e3db8..291a04841d3 100644 --- a/tests/llmobs/test_llmobs_span_agent_writer.py +++ b/tests/llmobs/test_llmobs_span_agent_writer.py @@ -3,9 +3,9 @@ import mock from ddtrace.internal.evp_proxy.constants import EVP_PROXY_AGENT_BASE_PATH +from ddtrace.internal.settings._agent import config as agent_config from ddtrace.llmobs._constants import SPAN_ENDPOINT from ddtrace.llmobs._writer import LLMObsSpanWriter -from ddtrace.settings._agent import config as agent_config from tests.llmobs._utils import _chat_completion_event from tests.llmobs._utils import _completion_event from tests.llmobs._utils import _large_event diff --git a/tests/opentelemetry/test_span.py b/tests/opentelemetry/test_span.py index 3af064de795..61f9c6e0359 100644 --- a/tests/opentelemetry/test_span.py +++ b/tests/opentelemetry/test_span.py @@ -272,4 +272,3 @@ def test_otel_span_interoperability(oteltracer): otel_span_clone = Span(otel_span_og._ddspan) # Ensure all properties are consistent assert otel_span_clone.__dict__ == otel_span_og.__dict__ - assert otel_span_clone._ddspan._pprint() == otel_span_og._ddspan._pprint() diff --git a/tests/opentracer/__init__.py b/tests/opentracer/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/tests/opentracer/conftest.py b/tests/opentracer/conftest.py deleted file mode 100644 index 09a4dad886c..00000000000 --- a/tests/opentracer/conftest.py +++ /dev/null @@ -1,60 +0,0 @@ -""" -pytest local plugin used to automatically make the following fixtures -available for all tests in this directory - -https://docs.pytest.org/en/latest/writing_plugins.html#testing-plugins -""" -import pytest - -from ddtrace.opentracer import Tracer as OTTracer -from ddtrace.opentracer import set_global_tracer -from tests.utils import DummyTracer -from tests.utils import TracerSpanContainer - - -@pytest.fixture() -def ot_tracer_factory(): - """Fixture which returns an opentracer ready to use for testing.""" - - def make_ot_tracer(service_name="my_svc", config=None, scope_manager=None, context_provider=None): - config = config or {} - tracer = OTTracer(service_name=service_name, config=config, scope_manager=scope_manager) - - # similar to how we test the ddtracer, use a dummy tracer - dd_tracer = DummyTracer() - if context_provider: - dd_tracer.context_provider = context_provider - - # attach the dummy tracer to the opentracer - tracer._dd_tracer = dd_tracer - return tracer - - return make_ot_tracer - - -@pytest.fixture() -def ot_tracer(ot_tracer_factory): - """Fixture for a default opentracer.""" - return ot_tracer_factory() - - -@pytest.fixture -def test_spans(ot_tracer): - container = TracerSpanContainer(ot_tracer._dd_tracer) - yield container - container.reset() - - -@pytest.fixture() -def global_tracer(ot_tracer): - """A function similar to one OpenTracing users would write to initialize - their OpenTracing tracer. - """ - set_global_tracer(ot_tracer) - - return ot_tracer - - -@pytest.fixture() -def dd_tracer(ot_tracer): - return ot_tracer._dd_tracer diff --git a/tests/opentracer/core/__init__.py b/tests/opentracer/core/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/tests/opentracer/core/test_dd_compatibility.py b/tests/opentracer/core/test_dd_compatibility.py deleted file mode 100644 index c68b5ca6d6c..00000000000 --- a/tests/opentracer/core/test_dd_compatibility.py +++ /dev/null @@ -1,180 +0,0 @@ -import opentracing -from opentracing import Format - -import ddtrace -from ddtrace.opentracer.span_context import SpanContext - - -class TestTracerCompatibility(object): - """Ensure that our opentracer produces results in the underlying ddtracer.""" - - def test_ottracer_uses_global_ddtracer(self): - """Ensure that the opentracer will by default use the global ddtracer - as its underlying Datadog tracer. - """ - tracer = ddtrace.opentracer.Tracer() - assert tracer._dd_tracer is ddtrace.tracer - - def test_ot_dd_global_tracers(self, global_tracer): - """Ensure our test function opentracer_init() prep""" - ot_tracer = global_tracer - dd_tracer = global_tracer._dd_tracer - - # check all the global references - assert ot_tracer is opentracing.tracer - assert ot_tracer._dd_tracer is dd_tracer - assert dd_tracer is ddtrace.tracer - - def test_ot_dd_nested_trace(self, ot_tracer, dd_tracer, test_spans): - """Ensure intertwined usage of the opentracer and ddtracer.""" - - with ot_tracer.start_span("my_ot_span") as ot_span: - with dd_tracer.trace("my_dd_span") as dd_span: - pass - spans = test_spans.pop() - assert len(spans) == 2 - - # confirm the ordering - assert spans[1] is ot_span._dd_span - assert spans[0] is dd_span - - # check the parenting - assert spans[0].parent_id is None - assert spans[1].parent_id is None - - def test_dd_ot_nested_trace(self, ot_tracer, dd_tracer, test_spans): - """Ensure intertwined usage of the opentracer and ddtracer.""" - with dd_tracer.trace("my_dd_span") as dd_span: - with ot_tracer.start_span("my_ot_span") as ot_span: - pass - spans = test_spans.pop() - assert len(spans) == 2 - - # confirm the ordering - assert spans[0] is dd_span - assert spans[1] is ot_span._dd_span - - # check the parenting - assert spans[0].parent_id is None - assert spans[1].parent_id is spans[0].span_id - - def test_ot_dd_ot_dd_nested_trace(self, ot_tracer, dd_tracer, test_spans): - """Ensure intertwined usage of the opentracer and ddtracer.""" - with ot_tracer.start_active_span("ot_span") as ot_scope: - with dd_tracer.trace("dd_span") as dd_span: - with ot_tracer.start_active_span("ot_span2") as ot_scope2: - with dd_tracer.trace("dd_span2") as dd_span2: - pass - - spans = test_spans.pop() - assert len(spans) == 4 - - spans = {span.name: span for span in spans} - assert spans["ot_span"] == ot_scope.span._dd_span - assert spans["dd_span"] == dd_span - assert spans["ot_span2"] == ot_scope2.span._dd_span - assert spans["dd_span2"] == dd_span2 - - # check the parenting - assert spans["ot_span"].parent_id is None - assert spans["dd_span"].parent_id is spans["ot_span"].span_id - assert spans["ot_span2"].parent_id is spans["dd_span"].span_id - assert spans["dd_span2"].parent_id is spans["ot_span2"].span_id - - def test_ot_ot_dd_ot_dd_nested_trace_active(self, ot_tracer, dd_tracer, test_spans): - """Ensure intertwined usage of the opentracer and ddtracer.""" - with ot_tracer.start_active_span("my_ot_span") as ot_scope: - with ot_tracer.start_active_span("my_ot_span") as ot_scope2: - with dd_tracer.trace("my_dd_span") as dd_span: - with ot_tracer.start_active_span("my_ot_span") as ot_scope3: - with dd_tracer.trace("my_dd_span") as dd_span2: - pass - - spans = test_spans.pop() - assert len(spans) == 5 - - # confirm the ordering - assert spans[0] is ot_scope.span._dd_span - assert spans[1] is ot_scope2.span._dd_span - assert spans[2] is dd_span - assert spans[3] is ot_scope3.span._dd_span - assert spans[4] is dd_span2 - - # check the parenting - assert spans[0].parent_id is None - assert spans[1].parent_id == spans[0].span_id - assert spans[2].parent_id == spans[1].span_id - assert spans[3].parent_id == spans[2].span_id - assert spans[4].parent_id == spans[3].span_id - - def test_consecutive_trace(self, ot_tracer, dd_tracer, test_spans): - """Ensure consecutive usage of the opentracer and ddtracer.""" - with ot_tracer.start_active_span("my_ot_span") as ot_scope: - pass - - with dd_tracer.trace("my_dd_span") as dd_span: - pass - - with ot_tracer.start_active_span("my_ot_span") as ot_scope2: - pass - - with dd_tracer.trace("my_dd_span") as dd_span2: - pass - - spans = test_spans.pop() - assert len(spans) == 4 - - # confirm the ordering - assert spans[0] is ot_scope.span._dd_span - assert spans[1] is dd_span - assert spans[2] is ot_scope2.span._dd_span - assert spans[3] is dd_span2 - - # check the parenting - assert spans[0].parent_id is None - assert spans[1].parent_id is None - assert spans[2].parent_id is None - assert spans[3].parent_id is None - - def test_ddtrace_wrapped_fn(self, ot_tracer, dd_tracer, test_spans): - """Ensure ddtrace wrapped functions work with the opentracer""" - - @dd_tracer.wrap() - def fn(): - with ot_tracer.start_span("ot_span_inner"): - pass - - with ot_tracer.start_active_span("ot_span_outer"): - fn() - - spans = test_spans.pop() - assert len(spans) == 3 - - # confirm the ordering - assert spans[0].name == "ot_span_outer" - assert spans[1].name == "tests.opentracer.core.test_dd_compatibility.fn" - assert spans[2].name == "ot_span_inner" - - # check the parenting - assert spans[0].parent_id is None - assert spans[1].parent_id is spans[0].span_id - assert spans[2].parent_id is spans[1].span_id - - def test_distributed_trace_propagation(self, ot_tracer, dd_tracer, test_spans): - """Ensure that a propagated span context is properly activated.""" - span_ctx = SpanContext(trace_id=123, span_id=456) - carrier = {} - ot_tracer.inject(span_ctx, Format.HTTP_HEADERS, carrier) - - # extract should activate the span so that a subsequent start_span - # will inherit from the propagated span context - ot_tracer.extract(Format.HTTP_HEADERS, carrier) - - with dd_tracer.trace("test") as span: - pass - - assert span.parent_id == 456 - assert span.trace_id == 123 - - spans = test_spans.pop() - assert len(spans) == 1 diff --git a/tests/opentracer/core/test_span.py b/tests/opentracer/core/test_span.py deleted file mode 100644 index ea2fc3bbbc1..00000000000 --- a/tests/opentracer/core/test_span.py +++ /dev/null @@ -1,163 +0,0 @@ -import pytest - -from ddtrace.opentracer.span import Span -from tests.utils import DummyTracer - - -@pytest.fixture -def nop_tracer(): - from ddtrace.opentracer import Tracer - - tracer = Tracer(service_name="mysvc", config={}) - # use the same test tracer used by the primary tests - tracer._tracer = DummyTracer() - return tracer - - -@pytest.fixture -def nop_span_ctx(): - from ddtrace.constants import AUTO_KEEP - from ddtrace.opentracer.span_context import SpanContext - - return SpanContext(sampling_priority=AUTO_KEEP) - - -@pytest.fixture -def nop_span(nop_tracer, nop_span_ctx): - return Span(nop_tracer, nop_span_ctx, "my_op_name") - - -class TestSpan(object): - """Test the Datadog OpenTracing Span implementation.""" - - def test_init(self, nop_tracer, nop_span_ctx): - """Very basic test for skeleton code""" - span = Span(nop_tracer, nop_span_ctx, "my_op_name") - assert not span.finished - - def test_tags(self, nop_span): - """Set a tag and get it back.""" - r = nop_span.set_tag("test", 23) - assert nop_span._get_metric("test") == 23 - assert r is nop_span - - def test_set_baggage(self, nop_span): - """Test setting baggage.""" - r = nop_span.set_baggage_item("test", 23) - assert r is nop_span - - r = nop_span.set_baggage_item("1", 1).set_baggage_item("2", 2) - assert r is nop_span - - def test_get_baggage(self, nop_span): - """Test setting and getting baggage.""" - # test a single item - nop_span.set_baggage_item("test", 23) - assert int(nop_span.get_baggage_item("test")) == 23 - - # test multiple items - nop_span.set_baggage_item("1", "1").set_baggage_item("2", 2) - assert int(nop_span.get_baggage_item("test")) == 23 - assert nop_span.get_baggage_item("1") == "1" - assert int(nop_span.get_baggage_item("2")) == 2 - - def test_log_kv(self, nop_span): - """Ensure logging values doesn't break anything.""" - # just log a bunch of values - nop_span.log_kv({"myval": 2}) - nop_span.log_kv({"myval2": 3}) - nop_span.log_kv({"myval3": 5}) - nop_span.log_kv({"myval": 2}) - - def test_log_dd_kv(self, nop_span): - """Ensure keys that can be handled by our impl. are indeed handled.""" - import traceback - - from ddtrace.constants import ERROR_MSG - from ddtrace.constants import ERROR_STACK - from ddtrace.constants import ERROR_TYPE - - stack_trace = str(traceback.format_stack()) - nop_span.log_kv( - { - "event": "error", - "error": 3, - "message": "my error message", - "stack": stack_trace, - } - ) - - # Ensure error flag is set... - assert nop_span._dd_span.error - # ...and that error tags are set with the correct key - assert nop_span._get_tag(ERROR_STACK) == stack_trace - assert nop_span._get_tag(ERROR_MSG) == "my error message" - assert nop_span._get_metric(ERROR_TYPE) == 3 - - def test_operation_name(self, nop_span): - """Sanity check for setting the operation name.""" - # just try setting the operation name - r = nop_span.set_operation_name("new_op_name") - assert nop_span._dd_span.name == "new_op_name" - assert r is nop_span - - def test_context_manager(self, nop_span): - """Test the span context manager.""" - import time - - assert not nop_span.finished - # run the context manager but since the span has not been added - # to the span context, we will not get any traces - with nop_span: - time.sleep(0.005) - - # span should be finished when the context manager exits - assert nop_span.finished - - # there should be no traces (see above comment) - spans = nop_span.tracer._tracer.pop() - assert len(spans) == 0 - - def test_immutable_span_context(self, nop_span): - """Ensure span contexts are immutable.""" - before_ctx = nop_span._context - nop_span.set_baggage_item("key", "value") - after_ctx = nop_span._context - # should be different contexts - assert before_ctx is not after_ctx - - -class TestSpanCompatibility(object): - """Ensure our opentracer spans features correspond to datadog span features.""" - - def test_set_tag(self, nop_span): - nop_span.set_tag("test", 2) - assert nop_span._get_metric("test") == 2 - - def test_tag_resource_name(self, nop_span): - nop_span.set_tag("resource.name", "myresource") - assert nop_span._dd_span.resource == "myresource" - - def test_tag_span_type(self, nop_span): - nop_span.set_tag("span.type", "db") - assert nop_span._dd_span.span_type == "db" - - def test_tag_service_name(self, nop_span): - nop_span.set_tag("service.name", "mysvc234") - assert nop_span._dd_span.service == "mysvc234" - - def test_tag_db_statement(self, nop_span): - nop_span.set_tag("db.statement", "SELECT * FROM USERS") - assert nop_span._dd_span.resource == "SELECT * FROM USERS" - - def test_tag_peer_hostname(self, nop_span): - nop_span.set_tag("peer.hostname", "peername") - assert nop_span._dd_span.get_tag("out.host") == "peername" - - def test_tag_peer_port(self, nop_span): - nop_span.set_tag("peer.port", 55555) - assert nop_span._get_metric("network.destination.port") == 55555 - - def test_tag_sampling_priority(self, nop_span): - nop_span.set_tag("sampling.priority", "2") - assert nop_span._dd_span.context.sampling_priority == "2" diff --git a/tests/opentracer/core/test_span_context.py b/tests/opentracer/core/test_span_context.py deleted file mode 100644 index 2c7038fe327..00000000000 --- a/tests/opentracer/core/test_span_context.py +++ /dev/null @@ -1,38 +0,0 @@ -from ddtrace.opentracer.span_context import SpanContext - - -class TestSpanContext(object): - def test_init(self): - """Make sure span context creation is fine.""" - span_ctx = SpanContext() - assert span_ctx - - def test_baggage(self): - """Ensure baggage passed is the resulting baggage of the span context.""" - baggage = { - "some": "stuff", - } - - span_ctx = SpanContext(baggage=baggage) - - assert span_ctx.baggage == baggage - - def test_with_baggage_item(self): - """Should allow immutable extension of new span contexts.""" - baggage = { - "1": 1, - } - - first_ctx = SpanContext(baggage=baggage) - - second_ctx = first_ctx.with_baggage_item("2", 2) - - assert "2" not in first_ctx.baggage - assert second_ctx.baggage is not first_ctx.baggage - - def test_span_context_immutable_baggage(self): - """Ensure that two different span contexts do not share baggage.""" - ctx1 = SpanContext() - ctx1.set_baggage_item("test", 3) - ctx2 = SpanContext() - assert "test" not in ctx2._baggage diff --git a/tests/opentracer/core/test_tracer.py b/tests/opentracer/core/test_tracer.py deleted file mode 100644 index 5d9f11ab74f..00000000000 --- a/tests/opentracer/core/test_tracer.py +++ /dev/null @@ -1,585 +0,0 @@ -import time - -import mock -import opentracing -from opentracing import Format -from opentracing import InvalidCarrierException -from opentracing import UnsupportedFormatException -from opentracing import child_of -import pytest - -import ddtrace -from ddtrace.constants import AUTO_KEEP -from ddtrace.opentracer import Tracer -from ddtrace.opentracer import set_global_tracer -from ddtrace.opentracer.span_context import SpanContext -from ddtrace.propagation.http import HTTP_HEADER_TRACE_ID -from ddtrace.settings.exceptions import ConfigException - - -class TestTracerConfig(object): - def test_config(self): - """Test the configuration of the tracer""" - config = {"enabled": True} - tracer = Tracer(service_name="myservice", config=config) - - assert tracer._service_name == "myservice" - assert tracer._dd_tracer.enabled is True - - def test_no_service_name(self): - """A service_name should be generated if one is not provided.""" - tracer = Tracer() - assert tracer._service_name in {"pytest.py", "pytest", "__main__.py"} - - def test_multiple_tracer_configs(self): - """Ensure that a tracer config is a copy of the passed config.""" - config = {"enabled": True} - - tracer1 = Tracer(service_name="serv1", config=config) - assert tracer1._service_name == "serv1" - - config["enabled"] = False - tracer2 = Tracer(service_name="serv2", config=config) - - # Ensure tracer1's config was not mutated - assert tracer1._service_name == "serv1" - assert tracer2._service_name == "serv2" - - def test_invalid_config_key(self): - """A config with an invalid key should raise a ConfigException.""" - - config = {"enabeld": False} # codespell:ignore - - # No debug flag should not raise an error - tracer = Tracer(service_name="mysvc", config=config) - - # With debug flag should raise an error - config["debug"] = True - with pytest.raises(ConfigException) as ce_info: - tracer = Tracer(config=config) - assert "enabeld" in str(ce_info) # codespell:ignore - assert tracer is not None - - # Test with multiple incorrect keys - config["setttings"] = {} - with pytest.raises(ConfigException) as ce_info: - tracer = Tracer(service_name="mysvc", config=config) - assert ["enabeld", "setttings"] in str(ce_info) # codespell:ignore - assert tracer is not None - - def test_global_tags(self): - """Global tags should be passed from the opentracer to the tracer.""" - config = { - "global_tags": { - "tag1": "value1", - "tag2": 2, - }, - } - - tracer = Tracer(service_name="mysvc", config=config) - with tracer.start_span("myop") as span: - # global tags should be attached to generated all datadog spans - assert span._dd_span.get_tag("tag1") == "value1" - assert span._dd_span.get_metric("tag2") == 2 - - with tracer.start_span("myop2") as span2: - assert span2._dd_span.get_tag("tag1") == "value1" - assert span2._dd_span.get_metric("tag2") == 2 - - -class TestTracer(object): - def test_start_span(self, ot_tracer, test_spans): - """Start and finish a span.""" - with ot_tracer.start_span("myop") as span: - pass - - # span should be finished when the context manager exits - assert span.finished - - spans = test_spans.get_spans() - assert len(spans) == 1 - - def test_start_span_references(self, ot_tracer, test_spans): - """Start a span using references.""" - - with ot_tracer.start_span("one", references=[child_of()]): - pass - - spans = test_spans.pop() - assert spans[0].parent_id is None - - root = ot_tracer.start_active_span("root") - # create a child using a parent reference that is not the context parent - with ot_tracer.start_active_span("one"): - with ot_tracer.start_active_span("two", references=[child_of(root.span)]): - pass - root.close() - - spans = test_spans.pop() - assert spans[1].parent_id == spans[0].span_id - assert spans[2].parent_id == spans[0].span_id - - def test_start_span_custom_start_time(self, ot_tracer): - """Start a span with a custom start time.""" - t = 100 - with mock.patch("ddtrace._trace.span.Time.time_ns") as time: - time.return_value = 102 * 1e9 - with ot_tracer.start_span("myop", start_time=t) as span: - pass - - assert span._dd_span.start == t - assert span._dd_span.duration == 2 - - def test_start_span_with_spancontext(self, ot_tracer, test_spans): - """Start and finish a span using a span context as the child_of - reference. - """ - with ot_tracer.start_span("myop") as span: - with ot_tracer.start_span("myop", child_of=span.context) as span2: - pass - - # span should be finished when the context manager exits - assert span.finished - assert span2.finished - - spans = test_spans.pop() - assert len(spans) == 2 - - # ensure proper parenting - assert spans[1].parent_id is spans[0].span_id - - def test_start_span_with_tags(self, ot_tracer): - """Create a span with initial tags.""" - tags = {"key": "value", "key2": "value2"} - with ot_tracer.start_span("myop", tags=tags) as span: - pass - - assert span._dd_span.get_tag("key") == "value" - assert span._dd_span.get_tag("key2") == "value2" - - def test_start_span_with_resource_name_tag(self, ot_tracer): - """Create a span with the tag to set the resource name""" - tags = {"resource.name": "value", "key2": "value2"} - with ot_tracer.start_span("myop", tags=tags) as span: - pass - - # Span resource name should be set to tag value, and should not get set as - # a tag on the underlying span. - assert span._dd_span.resource == "value" - assert span._dd_span.get_tag("resource.name") is None - - # Other tags are set as normal - assert span._dd_span.get_tag("key2") == "value2" - - def test_start_active_span_multi_child(self, ot_tracer, test_spans): - """Start and finish multiple child spans. - This should ensure that child spans can be created 2 levels deep. - """ - with ot_tracer.start_active_span("myfirstop") as scope1: - time.sleep(0.009) - with ot_tracer.start_active_span("mysecondop") as scope2: - time.sleep(0.007) - with ot_tracer.start_active_span("mythirdop") as scope3: - time.sleep(0.005) - - # spans should be finished when the context manager exits - assert scope1.span.finished - assert scope2.span.finished - assert scope3.span.finished - - spans = test_spans.pop() - - # check spans are captured in the trace - assert scope1.span._dd_span is spans[0] - assert scope2.span._dd_span is spans[1] - assert scope3.span._dd_span is spans[2] - - # ensure proper parenting - assert spans[1].parent_id is spans[0].span_id - assert spans[2].parent_id is spans[1].span_id - - # sanity check a lower bound on the durations - assert spans[0].duration >= 0.009 + 0.007 + 0.005 - assert spans[1].duration >= 0.007 + 0.005 - assert spans[2].duration >= 0.005 - - def test_start_active_span_multi_child_siblings(self, ot_tracer, test_spans): - """Start and finish multiple span at the same level. - This should test to ensure a parent can have multiple child spans at the - same level. - """ - with ot_tracer.start_active_span("myfirstop") as scope1: - time.sleep(0.009) - with ot_tracer.start_active_span("mysecondop") as scope2: - time.sleep(0.007) - with ot_tracer.start_active_span("mythirdop") as scope3: - time.sleep(0.005) - - # spans should be finished when the context manager exits - assert scope1.span.finished - assert scope2.span.finished - assert scope3.span.finished - - spans = test_spans.pop() - - # check spans are captured in the trace - assert scope1.span._dd_span is spans[0] - assert scope2.span._dd_span is spans[1] - assert scope3.span._dd_span is spans[2] - - # ensure proper parenting - assert spans[1].parent_id is spans[0].span_id - assert spans[2].parent_id is spans[0].span_id - - # sanity check a lower bound on the durations - assert spans[0].duration >= 0.009 + 0.007 + 0.005 - assert spans[1].duration >= 0.007 - assert spans[2].duration >= 0.005 - - def test_start_span_manual_child_of(self, ot_tracer, test_spans): - """Start spans without using a scope manager. - Spans should be created without parents since there will be no call - for the active span. - """ - root = ot_tracer.start_span("zero") - - with ot_tracer.start_span("one", child_of=root): - with ot_tracer.start_span("two", child_of=root): - with ot_tracer.start_span("three", child_of=root): - pass - root.finish() - - spans = test_spans.pop() - - assert spans[0].parent_id is None - # ensure each child span is a child of root - assert spans[1].parent_id is root._dd_span.span_id - assert spans[2].parent_id is root._dd_span.span_id - assert spans[3].parent_id is root._dd_span.span_id - assert spans[0].trace_id == spans[1].trace_id and spans[1].trace_id == spans[2].trace_id - - def test_start_span_no_active_span(self, ot_tracer, test_spans): - """Start spans without using a scope manager. - Spans should be created without parents since there will be no call - for the active span. - """ - with ot_tracer.start_span("one", ignore_active_span=True): - with ot_tracer.start_span("two", ignore_active_span=True): - pass - with ot_tracer.start_span("three", ignore_active_span=True): - pass - - spans = test_spans.pop() - - # ensure each span does not have a parent - assert spans[0].parent_id is None - assert spans[1].parent_id is None - assert spans[2].parent_id is None - # and that each span is a new trace - assert ( - spans[0].trace_id != spans[1].trace_id - and spans[1].trace_id != spans[2].trace_id - and spans[0].trace_id != spans[2].trace_id - ) - - def test_start_active_span_child_finish_after_parent(self, ot_tracer, test_spans): - """Start a child span and finish it after its parent.""" - span1 = ot_tracer.start_active_span("one").span - span2 = ot_tracer.start_active_span("two").span - span1.finish() - time.sleep(0.005) - span2.finish() - - spans = test_spans.pop() - assert len(spans) == 2 - assert spans[0].parent_id is None - assert spans[1].parent_id is span1._dd_span.span_id - assert spans[1].duration > spans[0].duration - - def test_start_span_multi_intertwined(self, ot_tracer, test_spans): - """Start multiple spans at the top level intertwined. - Alternate calling between two traces. - """ - import threading - - # synchronize threads with a threading event object - event = threading.Event() - - def trace_one(): - _id = 11 - with ot_tracer.start_active_span(str(_id)): - _id += 1 - with ot_tracer.start_active_span(str(_id)): - _id += 1 - with ot_tracer.start_active_span(str(_id)): - pass - event.set() - - def trace_two(): - _id = 21 - event.wait() - with ot_tracer.start_active_span(str(_id)): - _id += 1 - with ot_tracer.start_active_span(str(_id)): - _id += 1 - with ot_tracer.start_active_span(str(_id)): - pass - - # the ordering should be - # t1.span1/t2.span1, t2.span2, t1.span2, t1.span3, t2.span3 - t1 = threading.Thread(target=trace_one) - t2 = threading.Thread(target=trace_two) - - t1.start() - t2.start() - # wait for threads to finish - t1.join() - t2.join() - - spans = test_spans.pop() - - # trace_one will finish before trace_two so its spans should be written - # before the spans from trace_two, let's confirm this - assert spans[0].name == "11" - assert spans[1].name == "12" - assert spans[2].name == "13" - assert spans[3].name == "21" - assert spans[4].name == "22" - assert spans[5].name == "23" - - # next let's ensure that each span has the correct parent: - # trace_one - assert spans[0].parent_id is None - assert spans[1].parent_id is spans[0].span_id - assert spans[2].parent_id is spans[1].span_id - # trace_two - assert spans[3].parent_id is None - assert spans[4].parent_id is spans[3].span_id - assert spans[5].parent_id is spans[3].span_id - - # finally we should ensure that the trace_ids are reasonable - # trace_one - assert spans[0].trace_id == spans[1].trace_id and spans[1].trace_id == spans[2].trace_id - # traces should be independent - assert spans[2].trace_id != spans[3].trace_id - # trace_two - assert spans[3].trace_id == spans[4].trace_id and spans[4].trace_id == spans[5].trace_id - - def test_start_active_span(self, ot_tracer, test_spans): - with ot_tracer.start_active_span("one") as scope: - pass - - assert scope.span._dd_span.name == "one" - assert scope.span.finished - spans = test_spans.pop() - assert spans - - def test_start_active_span_finish_on_close(self, ot_tracer, test_spans): - with ot_tracer.start_active_span("one", finish_on_close=False) as scope: - pass - - assert scope.span._dd_span.name == "one" - assert not scope.span.finished - spans = test_spans.pop() - assert not spans - scope.span.finish() - - def test_start_active_span_nested(self, ot_tracer): - """Test the active span of multiple nested calls of start_active_span.""" - with ot_tracer.start_active_span("one") as outer_scope: - assert ot_tracer.active_span == outer_scope.span - with ot_tracer.start_active_span("two") as inner_scope: - assert ot_tracer.active_span == inner_scope.span - with ot_tracer.start_active_span("three") as innest_scope: # why isn't it innest? innermost so verbose - assert ot_tracer.active_span == innest_scope.span - with ot_tracer.start_active_span("two") as inner_scope: - assert ot_tracer.active_span == inner_scope.span - assert ot_tracer.active_span == outer_scope.span - assert ot_tracer.active_span is None - - def test_start_active_span_trace(self, ot_tracer, test_spans): - """Test the active span of multiple nested calls of start_active_span.""" - with ot_tracer.start_active_span("one") as outer_scope: - outer_scope.span.set_tag("outer", 2) - with ot_tracer.start_active_span("two") as inner_scope: - inner_scope.span.set_tag("inner", 3) - with ot_tracer.start_active_span("two") as inner_scope: - inner_scope.span.set_tag("inner", 3) - with ot_tracer.start_active_span("three") as innest_scope: - innest_scope.span.set_tag("innerest", 4) - - spans = test_spans.pop() - - assert spans[0].parent_id is None - assert spans[1].parent_id is spans[0].span_id - assert spans[2].parent_id is spans[0].span_id - assert spans[3].parent_id is spans[2].span_id - - def test_interleave(self, dd_tracer, ot_tracer, test_spans): - with ot_tracer.start_active_span("ot_root_1", ignore_active_span=True): - with dd_tracer.trace("dd_child"): - with ot_tracer.start_active_span("ot_child_1"): - pass - with ot_tracer.start_active_span("ot_child_2"): - pass - - spans = test_spans.pop() - assert len(spans) == 4 - assert spans[0].name == "ot_root_1" and spans[0].parent_id is None - assert spans[1].name == "dd_child" and spans[1].parent_id == spans[0].span_id - assert spans[2].name == "ot_child_1" and spans[2].parent_id == spans[1].span_id - assert spans[3].name == "ot_child_2" and spans[3].parent_id == spans[0].span_id - - def test_active_span(self, ot_tracer, test_spans): - with ot_tracer._dd_tracer.trace("dd") as span: - assert ot_tracer.active_span is not None - assert ot_tracer.active_span._dd_span is span - - -@pytest.fixture -def nop_span_ctx(): - return SpanContext(sampling_priority=AUTO_KEEP) - - -class TestTracerSpanContextPropagation(object): - """Test the injection and extraction of a span context from a tracer.""" - - def test_invalid_format(self, ot_tracer, nop_span_ctx): - """An invalid format should raise an UnsupportedFormatException.""" - # test inject - with pytest.raises(UnsupportedFormatException): - ot_tracer.inject(nop_span_ctx, None, {}) - - # test extract - with pytest.raises(UnsupportedFormatException): - ot_tracer.extract(None, {}) - - def test_inject_invalid_carrier(self, ot_tracer, nop_span_ctx): - """Only dicts should be supported as a carrier.""" - with pytest.raises(InvalidCarrierException): - ot_tracer.inject(nop_span_ctx, Format.HTTP_HEADERS, None) - - def test_extract_invalid_carrier(self, ot_tracer): - """Only dicts should be supported as a carrier.""" - with pytest.raises(InvalidCarrierException): - ot_tracer.extract(Format.HTTP_HEADERS, None) - - def test_http_headers_base(self, ot_tracer): - """extract should undo inject for http headers.""" - - span_ctx = SpanContext(trace_id=123, span_id=456) - carrier = {} - - ot_tracer.inject(span_ctx, Format.HTTP_HEADERS, carrier) - assert len(carrier.keys()) > 0 - - ext_span_ctx = ot_tracer.extract(Format.HTTP_HEADERS, carrier) - assert ext_span_ctx._dd_context.trace_id == 123 - assert ext_span_ctx._dd_context.span_id == 456 - - def test_http_headers_baggage(self, ot_tracer): - """extract should undo inject for http headers.""" - span_ctx = SpanContext(trace_id=123, span_id=456, baggage={"test": 4, "test2": "string"}) - carrier = {} - - ot_tracer.inject(span_ctx, Format.HTTP_HEADERS, carrier) - assert len(carrier.keys()) > 0 - - ext_span_ctx = ot_tracer.extract(Format.HTTP_HEADERS, carrier) - assert ext_span_ctx._dd_context.trace_id == 123 - assert ext_span_ctx._dd_context.span_id == 456 - assert ext_span_ctx.baggage == span_ctx.baggage - - def test_empty_propagated_context(self, ot_tracer): - """An empty propagated context should not raise a - SpanContextCorruptedException when extracted. - """ - carrier = {} - ot_tracer.extract(Format.HTTP_HEADERS, carrier) - - def test_text(self, ot_tracer): - """extract should undo inject for http headers""" - span_ctx = SpanContext(trace_id=123, span_id=456, baggage={"test": 4, "test2": "string"}) - carrier = {} - - ot_tracer.inject(span_ctx, Format.TEXT_MAP, carrier) - assert len(carrier.keys()) > 0 - - ext_span_ctx = ot_tracer.extract(Format.TEXT_MAP, carrier) - assert ext_span_ctx._dd_context.trace_id == 123 - assert ext_span_ctx._dd_context.span_id == 456 - assert ext_span_ctx.baggage == span_ctx.baggage - - def test_corrupted_propagated_context(self, ot_tracer): - """Corrupted context should raise a SpanContextCorruptedException.""" - span_ctx = SpanContext(trace_id=123, span_id=456, baggage={"test": 4, "test2": "string"}) - carrier = {} - - ot_tracer.inject(span_ctx, Format.TEXT_MAP, carrier) - assert len(carrier.keys()) > 0 - - # manually alter a key in the carrier baggage - del carrier[HTTP_HEADER_TRACE_ID] - corrupted_key = HTTP_HEADER_TRACE_ID[2:] - carrier[corrupted_key] = 123 - - ot_tracer.extract(Format.TEXT_MAP, carrier) - - def test_immutable_span_context(self, ot_tracer): - """Span contexts should be immutable.""" - with ot_tracer.start_span("root") as root: - ctx_before = root.context - root.set_baggage_item("test", 2) - assert ctx_before is not root.context - with ot_tracer.start_span("child") as level1: - with ot_tracer.start_span("child") as level2: - pass - assert root.context is not level1.context - assert level2.context is not level1.context - assert level2.context is not root.context - - def test_inherited_baggage(self, ot_tracer): - """Baggage should be inherited by child spans.""" - with ot_tracer.start_active_span("root") as root: - # this should be passed down to the child - root.span.set_baggage_item("root", 1) - root.span.set_baggage_item("root2", 1) - with ot_tracer.start_active_span("child") as level1: - level1.span.set_baggage_item("level1", 1) - with ot_tracer.start_active_span("child") as level2: - level2.span.set_baggage_item("level2", 1) - # ensure immutability - assert level1.span.context is not root.span.context - assert level2.span.context is not level1.span.context - - # level1 should have inherited the baggage of root - assert level1.span.get_baggage_item("root") - assert level1.span.get_baggage_item("root2") - - # level2 should have inherited the baggage of both level1 and level2 - assert level2.span.get_baggage_item("root") - assert level2.span.get_baggage_item("root2") - assert level2.span.get_baggage_item("level1") - assert level2.span.get_baggage_item("level2") - - -class TestTracerCompatibility(object): - """Ensure that our opentracer produces results in the underlying datadog tracer.""" - - def test_required_dd_fields(self): - """Ensure required fields needed for successful tracing are possessed - by the underlying datadog tracer. - """ - # a service name is required - tracer = Tracer("service") - with tracer.start_span("my_span") as span: - assert span._dd_span.service - - -def test_set_global_tracer(): - """Sanity check for set_global_tracer""" - my_tracer = Tracer("service") - set_global_tracer(my_tracer) - - assert opentracing.tracer is my_tracer - assert ddtrace.tracer is my_tracer._dd_tracer diff --git a/tests/opentracer/core/test_utils.py b/tests/opentracer/core/test_utils.py deleted file mode 100644 index 37c9e9dd305..00000000000 --- a/tests/opentracer/core/test_utils.py +++ /dev/null @@ -1,17 +0,0 @@ -from opentracing.scope_managers import ThreadLocalScopeManager -from opentracing.scope_managers.asyncio import AsyncioScopeManager - -import ddtrace -from ddtrace.opentracer.utils import get_context_provider_for_scope_manager - - -class TestOpentracerUtils(object): - def test_get_context_provider_for_scope_manager_thread(self): - scope_manager = ThreadLocalScopeManager() - ctx_prov = get_context_provider_for_scope_manager(scope_manager) - assert isinstance(ctx_prov, ddtrace._trace.provider.DefaultContextProvider) - - def test_get_context_provider_for_asyncio_scope_manager(self): - scope_manager = AsyncioScopeManager() - ctx_prov = get_context_provider_for_scope_manager(scope_manager) - assert isinstance(ctx_prov, ddtrace._trace.provider.DefaultContextProvider) diff --git a/tests/opentracer/test_tracer_asyncio.py b/tests/opentracer/test_tracer_asyncio.py deleted file mode 100644 index 35ece48c126..00000000000 --- a/tests/opentracer/test_tracer_asyncio.py +++ /dev/null @@ -1,143 +0,0 @@ -import asyncio - -import pytest - -from ddtrace.constants import ERROR_MSG - - -@pytest.mark.asyncio -def test_trace_coroutine(test_spans): - # it should use the task context when invoked in a coroutine - with test_spans.tracer.start_span("coroutine"): - pass - - traces = test_spans.pop_traces() - - assert len(traces) == 1 - assert len(traces[0]) == 1 - assert traces[0][0].name == "coroutine" - - -@pytest.mark.asyncio -async def test_trace_multiple_coroutines(ot_tracer, test_spans): - # if multiple coroutines have nested tracing, they must belong - # to the same trace - - async def coro(): - # another traced coroutine - with ot_tracer.start_active_span("coroutine_2"): - return 42 - - with ot_tracer.start_active_span("coroutine_1"): - value = await coro() - - # the coroutine has been called correctly - assert value == 42 - # a single trace has been properly reported - traces = test_spans.pop_traces() - assert len(traces) == 1 - assert len(traces[0]) == 2 - assert traces[0][0].name == "coroutine_1" - assert traces[0][1].name == "coroutine_2" - # the parenting is correct - assert traces[0][0] == traces[0][1]._parent - assert traces[0][0].trace_id == traces[0][1].trace_id - - -@pytest.mark.asyncio -async def test_exception(ot_tracer, test_spans): - async def f1(): - with ot_tracer.start_span("f1"): - raise Exception("f1 error") - - with pytest.raises(Exception, match="f1 error"): - await f1() - - traces = test_spans.pop_traces() - assert len(traces) == 1 - spans = traces[0] - assert len(spans) == 1 - span = spans[0] - assert span.error == 1 - assert span.get_tag(ERROR_MSG) == "f1 error" - assert "Exception: f1 error" in span.get_tag("error.stack") - - -@pytest.mark.asyncio -async def test_trace_multiple_calls(ot_tracer, test_spans): - # create multiple futures so that we expect multiple - # traces instead of a single one (helper not used) - async def coro(): - # another traced coroutine - with ot_tracer.start_span("coroutine"): - await asyncio.sleep(0.01) - - futures = [asyncio.ensure_future(coro()) for x in range(10)] - for future in futures: - await future - - traces = test_spans.pop_traces() - - assert len(traces) == 10 - assert len(traces[0]) == 1 - assert traces[0][0].name == "coroutine" - - -@pytest.mark.asyncio -async def test_trace_multiple_coroutines_ot_dd(ot_tracer): - """ - Ensure we can trace from opentracer to ddtracer across asyncio - context switches. - """ - - # if multiple coroutines have nested tracing, they must belong - # to the same trace - async def coro(): - # another traced coroutine - with ot_tracer._dd_tracer.trace("coroutine_2"): - return 42 - - with ot_tracer.start_active_span("coroutine_1"): - value = await coro() - - # the coroutine has been called correctly - assert value == 42 - # a single trace has been properly reported - traces = ot_tracer._dd_tracer.pop_traces() - assert len(traces) == 1 - assert len(traces[0]) == 2 - assert traces[0][0].name == "coroutine_1" - assert traces[0][1].name == "coroutine_2" - # the parenting is correct - assert traces[0][0] == traces[0][1]._parent - assert traces[0][0].trace_id == traces[0][1].trace_id - - -@pytest.mark.asyncio -async def test_trace_multiple_coroutines_dd_ot(ot_tracer): - """ - Ensure we can trace from ddtracer to opentracer across asyncio - context switches. - """ - - # if multiple coroutines have nested tracing, they must belong - # to the same trace - async def coro(): - # another traced coroutine - with ot_tracer.start_span("coroutine_2"): - return 42 - - with ot_tracer._dd_tracer.trace("coroutine_1"): - value = await coro() - - # the coroutine has been called correctly - assert value == 42 - # a single trace has been properly reported - traces = ot_tracer._dd_tracer.pop_traces() - assert len(traces) == 1 - assert len(traces[0]) == 2 - assert traces[0][0].name == "coroutine_1" - assert traces[0][1].name == "coroutine_2" - # the parenting is correct - assert traces[0][0] == traces[0][1]._parent - assert traces[0][0].trace_id == traces[0][1].trace_id diff --git a/tests/opentracer/test_tracer_gevent.py b/tests/opentracer/test_tracer_gevent.py deleted file mode 100644 index 320b39ee997..00000000000 --- a/tests/opentracer/test_tracer_gevent.py +++ /dev/null @@ -1,193 +0,0 @@ -import gevent -from opentracing.scope_managers.gevent import GeventScopeManager -import pytest - -from ddtrace.contrib.internal.gevent.patch import patch -from ddtrace.contrib.internal.gevent.patch import unpatch - - -@pytest.fixture() -def ot_tracer(ot_tracer_factory): - """Fixture providing an opentracer configured for gevent usage.""" - # patch gevent - patch() - yield ot_tracer_factory("gevent_svc", {}, GeventScopeManager()) - # unpatch gevent - unpatch() - - -class TestTracerGevent(object): - """Converted Gevent tests for the regular tracer. - - Ensures that greenlets are properly traced when using - the opentracer. - """ - - def test_no_threading(self, ot_tracer): - with ot_tracer.start_span("span") as span: - span.set_tag("tag", "value") - - assert span.finished - - def test_greenlets(self, ot_tracer, test_spans): - def f(): - with ot_tracer.start_span("f") as span: - gevent.sleep(0.04) - span.set_tag("f", "yes") - - def g(): - with ot_tracer.start_span("g") as span: - gevent.sleep(0.03) - span.set_tag("g", "yes") - - with ot_tracer.start_active_span("root"): - gevent.joinall([gevent.spawn(f), gevent.spawn(g)]) - - traces = test_spans.pop_traces() - assert len(traces) == 1 - assert len(traces[0]) == 3 - - def test_trace_greenlet(self, ot_tracer, test_spans): - # a greenlet can be traced using the trace API - def greenlet(): - with ot_tracer.start_span("greenlet"): - pass - - gevent.spawn(greenlet).join() - traces = test_spans.pop_traces() - assert len(traces) == 1 - assert len(traces[0]) == 1 - assert traces[0][0].name == "greenlet" - - def test_trace_later_greenlet(self, ot_tracer, test_spans): - # a greenlet can be traced using the trace API - def greenlet(): - with ot_tracer.start_span("greenlet"): - pass - - gevent.spawn_later(0.01, greenlet).join() - traces = test_spans.pop_traces() - - assert len(traces) == 1 - assert len(traces[0]) == 1 - assert traces[0][0].name == "greenlet" - - def test_trace_concurrent_calls(self, ot_tracer, test_spans): - # create multiple futures so that we expect multiple - # traces instead of a single one - def greenlet(): - with ot_tracer.start_span("greenlet"): - gevent.sleep(0.01) - - jobs = [gevent.spawn(greenlet) for x in range(100)] - gevent.joinall(jobs) - - traces = test_spans.pop_traces() - - assert len(traces) == 100 - assert len(traces[0]) == 1 - assert traces[0][0].name == "greenlet" - - def test_trace_concurrent_spawn_later_calls(self, ot_tracer, test_spans): - # create multiple futures so that we expect multiple - # traces instead of a single one, even if greenlets - # are delayed - def greenlet(): - with ot_tracer.start_span("greenlet"): - gevent.sleep(0.01) - - jobs = [gevent.spawn_later(0.01, greenlet) for x in range(100)] - gevent.joinall(jobs) - - traces = test_spans.pop_traces() - assert len(traces) == 100 - assert len(traces[0]) == 1 - assert traces[0][0].name == "greenlet" - - -class TestTracerGeventCompatibility(object): - """Ensure the opentracer works in tandem with the ddtracer and gevent.""" - - def test_trace_spawn_multiple_greenlets_multiple_traces_ot_parent(self, ot_tracer, dd_tracer, test_spans): - """ - Copy of gevent test with the same name but testing with mixed usage of - the opentracer and datadog tracers. - - Uses an opentracer span as the parent span. - """ - - # multiple greenlets must be part of the same trace - def entrypoint(): - with ot_tracer.start_active_span("greenlet.main"): - jobs = [gevent.spawn(green_1), gevent.spawn(green_2)] - gevent.joinall(jobs) - - def green_1(): - with dd_tracer.trace("greenlet.worker") as span: - span.set_tag("worker_id", "1") - gevent.sleep(0.01) - - def green_2(): - with ot_tracer.start_span("greenlet.worker") as span: - span.set_tag("worker_id", "2") - gevent.sleep(0.01) - - gevent.spawn(entrypoint).join() - traces = test_spans.pop_traces() - assert len(traces) == 1 - assert len(traces[0]) == 3 - parent_span = traces[0][0] - worker_1 = traces[0][1] - worker_2 = traces[0][2] - # check spans data and hierarchy - assert parent_span.name == "greenlet.main" - assert worker_1.get_tag("worker_id") == "1" - assert worker_1.name == "greenlet.worker" - assert worker_1.resource == "greenlet.worker" - assert worker_1.parent_id == parent_span.span_id - assert worker_2.get_tag("worker_id") == "2" - assert worker_2.name == "greenlet.worker" - assert worker_2.resource == "greenlet.worker" - assert worker_2.parent_id == parent_span.span_id - - def test_trace_spawn_multiple_greenlets_multiple_traces_dd_parent(self, ot_tracer, dd_tracer, test_spans): - """ - Copy of gevent test with the same name but testing with mixed usage of - the opentracer and datadog tracers. - - Uses an opentracer span as the parent span. - """ - - # multiple greenlets must be part of the same trace - def entrypoint(): - with dd_tracer.trace("greenlet.main"): - jobs = [gevent.spawn(green_1), gevent.spawn(green_2)] - gevent.joinall(jobs) - - def green_1(): - with ot_tracer.start_span("greenlet.worker") as span: - span.set_tag("worker_id", "1") - gevent.sleep(0.01) - - def green_2(): - with dd_tracer.trace("greenlet.worker") as span: - span.set_tag("worker_id", "2") - gevent.sleep(0.01) - - gevent.spawn(entrypoint).join() - traces = test_spans.pop_traces() - assert len(traces) == 1 - assert len(traces[0]) == 3 - parent_span = traces[0][0] - worker_1 = traces[0][1] - worker_2 = traces[0][2] - # check spans data and hierarchy - assert parent_span.name == "greenlet.main" - assert worker_1.get_tag("worker_id") == "1" - assert worker_1.name == "greenlet.worker" - assert worker_1.resource == "greenlet.worker" - assert worker_1.parent_id == parent_span.span_id - assert worker_2.get_tag("worker_id") == "2" - assert worker_2.name == "greenlet.worker" - assert worker_2.resource == "greenlet.worker" - assert worker_2.parent_id == parent_span.span_id diff --git a/tests/opentracer/test_tracer_tornado.py b/tests/opentracer/test_tracer_tornado.py deleted file mode 100644 index d81541e0a52..00000000000 --- a/tests/opentracer/test_tracer_tornado.py +++ /dev/null @@ -1,30 +0,0 @@ -from opentracing.scope_managers.tornado import TornadoScopeManager -import pytest - - -@pytest.fixture() -def ot_tracer(ot_tracer_factory): - """Fixture providing an opentracer configured for tornado usage.""" - yield ot_tracer_factory("tornado_svc", {}, TornadoScopeManager()) - - -class TestTracerTornado(object): - """ - Since the ScopeManager is provided by OpenTracing we should simply test - whether it exists and works for a very simple use-case. - """ - - def test_sanity(self, ot_tracer, test_spans): - with ot_tracer.start_active_span("one"): - with ot_tracer.start_active_span("two"): - pass - - traces = test_spans.pop_traces() - assert len(traces) == 1 - assert len(traces[0]) == 2 - assert traces[0][0].name == "one" - assert traces[0][1].name == "two" - - # the parenting is correct - assert traces[0][0] == traces[0][1]._parent - assert traces[0][0].trace_id == traces[0][1].trace_id diff --git a/tests/opentracer/utils.py b/tests/opentracer/utils.py deleted file mode 100644 index 85b84865ad8..00000000000 --- a/tests/opentracer/utils.py +++ /dev/null @@ -1,11 +0,0 @@ -from ddtrace.opentracer import Tracer - - -def init_tracer(service_name, dd_tracer, scope_manager=None): - """A method that emulates what a user of OpenTracing would call to - initialize a Datadog opentracer. - - It accepts a Datadog tracer that should be the same one used for testing. - """ - ot_tracer = Tracer(service_name, scope_manager=scope_manager, _dd_tracer=dd_tracer) - return ot_tracer diff --git a/tests/profiling/_wrong_file b/tests/profiling/_wrong_file deleted file mode 100644 index c1c2fc2dab2..00000000000 --- a/tests/profiling/_wrong_file +++ /dev/null @@ -1 +0,0 @@ -this is definitely not good python, right? diff --git a/tests/profiling/collector/__init__.py b/tests/profiling/collector/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/tests/profiling/collector/conftest.py b/tests/profiling/collector/conftest.py index 8475c4d0a22..7dc1d816091 100644 --- a/tests/profiling/collector/conftest.py +++ b/tests/profiling/collector/conftest.py @@ -1,22 +1,8 @@ import pytest import ddtrace -from ddtrace.profiling import Profiler -from tests.utils import override_global_config @pytest.fixture def tracer(): - with override_global_config(dict(_startup_logs_enabled=False)): - yield ddtrace.trace.tracer - - -@pytest.fixture -def profiler(monkeypatch): - monkeypatch.setenv("DD_PROFILING_API_TIMEOUT_MS", "100") - p = Profiler() - p.start() - try: - yield p - finally: - p.stop() + return ddtrace.trace.tracer diff --git a/tests/profiling_v2/collector/test_asyncio.py b/tests/profiling/collector/test_asyncio.py similarity index 100% rename from tests/profiling_v2/collector/test_asyncio.py rename to tests/profiling/collector/test_asyncio.py diff --git a/tests/profiling/collector/test_collector.py b/tests/profiling/collector/test_collector.py index 6c0993ad757..71353e26e46 100644 --- a/tests/profiling/collector/test_collector.py +++ b/tests/profiling/collector/test_collector.py @@ -19,24 +19,6 @@ def _test_restart(collector, **kwargs): c.join() -def test_dynamic_interval(): - c = collector.PeriodicCollector(interval=1) - c.start() - assert c.interval == 1 - assert c._worker.interval == c.interval - c.interval = 2 - assert c.interval == 2 - assert c._worker.interval == c.interval - c.stop() - - -def test_thread_name(): - c = collector.PeriodicCollector(interval=1) - c.start() - assert c._worker.name == "ddtrace.profiling.collector:PeriodicCollector" - c.stop() - - def test_capture_sampler(): cs = collector.CaptureSampler(15) assert cs.capture() is False # 15 diff --git a/tests/profiling/collector/test_memalloc.py b/tests/profiling/collector/test_memalloc.py index 54f1997a46b..78118263b26 100644 --- a/tests/profiling/collector/test_memalloc.py +++ b/tests/profiling/collector/test_memalloc.py @@ -1,21 +1,21 @@ -# -*- encoding: utf-8 -*- import gc +import inspect import os import sys import threading import pytest +from ddtrace.internal.datadog.profiling import ddup +from ddtrace.internal.settings.profiling import ProfilingConfig +from ddtrace.internal.settings.profiling import _derive_default_heap_sample_size +from ddtrace.profiling.collector import _memalloc from ddtrace.profiling.collector import memalloc from ddtrace.profiling.event import DDFrame -from ddtrace.settings.profiling import ProfilingConfig -from ddtrace.settings.profiling import _derive_default_heap_sample_size +from tests.profiling.collector import pprof_utils -try: - from ddtrace.profiling.collector import _memalloc -except ImportError: - pytestmark = pytest.mark.skip("_memalloc not available") +PY_313_OR_ABOVE = sys.version_info[:2] >= (3, 13) def test_start_twice(): @@ -57,13 +57,13 @@ def _allocate_1k(): return [object() for _ in range(1000)] -_ALLOC_LINE_NUMBER = _allocate_1k.__code__.co_firstlineno + 1 - - def _pre_allocate_1k(): return _allocate_1k() +_ALLOC_LINE_NUMBER = _allocate_1k.__code__.co_firstlineno + 1 + + def test_iter_events(): max_nframe = 32 collector = memalloc.MemoryCollector(max_nframe=max_nframe, heap_sample_size=64) @@ -174,6 +174,785 @@ def test_iter_events_multi_thread(): assert count_thread >= 1000 +# This test is marked as subprocess as it changes default heap sample size +@pytest.mark.subprocess( + env=dict(DD_PROFILING_HEAP_SAMPLE_SIZE="1024", DD_PROFILING_OUTPUT_PPROF="/tmp/test_heap_samples_collected") +) +def test_heap_samples_collected(): + import os + + from ddtrace.profiling import Profiler + from tests.profiling.collector import pprof_utils + from tests.profiling.collector.test_memalloc import _allocate_1k + + # Test for https://github.com/DataDog/dd-trace-py/issues/11069 + pprof_prefix = os.environ["DD_PROFILING_OUTPUT_PPROF"] + output_filename = pprof_prefix + "." + str(os.getpid()) + + p = Profiler() + p.start() + x = _allocate_1k() # noqa: F841 + p.stop() + + profile = pprof_utils.parse_newest_profile(output_filename) + samples = pprof_utils.get_samples_with_value_type(profile, "heap-space") + assert len(samples) > 0 + + +def test_memory_collector(tmp_path): + test_name = "test_memory_collector" + pprof_prefix = str(tmp_path / test_name) + output_filename = pprof_prefix + "." + str(os.getpid()) + + ddup.config( + service=test_name, + version="test", + env="test", + output_filename=pprof_prefix, + ) + ddup.start() + + mc = memalloc.MemoryCollector(heap_sample_size=256) + with mc: + _allocate_1k() + mc.snapshot() + + ddup.upload() + + profile = pprof_utils.parse_newest_profile(output_filename) + # Gets samples with alloc-space > 0 + samples = pprof_utils.get_samples_with_value_type(profile, "alloc-space") + + assert len(samples) > 0 + + alloc_samples_idx = pprof_utils.get_sample_type_index(profile, "alloc-samples") + for sample in samples: + # We also want to check 'alloc-samples' is > 0. + assert sample.value[alloc_samples_idx] > 0 + + # We also want to assert that there's a sample that's coming from _allocate_1k() + # And also assert that it's actually coming from _allocate_1k() + pprof_utils.assert_profile_has_sample( + profile, + samples, + expected_sample=pprof_utils.StackEvent( + thread_name="MainThread", + thread_id=threading.main_thread().ident, + locations=[ + pprof_utils.StackLocation( + function_name="_allocate_1k", filename="test_memalloc.py", line_no=_ALLOC_LINE_NUMBER + ) + ], + ), + ) + + +def test_memory_collector_ignore_profiler(tmp_path): + test_name = "test_memory_collector_ignore_profiler" + pprof_prefix = str(tmp_path / test_name) + output_filename = pprof_prefix + "." + str(os.getpid()) + + ddup.config( + service=test_name, + version="test", + env="test", + output_filename=pprof_prefix, + ) + ddup.start() + + mc = memalloc.MemoryCollector(ignore_profiler=True) + quit_thread = threading.Event() + + with mc: + + def alloc(): + _allocate_1k() + quit_thread.wait() + + alloc_thread = threading.Thread(name="allocator", target=alloc) + alloc_thread._ddtrace_profiling_ignore = True + alloc_thread.start() + + mc.snapshot() + + # We need to wait for the data collection to happen so it gets the `_ddtrace_profiling_ignore` Thread attribute from + # the global thread list. + quit_thread.set() + alloc_thread.join() + + ddup.upload() + + try: + pprof_utils.parse_newest_profile(output_filename) + except AssertionError as e: + assert "No samples found" in str(e) + + +@pytest.mark.subprocess( + env=dict(DD_PROFILING_HEAP_SAMPLE_SIZE="8", DD_PROFILING_OUTPUT_PPROF="/tmp/test_heap_profiler_large_heap_overhead") +) +def test_heap_profiler_large_heap_overhead(): + # TODO(nick): this test case used to crash due to integer arithmetic bugs. + # Now it doesn't crash, but it takes far too long to run to be useful in CI. + # Un-skip this test if/when we improve the worst-case performance of the + # heap profiler for large heaps + from ddtrace.profiling import Profiler + from tests.profiling.collector.test_memalloc import one + + p = Profiler() + p.start() + + count = 100_000 + thing_size = 32 + + junk = [] + for i in range(count): + b1 = one(thing_size) + b2 = one(2 * thing_size) + b3 = one(3 * thing_size) + b4 = one(4 * thing_size) + t = (b1, b2, b3, b4) + junk.append(t) + + del junk + + p.stop() + + +# one, two, three, and four exist to give us distinct things +# we can find in the profile without depending on something +# like the line number at which an allocation happens +# Python 3.13 changed bytearray to use an allocation domain that we don't +# currently profile, so we use None instead of bytearray to test. +def one(size): + return (None,) * size if PY_313_OR_ABOVE else bytearray(size) + + +def two(size): + return (None,) * size if PY_313_OR_ABOVE else bytearray(size) + + +def three(size): + return (None,) * size if PY_313_OR_ABOVE else bytearray(size) + + +def four(size): + return (None,) * size if PY_313_OR_ABOVE else bytearray(size) + + +def _create_allocation(size): + return (None,) * size if PY_313_OR_ABOVE else bytearray(size) + + +class HeapInfo: + def __init__(self, count, size): + self.count = count + self.size = size + + +def get_heap_info(heap, funcs): + got = {} + for event in heap: + (frames, _), in_use_size, alloc_size, count = event + + in_use = in_use_size > 0 + size = in_use_size if in_use_size > 0 else alloc_size + + if not in_use: + continue + func = frames[0].function_name + if func in funcs: + v = got.get(func, HeapInfo(0, 0)) + v.count += 1 + v.size += size + got[func] = v + return got + + +def has_function_in_traceback(frames, function_name): + return any(frame.function_name == function_name for frame in frames) + + +def get_tracemalloc_stats_per_func(stats, funcs): + source_to_func = {} + + for f in funcs: + file = inspect.getsourcefile(f) + line = inspect.getsourcelines(f)[1] + 1 + source_to_func[str(file) + str(line)] = f.__name__ + + actual_sizes = {} + actual_counts = {} + for stat in stats: + f = stat.traceback[0] + key = f.filename + str(f.lineno) + if key in source_to_func: + func_name = source_to_func[key] + actual_sizes[func_name] = stat.size + actual_counts[func_name] = stat.count + return actual_sizes, actual_counts + + +# TODO: higher sampling intervals have a lot more variance and are flaky +# but would be nice to test since our default is 1MiB +@pytest.mark.parametrize("sample_interval", (8, 512, 1024)) +def test_heap_profiler_sampling_accuracy(sample_interval): + # tracemalloc lets us get ground truth on how many allocations there were + import tracemalloc + + # TODO(nick): use Profiler instead of _memalloc + from ddtrace.profiling.collector import _memalloc + + # We seed the RNG to reduce flakiness. This doesn't actually diminish the + # quality of the test much. A broken sampling implementation is unlikely to + # pass for an arbitrary seed. + old = os.environ.get("_DD_MEMALLOC_DEBUG_RNG_SEED") + os.environ["_DD_MEMALLOC_DEBUG_RNG_SEED"] = "42" + _memalloc.start(32, sample_interval) + # Put the env var back in the state we found it + if old is not None: + os.environ["_DD_MEMALLOC_DEBUG_RNG_SEED"] = old + else: + del os.environ["_DD_MEMALLOC_DEBUG_RNG_SEED"] + + tracemalloc.start() + + junk = [] + for i in range(1000): + size = 256 + junk.append(one(size)) + junk.append(two(2 * size)) + junk.append(three(3 * size)) + junk.append(four(4 * size)) + + # TODO(nick): randomly remove things from junk to see if the profile is + # still accurate + + # Stop tracemalloc before collecting the heap sample, since tracemalloc + # is _really_ slow when the _memalloc.heap() call does lots of allocs for + # lower sample intervals (i.e. more sampled allocations) + stats = tracemalloc.take_snapshot().statistics("traceback") + tracemalloc.stop() + + heap = _memalloc.heap() + # Important: stop _memalloc _after_ tracemalloc. Need to remove allocator + # hooks in LIFO order. + _memalloc.stop() + + actual_sizes, _ = get_tracemalloc_stats_per_func(stats, (one, two, three, four)) + actual_total = sum(actual_sizes.values()) + + del junk + + sizes = get_heap_info(heap, {"one", "two", "three", "four"}) + + total = sum(v.size for v in sizes.values()) + print(f"observed total: {total} actual total: {actual_total} error: {abs(total - actual_total) / actual_total}") + # 20% error in actual size feels pretty generous + # TODO(nick): justify in terms of variance of sampling? + assert abs(1 - total / actual_total) <= 0.20 + + print("func\tcount\tsize\tactual\trel\tactual\tdiff") + for func in ("one", "two", "three", "four"): + got = sizes[func] + actual_size = actual_sizes[func] + + # Relative portion of the bytes in the profile for this function + # out of the functions we're interested in + rel = got.size / total + actual_rel = actual_size / actual_total + + print( + f"{func}\t{got.count}\t{got.size}\t{actual_size}\t{rel:.3f}\t{actual_rel:.3f}\t{abs(rel - actual_rel):.3f}" + ) + + # Assert that the reported portion of this function in the profile is + # pretty close to the actual portion. So, if it's actually ~20% of the + # profile then we'd accept anything between 10% and 30%, which is + # probably too generous for low sampling intervals but at least won't be + # flaky. + assert abs(rel - actual_rel) < 0.10 + + +@pytest.mark.skip(reason="too slow, indeterministic") +@pytest.mark.subprocess( + env=dict( + # Turn off other profilers so that we're just testing memalloc + DD_PROFILING_STACK_ENABLED="false", + DD_PROFILING_LOCK_ENABLED="false", + # Upload a lot, since rotating out memalloc profiler state can race with profiling + DD_PROFILING_UPLOAD_INTERVAL="1", + ), +) +def test_memealloc_data_race_regression(): + import gc + import threading + import time + + from ddtrace.profiling import Profiler + + gc.enable() + # This threshold is controls when garbage collection is triggered. The + # threshold is on the count of live allocations, which is checked when doing + # a new allocation. This test is ultimately trying to get the allocation of + # frame objects during the memory profiler's traceback function to trigger + # garbage collection. We want a lower threshold to improve the odds that + # this happens. + gc.set_threshold(100) + + class Thing: + def __init__(self): + # Self reference so this gets deallocated in GC vs via refcount + self.ref = self + + def __del__(self): + # Force GIL yield, so if/when memalloc triggers GC, this is + # deallocated, releasing GIL while memalloc is sampling and allowing + # something else to run and possibly modify memalloc's internal + # state concurrently + time.sleep(0) + + def do_alloc(): + def f(): + return Thing() + + return f + + def lotsa_allocs(ev): + while not ev.is_set(): + f = do_alloc() + f() + time.sleep(0.01) + + p = Profiler() + p.start() + + threads = [] + ev = threading.Event() + for i in range(4): + t = threading.Thread(target=lotsa_allocs, args=(ev,)) + t.start() + threads.append(t) + + # Arbitrary sleep. This typically crashes in about a minute. + # But for local development, either let it run way longer or + # figure out sanitizer instrumentation + time.sleep(120) + + p.stop() + + ev.set() + for t in threads: + t.join() + + +@pytest.mark.parametrize("sample_interval", (256, 512, 1024)) +def test_memory_collector_allocation_accuracy_with_tracemalloc(sample_interval): + import tracemalloc + + old = os.environ.get("_DD_MEMALLOC_DEBUG_RNG_SEED") + os.environ["_DD_MEMALLOC_DEBUG_RNG_SEED"] = "42" + + mc = memalloc.MemoryCollector(heap_sample_size=sample_interval) + + try: + with mc: + tracemalloc.start() + + junk = [] + for i in range(1000): + size = 256 + junk.append(one(size)) + junk.append(two(2 * size)) + junk.append(three(3 * size)) + junk.append(four(4 * size)) + + stats = tracemalloc.take_snapshot().statistics("traceback") + tracemalloc.stop() + + del junk + + samples = mc.test_snapshot() + + finally: + if old is not None: + os.environ["_DD_MEMALLOC_DEBUG_RNG_SEED"] = old + else: + if "_DD_MEMALLOC_DEBUG_RNG_SEED" in os.environ: + del os.environ["_DD_MEMALLOC_DEBUG_RNG_SEED"] + + allocation_samples = [s for s in samples if s.in_use_size == 0] + heap_samples = [s for s in samples if s.in_use_size > 0] + + print(f"Total samples: {len(samples)}") + print(f"Allocation samples (in_use_size=0): {len(allocation_samples)}") + print(f"Heap samples (in_use_size>0): {len(heap_samples)}") + + assert len(allocation_samples) > 0, "Should have captured allocation samples after deletion" + + total_allocation_count = 0 + for sample in allocation_samples: + assert sample.size > 0, f"Invalid allocation sample size: {sample.size}" + assert sample.count > 0, f"Invalid allocation sample count: {sample.count}" + assert sample.in_use_size == 0, f"Allocation sample should have in_use_size=0, got: {sample.in_use_size}" + assert sample.in_use_size >= 0, f"Invalid in_use_size: {sample.in_use_size}" + assert sample.alloc_size >= 0, f"Invalid alloc_size: {sample.alloc_size}" + total_allocation_count += sample.count + + print(f"Total allocation count: {total_allocation_count}") + assert total_allocation_count >= 1, "Should have captured at least 1 allocation sample" + + actual_sizes, actual_counts = get_tracemalloc_stats_per_func(stats, (one, two, three, four)) + actual_total = sum(actual_sizes.values()) + actual_count_total = sum(actual_counts.values()) + + def get_allocation_info(samples, funcs): + got = {} + for sample in samples: + if sample.in_use_size > 0: + continue + + for frame in sample.frames: + func = frame.function_name + if func in funcs: + v = got.get(func, HeapInfo(0, 0)) + v.count += sample.count + v.size += sample.alloc_size + got[func] = v + break + return got + + sizes = get_allocation_info(samples, {"one", "two", "three", "four"}) + + total = sum(v.size for v in sizes.values()) + total_count = sum(v.count for v in sizes.values()) + + print(f"observed total: {total} actual total: {actual_total} error: {abs(total - actual_total) / actual_total}") + assert abs(1 - total / actual_total) <= 0.20 + + count_error = abs(total_count - actual_count_total) / actual_count_total + print(f"observed count total: {total_count} actual count total: {actual_count_total} error: {count_error}") + # Commenting out the total count assertions because we still have more work to do on this. + # Our reported counts differed from the actual count by more than we expected, while the reported sizes + # are accurate. Our counts seem to be consistently lower than expected for the sample intervals we're testing. + # We'll need to double-check our count scaling before making assertions about the actual values + # assert abs(1 - total_count / actual_count_total) <= 0.30 + + print("func\tcount\tsize\tactual_size\tactual_count\trel_size\tactual_rel_size\trel_count\tactual_rel_count") + for func in ("one", "two", "three", "four"): + got = sizes[func] + actual_size = actual_sizes[func] + actual_count = actual_counts[func] + + rel_size = got.size / total + actual_rel_size = actual_size / actual_total + + rel_count = got.count / total_count + actual_rel_count = actual_count / actual_count_total + + print( + f"{func}\t{got.count}\t{got.size}\t{actual_size}\t{actual_count}\t{rel_size:.3f}\t{actual_rel_size:.3f}\t{rel_count:.3f}\t{actual_rel_count:.3f}" + ) + + assert abs(rel_size - actual_rel_size) < 0.10 + assert abs(rel_count - actual_rel_count) < 0.15 + + print(f"Successfully validated allocation sampling accuracy for sample_interval={sample_interval}") + print(f"Captured {len(allocation_samples)} allocation samples representing {total_allocation_count} allocations") + + +def test_memory_collector_allocation_tracking_across_snapshots(): + mc = memalloc.MemoryCollector(heap_sample_size=64) + + with mc: + data_to_free = [] + for i in range(10): + data_to_free.append(one(256)) + + data_to_keep = [] + for i in range(10): + data_to_keep.append(two(512)) + + del data_to_free + + samples = mc.test_snapshot() + + assert all( + sample.alloc_size > 0 for sample in samples + ), "Initial snapshot should have alloc_size>0 (new allocations)" + + freed_samples = [s for s in samples if s.in_use_size == 0] + live_samples = [s for s in samples if s.in_use_size > 0] + + assert len(freed_samples) > 0, "Should have some freed samples after deletion" + + assert len(live_samples) > 0, "Should have some live samples" + + for sample in samples: + assert sample.size > 0, f"Invalid size: {sample.size}" + assert sample.count > 0, f"Invalid count: {sample.count}" + assert sample.in_use_size >= 0, f"Invalid in_use_size: {sample.in_use_size}" + assert sample.alloc_size >= 0, f"Invalid alloc_size: {sample.alloc_size}" + + one_freed_samples = [sample for sample in samples if has_function_in_traceback(sample.frames, "one")] + + assert len(one_freed_samples) > 0, "Should have freed samples from function 'one'" + assert all(sample.in_use_size == 0 and sample.alloc_size > 0 for sample in one_freed_samples) + + two_live_samples = [sample for sample in samples if has_function_in_traceback(sample.frames, "two")] + + assert len(two_live_samples) > 0, "Should have live samples from function 'two'" + assert all(sample.in_use_size > 0 and sample.alloc_size > 0 for sample in two_live_samples) + + del data_to_keep + + +def test_memory_collector_python_interface_with_allocation_tracking(): + mc = memalloc.MemoryCollector(heap_sample_size=128) + + with mc: + first_batch = [] + for i in range(20): + first_batch.append(one(256)) + + # We're taking a snapshot here to ensure that in the next snapshot, we don't see any "one" allocations + mc.test_snapshot() + + second_batch = [] + for i in range(15): + second_batch.append(two(512)) + + del first_batch + + final_samples = mc.test_snapshot() + + assert len(final_samples) >= 0, "Final snapshot should be valid" + + for sample in final_samples: + assert sample.size > 0, f"Size should be positive int, got {sample.size}" + assert sample.count > 0, f"Count should be positive int, got {sample.count}" + assert sample.in_use_size >= 0, f"in_use_size should be non-negative int, got {sample.in_use_size}" + assert sample.alloc_size >= 0, f"alloc_size should be non-negative int, got {sample.alloc_size}" + + one_samples_in_final = [sample for sample in final_samples if has_function_in_traceback(sample.frames, "one")] + + assert ( + len(one_samples_in_final) == 0 + ), f"Should have no samples with 'one' in traceback in final_samples, got {len(one_samples_in_final)}" + + batch_two_live_samples = [ + sample + for sample in final_samples + if has_function_in_traceback(sample.frames, "two") and sample.in_use_size > 0 + ] + + assert ( + len(batch_two_live_samples) > 0 + ), f"Should have live samples from batch two, got {len(batch_two_live_samples)}" + assert all(sample.in_use_size > 0 and sample.alloc_size > 0 for sample in batch_two_live_samples) + + del second_batch + + +def test_memory_collector_python_interface_with_allocation_tracking_no_deletion(): + mc = memalloc.MemoryCollector(heap_sample_size=128) + + with mc: + initial_samples = mc.test_snapshot() + initial_count = len(initial_samples) + + first_batch = [] + for i in range(20): + first_batch.append(one(256)) + + after_first_batch = mc.test_snapshot() + + second_batch = [] + for i in range(15): + second_batch.append(two(512)) + + final_samples = mc.test_snapshot() + + assert len(after_first_batch) >= initial_count, "Should have at least as many samples after first batch" + assert len(final_samples) >= 0, "Final snapshot should be valid" + + for samples in [initial_samples, after_first_batch, final_samples]: + for sample in samples: + assert sample.size > 0, f"Size should be positive int, got {sample.size}" + assert sample.count > 0, f"Count should be positive int, got {sample.count}" + assert sample.in_use_size >= 0, f"in_use_size should be non-negative int, got {sample.in_use_size}" + assert sample.alloc_size >= 0, f"alloc_size should be non-negative int, got {sample.alloc_size}" + + batch_one_live_samples = [ + sample + for sample in final_samples + if has_function_in_traceback(sample.frames, "one") and sample.in_use_size > 0 + ] + + batch_two_live_samples = [ + sample + for sample in final_samples + if has_function_in_traceback(sample.frames, "two") and sample.in_use_size > 0 + ] + + assert ( + len(batch_one_live_samples) > 0 + ), f"Should have live samples from batch one, got {len(batch_one_live_samples)}" + assert ( + len(batch_two_live_samples) > 0 + ), f"Should have live samples from batch two, got {len(batch_two_live_samples)}" + + assert all(sample.in_use_size > 0 and sample.alloc_size == 0 for sample in batch_one_live_samples) + assert all(sample.in_use_size > 0 and sample.alloc_size > 0 for sample in batch_two_live_samples) + + del first_batch + del second_batch + + +def test_memory_collector_exception_handling(): + mc = memalloc.MemoryCollector(heap_sample_size=256) + + with pytest.raises(ValueError): + with mc: + _allocate_1k() + samples = mc.test_snapshot() + assert isinstance(samples, tuple) + raise ValueError("Test exception") + + with mc: + _allocate_1k() + samples = mc.test_snapshot() + assert isinstance(samples, tuple) + + +def test_memory_collector_allocation_during_shutdown(): + """Test that verifies that when _memalloc.stop() is called while allocations are still + happening in another thread, the shutdown process completes without deadlocks or crashes. + """ + import time + + from ddtrace.profiling.collector import _memalloc + + _memalloc.start(32, 512) + + shutdown_event = threading.Event() + allocation_thread = None + + def allocate_continuously(): + while not shutdown_event.is_set(): + data = [0] * 100 + del data + time.sleep(0.001) + + try: + allocation_thread = threading.Thread(target=allocate_continuously) + allocation_thread.start() + + time.sleep(0.1) + + _memalloc.stop() + + finally: + shutdown_event.set() + if allocation_thread: + allocation_thread.join(timeout=1) + + +def test_memory_collector_buffer_pool_exhaustion(): + """Test that the memory collector handles buffer pool exhaustion. + This test creates multiple threads that simultaneously allocate with very deep + stack traces, which could potentially exhaust internal buffer pools. + """ + mc = memalloc.MemoryCollector(heap_sample_size=64) + + with mc: + threads = [] + barrier = threading.Barrier(10) + + def allocate_with_traceback(): + barrier.wait() + + def deep_alloc(depth): + if depth == 0: + return _create_allocation(100) + return deep_alloc(depth - 1) + + data = deep_alloc(50) + del data + + for i in range(10): + t = threading.Thread(target=allocate_with_traceback) + threads.append(t) + t.start() + + for t in threads: + t.join() + + samples = mc.test_snapshot() + + deep_alloc_count = 0 + max_stack_depth = 0 + + for sample in samples: + assert sample.frames is not None, "Buffer pool test: All samples should have stack frames" + stack_depth = len(sample.frames) + max_stack_depth = max(max_stack_depth, stack_depth) + + for frame in sample.frames: + if frame.function_name == "deep_alloc": + deep_alloc_count += 1 + break + + assert ( + deep_alloc_count >= 10 + ), f"Buffer pool test: Expected many allocations from concurrent threads, got {deep_alloc_count}" + + assert max_stack_depth >= 50, ( + f"Buffer pool test: Stack traces should be preserved even under stress (expecting at least 50 frames), " + f"but max depth was only {max_stack_depth}" + ) + + +def test_memory_collector_thread_lifecycle(): + """Test that continuously creates and destroys threads while they perform allocations, + verifying that the collector can track allocations across changing thread contexts. + """ + mc = memalloc.MemoryCollector(heap_sample_size=8) + + with mc: + threads = [] + + def worker(): + for i in range(10): + data = [i] * 100 + del data + + for i in range(20): + t = threading.Thread(target=worker) + t.start() + threads.append(t) + + if i > 5: + old_thread = threads.pop(0) + old_thread.join() + + for t in threads: + t.join() + + samples = mc.test_snapshot() + + worker_samples = 0 + for sample in samples: + for frame in sample.frames: + if frame.function_name == "worker": + worker_samples += 1 + break + + assert ( + worker_samples > 0 + ), "Thread lifecycle test: Should capture allocations even as threads are created/destroyed" + + def test_heap(): max_nframe = 32 collector = memalloc.MemoryCollector(max_nframe=max_nframe, heap_sample_size=1024) diff --git a/tests/profiling/collector/test_stack.py b/tests/profiling/collector/test_stack.py index 149f0635ba4..5c9ac25cf79 100644 --- a/tests/profiling/collector/test_stack.py +++ b/tests/profiling/collector/test_stack.py @@ -1,24 +1,19 @@ -# -*- encoding: utf-8 -*- import _thread import os import sys import threading import time -import timeit -import typing # noqa:F401 +from unittest.mock import patch import uuid import pytest -import ddtrace # noqa:F401 from ddtrace import ext from ddtrace.internal.datadog.profiling import ddup -from ddtrace.profiling import _threading from ddtrace.profiling.collector import stack from tests.conftest import get_original_test_name from tests.profiling.collector import pprof_utils - -from . import test_collector +from tests.profiling.collector import test_collector # Python 3.11.9 is not compatible with gevent, https://github.com/gevent/gevent/issues/2040 @@ -29,6 +24,24 @@ sys.version_info < (3, 11, 9) or sys.version_info >= (3, 12, 5) ) +# Function to use for stress-test of polling +MAX_FN_NUM = 30 +FN_TEMPLATE = """def _f{num}(): + return _f{nump1}()""" + +for num in range(MAX_FN_NUM): + exec(FN_TEMPLATE.format(num=num, nump1=num + 1)) + +exec( + """def _f{MAX_FN_NUM}(): + try: + raise ValueError('test') + except Exception: + time.sleep(2)""".format( + MAX_FN_NUM=MAX_FN_NUM + ) +) + def func1(): return func2() @@ -50,6 +63,7 @@ def func5(): return time.sleep(1) +# Use subprocess as ddup config persists across tests. @pytest.mark.subprocess( env=dict( DD_PROFILING_MAX_FRAMES="5", @@ -79,52 +93,230 @@ def test_collect_truncate(): samples = pprof_utils.get_samples_with_value_type(profile, "wall-time") assert len(samples) > 0 for sample in samples: - assert len(sample.location_id) <= max_nframes, len(sample.location_id) + # stack v2 adds one extra frame for "%d frames omitted" message + # Also, it allows max_nframes + 1 frames, so we add 2 here. + assert len(sample.location_id) <= max_nframes + 2, len(sample.location_id) -def test_collect_once(tmp_path): - test_name = "test_collect_once" +def test_stack_locations(tmp_path): + test_name = "test_stack_locations" pprof_prefix = str(tmp_path / test_name) output_filename = pprof_prefix + "." + str(os.getpid()) + assert ddup.is_available ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) ddup.start() - s = stack.StackCollector() - s._init() - all_events = s.collect() + def baz(): + time.sleep(0.1) - ddup.upload() - # assert len(all_events) == 0 - assert len(all_events) == 2 + def bar(): + baz() - stack_events = all_events[0] - exc_events = all_events[1] - assert len(stack_events) == 0 - assert len(exc_events) == 0 + def foo(): + bar() + + with stack.StackCollector(): + for _ in range(10): + foo() + ddup.upload() profile = pprof_utils.parse_newest_profile(output_filename) samples = pprof_utils.get_samples_with_value_type(profile, "wall-time") assert len(samples) > 0 + expected_sample = pprof_utils.StackEvent( + thread_id=_thread.get_ident(), + thread_name="MainThread", + locations=[ + pprof_utils.StackLocation( + function_name="baz", + filename="test_stack.py", + line_no=baz.__code__.co_firstlineno + 1, + ), + pprof_utils.StackLocation( + function_name="bar", + filename="test_stack.py", + line_no=bar.__code__.co_firstlineno + 1, + ), + pprof_utils.StackLocation( + function_name="foo", + filename="test_stack.py", + line_no=foo.__code__.co_firstlineno + 1, + ), + ], + ) + + pprof_utils.assert_profile_has_sample(profile, samples=samples, expected_sample=expected_sample) + + +def test_push_span(tmp_path, tracer): + test_name = "test_push_span" + pprof_prefix = str(tmp_path / test_name) + output_filename = pprof_prefix + "." + str(os.getpid()) + + tracer._endpoint_call_counter_span_processor.enable() + + assert ddup.is_available + ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) + ddup.start() + + resource = str(uuid.uuid4()) + span_type = ext.SpanTypes.WEB + + with stack.StackCollector(tracer=tracer): + with tracer.trace("foobar", resource=resource, span_type=span_type) as span: + span_id = span.span_id + local_root_span_id = span._local_root.span_id + for _ in range(10): + time.sleep(0.1) + ddup.upload(tracer=tracer) + + profile = pprof_utils.parse_newest_profile(output_filename) + samples = pprof_utils.get_samples_with_label_key(profile, "span id") + assert len(samples) > 0 + for sample in samples: + pprof_utils.assert_stack_event( + profile, + sample, + expected_event=pprof_utils.StackEvent( + span_id=span_id, + local_root_span_id=local_root_span_id, + trace_type=span_type, + trace_endpoint=resource, + ), + ) + + +def test_push_span_unregister_thread(tmp_path, monkeypatch, tracer): + with patch("ddtrace.internal.datadog.profiling.stack_v2.unregister_thread") as unregister_thread: + tracer._endpoint_call_counter_span_processor.enable() + + test_name = "test_push_span_unregister_thread" + pprof_prefix = str(tmp_path / test_name) + output_filename = pprof_prefix + "." + str(os.getpid()) + + assert ddup.is_available + ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) + ddup.start() + + resource = str(uuid.uuid4()) + span_type = ext.SpanTypes.WEB + + def target_fun(): + for _ in range(10): + time.sleep(0.1) + + with stack.StackCollector( + tracer=tracer, + ): + with tracer.trace("foobar", resource=resource, span_type=span_type) as span: + span_id = span.span_id + local_root_span_id = span._local_root.span_id + t = threading.Thread(target=target_fun) + t.start() + t.join() + thread_id = t.ident + ddup.upload(tracer=tracer) + + profile = pprof_utils.parse_newest_profile(output_filename) + samples = pprof_utils.get_samples_with_label_key(profile, "span id") + assert len(samples) > 0 + for sample in samples: + pprof_utils.assert_stack_event( + profile, + sample, + expected_event=pprof_utils.StackEvent( + span_id=span_id, + local_root_span_id=local_root_span_id, + trace_type=span_type, + trace_endpoint=resource, + ), + ) + + unregister_thread.assert_called_with(thread_id) + + +def test_push_non_web_span(tmp_path, tracer): + tracer._endpoint_call_counter_span_processor.enable() + + test_name = "test_push_non_web_span" + pprof_prefix = str(tmp_path / test_name) + output_filename = pprof_prefix + "." + str(os.getpid()) + + assert ddup.is_available + ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) + ddup.start() + + resource = str(uuid.uuid4()) + span_type = ext.SpanTypes.SQL + + with stack.StackCollector( + tracer=tracer, + ): + with tracer.trace("foobar", resource=resource, span_type=span_type) as span: + span_id = span.span_id + local_root_span_id = span._local_root.span_id + for _ in range(10): + time.sleep(0.1) + ddup.upload(tracer=tracer) + + profile = pprof_utils.parse_newest_profile(output_filename) + samples = pprof_utils.get_samples_with_label_key(profile, "span id") + assert len(samples) > 0 + for sample in samples: + pprof_utils.assert_stack_event( + profile, + sample, + expected_event=pprof_utils.StackEvent( + span_id=span_id, + local_root_span_id=local_root_span_id, + trace_type=span_type, + # trace_endpoint is not set for non-web spans + ), + ) + + +def test_push_span_none_span_type(tmp_path, tracer): + # Test for https://github.com/DataDog/dd-trace-py/issues/11141 + test_name = "test_push_span_none_span_type" + pprof_prefix = str(tmp_path / test_name) + output_filename = pprof_prefix + "." + str(os.getpid()) + + assert ddup.is_available + ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) + ddup.start() -def _find_sleep_event(events, class_name): - class_method_found = False - class_classmethod_found = False + tracer._endpoint_call_counter_span_processor.enable() - for e in events: - for frame in e.frames: - if frame[0] == __file__.replace(".pyc", ".py") and frame[2] == "sleep_class" and frame[3] == class_name: - class_method_found = True - elif ( - frame[0] == __file__.replace(".pyc", ".py") and frame[2] == "sleep_instance" and frame[3] == class_name - ): - class_classmethod_found = True + resource = str(uuid.uuid4()) - if class_method_found and class_classmethod_found: - return True + with stack.StackCollector( + tracer=tracer, + ): + # Explicitly set None span_type as the default could change in the + # future. + with tracer.trace("foobar", resource=resource, span_type=None) as span: + span_id = span.span_id + local_root_span_id = span._local_root.span_id + for _ in range(10): + time.sleep(0.1) + ddup.upload(tracer=tracer) - return False + profile = pprof_utils.parse_newest_profile(output_filename) + samples = pprof_utils.get_samples_with_label_key(profile, "span id") + assert len(samples) > 0 + for sample in samples: + pprof_utils.assert_stack_event( + profile, + sample, + expected_event=pprof_utils.StackEvent( + span_id=span_id, + local_root_span_id=local_root_span_id, + # span_type is None + # trace_endpoint is not set for non-web spans + ), + ) def test_collect_once_with_class(tmp_path): @@ -247,7 +439,7 @@ def _fib(n): @pytest.mark.skipif(not TESTING_GEVENT, reason="Not testing gevent") @pytest.mark.subprocess(ddtrace_run=True) def test_collect_gevent_thread_task(): - from gevent import monkey # noqa:F401 + from gevent import monkey monkey.patch_all() @@ -263,6 +455,7 @@ def test_collect_gevent_thread_task(): test_name = "test_collect_gevent_thread_task" pprof_prefix = "/tmp/" + test_name output_filename = pprof_prefix + "." + str(os.getpid()) + nb_threads = 5 assert ddup.is_available ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) @@ -270,16 +463,22 @@ def test_collect_gevent_thread_task(): # Start some (green)threads def _dofib(): - for _ in range(10): + for _ in range(5): # spend some time in CPU so the profiler can catch something - _fib(28) + # On a Mac w/ Apple M3 MAX with Python 3.11 it takes about 200ms to calculate _fib(32) + # And _fib() is called 5 times so it should take about 1 second + # We use 5 threads below so it should take about 5 seconds + _fib(32) # Just make sure gevent switches threads/greenlets time.sleep(0) threads = [] + i_to_tid = {} + with stack.StackCollector(): - for i in range(10): + for i in range(nb_threads): t = threading.Thread(target=_dofib, name="TestThread %d" % i) + i_to_tid[i] = t.ident t.start() threads.append(t) for t in threads: @@ -287,220 +486,47 @@ def _dofib(): ddup.upload() - expected_task_ids = {thread.ident for thread in threads} - profile = pprof_utils.parse_newest_profile(output_filename) - samples = pprof_utils.get_samples_with_label_key(profile, "task id") + samples = pprof_utils.get_samples_with_label_key(profile, "task name") assert len(samples) > 0 - checked_thread = False - - for sample in samples: - task_id_label = pprof_utils.get_label_with_key(profile.string_table, sample, "task id") - task_id = int(task_id_label.num) - if task_id in expected_task_ids: - pprof_utils.assert_stack_event( - profile, - sample, - pprof_utils.StackEvent( - task_name=r"TestThread \d+$", - task_id=task_id, - ), - ) - checked_thread = True - - assert checked_thread, "No samples found for the expected threads" - - -def test_max_time_usage(): - with pytest.raises(ValueError): - stack.StackCollector(max_time_usage_pct=0) - - -def test_max_time_usage_over(): - with pytest.raises(ValueError): - stack.StackCollector(max_time_usage_pct=200) - - -@pytest.mark.parametrize("ignore_profiler", [True, False]) -def test_ignore_profiler(tmp_path, ignore_profiler): - test_name = "test_ignore_profiler" - pprof_prefix = str(tmp_path / test_name) - output_filename = pprof_prefix + "." + str(os.getpid()) - - assert ddup.is_available - ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) - ddup.start() - - s = stack.StackCollector(ignore_profiler=ignore_profiler) - collector_worker_thread_id = None - - with s: - for _ in range(10): - time.sleep(0.1) - collector_worker_thread_id = s._worker.ident - - ddup.upload() - - profile = pprof_utils.parse_newest_profile(output_filename) - samples = pprof_utils.get_samples_with_label_key(profile, "thread id") - - thread_ids = set() - - for sample in samples: - thread_id_label = pprof_utils.get_label_with_key(profile.string_table, sample, "thread id") - thread_id = int(thread_id_label.num) - thread_ids.add(thread_id) - - if ignore_profiler: - assert collector_worker_thread_id not in thread_ids, (collector_worker_thread_id, thread_ids) - else: - assert collector_worker_thread_id in thread_ids, (collector_worker_thread_id, thread_ids) - - -@pytest.mark.skipif(not TESTING_GEVENT, reason="Not testing gevent") -@pytest.mark.subprocess( - ddtrace_run=True, - env=dict( - DD_PROFILING_IGNORE_PROFILER="1", - DD_PROFILING_OUTPUT_PPROF="/tmp/test_ignore_profiler_gevent_task", - ), -) -def test_ignore_profiler_gevent_task(): - import gevent.monkey - - gevent.monkey.patch_all() - - import os - import time - - from ddtrace.internal.datadog.profiling import ddup - from ddtrace.profiling.collector import stack - from tests.profiling.collector import pprof_utils - - test_name = "test_ignore_profiler_gevent_task" - pprof_prefix = os.environ["DD_PROFILING_OUTPUT_PPROF"] - output_filename = pprof_prefix + "." + str(os.getpid()) - - assert ddup.is_available - ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) - ddup.start() - - s = stack.StackCollector() - collector_worker_thread_id = None - - with s: - for _ in range(10): - time.sleep(0.1) - collector_worker_thread_id = s._worker.ident - - ddup.upload() - - profile = pprof_utils.parse_newest_profile(output_filename) - samples = pprof_utils.get_samples_with_label_key(profile, "thread id") - - thread_ids = set() - - for sample in samples: - thread_id_label = pprof_utils.get_label_with_key(profile.string_table, sample, "thread id") - thread_id = int(thread_id_label.num) - thread_ids.add(thread_id) - - assert collector_worker_thread_id not in thread_ids, (collector_worker_thread_id, thread_ids) - - -# def test_collect(): -# test_collector._test_collector_collect(stack.StackCollector, stack_event.StackSampleEvent) - - -# def test_restart(): -# test_collector._test_restart(stack.StackCollector) + for task_id in range(nb_threads): + pprof_utils.assert_profile_has_sample( + profile, + samples, + expected_sample=pprof_utils.StackEvent( + thread_name="MainThread", + task_name=r"Greenlet-\d+$", + task_id=i_to_tid[task_id], + thread_id=i_to_tid[task_id], + locations=[ + pprof_utils.StackLocation( + filename="test_stack.py", + function_name="_fib", + line_no=_fib.__code__.co_firstlineno + 6, + ), + pprof_utils.StackLocation( + filename="test_stack.py", + function_name="_fib", + line_no=_fib.__code__.co_firstlineno + 6, + ), + pprof_utils.StackLocation( + filename="test_stack.py", + function_name="_fib", + line_no=_fib.__code__.co_firstlineno + 6, + ), + ], + ), + ) def test_repr(): test_collector._test_repr( stack.StackCollector, - "StackCollector(status=, " - "min_interval_time=0.01, max_time_usage_pct=1.0, " - "nframes=64, ignore_profiler=False, endpoint_collection_enabled=None, tracer=None)", + "StackCollector(status=, nframes=64, tracer=None)", ) -def test_new_interval(): - c = stack.StackCollector(max_time_usage_pct=2) - new_interval = c._compute_new_interval(1000000) - assert new_interval == 0.049 - new_interval = c._compute_new_interval(2000000) - assert new_interval == 0.098 - c = stack.StackCollector(max_time_usage_pct=10) - new_interval = c._compute_new_interval(200000) - assert new_interval == 0.01 - new_interval = c._compute_new_interval(1) - assert new_interval == c.min_interval_time - - -# Function to use for stress-test of polling -MAX_FN_NUM = 30 -FN_TEMPLATE = """def _f{num}(): - return _f{nump1}()""" - -for num in range(MAX_FN_NUM): - exec(FN_TEMPLATE.format(num=num, nump1=num + 1)) - -exec( - """def _f{MAX_FN_NUM}(): - try: - raise ValueError('test') - except Exception: - time.sleep(2)""".format( - MAX_FN_NUM=MAX_FN_NUM - ) -) - - -def test_stress_threads(tmp_path): - test_name = "test_stress_threads" - pprof_prefix = str(tmp_path / test_name) - output_filename = pprof_prefix + "." + str(os.getpid()) - - assert ddup.is_available - ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) - ddup.start() - - with stack.StackCollector() as s: - NB_THREADS = 40 - - threads = [] - for _ in range(NB_THREADS): - t = threading.Thread(target=_f0) # noqa: E149,F821 - t.start() - threads.append(t) - number = 20000 - - exectime = timeit.timeit(s.collect, number=number) - # Threads are fake threads with gevent, so result is actually for one thread, not NB_THREADS - exectime_per_collect = exectime / number - print("%.3f ms per call" % (1000.0 * exectime_per_collect)) - print( - "CPU overhead for %d threads with %d functions long at %d Hz: %.2f%%" - % ( - NB_THREADS, - MAX_FN_NUM, - 1 / s.min_interval_time, - 100 * exectime_per_collect / s.min_interval_time, - ) - ) - - for t in threads: - t.join() - - ddup.upload() - - profile = pprof_utils.parse_newest_profile(output_filename) - samples = pprof_utils.get_samples_with_value_type(profile, "cpu-time") - assert len(samples) > 0 - - def test_stress_threads_run_as_thread(tmp_path): test_name = "test_stress_threads_run_as_thread" pprof_prefix = str(tmp_path / test_name) @@ -533,127 +559,10 @@ def wait_for_quit(): ddup.upload() profile = pprof_utils.parse_newest_profile(output_filename) - samples = pprof_utils.get_samples_with_value_type(profile, "cpu-time") + samples = pprof_utils.get_samples_with_value_type(profile, "wall-time") assert len(samples) > 0 -@pytest.mark.skipif(not stack.FEATURES["stack-exceptions"], reason="Stack exceptions not supported") -def test_exception_collection_threads(tmp_path): - test_name = "test_exception_collection_threads" - pprof_prefix = str(tmp_path / test_name) - output_filename = pprof_prefix + "." + str(os.getpid()) - - assert ddup.is_available - ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) - ddup.start() - - tids = [] - with stack.StackCollector(): - NB_THREADS = 5 - threads = [] - for _ in range(NB_THREADS): - t = threading.Thread(target=_f0) # noqa: E149,F821 - t.start() - threads.append(t) - tids.append(t.ident) - - for t in threads: - t.join() - - ddup.upload() - - profile = pprof_utils.parse_newest_profile(output_filename) - samples = pprof_utils.get_samples_with_value_type(profile, "exception-samples") - for tid in tids: - pprof_utils.assert_profile_has_sample( - profile, - samples, - expected_sample=pprof_utils.StackEvent( - exception_type="builtins.ValueError", - thread_id=tid, - locations=[pprof_utils.StackLocation(filename="", function_name="_f30", line_no=5)], - ), - ) - - -@pytest.mark.skipif(not stack.FEATURES["stack-exceptions"], reason="Stack exceptions not supported") -def test_exception_collection(tmp_path): - test_name = "test_exception_collection" - pprof_prefix = str(tmp_path / test_name) - output_filename = pprof_prefix + "." + str(os.getpid()) - - assert ddup.is_available - ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) - ddup.start() - - with stack.StackCollector(): - try: - raise ValueError("hello") - except Exception: - time.sleep(1) - - ddup.upload() - - profile = pprof_utils.parse_newest_profile(output_filename) - samples = pprof_utils.get_samples_with_value_type(profile, "exception-samples") - pprof_utils.assert_profile_has_sample( - profile, - samples, - expected_sample=pprof_utils.StackEvent( - exception_type="builtins.ValueError", - thread_id=_thread.get_ident(), - locations=[ - pprof_utils.StackLocation( - filename=os.path.basename(__file__), - function_name=test_name, - # this sample is captured while we're in time.sleep, so - # the line number is the one of the time.sleep call - line_no=test_exception_collection.__code__.co_firstlineno + 14, - ) - ], - ), - ) - - -@pytest.mark.skipif(not stack.FEATURES["stack-exceptions"], reason="Stack exceptions not supported") -def test_exception_collection_trace(tmp_path, tracer): - test_name = "test_exception_collection_trace" - pprof_prefix = str(tmp_path / test_name) - output_filename = pprof_prefix + "." + str(os.getpid()) - - assert ddup.is_available - ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) - ddup.start() - - with stack.StackCollector(tracer=tracer): - with tracer.trace("test123"): - try: - raise ValueError("hello") - except Exception: - time.sleep(1) - ddup.upload() - - profile = pprof_utils.parse_newest_profile(output_filename) - samples = pprof_utils.get_samples_with_value_type(profile, "exception-samples") - pprof_utils.assert_profile_has_sample( - profile, - samples, - expected_sample=pprof_utils.StackEvent( - exception_type="builtins.ValueError", - thread_id=_thread.get_ident(), - locations=[ - pprof_utils.StackLocation( - filename=os.path.basename(__file__), - function_name=test_name, - # this sample is captured while we're in time.sleep, so - # the line number is the one of the time.sleep call - line_no=test_exception_collection_trace.__code__.co_firstlineno + 15, - ) - ], - ), - ) - - # if you don't need to check the output profile, you can use this fixture @pytest.fixture def tracer_and_collector(tracer, request, tmp_path): @@ -673,75 +582,6 @@ def tracer_and_collector(tracer, request, tmp_path): ddup.upload(tracer=tracer) -def test_thread_to_span_thread_isolation(tracer_and_collector): - t, c = tracer_and_collector - root = t.start_span("root", activate=True) - thread_id = _thread.get_ident() - assert c._thread_span_links.get_active_span_from_thread_id(thread_id) == root - - quit_thread = threading.Event() - span_started = threading.Event() - - store = {} - - def start_span(): - store["span2"] = t.start_span("thread2", activate=True) - span_started.set() - quit_thread.wait() - - th = threading.Thread(target=start_span) - th.start() - span_started.wait() - assert c._thread_span_links.get_active_span_from_thread_id(thread_id) == root - assert c._thread_span_links.get_active_span_from_thread_id(th.ident) == store["span2"] - # Do not quit the thread before we test, otherwise the collector might clean up the thread from the list of spans - quit_thread.set() - th.join() - - -def test_thread_to_span_multiple(tracer_and_collector): - t, c = tracer_and_collector - root = t.start_span("root", activate=True) - thread_id = _thread.get_ident() - assert c._thread_span_links.get_active_span_from_thread_id(thread_id) == root - subspan = t.start_span("subtrace", child_of=root, activate=True) - assert c._thread_span_links.get_active_span_from_thread_id(thread_id) == subspan - subspan.finish() - assert c._thread_span_links.get_active_span_from_thread_id(thread_id) == root - root.finish() - assert c._thread_span_links.get_active_span_from_thread_id(thread_id) is None - - -def test_thread_to_child_span_multiple_unknown_thread(tracer_and_collector): - t, c = tracer_and_collector - t.start_span("root", activate=True) - assert c._thread_span_links.get_active_span_from_thread_id(3456789) is None - - -def test_thread_to_child_span_clear(tracer_and_collector): - t, c = tracer_and_collector - root = t.start_span("root", activate=True) - thread_id = _thread.get_ident() - assert c._thread_span_links.get_active_span_from_thread_id(thread_id) == root - c._thread_span_links.clear_threads(set()) - assert c._thread_span_links.get_active_span_from_thread_id(thread_id) is None - - -def test_thread_to_child_span_multiple_more_children(tracer_and_collector): - t, c = tracer_and_collector - root = t.start_span("root", activate=True) - thread_id = _thread.get_ident() - assert c._thread_span_links.get_active_span_from_thread_id(thread_id) == root - subspan = t.start_span("subtrace", child_of=root, activate=True) - subsubspan = t.start_span("subsubtrace", child_of=subspan, activate=True) - assert c._thread_span_links.get_active_span_from_thread_id(thread_id) == subsubspan - subsubspan2 = t.start_span("subsubtrace2", child_of=subspan, activate=True) - assert c._thread_span_links.get_active_span_from_thread_id(thread_id) == subsubspan2 - # ⚠ subspan is not supposed to finish before its children, but the API authorizes it - subspan.finish() - assert c._thread_span_links.get_active_span_from_thread_id(thread_id) == subsubspan2 - - def test_collect_span_id(tracer, tmp_path): test_name = "test_collect_span_id" pprof_prefix = str(tmp_path / test_name) @@ -794,7 +634,7 @@ def test_collect_span_resource_after_finish(tracer, tmp_path, request): ddup.start() tracer._endpoint_call_counter_span_processor.enable() - with stack.StackCollector(tracer=tracer, endpoint_collection_enabled=True): + with stack.StackCollector(tracer=tracer): resource = str(uuid.uuid4()) span_type = ext.SpanTypes.WEB span = tracer.start_span("foobar", activate=True, span_type=span_type, resource=resource) @@ -834,7 +674,7 @@ def test_resource_not_collected(tmp_path, tracer): ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) ddup.start() - with stack.StackCollector(endpoint_collection_enabled=False, tracer=tracer): + with stack.StackCollector(tracer=tracer): resource = str(uuid.uuid4()) span_type = ext.SpanTypes.WEB with tracer.start_span("foobar", activate=True, resource=resource, span_type=span_type) as span: @@ -871,7 +711,7 @@ def test_collect_nested_span_id(tmp_path, tracer, request): ddup.start() tracer._endpoint_call_counter_span_processor.enable() - with stack.StackCollector(tracer=tracer, endpoint_collection_enabled=True): + with stack.StackCollector(tracer=tracer): resource = str(uuid.uuid4()) span_type = ext.SpanTypes.WEB with tracer.start_span("foobar", activate=True, resource=resource, span_type=span_type): @@ -922,126 +762,3 @@ def _trace(): for t in threads: t.join() - - -def test_thread_time_cache(): - tt = stack._ThreadTime() - - lock = threading.Lock() - lock.acquire() - - t = threading.Thread(target=lock.acquire) - t.start() - - main_thread_id = threading.current_thread().ident - - threads = [ - main_thread_id, - t.ident, - ] - - cpu_time = tt(threads) - - assert sorted(k[0] for k in cpu_time.keys()) == sorted([main_thread_id, t.ident]) - assert all(t >= 0 for t in cpu_time.values()) - - cpu_time = tt(threads) - - assert sorted(k[0] for k in cpu_time.keys()) == sorted([main_thread_id, t.ident]) - assert all(t >= 0 for t in cpu_time.values()) - - if stack.FEATURES["cpu-time"]: - assert set(tt._get_last_thread_time().keys()) == set( - (pthread_id, _threading.get_thread_native_id(pthread_id)) for pthread_id in threads - ) - - lock.release() - - threads = { - main_thread_id: _threading.get_thread_native_id(main_thread_id), - } - - cpu_time = tt(threads) - assert sorted(k[0] for k in cpu_time.keys()) == sorted([main_thread_id]) - assert all(t >= 0 for t in cpu_time.values()) - - if stack.FEATURES["cpu-time"]: - assert set(tt._get_last_thread_time().keys()) == set( - (pthread_id, _threading.get_thread_native_id(pthread_id)) for pthread_id in threads - ) - - -@pytest.mark.skipif(not TESTING_GEVENT or sys.version_info < (3, 9), reason="Not testing gevent") -@pytest.mark.subprocess(ddtrace_run=True) -def test_collect_gevent_threads(): - import gevent.monkey - - gevent.monkey.patch_all() - - import os - import threading - import time - - from ddtrace.internal.datadog.profiling import ddup - from ddtrace.profiling.collector import stack - from tests.profiling.collector import pprof_utils - - iteration = 100 - sleep_time = 0.01 - nb_threads = 15 - - # Start some greenthreads: they do nothing we just keep switching between them. - def _nothing(): - for _ in range(iteration): - # Do nothing and just switch to another greenlet - time.sleep(sleep_time) - - test_name = "test_collect_gevent_threads" - pprof_prefix = "/tmp/" + test_name - output_filename = pprof_prefix + "." + str(os.getpid()) - - assert ddup.is_available - ddup.config(env="test", service="test_collect_gevent_threads", version="my_version", output_filename=pprof_prefix) - ddup.start() - - with stack.StackCollector(max_time_usage_pct=100): - threads = [] - i_to_tid = {} - for i in range(nb_threads): - t = threading.Thread(target=_nothing, name="TestThread %d" % i) - i_to_tid[i] = t.ident - t.start() - threads.append(t) - for t in threads: - t.join() - - ddup.upload() - - profile = pprof_utils.parse_newest_profile(output_filename) - samples = pprof_utils.get_samples_with_label_key(profile, "task name") - assert len(samples) > 0 - - for task_id in range(nb_threads): - pprof_utils.assert_profile_has_sample( - profile, - samples, - expected_sample=pprof_utils.StackEvent( - task_name="TestThread %d" % task_id, - task_id=i_to_tid[task_id], - thread_id=i_to_tid[task_id], - locations=[ - pprof_utils.StackLocation( - filename="test_stack.py", - function_name="_nothing", - line_no=_nothing.__code__.co_firstlineno + 3, - ) - ], - ), - ) - pprof_utils.assert_profile_has_sample( - profile, - samples, - expected_sample=pprof_utils.StackEvent( - task_name="MainThread", - ), - ) diff --git a/tests/profiling/collector/test_stack_asyncio.py b/tests/profiling/collector/test_stack_asyncio.py index f7d4ebd6cac..a13ea2aaad9 100644 --- a/tests/profiling/collector/test_stack_asyncio.py +++ b/tests/profiling/collector/test_stack_asyncio.py @@ -7,6 +7,7 @@ ), err=None, ) +# For macOS: err=None ignores expected stderr from tracer failing to connect to agent (not relevant to this test) def test_asyncio(): import asyncio import os @@ -14,10 +15,13 @@ def test_asyncio(): import uuid from ddtrace import ext + from ddtrace.internal.datadog.profiling import stack_v2 from ddtrace.profiling import profiler from ddtrace.trace import tracer from tests.profiling.collector import pprof_utils + assert stack_v2.is_available, stack_v2.failure_msg + sleep_time = 0.2 loop_run_time = 3 @@ -37,7 +41,10 @@ async def hello(): p = profiler.Profiler(tracer=tracer) p.start() - with tracer.trace("test_asyncio", resource=resource, span_type=span_type): + with tracer.trace("test_asyncio", resource=resource, span_type=span_type) as span: + span_id = span.span_id + local_root_span_id = span._local_root.span_id + loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) maintask = loop.create_task(hello(), name="main") @@ -60,6 +67,8 @@ async def hello(): # get samples with task_name samples = pprof_utils.get_samples_with_label_key(profile, "task name") + # The next fails if stack_v2 is not properly configured with asyncio task + # tracking via ddtrace.profiling._asyncio assert len(samples) > 0 pprof_utils.assert_profile_has_sample( @@ -68,10 +77,8 @@ async def hello(): expected_sample=pprof_utils.StackEvent( thread_name="MainThread", task_name="main", - # Noticed that these are not set in Stack v1 as it doesn't propagate - # span correlation information. Stack v2 does. - # span_id=span_id, - # local_root_span_id=local_root_span_id, + span_id=span_id, + local_root_span_id=local_root_span_id, locations=[ pprof_utils.StackLocation( function_name="hello", filename="test_stack_asyncio.py", line_no=hello.__code__.co_firstlineno + 3 @@ -86,8 +93,8 @@ async def hello(): expected_sample=pprof_utils.StackEvent( thread_name="MainThread", task_name=t1_name, - # span_id=span_id, - # local_root_span_id=local_root_span_id, + span_id=span_id, + local_root_span_id=local_root_span_id, locations=[ pprof_utils.StackLocation( function_name="stuff", filename="test_stack_asyncio.py", line_no=stuff.__code__.co_firstlineno + 3 @@ -102,8 +109,8 @@ async def hello(): expected_sample=pprof_utils.StackEvent( thread_name="MainThread", task_name=t2_name, - # span_id=span_id, - # local_root_span_id=local_root_span_id, + span_id=span_id, + local_root_span_id=local_root_span_id, locations=[ pprof_utils.StackLocation( function_name="stuff", filename="test_stack_asyncio.py", line_no=stuff.__code__.co_firstlineno + 3 @@ -111,3 +118,755 @@ async def hello(): ], ), ) + + +@pytest.mark.subprocess( + env=dict( + DD_PROFILING_OUTPUT_PPROF="/tmp/test_asyncio_start_profiler_from_process_before_importing_asyncio", + ), + err=None, +) +# For macOS: err=None ignores expected stderr from tracer failing to connect to agent (not relevant to this test) +def test_asyncio_start_profiler_from_process_before_importing_asyncio(): + from ddtrace.internal.datadog.profiling import stack_v2 + from ddtrace.profiling import profiler + + assert stack_v2.is_available, stack_v2.failure_msg + + p = profiler.Profiler() + p.start() + + import asyncio + import os + import sys + import time + + # Start an asyncio loop BEFORE importing profiler modules + # This simulates the bug scenario where a loop exists before profiling is enabled + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + + async def my_function(): + async def background_task_func() -> None: + """Background task that runs in the existing loop.""" + await asyncio.sleep(1.5) + + # Create and start a task in the existing loop + background_task = loop.create_task(background_task_func(), name="background") + assert background_task is not None + + # Run tasks that should be tracked + sleep_time = 0.2 + loop_run_time = 0.75 + + async def tracked_task() -> None: + start_time = time.time() + while time.time() < start_time + loop_run_time: + await asyncio.sleep(sleep_time) + + async def main_task(): + t1 = asyncio.create_task(tracked_task(), name="tracked 1") + t2 = asyncio.create_task(tracked_task(), name="tracked 2") + await tracked_task() + await asyncio.sleep(0.25) + return t1, t2 + + result = await main_task() + + await background_task + + return tracked_task, background_task_func, result + + main_task = loop.create_task(my_function(), name="main") + tracked_task_def, background_task_def, (t1, t2) = loop.run_until_complete(main_task) + + p.stop() + + t1_name = t1.get_name() + t2_name = t2.get_name() + + assert t1_name == "tracked 1" + assert t2_name == "tracked 2" + + from tests.profiling.collector import pprof_utils + + output_filename = os.environ["DD_PROFILING_OUTPUT_PPROF"] + "." + str(os.getpid()) + profile = pprof_utils.parse_newest_profile(output_filename) + + samples = pprof_utils.get_samples_with_label_key(profile, "task name") + assert len(samples) > 0, "No task names found - existing loop was not tracked!" + + if sys.version_info >= (3, 11): + EXPECTED_FUNCTION_NAME_BACKGROUND = f"{my_function.__name__}..{background_task_def.__name__}" + else: + EXPECTED_FUNCTION_NAME_BACKGROUND = background_task_def.__name__ + EXPECTED_FILENAME_BACKGROUND = os.path.basename(background_task_def.__code__.co_filename) + EXPECTED_LINE_NO_BACKGROUND = background_task_def.__code__.co_firstlineno + 2 + + pprof_utils.assert_profile_has_sample( + profile, + samples, + expected_sample=pprof_utils.StackEvent( + thread_name="MainThread", + task_name="background", + locations=[ + pprof_utils.StackLocation( + function_name=EXPECTED_FUNCTION_NAME_BACKGROUND, + filename=EXPECTED_FILENAME_BACKGROUND, + line_no=EXPECTED_LINE_NO_BACKGROUND, + ), + ], + ), + ) + + # Verify specific tasks are in the profile + if sys.version_info >= (3, 11): + EXPECTED_FUNCTION_NAME_TRACKED = f"{my_function.__name__}..{tracked_task_def.__name__}" + else: + EXPECTED_FUNCTION_NAME_TRACKED = tracked_task_def.__name__ + EXPECTED_FILENAME_TRACKED = os.path.basename(tracked_task_def.__code__.co_filename) + EXPECTED_LINE_NO_TRACKED = tracked_task_def.__code__.co_firstlineno + 3 + + pprof_utils.assert_profile_has_sample( + profile, + samples, + expected_sample=pprof_utils.StackEvent( + thread_name="MainThread", + task_name=t1_name, + locations=[ + pprof_utils.StackLocation( + function_name=EXPECTED_FUNCTION_NAME_TRACKED, + filename=EXPECTED_FILENAME_TRACKED, + line_no=EXPECTED_LINE_NO_TRACKED, + ) + ], + ), + ) + + +@pytest.mark.subprocess( + env=dict( + DD_PROFILING_OUTPUT_PPROF="/tmp/test_asyncio_start_profiler_from_process_before_starting_loop", + ), + err=None, +) +# For macOS: err=None ignores expected stderr from tracer failing to connect to agent (not relevant to this test) +def test_asyncio_start_profiler_from_process_before_starting_loop(): + import asyncio + import os + import sys + import time + + from ddtrace.internal.datadog.profiling import stack_v2 + from ddtrace.profiling import profiler + + assert stack_v2.is_available, stack_v2.failure_msg + + p = profiler.Profiler() + p.start() + + # Start an asyncio loop BEFORE importing profiler modules + # This simulates the bug scenario where a loop exists before profiling is enabled + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + + async def my_function(): + async def background_task_func() -> None: + """Background task that runs in the existing loop.""" + await asyncio.sleep(1.5) + + # Create and start a task in the existing loop + background_task = loop.create_task(background_task_func(), name="background") + assert background_task is not None + + # Run tasks that should be tracked + sleep_time = 0.2 + loop_run_time = 0.75 + + async def tracked_task() -> None: + start_time = time.time() + while time.time() < start_time + loop_run_time: + await asyncio.sleep(sleep_time) + + async def main_task(): + t1 = asyncio.create_task(tracked_task(), name="tracked 1") + t2 = asyncio.create_task(tracked_task(), name="tracked 2") + await tracked_task() + await asyncio.sleep(0.25) + return t1, t2 + + result = await main_task() + + await background_task + + return tracked_task, background_task_func, result + + main_task = loop.create_task(my_function(), name="main") + tracked_task_def, background_task_def, (t1, t2) = loop.run_until_complete(main_task) + + p.stop() + + t1_name = t1.get_name() + t2_name = t2.get_name() + + assert t1_name == "tracked 1" + assert t2_name == "tracked 2" + + from tests.profiling.collector import pprof_utils + + output_filename = os.environ["DD_PROFILING_OUTPUT_PPROF"] + "." + str(os.getpid()) + profile = pprof_utils.parse_newest_profile(output_filename) + + samples = pprof_utils.get_samples_with_label_key(profile, "task name") + assert len(samples) > 0, "No task names found - existing loop was not tracked!" + + if sys.version_info >= (3, 11): + EXPECTED_FUNCTION_NAME_BACKGROUND = f"{my_function.__name__}..{background_task_def.__name__}" + else: + EXPECTED_FUNCTION_NAME_BACKGROUND = background_task_def.__name__ + EXPECTED_FILENAME_BACKGROUND = os.path.basename(background_task_def.__code__.co_filename) + EXPECTED_LINE_NO_BACKGROUND = background_task_def.__code__.co_firstlineno + 2 + + pprof_utils.assert_profile_has_sample( + profile, + samples, + expected_sample=pprof_utils.StackEvent( + thread_name="MainThread", + task_name="background", + locations=[ + pprof_utils.StackLocation( + function_name=EXPECTED_FUNCTION_NAME_BACKGROUND, + filename=EXPECTED_FILENAME_BACKGROUND, + line_no=EXPECTED_LINE_NO_BACKGROUND, + ), + ], + ), + ) + + # Verify specific tasks are in the profile + if sys.version_info >= (3, 11): + EXPECTED_FUNCTION_NAME_TRACKED = f"{my_function.__name__}..{tracked_task_def.__name__}" + else: + EXPECTED_FUNCTION_NAME_TRACKED = tracked_task_def.__name__ + EXPECTED_FILENAME_TRACKED = os.path.basename(tracked_task_def.__code__.co_filename) + EXPECTED_LINE_NO_TRACKED = tracked_task_def.__code__.co_firstlineno + 3 + + pprof_utils.assert_profile_has_sample( + profile, + samples, + expected_sample=pprof_utils.StackEvent( + thread_name="MainThread", + task_name=t1_name, + locations=[ + pprof_utils.StackLocation( + function_name=EXPECTED_FUNCTION_NAME_TRACKED, + filename=EXPECTED_FILENAME_TRACKED, + line_no=EXPECTED_LINE_NO_TRACKED, + ) + ], + ), + ) + + +@pytest.mark.xfail(reason="This test fails because there's no way to get the current loop if it's not already running.") +@pytest.mark.subprocess( + env=dict( + DD_PROFILING_OUTPUT_PPROF="/tmp/test_asyncio_start_profiler_from_process_after_creating_loop", + ), + err=None, +) +# For macOS: err=None ignores expected stderr from tracer failing to connect to agent (not relevant to this test) +def test_asyncio_start_profiler_from_process_after_creating_loop(): + import asyncio + import os + import sys + import time + + from ddtrace.internal.datadog.profiling import stack_v2 + from ddtrace.profiling import profiler + + # Start an asyncio loop BEFORE importing profiler modules + # This simulates the bug scenario where a loop exists before profiling is enabled + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + + assert stack_v2.is_available, stack_v2.failure_msg + + p = profiler.Profiler() + p.start() + + async def my_function(): + async def background_task_func() -> None: + """Background task that runs in the existing loop.""" + await asyncio.sleep(1.5) + + # Create and start a task in the existing loop + background_task = loop.create_task(background_task_func(), name="background") + assert background_task is not None + + # Run tasks that should be tracked + sleep_time = 0.2 + loop_run_time = 0.75 + + async def tracked_task() -> None: + start_time = time.time() + while time.time() < start_time + loop_run_time: + await asyncio.sleep(sleep_time) + + async def main_task(): + t1 = asyncio.create_task(tracked_task(), name="tracked 1") + t2 = asyncio.create_task(tracked_task(), name="tracked 2") + await tracked_task() + await asyncio.sleep(0.25) + return t1, t2 + + result = await main_task() + + await background_task + + return tracked_task, background_task_func, result + + main_task = loop.create_task(my_function(), name="main") + tracked_task_def, background_task_def, (t1, t2) = loop.run_until_complete(main_task) + + p.stop() + + t1_name = t1.get_name() + t2_name = t2.get_name() + + assert t1_name == "tracked 1" + assert t2_name == "tracked 2" + + from tests.profiling.collector import pprof_utils + + output_filename = os.environ["DD_PROFILING_OUTPUT_PPROF"] + "." + str(os.getpid()) + profile = pprof_utils.parse_newest_profile(output_filename) + + samples = pprof_utils.get_samples_with_label_key(profile, "task name") + assert len(samples) > 0, "No task names found - existing loop was not tracked!" + + EXPECTED_FILENAME_BACKGROUND = os.path.basename(background_task_def.__code__.co_filename) + EXPECTED_LINE_NO_BACKGROUND = background_task_def.__code__.co_firstlineno + 2 + if sys.version_info >= (3, 11): + EXPECTED_FUNCTION_NAME_BACKGROUND = f"{my_function.__name__}..{background_task_def.__name__}" + else: + EXPECTED_FUNCTION_NAME_BACKGROUND = background_task_def.__name__ + + pprof_utils.assert_profile_has_sample( + profile, + samples, + expected_sample=pprof_utils.StackEvent( + thread_name="MainThread", + task_name="background", + locations=[ + pprof_utils.StackLocation( + function_name=EXPECTED_FUNCTION_NAME_BACKGROUND, + filename=EXPECTED_FILENAME_BACKGROUND, + line_no=EXPECTED_LINE_NO_BACKGROUND, + ), + ], + ), + ) + + # Verify specific tasks are in the profile + EXPECTED_FILENAME_TRACKED = os.path.basename(tracked_task_def.__code__.co_filename) + EXPECTED_LINE_NO_TRACKED = tracked_task_def.__code__.co_firstlineno + 3 + if sys.version_info >= (3, 11): + EXPECTED_FUNCTION_NAME_TRACKED = f"{my_function.__name__}..{tracked_task_def.__name__}" + else: + EXPECTED_FUNCTION_NAME_TRACKED = tracked_task_def.__name__ + + pprof_utils.assert_profile_has_sample( + profile, + samples, + expected_sample=pprof_utils.StackEvent( + thread_name="MainThread", + task_name=t1_name, + locations=[ + pprof_utils.StackLocation( + function_name=EXPECTED_FUNCTION_NAME_TRACKED, + filename=EXPECTED_FILENAME_TRACKED, + line_no=EXPECTED_LINE_NO_TRACKED, + ) + ], + ), + ) + + +@pytest.mark.xfail(reason="This test fails because there's no way to get the current loop if it's not already running.") +@pytest.mark.subprocess( + env=dict( + DD_PROFILING_OUTPUT_PPROF="/tmp/test_asyncio_import_profiler_from_process_after_starting_loop", + ), + err=None, +) +# For macOS: err=None ignores expected stderr from tracer failing to connect to agent (not relevant to this test) +def test_asyncio_import_profiler_from_process_after_starting_loop(): + import asyncio + import os + import sys + import time + + # Start an asyncio loop BEFORE importing profiler modules + # This simulates the bug scenario where a loop exists before profiling is enabled + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + + from ddtrace.internal.datadog.profiling import stack_v2 + from ddtrace.profiling import profiler + + assert stack_v2.is_available, stack_v2.failure_msg + + p = profiler.Profiler() + p.start() + + async def my_function(): + async def background_task_func() -> None: + """Background task that runs in the existing loop.""" + await asyncio.sleep(1.5) + + # Create and start a task in the existing loop + background_task = loop.create_task(background_task_func(), name="background") + assert background_task is not None + + # Run tasks that should be tracked + sleep_time = 0.2 + loop_run_time = 0.75 + + async def tracked_task() -> None: + start_time = time.time() + while time.time() < start_time + loop_run_time: + await asyncio.sleep(sleep_time) + + async def main_task(): + t1 = asyncio.create_task(tracked_task(), name="tracked 1") + t2 = asyncio.create_task(tracked_task(), name="tracked 2") + await tracked_task() + await asyncio.sleep(0.25) + return t1, t2 + + result = await main_task() + + await background_task + + return tracked_task, background_task_func, result + + main_task = loop.create_task(my_function(), name="main") + tracked_task_def, background_task_def, (t1, t2) = loop.run_until_complete(main_task) + + p.stop() + + t1_name = t1.get_name() + t2_name = t2.get_name() + + assert t1_name == "tracked 1" + assert t2_name == "tracked 2" + + from tests.profiling.collector import pprof_utils + + output_filename = os.environ["DD_PROFILING_OUTPUT_PPROF"] + "." + str(os.getpid()) + profile = pprof_utils.parse_newest_profile(output_filename) + + samples = pprof_utils.get_samples_with_label_key(profile, "task name") + assert len(samples) > 0, "No task names found - existing loop was not tracked!" + + EXPECTED_FILENAME_BACKGROUND = os.path.basename(background_task_def.__code__.co_filename) + EXPECTED_LINE_NO_BACKGROUND = background_task_def.__code__.co_firstlineno + if sys.version_info >= (3, 11): + EXPECTED_FUNCTION_NAME_BACKGROUND = f"{my_function.__name__}..{background_task_def.__name__}" + else: + EXPECTED_FUNCTION_NAME_BACKGROUND = background_task_def.__name__ + + pprof_utils.assert_profile_has_sample( + profile, + samples, + expected_sample=pprof_utils.StackEvent( + thread_name="MainThread", + task_name="background", + locations=[ + pprof_utils.StackLocation( + function_name=EXPECTED_FUNCTION_NAME_BACKGROUND, + filename=EXPECTED_FILENAME_BACKGROUND, + line_no=EXPECTED_LINE_NO_BACKGROUND, + ), + ], + ), + ) + + # Verify specific tasks are in the profile + EXPECTED_FILENAME_TRACKED = os.path.basename(tracked_task_def.__code__.co_filename) + EXPECTED_LINE_NO_TRACKED = tracked_task_def.__code__.co_firstlineno + 3 + if sys.version_info >= (3, 11): + EXPECTED_FUNCTION_NAME_TRACKED = f"{my_function.__name__}..{tracked_task_def.__name__}" + else: + EXPECTED_FUNCTION_NAME_TRACKED = tracked_task_def.__name__ + + pprof_utils.assert_profile_has_sample( + profile, + samples, + expected_sample=pprof_utils.StackEvent( + thread_name="MainThread", + task_name=t1_name, + locations=[ + pprof_utils.StackLocation( + function_name=EXPECTED_FUNCTION_NAME_TRACKED, + filename=EXPECTED_FILENAME_TRACKED, + line_no=EXPECTED_LINE_NO_TRACKED, + ) + ], + ), + ) + + +@pytest.mark.subprocess( + env=dict( + DD_PROFILING_OUTPUT_PPROF="/tmp/test_asyncio_start_profiler_from_process_after_creating_loop_and_task", + ), + err=None, +) +def test_asyncio_start_profiler_from_process_after_task_start(): + # NOW import profiling modules - this should track the existing loop + import asyncio + import os + import sys + import time + + from ddtrace.internal.datadog.profiling import stack_v2 + from ddtrace.profiling import profiler + + # Start an asyncio loop BEFORE importing profiler modules + # This simulates the bug scenario where a loop exists before profiling is enabled + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + + async def my_function(): + async def background_task_func() -> None: + """Background task that runs in the existing loop.""" + await asyncio.sleep(1.5) + + # Create and start a task in the existing loop + background_task = loop.create_task(background_task_func(), name="background") + assert background_task is not None + + # Start profiler after loop is already running + assert asyncio.get_running_loop() is loop + + assert stack_v2.is_available, stack_v2.failure_msg + + p = profiler.Profiler() + p.start() + + # Run tasks that should be tracked + sleep_time = 0.2 + loop_run_time = 0.75 + + async def tracked_task() -> None: + start_time = time.time() + while time.time() < start_time + loop_run_time: + await asyncio.sleep(sleep_time) + + async def main_task(): + t1 = asyncio.create_task(tracked_task(), name="tracked 1") + t2 = asyncio.create_task(tracked_task(), name="tracked 2") + await tracked_task() + await asyncio.sleep(0.25) + return t1, t2 + + result = await main_task() + + await background_task + + return tracked_task, background_task_func, p, result + + main_task = loop.create_task(my_function(), name="main") + tracked_task_def, background_task_def, p, (t1, t2) = loop.run_until_complete(main_task) + + p.stop() + + t1_name = t1.get_name() + t2_name = t2.get_name() + + assert t1_name == "tracked 1" + assert t2_name == "tracked 2" + + from tests.profiling.collector import pprof_utils + + output_filename = os.environ["DD_PROFILING_OUTPUT_PPROF"] + "." + str(os.getpid()) + profile = pprof_utils.parse_newest_profile(output_filename) + + samples = pprof_utils.get_samples_with_label_key(profile, "task name") + assert len(samples) > 0, "No task names found - existing loop was not tracked!" + + EXPECTED_FILENAME_BACKGROUND = os.path.basename(background_task_def.__code__.co_filename) + EXPECTED_LINE_NO_BACKGROUND = background_task_def.__code__.co_firstlineno + if sys.version_info >= (3, 11): + EXPECTED_FUNCTION_NAME_BACKGROUND = f"{my_function.__name__}..{background_task_def.__name__}" + else: + EXPECTED_FUNCTION_NAME_BACKGROUND = background_task_def.__name__ + + pprof_utils.assert_profile_has_sample( + profile, + samples, + expected_sample=pprof_utils.StackEvent( + thread_name="MainThread", + task_name="background", + locations=[ + pprof_utils.StackLocation( + function_name=EXPECTED_FUNCTION_NAME_BACKGROUND, + filename=EXPECTED_FILENAME_BACKGROUND, + line_no=EXPECTED_LINE_NO_BACKGROUND, + ), + ], + ), + ) + + # Verify specific tasks are in the profile + EXPECTED_FILENAME_TRACKED = os.path.basename(tracked_task_def.__code__.co_filename) + EXPECTED_LINE_NO_TRACKED = tracked_task_def.__code__.co_firstlineno + 3 + if sys.version_info >= (3, 11): + EXPECTED_FUNCTION_NAME_TRACKED = f"{my_function.__name__}..{tracked_task_def.__name__}" + else: + EXPECTED_FUNCTION_NAME_TRACKED = tracked_task_def.__name__ + + pprof_utils.assert_profile_has_sample( + profile, + samples, + expected_sample=pprof_utils.StackEvent( + thread_name="MainThread", + task_name=t1_name, + locations=[ + pprof_utils.StackLocation( + function_name=EXPECTED_FUNCTION_NAME_TRACKED, + filename=EXPECTED_FILENAME_TRACKED, + line_no=EXPECTED_LINE_NO_TRACKED, + ) + ], + ), + ) + + +@pytest.mark.subprocess( + env=dict( + DD_PROFILING_OUTPUT_PPROF="/tmp/test_asyncio_start_profiler_from_process_after_task_start", + ), + err=None, +) +def test_asyncio_import_and_start_profiler_from_process_after_task_start(): + import asyncio + import os + import sys + import time + + # Start an asyncio loop BEFORE importing profiler modules + # This simulates the bug scenario where a loop exists before profiling is enabled + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + + async def my_function(): + async def background_task_func() -> None: + """Background task that runs in the existing loop.""" + await asyncio.sleep(1.5) + + # Create and start a task in the existing loop + background_task = loop.create_task(background_task_func(), name="background") + assert background_task is not None + + # Start profiler after loop is already running + assert asyncio.get_running_loop() is loop + + # NOW import profiling modules - this should track the existing loop + from ddtrace.internal.datadog.profiling import stack_v2 + from ddtrace.profiling import profiler + + assert stack_v2.is_available, stack_v2.failure_msg + + p = profiler.Profiler() + p.start() + + # Run tasks that should be tracked + sleep_time = 0.2 + loop_run_time = 0.75 + + async def tracked_task() -> None: + start_time = time.time() + while time.time() < start_time + loop_run_time: + await asyncio.sleep(sleep_time) + + async def main_task(): + t1 = asyncio.create_task(tracked_task(), name="tracked 1") + t2 = asyncio.create_task(tracked_task(), name="tracked 2") + await tracked_task() + await asyncio.sleep(0.25) + return t1, t2 + + result = await main_task() + + await background_task + + return tracked_task, background_task_func, p, result + + main_task = loop.create_task(my_function(), name="main") + tracked_task_def, background_task_def, p, (t1, t2) = loop.run_until_complete(main_task) + + p.stop() + + t1_name = t1.get_name() + t2_name = t2.get_name() + + assert t1_name == "tracked 1" + assert t2_name == "tracked 2" + + from tests.profiling.collector import pprof_utils + + output_filename = os.environ["DD_PROFILING_OUTPUT_PPROF"] + "." + str(os.getpid()) + profile = pprof_utils.parse_newest_profile(output_filename) + + samples = pprof_utils.get_samples_with_label_key(profile, "task name") + assert len(samples) > 0, "No task names found - existing loop was not tracked!" + + if sys.version_info >= (3, 11): + EXPECTED_FUNCTION_NAME_BACKGROUND = f"{my_function.__name__}..{background_task_def.__name__}" + else: + EXPECTED_FUNCTION_NAME_BACKGROUND = background_task_def.__name__ + EXPECTED_FILENAME_BACKGROUND = os.path.basename(background_task_def.__code__.co_filename) + EXPECTED_LINE_NO_BACKGROUND = background_task_def.__code__.co_firstlineno + + pprof_utils.assert_profile_has_sample( + profile, + samples, + expected_sample=pprof_utils.StackEvent( + thread_name="MainThread", + task_name="background", + locations=[ + pprof_utils.StackLocation( + function_name=EXPECTED_FUNCTION_NAME_BACKGROUND, + filename=EXPECTED_FILENAME_BACKGROUND, + line_no=EXPECTED_LINE_NO_BACKGROUND, + ), + ], + ), + ) + + # Verify specific tasks are in the profile + if sys.version_info >= (3, 11): + EXPECTED_FUNCTION_NAME_TRACKED = f"{my_function.__name__}..{tracked_task_def.__name__}" + else: + EXPECTED_FUNCTION_NAME_TRACKED = tracked_task_def.__name__ + EXPECTED_FILENAME_TRACKED = os.path.basename(tracked_task_def.__code__.co_filename) + EXPECTED_LINE_NO_TRACKED = tracked_task_def.__code__.co_firstlineno + 3 + + pprof_utils.assert_profile_has_sample( + profile, + samples, + expected_sample=pprof_utils.StackEvent( + thread_name="MainThread", + task_name=t1_name, + locations=[ + pprof_utils.StackLocation( + function_name=EXPECTED_FUNCTION_NAME_TRACKED, + filename=EXPECTED_FILENAME_TRACKED, + line_no=EXPECTED_LINE_NO_TRACKED, + ) + ], + ), + ) diff --git a/tests/profiling/collector/test_task.py b/tests/profiling/collector/test_task.py index 7e42c757965..a1ee8b30fb2 100644 --- a/tests/profiling/collector/test_task.py +++ b/tests/profiling/collector/test_task.py @@ -1,74 +1,8 @@ -import os import threading -import pytest - from ddtrace.profiling.collector import _task -TESTING_GEVENT = os.getenv("DD_PROFILE_TEST_GEVENT", False) - - def test_get_task_main(): # type: (...) -> None assert _task.get_task(threading.main_thread().ident) == (None, None, None) - - -@pytest.mark.subprocess -def test_list_tasks_nogevent(): - import threading - - from ddtrace.profiling.collector import _task - - assert _task.list_tasks(threading.main_thread().ident) == [] - - -@pytest.mark.skipif(not TESTING_GEVENT, reason="only works with gevent") -@pytest.mark.subprocess(ddtrace_run=True) -def test_list_tasks_gevent(): - import gevent.monkey - - gevent.monkey.patch_all() - - import threading - - from ddtrace.profiling.collector import _task - - l1 = threading.Lock() - l1.acquire() - - def wait(): - l1.acquire() - l1.release() - - def nothing(): - pass - - t1 = threading.Thread(target=wait, name="t1") - t1.start() - - tasks = _task.list_tasks(threading.main_thread().ident) - # can't check == 2 because there are left over from other tests - assert len(tasks) >= 2 - - main_thread_found = False - t1_found = False - for task in tasks: - assert len(task) == 3 - # main thread - if task[0] == threading.main_thread().ident or task[1] == "MainThread": - assert task[1] == "MainThread" - assert task[2] is None - main_thread_found = True - # t1 - elif task[0] == t1.ident: - assert task[1] == "t1" - assert task[2] is not None - t1_found = True - - l1.release() - - t1.join() - - assert t1_found - assert main_thread_found diff --git a/tests/profiling_v2/collector/test_threading.py b/tests/profiling/collector/test_threading.py similarity index 99% rename from tests/profiling_v2/collector/test_threading.py rename to tests/profiling/collector/test_threading.py index 11bfc1acfce..bdab98a9d7c 100644 --- a/tests/profiling_v2/collector/test_threading.py +++ b/tests/profiling/collector/test_threading.py @@ -794,7 +794,7 @@ def test_lock_enter_exit_events(self) -> None: [True, False], ) def test_class_member_lock(self, inspect_dir_enabled: bool) -> None: - with mock.patch("ddtrace.settings.profiling.config.lock.name_inspect_dir", inspect_dir_enabled): + with mock.patch("ddtrace.internal.settings.profiling.config.lock.name_inspect_dir", inspect_dir_enabled): expected_lock_name: Optional[str] = "foo_lock" if inspect_dir_enabled else None with self.collector_class(capture_pct=100): diff --git a/tests/profiling_v2/collector/test_threading_asyncio.py b/tests/profiling/collector/test_threading_asyncio.py similarity index 100% rename from tests/profiling_v2/collector/test_threading_asyncio.py rename to tests/profiling/collector/test_threading_asyncio.py diff --git a/tests/profiling/collector/test_traceback.py b/tests/profiling/collector/test_traceback.py deleted file mode 100644 index 2caeaa90002..00000000000 --- a/tests/profiling/collector/test_traceback.py +++ /dev/null @@ -1,22 +0,0 @@ -import sys - -from ddtrace.profiling.collector import _traceback - - -def _x(): - raise ValueError("hey!") - - -def test_check_traceback_to_frames(): - try: - _x() - except Exception: - exc_type, exc_value, traceback = sys.exc_info() - frames, nframes = _traceback.traceback_to_frames(traceback, 10) - assert nframes == 2 - - this_file = __file__.replace(".pyc", ".py") - assert frames == [ - (this_file, 7, "_x", ""), - (this_file, 15, "test_check_traceback_to_frames", ""), - ] diff --git a/tests/profiling/conftest.py b/tests/profiling/conftest.py deleted file mode 100644 index 6b04983dd8f..00000000000 --- a/tests/profiling/conftest.py +++ /dev/null @@ -1,11 +0,0 @@ -import os - -import pytest - - -@pytest.fixture(scope="session", autouse=True) -def disable_coverage_for_subprocess(): - try: - del os.environ["COV_CORE_SOURCE"] - except KeyError: - pass diff --git a/tests/contrib/google_generativeai/__init__.py b/tests/profiling/exporter/__init__.py similarity index 100% rename from tests/contrib/google_generativeai/__init__.py rename to tests/profiling/exporter/__init__.py diff --git a/tests/profiling_v2/exporter/test_ddup.py b/tests/profiling/exporter/test_ddup.py similarity index 96% rename from tests/profiling_v2/exporter/test_ddup.py rename to tests/profiling/exporter/test_ddup.py index 6ec350abfe3..f799bfe0e28 100644 --- a/tests/profiling_v2/exporter/test_ddup.py +++ b/tests/profiling/exporter/test_ddup.py @@ -46,7 +46,7 @@ def test_tags_propagated(): from ddtrace.profiling.profiler import Profiler # noqa: I001 from ddtrace.internal.datadog.profiling import ddup - from ddtrace.settings.profiling import config + from ddtrace.internal.settings.profiling import config # DD_PROFILING_TAGS should override DD_TAGS assert config.tags["hello"] == "python" diff --git a/tests/profiling_v2/gunicorn.conf.py b/tests/profiling/gunicorn.conf.py similarity index 100% rename from tests/profiling_v2/gunicorn.conf.py rename to tests/profiling/gunicorn.conf.py diff --git a/tests/profiling_v2/native_tests b/tests/profiling/native_tests similarity index 100% rename from tests/profiling_v2/native_tests rename to tests/profiling/native_tests diff --git a/tests/profiling/simple_program.py b/tests/profiling/simple_program.py index b0d91cec73d..1ff264c7408 100755 --- a/tests/profiling/simple_program.py +++ b/tests/profiling/simple_program.py @@ -1,7 +1,6 @@ #!/usr/bin/env python import os import sys -import time from ddtrace.internal import service from ddtrace.profiling import bootstrap @@ -17,11 +16,6 @@ print("hello world") assert running_collector.status == service.ServiceStatus.RUNNING -print(running_collector.interval) - -t0 = time.time() -while time.time() - t0 < (running_collector.interval * 10): - pass # Do some serious memory allocations! for _ in range(5000000): diff --git a/tests/profiling_v2/simple_program_pytorch_gpu.py b/tests/profiling/simple_program_pytorch_gpu.py similarity index 100% rename from tests/profiling_v2/simple_program_pytorch_gpu.py rename to tests/profiling/simple_program_pytorch_gpu.py diff --git a/tests/profiling/suitespec.yml b/tests/profiling/suitespec.yml index d8c85bdc9ca..6d3e31b7544 100644 --- a/tests/profiling/suitespec.yml +++ b/tests/profiling/suitespec.yml @@ -29,7 +29,7 @@ components: - ddtrace/profiling/* - ddtrace/internal/datadog/profiling/* - ddtrace/internal/processor/endpoint_call_counter.py - - ddtrace/settings/profiling.py + - ddtrace/internal/settings/profiling.py core: - ddtrace/internal/__init__.py - ddtrace/internal/_exceptions.py @@ -71,7 +71,7 @@ components: - ddtrace/__init__.py - ddtrace/py.typed - ddtrace/version.py - - ddtrace/settings/_config.py + - ddtrace/internal/settings/_config.py - src/native/* bootstrap: - ddtrace/bootstrap/* @@ -81,27 +81,14 @@ suites: profile: env: DD_TRACE_AGENT_URL: '' - # `riot list --hash-only profile$ | wc -1` = 19 - parallelism: 19 + # `riot list --hash-only profile | wc -l` = 26 + parallelism: 26 paths: - '@bootstrap' - '@core' - '@profiling' - tests/profiling/* - pattern: profile$ - retry: 2 - runner: riot - profile_v2: - env: - DD_TRACE_AGENT_URL: '' - # `riot list --hash-only profile-v2$ | wc -1` = 19 - parallelism: 19 - paths: - - '@bootstrap' - - '@core' - - '@profiling' - tests/profiling/suitespec.yml - - tests/profiling_v2/* - pattern: profile-v2 + pattern: profile retry: 2 runner: riot diff --git a/tests/profiling_v2/test_accuracy.py b/tests/profiling/test_accuracy.py similarity index 95% rename from tests/profiling_v2/test_accuracy.py rename to tests/profiling/test_accuracy.py index 2333e429e55..6fd014af181 100644 --- a/tests/profiling_v2/test_accuracy.py +++ b/tests/profiling/test_accuracy.py @@ -68,8 +68,8 @@ def test_accuracy_stack_v2(): from ddtrace.profiling import profiler from tests.profiling.collector import pprof_utils - from tests.profiling_v2.test_accuracy import assert_almost_equal - from tests.profiling_v2.test_accuracy import spend_16 + from tests.profiling.test_accuracy import assert_almost_equal + from tests.profiling.test_accuracy import spend_16 # Set this to 100 so we don't sleep too often and mess with the precision. p = profiler.Profiler() diff --git a/tests/profiling_v2/test_code_provenance.py b/tests/profiling/test_code_provenance.py similarity index 100% rename from tests/profiling_v2/test_code_provenance.py rename to tests/profiling/test_code_provenance.py diff --git a/tests/profiling/test_gunicorn.py b/tests/profiling/test_gunicorn.py index a21acaa523f..78297c85e55 100644 --- a/tests/profiling/test_gunicorn.py +++ b/tests/profiling/test_gunicorn.py @@ -4,10 +4,21 @@ import subprocess import sys import time +import urllib.request import pytest -from . import utils +from tests.profiling.collector import pprof_utils + + +# DEV: gunicorn tests are hard to debug, so keeping these print statements for +# future debugging +DEBUG_PRINT = True + + +def debug_print(*args): + if DEBUG_PRINT: + print(*args) # gunicorn is not available on Windows @@ -19,17 +30,27 @@ def _run_gunicorn(*args): cmd = ( - ["ddtrace-run", "gunicorn", "--bind", "127.0.0.1:7643", "--chdir", os.path.dirname(__file__)] + [ + "ddtrace-run", + "gunicorn", + "--bind", + "127.0.0.1:7644", + "--worker-tmp-dir", + "/dev/shm", + "-c", + os.path.dirname(__file__) + "/gunicorn.conf.py", + "--chdir", + os.path.dirname(__file__), + ] + list(args) - + ["gunicorn-app:app"] + + ["tests.profiling.gunicorn-app:app"] ) return subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) @pytest.fixture def gunicorn(monkeypatch): - # Do not ignore profiler so we have samples in the output pprof - monkeypatch.setenv("DD_PROFILING_IGNORE_PROFILER", "0") + monkeypatch.setenv("DD_PROFILING_IGNORE_PROFILER", "1") monkeypatch.setenv("DD_PROFILING_ENABLED", "1") yield _run_gunicorn @@ -44,22 +65,75 @@ def _test_gunicorn(gunicorn, tmp_path, monkeypatch, *args): # type: (...) -> None filename = str(tmp_path / "gunicorn.pprof") monkeypatch.setenv("DD_PROFILING_OUTPUT_PPROF", filename) - - proc = gunicorn("-w", "3", *args) - time.sleep(3) - proc.terminate() + monkeypatch.setenv("_DD_PROFILING_STACK_V2_ADAPTIVE_SAMPLING_ENABLED", "0") + + debug_print("Creating gunicorn workers") + # DEV: We only start 1 worker to simplify the test + proc = gunicorn("-w", "1", *args) + # Wait for the workers to start + time.sleep(5) + + if proc.poll() is not None: + pytest.fail("Gunicorn failed to start") + + debug_print("Making request to gunicorn server") + try: + with urllib.request.urlopen("http://127.0.0.1:7644", timeout=5) as f: + status_code = f.getcode() + assert status_code == 200, status_code + response = f.read().decode() + debug_print(response) + except Exception as e: + proc.terminate() + output = proc.stdout.read().decode() + print(output) + pytest.fail("Failed to make request to gunicorn server %s" % e) + finally: + # Need to terminate the process to get the output and release the port + proc.terminate() + + debug_print("Reading gunicorn worker output to get PIDs") output = proc.stdout.read().decode() worker_pids = _get_worker_pids(output) + debug_print("Gunicorn worker PIDs: %s" % worker_pids) - assert len(worker_pids) == 3, output - assert proc.wait() == 0, output - assert "module 'threading' has no attribute '_active'" not in output, output + for line in output.splitlines(): + debug_print(line) - utils.check_pprof_file("%s.%d" % (filename, proc.pid)) - for pid in worker_pids: - utils.check_pprof_file("%s.%d" % (filename, pid)) + assert len(worker_pids) == 1, output + debug_print("Waiting for gunicorn process to terminate") + try: + assert proc.wait(timeout=5) == 0, output + except subprocess.TimeoutExpired: + pytest.fail("Failed to terminate gunicorn process ", output) + assert "module 'threading' has no attribute '_active'" not in output, output + for pid in worker_pids: + debug_print("Reading pprof file with prefix %s.%d" % (filename, pid)) + profile = pprof_utils.parse_newest_profile("%s.%d" % (filename, pid)) + # This returns a list of samples that have non-zero cpu-time + samples = pprof_utils.get_samples_with_value_type(profile, "cpu-time") + assert len(samples) > 0 + + # DEV: somehow the filename is reported as either __init__.py or gunicorn-app.py + # when run on GitLab CI. We need to match either of these two. + filename_regex = r"^(?:__init__\.py|gunicorn-app\.py)$" + + expected_location = pprof_utils.StackLocation(function_name="fib", filename=filename_regex, line_no=8) + + pprof_utils.assert_profile_has_sample( + profile, + samples=samples, + # DEV: we expect multiple locations as fibonacci is recursive + expected_sample=pprof_utils.StackEvent(locations=[expected_location, expected_location]), + ) + + +@pytest.mark.skipif( + sys.version_info[:2] == (3, 8) and os.environ.get("DD_PROFILE_TEST_GEVENT") == "1", + reason="Flaky and fails often on Python 3.8 with DD_PROFILE_TEST_GEVENT=1", +) def test_gunicorn(gunicorn, tmp_path, monkeypatch): # type: (...) -> None args = ("-k", "gevent") if TESTING_GEVENT else tuple() diff --git a/tests/profiling/test_main.py b/tests/profiling/test_main.py index 92d171a6f8b..9ffb615b860 100644 --- a/tests/profiling/test_main.py +++ b/tests/profiling/test_main.py @@ -5,76 +5,142 @@ import pytest +from tests.profiling.collector import lock_utils +from tests.profiling.collector import pprof_utils from tests.utils import call_program -from . import utils - -def test_call_script(monkeypatch): - # Set a very short timeout to exit fast - monkeypatch.setenv("DD_PROFILING_API_TIMEOUT_MS", "100") - monkeypatch.setenv("DD_PROFILING_ENABLED", "1") +def test_call_script(): + env = os.environ.copy() + env["DD_PROFILING_ENABLED"] = "1" stdout, stderr, exitcode, _ = call_program( - "ddtrace-run", sys.executable, os.path.join(os.path.dirname(__file__), "simple_program.py") + "ddtrace-run", sys.executable, os.path.join(os.path.dirname(__file__), "simple_program.py"), env=env ) if sys.platform == "win32": assert exitcode == 0, (stdout, stderr) else: assert exitcode == 42, (stdout, stderr) - hello, interval, _ = list(s.strip() for s in stdout.decode().strip().split("\n")) + hello, _ = list(s.strip() for s in stdout.decode().strip().split("\n")) assert hello == "hello world", stdout.decode().strip() - assert float(interval) >= 0.01, stdout.decode().strip() @pytest.mark.skipif(not os.getenv("DD_PROFILE_TEST_GEVENT", False), reason="Not testing gevent") -def test_call_script_gevent(monkeypatch): - monkeypatch.setenv("DD_PROFILING_API_TIMEOUT_MS", "100") +def test_call_script_gevent(): + if sys.version_info[:2] == (3, 8): + pytest.skip("this test is flaky on 3.8 with stack v2") + env = os.environ.copy() + env["DD_PROFILING_ENABLED"] = "1" stdout, stderr, exitcode, pid = call_program( - sys.executable, os.path.join(os.path.dirname(__file__), "simple_program_gevent.py") + sys.executable, os.path.join(os.path.dirname(__file__), "simple_program_gevent.py"), env=env ) assert exitcode == 0, (stdout, stderr) -def test_call_script_pprof_output(tmp_path, monkeypatch): +def test_call_script_pprof_output(tmp_path): """This checks if the pprof output and atexit register work correctly. The script does not run for one minute, so if the `stop_on_exit` flag is broken, this test will fail. """ filename = str(tmp_path / "pprof") - monkeypatch.setenv("DD_PROFILING_OUTPUT_PPROF", filename) - monkeypatch.setenv("DD_PROFILING_CAPTURE_PCT", "1") - monkeypatch.setenv("DD_PROFILING_ENABLED", "1") + env = os.environ.copy() + env["DD_PROFILING_OUTPUT_PPROF"] = filename + env["DD_PROFILING_CAPTURE_PCT"] = "1" + env["DD_PROFILING_ENABLED"] = "1" stdout, stderr, exitcode, _ = call_program( - "ddtrace-run", sys.executable, os.path.join(os.path.dirname(__file__), "simple_program.py") + "ddtrace-run", + sys.executable, + os.path.join(os.path.dirname(__file__), "../profiling", "simple_program.py"), + env=env, ) if sys.platform == "win32": assert exitcode == 0, (stdout, stderr) else: assert exitcode == 42, (stdout, stderr) - hello, interval, pid = list(s.strip() for s in stdout.decode().strip().split("\n")) - utils.check_pprof_file(filename + "." + str(pid)) + _, pid = list(s.strip() for s in stdout.decode().strip().split("\n")) + profile = pprof_utils.parse_newest_profile(filename + "." + str(pid)) + samples = pprof_utils.get_samples_with_value_type(profile, "cpu-time") + assert len(samples) > 0 @pytest.mark.skipif(sys.platform == "win32", reason="fork only available on Unix") -def test_fork(tmp_path, monkeypatch): +def test_fork(tmp_path): filename = str(tmp_path / "pprof") - monkeypatch.setenv("DD_PROFILING_API_TIMEOUT_MS", "100") - monkeypatch.setenv("DD_PROFILING_OUTPUT_PPROF", filename) - monkeypatch.setenv("DD_PROFILING_CAPTURE_PCT", "100") + env = os.environ.copy() + env["DD_PROFILING_OUTPUT_PPROF"] = filename + env["DD_PROFILING_CAPTURE_PCT"] = "100" stdout, stderr, exitcode, pid = call_program( - "python", os.path.join(os.path.dirname(__file__), "simple_program_fork.py") + "python", os.path.join(os.path.dirname(__file__), "simple_program_fork.py"), env=env ) assert exitcode == 0 child_pid = stdout.decode().strip() - utils.check_pprof_file(filename + "." + str(pid)) - utils.check_pprof_file(filename + "." + str(child_pid), sample_type="lock-release") + profile = pprof_utils.parse_newest_profile(filename + "." + str(pid)) + parent_expected_acquire_events = [ + pprof_utils.LockAcquireEvent( + caller_name="", + filename="simple_program_fork.py", + linenos=lock_utils.LineNo(create=11, acquire=12, release=28), + lock_name="lock", + ), + ] + parent_expected_release_events = [ + pprof_utils.LockReleaseEvent( + caller_name="", + filename="simple_program_fork.py", + linenos=lock_utils.LineNo(create=11, acquire=12, release=28), + lock_name="lock", + ), + ] + pprof_utils.assert_lock_events( + profile, + expected_acquire_events=parent_expected_acquire_events, + expected_release_events=parent_expected_release_events, + ) + child_profile = pprof_utils.parse_newest_profile(filename + "." + str(child_pid)) + # We expect the child profile to not have lock events from the parent process + # Note that assert_lock_events function only checks that the given events + # exists, and doesn't assert that other events don't exist. + with pytest.raises(AssertionError): + pprof_utils.assert_lock_events( + child_profile, + expected_acquire_events=parent_expected_acquire_events, + expected_release_events=parent_expected_release_events, + ) + pprof_utils.assert_lock_events( + child_profile, + expected_acquire_events=[ + # After fork(), we clear the samples in child, so we only have one + # lock acquire event + pprof_utils.LockAcquireEvent( + caller_name="", + filename="simple_program_fork.py", + linenos=lock_utils.LineNo(create=24, acquire=25, release=26), + lock_name="lock", + ), + ], + expected_release_events=[ + pprof_utils.LockReleaseEvent( + caller_name="", + filename="simple_program_fork.py", + linenos=lock_utils.LineNo(create=11, acquire=12, release=21), + lock_name="lock", + ), + pprof_utils.LockReleaseEvent( + caller_name="", + filename="simple_program_fork.py", + linenos=lock_utils.LineNo(create=24, acquire=25, release=26), + lock_name="lock", + ), + ], + ) @pytest.mark.skipif(sys.platform == "win32", reason="fork only available on Unix") @pytest.mark.skipif(not os.getenv("DD_PROFILE_TEST_GEVENT", False), reason="Not testing gevent") -def test_fork_gevent(monkeypatch): - monkeypatch.setenv("DD_PROFILING_API_TIMEOUT_MS", "100") - stdout, stderr, exitcode, pid = call_program("python", os.path.join(os.path.dirname(__file__), "gevent_fork.py")) +def test_fork_gevent(): + env = os.environ.copy() + stdout, stderr, exitcode, pid = call_program( + "python", os.path.join(os.path.dirname(__file__), "../profiling", "gevent_fork.py"), env=env + ) assert exitcode == 0 @@ -85,21 +151,27 @@ def test_fork_gevent(monkeypatch): "method", set(methods) - {"forkserver", "fork"}, ) -def test_multiprocessing(method, tmp_path, monkeypatch): +def test_multiprocessing(method, tmp_path): filename = str(tmp_path / "pprof") - monkeypatch.setenv("DD_PROFILING_OUTPUT_PPROF", filename) - monkeypatch.setenv("DD_PROFILING_ENABLED", "1") - monkeypatch.setenv("DD_PROFILING_CAPTURE_PCT", "1") + env = os.environ.copy() + env["DD_PROFILING_OUTPUT_PPROF"] = filename + env["DD_PROFILING_ENABLED"] = "1" + env["DD_PROFILING_CAPTURE_PCT"] = "1" stdout, stderr, exitcode, _ = call_program( "ddtrace-run", sys.executable, - os.path.join(os.path.dirname(__file__), "_test_multiprocessing.py"), + os.path.join(os.path.dirname(__file__), "../profiling", "_test_multiprocessing.py"), method, + env=env, ) assert exitcode == 0, (stdout, stderr) pid, child_pid = list(s.strip() for s in stdout.decode().strip().split("\n")) - utils.check_pprof_file(filename + "." + str(pid)) - utils.check_pprof_file(filename + "." + str(child_pid), sample_type="wall-time") + profile = pprof_utils.parse_newest_profile(filename + "." + str(pid)) + samples = pprof_utils.get_samples_with_value_type(profile, "cpu-time") + assert len(samples) > 0 + child_profile = pprof_utils.parse_newest_profile(filename + "." + str(child_pid)) + child_samples = pprof_utils.get_samples_with_value_type(child_profile, "cpu-time") + assert len(child_samples) > 0 @pytest.mark.subprocess( @@ -116,7 +188,6 @@ def test_memalloc_no_init_error_on_fork(): os.waitpid(pid, 0) -@pytest.mark.skipif(sys.version_info[:2] == (3, 9), reason="This test is flaky on Python 3.9") @pytest.mark.subprocess( ddtrace_run=True, env=dict( diff --git a/tests/profiling/test_profiler.py b/tests/profiling/test_profiler.py index e34b05f7319..fd2cd861d9a 100644 --- a/tests/profiling/test_profiler.py +++ b/tests/profiling/test_profiler.py @@ -1,10 +1,12 @@ import logging +import sys import time import mock import pytest import ddtrace +from ddtrace.internal.compat import PYTHON_VERSION_INFO from ddtrace.profiling import collector from ddtrace.profiling import profiler from ddtrace.profiling import scheduler @@ -145,3 +147,174 @@ def test_profiler_serverless(monkeypatch): p = profiler.Profiler() assert isinstance(p._scheduler, scheduler.ServerlessScheduler) assert p.tags["functionname"] == "foobar" + + +@pytest.mark.skipif(PYTHON_VERSION_INFO < (3, 10), reason="ddtrace under Python 3.9 is deprecated") +@pytest.mark.subprocess() +def test_profiler_ddtrace_deprecation(): + """ + ddtrace interfaces loaded by the profiler can be marked deprecated, and we should update + them when this happens. As reported by https://github.com/DataDog/dd-trace-py/issues/8881 + """ + import warnings + + with warnings.catch_warnings(): + warnings.simplefilter("error", DeprecationWarning) + from ddtrace.profiling import _threading # noqa:F401 + from ddtrace.profiling import event # noqa:F401 + from ddtrace.profiling import profiler # noqa:F401 + from ddtrace.profiling import scheduler # noqa:F401 + from ddtrace.profiling.collector import _lock # noqa:F401 + from ddtrace.profiling.collector import _task # noqa:F401 + from ddtrace.profiling.collector import _traceback # noqa:F401 + from ddtrace.profiling.collector import memalloc # noqa:F401 + from ddtrace.profiling.collector import stack # noqa:F401 + + +@pytest.mark.subprocess( + env=dict(DD_PROFILING_ENABLED="true"), + err="Failed to load ddup module (mock failure message), disabling profiling\n", +) +def test_libdd_failure_telemetry_logging(): + """Test that libdd initialization failures log to telemetry. This mimics + one of the two scenarios where profiling can be configured. + 1) using ddtrace-run with DD_PROFILNG_ENABLED=true + 2) import ddtrace.profiling.auto + """ + + import mock + + with mock.patch.multiple( + "ddtrace.internal.datadog.profiling.ddup", + failure_msg="mock failure message", + is_available=False, + ), mock.patch("ddtrace.internal.telemetry.telemetry_writer.add_log") as mock_add_log: + from ddtrace.internal.settings.profiling import config # noqa:F401 + from ddtrace.internal.telemetry.constants import TELEMETRY_LOG_LEVEL + + mock_add_log.assert_called_once() + call_args = mock_add_log.call_args + assert call_args[0][0] == TELEMETRY_LOG_LEVEL.ERROR + message = call_args[0][1] + assert "Failed to load ddup module" in message + assert "mock failure message" in message + + +@pytest.mark.subprocess( + # We'd like to check the stderr, but it somehow leads to triggering the + # upload code path on macOS + err=None +) +def test_libdd_failure_telemetry_logging_with_auto(): + import mock + + with mock.patch.multiple( + "ddtrace.internal.datadog.profiling.ddup", + failure_msg="mock failure message", + is_available=False, + ), mock.patch("ddtrace.internal.telemetry.telemetry_writer.add_log") as mock_add_log: + from ddtrace.internal.telemetry.constants import TELEMETRY_LOG_LEVEL + import ddtrace.profiling.auto # noqa: F401 + + mock_add_log.assert_called_once() + call_args = mock_add_log.call_args + assert call_args[0][0] == TELEMETRY_LOG_LEVEL.ERROR + message = call_args[0][1] + assert "Failed to load ddup module" in message + assert "mock failure message" in message + + +@pytest.mark.subprocess( + env=dict(DD_PROFILING_ENABLED="true"), + err="Failed to load stack_v2 module (mock failure message), falling back to v1 stack sampler\n", +) +def test_stack_v2_failure_telemetry_logging(): + # Test that stack_v2 initialization failures log to telemetry. This is + # mimicking the behavior of ddtrace-run, where the config is imported to + # determine if profiling/stack_v2 is enabled + + import mock + + with mock.patch.multiple( + "ddtrace.internal.datadog.profiling.stack_v2", + failure_msg="mock failure message", + is_available=False, + ), mock.patch("ddtrace.internal.telemetry.telemetry_writer.add_log") as mock_add_log: + from ddtrace.internal.settings.profiling import config # noqa: F401 + from ddtrace.internal.telemetry.constants import TELEMETRY_LOG_LEVEL + + mock_add_log.assert_called_once() + call_args = mock_add_log.call_args + assert call_args[0][0] == TELEMETRY_LOG_LEVEL.ERROR + message = call_args[0][1] + assert "Failed to load stack_v2 module" in message + assert "mock failure message" in message + + +@pytest.mark.subprocess( + # We'd like to check the stderr, but it somehow leads to triggering the + # upload code path on macOS. + err=None, +) +def test_stack_v2_failure_telemetry_logging_with_auto(): + import mock + + with mock.patch.multiple( + "ddtrace.internal.datadog.profiling.stack_v2", + failure_msg="mock failure message", + is_available=False, + ), mock.patch("ddtrace.internal.telemetry.telemetry_writer.add_log") as mock_add_log: + from ddtrace.internal.telemetry.constants import TELEMETRY_LOG_LEVEL + import ddtrace.profiling.auto # noqa: F401 + + mock_add_log.assert_called_once() + call_args = mock_add_log.call_args + assert call_args[0][0] == TELEMETRY_LOG_LEVEL.ERROR + message = call_args[0][1] + assert "Failed to load stack_v2 module" in message + assert "mock failure message" in message + + +@pytest.mark.skipif(not sys.platform.startswith("linux"), reason="only works on linux") +@pytest.mark.subprocess(err=None) +# For macOS: Could print 'Error uploading' but okay to ignore since we are checking if native_id is set +def test_user_threads_have_native_id(): + from os import getpid + from threading import Thread + from threading import _MainThread # pyright: ignore[reportAttributeAccessIssue] + from threading import current_thread + from time import sleep + + from ddtrace.profiling import profiler + + # DEV: We used to run this test with ddtrace_run=True passed into the + # subprocess decorator, but that caused this to be flaky for Python 3.8.x + # with gevent. When it failed for that specific venv, current_thread() + # returned a DummyThread instead of a _MainThread. + p = profiler.Profiler() + p.start() + + main = current_thread() + assert isinstance(main, _MainThread) + # We expect the current thread to have the same ID as the PID + assert main.native_id == getpid(), (main.native_id, getpid()) + + t = Thread(target=lambda: None) + t.start() + + for _ in range(10): + try: + # The TID should be higher than the PID, but not too high + assert 0 < t.native_id - getpid() < 100, (t.native_id, getpid()) + except AttributeError: + # The native_id attribute is set by the thread so we might have to + # wait a bit for it to be set. + sleep(0.1) + else: + break + else: + raise AssertionError("Thread.native_id not set") + + t.join() + + p.stop() diff --git a/tests/profiling_v2/test_pytorch.py b/tests/profiling/test_pytorch.py similarity index 100% rename from tests/profiling_v2/test_pytorch.py rename to tests/profiling/test_pytorch.py diff --git a/tests/profiling/test_scheduler.py b/tests/profiling/test_scheduler.py index 0da58e0e922..f35479d431c 100644 --- a/tests/profiling/test_scheduler.py +++ b/tests/profiling/test_scheduler.py @@ -7,11 +7,6 @@ from ddtrace.profiling import scheduler -def test_exporter_failure(): - s = scheduler.Scheduler() - s.flush() - - def test_thread_name(): s = scheduler.Scheduler() s.start() @@ -37,7 +32,7 @@ def call_me(): s = scheduler.Scheduler(before_flush=call_me) s.flush() assert caplog.record_tuples == [ - ("ddtrace.profiling.scheduler", logging.ERROR, "Scheduler before_flush hook failed") + (("ddtrace.profiling.scheduler", logging.ERROR, "Scheduler before_flush hook failed")) ] diff --git a/tests/profiling/test_uwsgi.py b/tests/profiling/test_uwsgi.py index 79b20e917a8..6b5d4e7cf23 100644 --- a/tests/profiling/test_uwsgi.py +++ b/tests/profiling/test_uwsgi.py @@ -1,38 +1,47 @@ -# -*- encoding: utf-8 -*- +from importlib.metadata import version import os import re import signal from subprocess import TimeoutExpired import sys -import tempfile import time import pytest from tests.contrib.uwsgi import run_uwsgi - -from . import utils +from tests.profiling.collector import pprof_utils # uwsgi is not available on Windows if sys.platform == "win32": pytestmark = pytest.mark.skip +TESTING_GEVENT = os.getenv("DD_PROFILE_TEST_GEVENT", False) THREADS_MSG = ( b"ddtrace.internal.uwsgi.uWSGIConfigError: enable-threads option must be set to true, or a positive " b"number of threads must be set" ) -uwsgi_app = os.path.join(os.path.dirname(__file__), "uwsgi-app.py") +uwsgi_app = os.path.join(os.path.dirname(__file__), "..", "profiling", "uwsgi-app.py") @pytest.fixture -def uwsgi(monkeypatch): +def uwsgi(monkeypatch, tmp_path): # Do not ignore profiler so we have samples in the output pprof monkeypatch.setenv("DD_PROFILING_IGNORE_PROFILER", "0") # Do not use pytest tmpdir fixtures which generate directories longer than allowed for a socket file name - socket_name = tempfile.mktemp() - cmd = ["uwsgi", "--need-app", "--die-on-term", "--socket", socket_name, "--wsgi-file", uwsgi_app] + socket_name = str(tmp_path / "uwsgi.sock") + import os + + cmd = [ + "uwsgi", + "--need-app", + "--die-on-term", + "--socket", + socket_name, + "--wsgi-file", + uwsgi_app, + ] try: yield run_uwsgi(cmd) @@ -67,10 +76,12 @@ def test_uwsgi_threads_enabled(uwsgi, tmp_path, monkeypatch): proc.terminate() assert proc.wait() == 30 for pid in worker_pids: - utils.check_pprof_file("%s.%d" % (filename, pid)) + profile = pprof_utils.parse_newest_profile("%s.%d" % (filename, pid)) + samples = pprof_utils.get_samples_with_value_type(profile, "wall-time") + assert len(samples) > 0 -def test_uwsgi_threads_processes_no_master(uwsgi, monkeypatch): +def test_uwsgi_threads_processes_no_primary(uwsgi, monkeypatch): proc = uwsgi("--enable-threads", "--processes", "2") stdout, _ = proc.communicate() assert ( @@ -99,7 +110,7 @@ def _get_worker_pids(stdout, num_worker, num_app_started=1): return worker_pids -def test_uwsgi_threads_processes_master(uwsgi, tmp_path, monkeypatch): +def test_uwsgi_threads_processes_primary(uwsgi, tmp_path, monkeypatch): filename = str(tmp_path / "uwsgi.pprof") monkeypatch.setenv("DD_PROFILING_OUTPUT_PPROF", filename) proc = uwsgi("--enable-threads", "--master", "--py-call-uwsgi-fork-hooks", "--processes", "2") @@ -109,42 +120,109 @@ def test_uwsgi_threads_processes_master(uwsgi, tmp_path, monkeypatch): proc.terminate() assert proc.wait() == 0 for pid in worker_pids: - utils.check_pprof_file("%s.%d" % (filename, pid)) + profile = pprof_utils.parse_newest_profile("%s.%d" % (filename, pid)) + samples = pprof_utils.get_samples_with_value_type(profile, "wall-time") + assert len(samples) > 0 -def test_uwsgi_threads_processes_master_lazy_apps(uwsgi, tmp_path, monkeypatch): +def test_uwsgi_threads_processes_primary_lazy_apps(uwsgi, tmp_path, monkeypatch): filename = str(tmp_path / "uwsgi.pprof") monkeypatch.setenv("DD_PROFILING_OUTPUT_PPROF", filename) - proc = uwsgi("--enable-threads", "--master", "--processes", "2", "--lazy-apps") + monkeypatch.setenv("DD_PROFILING_UPLOAD_INTERVAL", "1") + # For uwsgi<2.0.30, --skip-atexit is required to avoid crashes when + # the child process exits. + proc = uwsgi("--enable-threads", "--master", "--processes", "2", "--lazy-apps", "--skip-atexit") worker_pids = _get_worker_pids(proc.stdout, 2, 2) - # Give some time to child to actually startup + # Give some time to child to actually startup and output a profile time.sleep(3) proc.terminate() assert proc.wait() == 0 for pid in worker_pids: - utils.check_pprof_file("%s.%d" % (filename, pid)) + profile = pprof_utils.parse_newest_profile("%s.%d" % (filename, pid)) + samples = pprof_utils.get_samples_with_value_type(profile, "wall-time") + assert len(samples) > 0 -def test_uwsgi_threads_processes_no_master_lazy_apps(uwsgi, tmp_path, monkeypatch): +def test_uwsgi_threads_processes_no_primary_lazy_apps(uwsgi, tmp_path, monkeypatch): filename = str(tmp_path / "uwsgi.pprof") monkeypatch.setenv("DD_PROFILING_OUTPUT_PPROF", filename) - proc = uwsgi("--enable-threads", "--processes", "2", "--lazy-apps") + monkeypatch.setenv("DD_PROFILING_UPLOAD_INTERVAL", "1") + # For uwsgi<2.0.30, --skip-atexit is required to avoid crashes when + # the child process exits. + proc = uwsgi("--enable-threads", "--processes", "2", "--lazy-apps", "--skip-atexit") worker_pids = _get_worker_pids(proc.stdout, 2, 2) - # Give some time to child to actually startup + assert len(worker_pids) == 2 + + # Give some time to child to actually startup and output a profile time.sleep(3) - # The processes are started without a master/parent so killing one does not kill the other: - # Kill them all and wait until they die. + + # Kill master process + parent_pid: int = worker_pids[0] + os.kill(parent_pid, signal.SIGTERM) + + # Wait for master to exit + res_pid, res_status = os.waitpid(parent_pid, 0) + print("") + print(f"INFO: Master process {parent_pid} exited with status {res_status} and pid {res_pid}") + + # Attempt to kill worker proc once + worker_pid: int = worker_pids[1] + print(f"DEBUG: Checking worker {worker_pid} status after master exit:") + try: + os.kill(worker_pid, 0) + print(f"WARNING: Worker {worker_pid} is a zombie (will be cleaned up by init).") + + os.kill(worker_pid, signal.SIGKILL) + print(f"WARNING: Worker {worker_pid} could not be killed with SIGKILL (will be cleaned up by init).") + except OSError: + print(f"INFO: Worker {worker_pid} was successfully killed.") + for pid in worker_pids: - os.kill(pid, signal.SIGTERM) - # The first worker is our child, we can wait for it "normally" - os.waitpid(worker_pids[0], 0) - # The other ones are grandchildren, we can't wait for it with `waitpid` - for pid in worker_pids[1:]: - # Wait for the uwsgi workers to all die - while True: - try: - os.kill(pid, 0) - except OSError: - break + profile = pprof_utils.parse_newest_profile("%s.%d" % (filename, pid)) + samples = pprof_utils.get_samples_with_value_type(profile, "wall-time") + assert len(samples) > 0 + + +@pytest.mark.parametrize("lazy_flag", ["--lazy-apps", "--lazy"]) +@pytest.mark.skipif( + tuple(int(x) for x in version("uwsgi").split(".")) >= (2, 0, 30), + reason="uwsgi>=2.0.30 does not require --skip-atexit", +) +def test_uwsgi_require_skip_atexit_when_lazy_with_master(uwsgi, lazy_flag): + expected_warning = b"ddtrace.internal.uwsgi.uWSGIConfigDeprecationWarning: skip-atexit option must be set" + + proc = uwsgi("--enable-threads", "--master", "--processes", "2", lazy_flag) + time.sleep(1) + proc.terminate() + stdout, _ = proc.communicate() + assert expected_warning in stdout + + +@pytest.mark.parametrize("lazy_flag", ["--lazy-apps", "--lazy"]) +@pytest.mark.skipif( + tuple(int(x) for x in version("uwsgi").split(".")) >= (2, 0, 30), + reason="uwsgi>=2.0.30 does not require --skip-atexit", +) +def test_uwsgi_require_skip_atexit_when_lazy_without_master(uwsgi, lazy_flag): + expected_warning = b"ddtrace.internal.uwsgi.uWSGIConfigDeprecationWarning: skip-atexit option must be set" + num_workers = 2 + proc = uwsgi("--enable-threads", "--processes", str(num_workers), lazy_flag) + + worker_pids = [] + logged_warning = 0 + while True: + line = proc.stdout.readline() + if line == b"": + break + if expected_warning in line: + logged_warning += 1 + else: + m = re.match(r"^spawned uWSGI worker \d+ .*\(pid: (\d+),", line.decode()) + if m: + worker_pids.append(int(m.group(1))) + + if logged_warning == num_workers: + break + for pid in worker_pids: - utils.check_pprof_file("%s.%d" % (filename, pid)) + os.kill(pid, signal.SIGTERM) diff --git a/tests/profiling/utils.py b/tests/profiling/utils.py deleted file mode 100644 index f9a74ea1943..00000000000 --- a/tests/profiling/utils.py +++ /dev/null @@ -1,11 +0,0 @@ -from tests.profiling.collector import pprof_utils - - -def check_pprof_file( - filename, # type: str - sample_type="cpu-samples", -): - profile = pprof_utils.parse_newest_profile(filename) - - samples = pprof_utils.get_samples_with_value_type(profile, sample_type) - assert len(samples) >= 1 diff --git a/tests/profiling_v2/__init__.py b/tests/profiling_v2/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/tests/profiling_v2/collector/conftest.py b/tests/profiling_v2/collector/conftest.py deleted file mode 100644 index 7dc1d816091..00000000000 --- a/tests/profiling_v2/collector/conftest.py +++ /dev/null @@ -1,8 +0,0 @@ -import pytest - -import ddtrace - - -@pytest.fixture -def tracer(): - return ddtrace.trace.tracer diff --git a/tests/profiling_v2/collector/test_memalloc.py b/tests/profiling_v2/collector/test_memalloc.py deleted file mode 100644 index 2022636f72f..00000000000 --- a/tests/profiling_v2/collector/test_memalloc.py +++ /dev/null @@ -1,799 +0,0 @@ -import inspect -import os -import sys -import threading - -import pytest - -from ddtrace.internal.datadog.profiling import ddup -from ddtrace.profiling.collector import memalloc -from tests.profiling.collector import pprof_utils - - -PY_313_OR_ABOVE = sys.version_info[:2] >= (3, 13) - - -def _allocate_1k(): - return [object() for _ in range(1000)] - - -_ALLOC_LINE_NUMBER = _allocate_1k.__code__.co_firstlineno + 1 - - -# This test is marked as subprocess as it changes default heap sample size -@pytest.mark.subprocess( - env=dict(DD_PROFILING_HEAP_SAMPLE_SIZE="1024", DD_PROFILING_OUTPUT_PPROF="/tmp/test_heap_samples_collected") -) -def test_heap_samples_collected(): - import os - - from ddtrace.profiling import Profiler - from tests.profiling.collector import pprof_utils - from tests.profiling_v2.collector.test_memalloc import _allocate_1k - - # Test for https://github.com/DataDog/dd-trace-py/issues/11069 - pprof_prefix = os.environ["DD_PROFILING_OUTPUT_PPROF"] - output_filename = pprof_prefix + "." + str(os.getpid()) - - p = Profiler() - p.start() - x = _allocate_1k() # noqa: F841 - p.stop() - - profile = pprof_utils.parse_newest_profile(output_filename) - samples = pprof_utils.get_samples_with_value_type(profile, "heap-space") - assert len(samples) > 0 - - -def test_memory_collector(tmp_path): - test_name = "test_memory_collector" - pprof_prefix = str(tmp_path / test_name) - output_filename = pprof_prefix + "." + str(os.getpid()) - - ddup.config( - service=test_name, - version="test", - env="test", - output_filename=pprof_prefix, - ) - ddup.start() - - mc = memalloc.MemoryCollector(heap_sample_size=256) - with mc: - _allocate_1k() - mc.snapshot() - - ddup.upload() - - profile = pprof_utils.parse_newest_profile(output_filename) - # Gets samples with alloc-space > 0 - samples = pprof_utils.get_samples_with_value_type(profile, "alloc-space") - - assert len(samples) > 0 - - alloc_samples_idx = pprof_utils.get_sample_type_index(profile, "alloc-samples") - for sample in samples: - # We also want to check 'alloc-samples' is > 0. - assert sample.value[alloc_samples_idx] > 0 - - # We also want to assert that there's a sample that's coming from _allocate_1k() - # And also assert that it's actually coming from _allocate_1k() - pprof_utils.assert_profile_has_sample( - profile, - samples, - expected_sample=pprof_utils.StackEvent( - thread_name="MainThread", - thread_id=threading.main_thread().ident, - locations=[ - pprof_utils.StackLocation( - function_name="_allocate_1k", filename="test_memalloc.py", line_no=_ALLOC_LINE_NUMBER - ) - ], - ), - ) - - -def test_memory_collector_ignore_profiler(tmp_path): - test_name = "test_memory_collector_ignore_profiler" - pprof_prefix = str(tmp_path / test_name) - output_filename = pprof_prefix + "." + str(os.getpid()) - - ddup.config( - service=test_name, - version="test", - env="test", - output_filename=pprof_prefix, - ) - ddup.start() - - mc = memalloc.MemoryCollector(ignore_profiler=True) - quit_thread = threading.Event() - - with mc: - - def alloc(): - _allocate_1k() - quit_thread.wait() - - alloc_thread = threading.Thread(name="allocator", target=alloc) - alloc_thread._ddtrace_profiling_ignore = True - alloc_thread.start() - - mc.snapshot() - - # We need to wait for the data collection to happen so it gets the `_ddtrace_profiling_ignore` Thread attribute from - # the global thread list. - quit_thread.set() - alloc_thread.join() - - ddup.upload() - - try: - pprof_utils.parse_newest_profile(output_filename) - except AssertionError as e: - assert "No samples found" in str(e) - - -@pytest.mark.subprocess( - env=dict(DD_PROFILING_HEAP_SAMPLE_SIZE="8", DD_PROFILING_OUTPUT_PPROF="/tmp/test_heap_profiler_large_heap_overhead") -) -def test_heap_profiler_large_heap_overhead(): - # TODO(nick): this test case used to crash due to integer arithmetic bugs. - # Now it doesn't crash, but it takes far too long to run to be useful in CI. - # Un-skip this test if/when we improve the worst-case performance of the - # heap profiler for large heaps - from ddtrace.profiling import Profiler - from tests.profiling_v2.collector.test_memalloc import one - - p = Profiler() - p.start() - - count = 100_000 - thing_size = 32 - - junk = [] - for i in range(count): - b1 = one(thing_size) - b2 = one(2 * thing_size) - b3 = one(3 * thing_size) - b4 = one(4 * thing_size) - t = (b1, b2, b3, b4) - junk.append(t) - - del junk - - p.stop() - - -# one, two, three, and four exist to give us distinct things -# we can find in the profile without depending on something -# like the line number at which an allocation happens -# Python 3.13 changed bytearray to use an allocation domain that we don't -# currently profile, so we use None instead of bytearray to test. -def one(size): - return (None,) * size if PY_313_OR_ABOVE else bytearray(size) - - -def two(size): - return (None,) * size if PY_313_OR_ABOVE else bytearray(size) - - -def three(size): - return (None,) * size if PY_313_OR_ABOVE else bytearray(size) - - -def four(size): - return (None,) * size if PY_313_OR_ABOVE else bytearray(size) - - -def _create_allocation(size): - return (None,) * size if PY_313_OR_ABOVE else bytearray(size) - - -class HeapInfo: - def __init__(self, count, size): - self.count = count - self.size = size - - -def get_heap_info(heap, funcs): - got = {} - for event in heap: - (frames, _), in_use_size, alloc_size, count = event - - in_use = in_use_size > 0 - size = in_use_size if in_use_size > 0 else alloc_size - - if not in_use: - continue - func = frames[0].function_name - if func in funcs: - v = got.get(func, HeapInfo(0, 0)) - v.count += 1 - v.size += size - got[func] = v - return got - - -def has_function_in_traceback(frames, function_name): - return any(frame.function_name == function_name for frame in frames) - - -def get_tracemalloc_stats_per_func(stats, funcs): - source_to_func = {} - - for f in funcs: - file = inspect.getsourcefile(f) - line = inspect.getsourcelines(f)[1] + 1 - source_to_func[str(file) + str(line)] = f.__name__ - - actual_sizes = {} - actual_counts = {} - for stat in stats: - f = stat.traceback[0] - key = f.filename + str(f.lineno) - if key in source_to_func: - func_name = source_to_func[key] - actual_sizes[func_name] = stat.size - actual_counts[func_name] = stat.count - return actual_sizes, actual_counts - - -# TODO: higher sampling intervals have a lot more variance and are flaky -# but would be nice to test since our default is 1MiB -@pytest.mark.parametrize("sample_interval", (8, 512, 1024)) -def test_heap_profiler_sampling_accuracy(sample_interval): - # tracemalloc lets us get ground truth on how many allocations there were - import tracemalloc - - # TODO(nick): use Profiler instead of _memalloc - from ddtrace.profiling.collector import _memalloc - - # We seed the RNG to reduce flakiness. This doesn't actually diminish the - # quality of the test much. A broken sampling implementation is unlikely to - # pass for an arbitrary seed. - old = os.environ.get("_DD_MEMALLOC_DEBUG_RNG_SEED") - os.environ["_DD_MEMALLOC_DEBUG_RNG_SEED"] = "42" - _memalloc.start(32, sample_interval) - # Put the env var back in the state we found it - if old is not None: - os.environ["_DD_MEMALLOC_DEBUG_RNG_SEED"] = old - else: - del os.environ["_DD_MEMALLOC_DEBUG_RNG_SEED"] - - tracemalloc.start() - - junk = [] - for i in range(1000): - size = 256 - junk.append(one(size)) - junk.append(two(2 * size)) - junk.append(three(3 * size)) - junk.append(four(4 * size)) - - # TODO(nick): randomly remove things from junk to see if the profile is - # still accurate - - # Stop tracemalloc before collecting the heap sample, since tracemalloc - # is _really_ slow when the _memalloc.heap() call does lots of allocs for - # lower sample intervals (i.e. more sampled allocations) - stats = tracemalloc.take_snapshot().statistics("traceback") - tracemalloc.stop() - - heap = _memalloc.heap() - # Important: stop _memalloc _after_ tracemalloc. Need to remove allocator - # hooks in LIFO order. - _memalloc.stop() - - actual_sizes, _ = get_tracemalloc_stats_per_func(stats, (one, two, three, four)) - actual_total = sum(actual_sizes.values()) - - del junk - - sizes = get_heap_info(heap, {"one", "two", "three", "four"}) - - total = sum(v.size for v in sizes.values()) - print(f"observed total: {total} actual total: {actual_total} error: {abs(total - actual_total) / actual_total}") - # 20% error in actual size feels pretty generous - # TODO(nick): justify in terms of variance of sampling? - assert abs(1 - total / actual_total) <= 0.20 - - print("func\tcount\tsize\tactual\trel\tactual\tdiff") - for func in ("one", "two", "three", "four"): - got = sizes[func] - actual_size = actual_sizes[func] - - # Relative portion of the bytes in the profile for this function - # out of the functions we're interested in - rel = got.size / total - actual_rel = actual_size / actual_total - - print( - f"{func}\t{got.count}\t{got.size}\t{actual_size}\t{rel:.3f}\t{actual_rel:.3f}\t{abs(rel - actual_rel):.3f}" - ) - - # Assert that the reported portion of this function in the profile is - # pretty close to the actual portion. So, if it's actually ~20% of the - # profile then we'd accept anything between 10% and 30%, which is - # probably too generous for low sampling intervals but at least won't be - # flaky. - assert abs(rel - actual_rel) < 0.10 - - -@pytest.mark.skip(reason="too slow, indeterministic") -@pytest.mark.subprocess( - env=dict( - # Turn off other profilers so that we're just testing memalloc - DD_PROFILING_STACK_ENABLED="false", - DD_PROFILING_LOCK_ENABLED="false", - # Upload a lot, since rotating out memalloc profiler state can race with profiling - DD_PROFILING_UPLOAD_INTERVAL="1", - ), -) -def test_memealloc_data_race_regression(): - import gc - import threading - import time - - from ddtrace.profiling import Profiler - - gc.enable() - # This threshold is controls when garbage collection is triggered. The - # threshold is on the count of live allocations, which is checked when doing - # a new allocation. This test is ultimately trying to get the allocation of - # frame objects during the memory profiler's traceback function to trigger - # garbage collection. We want a lower threshold to improve the odds that - # this happens. - gc.set_threshold(100) - - class Thing: - def __init__(self): - # Self reference so this gets deallocated in GC vs via refcount - self.ref = self - - def __del__(self): - # Force GIL yield, so if/when memalloc triggers GC, this is - # deallocated, releasing GIL while memalloc is sampling and allowing - # something else to run and possibly modify memalloc's internal - # state concurrently - time.sleep(0) - - def do_alloc(): - def f(): - return Thing() - - return f - - def lotsa_allocs(ev): - while not ev.is_set(): - f = do_alloc() - f() - time.sleep(0.01) - - p = Profiler() - p.start() - - threads = [] - ev = threading.Event() - for i in range(4): - t = threading.Thread(target=lotsa_allocs, args=(ev,)) - t.start() - threads.append(t) - - # Arbitrary sleep. This typically crashes in about a minute. - # But for local development, either let it run way longer or - # figure out sanitizer instrumentation - time.sleep(120) - - p.stop() - - ev.set() - for t in threads: - t.join() - - -@pytest.mark.parametrize("sample_interval", (256, 512, 1024)) -def test_memory_collector_allocation_accuracy_with_tracemalloc(sample_interval): - import tracemalloc - - old = os.environ.get("_DD_MEMALLOC_DEBUG_RNG_SEED") - os.environ["_DD_MEMALLOC_DEBUG_RNG_SEED"] = "42" - - mc = memalloc.MemoryCollector(heap_sample_size=sample_interval) - - try: - with mc: - tracemalloc.start() - - junk = [] - for i in range(1000): - size = 256 - junk.append(one(size)) - junk.append(two(2 * size)) - junk.append(three(3 * size)) - junk.append(four(4 * size)) - - stats = tracemalloc.take_snapshot().statistics("traceback") - tracemalloc.stop() - - del junk - - samples = mc.test_snapshot() - - finally: - if old is not None: - os.environ["_DD_MEMALLOC_DEBUG_RNG_SEED"] = old - else: - if "_DD_MEMALLOC_DEBUG_RNG_SEED" in os.environ: - del os.environ["_DD_MEMALLOC_DEBUG_RNG_SEED"] - - allocation_samples = [s for s in samples if s.in_use_size == 0] - heap_samples = [s for s in samples if s.in_use_size > 0] - - print(f"Total samples: {len(samples)}") - print(f"Allocation samples (in_use_size=0): {len(allocation_samples)}") - print(f"Heap samples (in_use_size>0): {len(heap_samples)}") - - assert len(allocation_samples) > 0, "Should have captured allocation samples after deletion" - - total_allocation_count = 0 - for sample in allocation_samples: - assert sample.size > 0, f"Invalid allocation sample size: {sample.size}" - assert sample.count > 0, f"Invalid allocation sample count: {sample.count}" - assert sample.in_use_size == 0, f"Allocation sample should have in_use_size=0, got: {sample.in_use_size}" - assert sample.in_use_size >= 0, f"Invalid in_use_size: {sample.in_use_size}" - assert sample.alloc_size >= 0, f"Invalid alloc_size: {sample.alloc_size}" - total_allocation_count += sample.count - - print(f"Total allocation count: {total_allocation_count}") - assert total_allocation_count >= 1, "Should have captured at least 1 allocation sample" - - actual_sizes, actual_counts = get_tracemalloc_stats_per_func(stats, (one, two, three, four)) - actual_total = sum(actual_sizes.values()) - actual_count_total = sum(actual_counts.values()) - - def get_allocation_info(samples, funcs): - got = {} - for sample in samples: - if sample.in_use_size > 0: - continue - - for frame in sample.frames: - func = frame.function_name - if func in funcs: - v = got.get(func, HeapInfo(0, 0)) - v.count += sample.count - v.size += sample.alloc_size - got[func] = v - break - return got - - sizes = get_allocation_info(samples, {"one", "two", "three", "four"}) - - total = sum(v.size for v in sizes.values()) - total_count = sum(v.count for v in sizes.values()) - - print(f"observed total: {total} actual total: {actual_total} error: {abs(total - actual_total) / actual_total}") - assert abs(1 - total / actual_total) <= 0.20 - - count_error = abs(total_count - actual_count_total) / actual_count_total - print(f"observed count total: {total_count} actual count total: {actual_count_total} error: {count_error}") - # Commenting out the total count assertions because we still have more work to do on this. - # Our reported counts differed from the actual count by more than we expected, while the reported sizes - # are accurate. Our counts seem to be consistently lower than expected for the sample intervals we're testing. - # We'll need to double-check our count scaling before making assertions about the actual values - # assert abs(1 - total_count / actual_count_total) <= 0.30 - - print("func\tcount\tsize\tactual_size\tactual_count\trel_size\tactual_rel_size\trel_count\tactual_rel_count") - for func in ("one", "two", "three", "four"): - got = sizes[func] - actual_size = actual_sizes[func] - actual_count = actual_counts[func] - - rel_size = got.size / total - actual_rel_size = actual_size / actual_total - - rel_count = got.count / total_count - actual_rel_count = actual_count / actual_count_total - - print( - f"{func}\t{got.count}\t{got.size}\t{actual_size}\t{actual_count}\t{rel_size:.3f}\t{actual_rel_size:.3f}\t{rel_count:.3f}\t{actual_rel_count:.3f}" - ) - - assert abs(rel_size - actual_rel_size) < 0.10 - assert abs(rel_count - actual_rel_count) < 0.15 - - print(f"Successfully validated allocation sampling accuracy for sample_interval={sample_interval}") - print(f"Captured {len(allocation_samples)} allocation samples representing {total_allocation_count} allocations") - - -def test_memory_collector_allocation_tracking_across_snapshots(): - mc = memalloc.MemoryCollector(heap_sample_size=64) - - with mc: - data_to_free = [] - for i in range(10): - data_to_free.append(one(256)) - - data_to_keep = [] - for i in range(10): - data_to_keep.append(two(512)) - - del data_to_free - - samples = mc.test_snapshot() - - assert all( - sample.alloc_size > 0 for sample in samples - ), "Initial snapshot should have alloc_size>0 (new allocations)" - - freed_samples = [s for s in samples if s.in_use_size == 0] - live_samples = [s for s in samples if s.in_use_size > 0] - - assert len(freed_samples) > 0, "Should have some freed samples after deletion" - - assert len(live_samples) > 0, "Should have some live samples" - - for sample in samples: - assert sample.size > 0, f"Invalid size: {sample.size}" - assert sample.count > 0, f"Invalid count: {sample.count}" - assert sample.in_use_size >= 0, f"Invalid in_use_size: {sample.in_use_size}" - assert sample.alloc_size >= 0, f"Invalid alloc_size: {sample.alloc_size}" - - one_freed_samples = [sample for sample in samples if has_function_in_traceback(sample.frames, "one")] - - assert len(one_freed_samples) > 0, "Should have freed samples from function 'one'" - assert all(sample.in_use_size == 0 and sample.alloc_size > 0 for sample in one_freed_samples) - - two_live_samples = [sample for sample in samples if has_function_in_traceback(sample.frames, "two")] - - assert len(two_live_samples) > 0, "Should have live samples from function 'two'" - assert all(sample.in_use_size > 0 and sample.alloc_size > 0 for sample in two_live_samples) - - del data_to_keep - - -def test_memory_collector_python_interface_with_allocation_tracking(): - mc = memalloc.MemoryCollector(heap_sample_size=128) - - with mc: - first_batch = [] - for i in range(20): - first_batch.append(one(256)) - - # We're taking a snapshot here to ensure that in the next snapshot, we don't see any "one" allocations - mc.test_snapshot() - - second_batch = [] - for i in range(15): - second_batch.append(two(512)) - - del first_batch - - final_samples = mc.test_snapshot() - - assert len(final_samples) >= 0, "Final snapshot should be valid" - - for sample in final_samples: - assert sample.size > 0, f"Size should be positive int, got {sample.size}" - assert sample.count > 0, f"Count should be positive int, got {sample.count}" - assert sample.in_use_size >= 0, f"in_use_size should be non-negative int, got {sample.in_use_size}" - assert sample.alloc_size >= 0, f"alloc_size should be non-negative int, got {sample.alloc_size}" - - one_samples_in_final = [sample for sample in final_samples if has_function_in_traceback(sample.frames, "one")] - - assert ( - len(one_samples_in_final) == 0 - ), f"Should have no samples with 'one' in traceback in final_samples, got {len(one_samples_in_final)}" - - batch_two_live_samples = [ - sample - for sample in final_samples - if has_function_in_traceback(sample.frames, "two") and sample.in_use_size > 0 - ] - - assert ( - len(batch_two_live_samples) > 0 - ), f"Should have live samples from batch two, got {len(batch_two_live_samples)}" - assert all(sample.in_use_size > 0 and sample.alloc_size > 0 for sample in batch_two_live_samples) - - del second_batch - - -def test_memory_collector_python_interface_with_allocation_tracking_no_deletion(): - mc = memalloc.MemoryCollector(heap_sample_size=128) - - with mc: - initial_samples = mc.test_snapshot() - initial_count = len(initial_samples) - - first_batch = [] - for i in range(20): - first_batch.append(one(256)) - - after_first_batch = mc.test_snapshot() - - second_batch = [] - for i in range(15): - second_batch.append(two(512)) - - final_samples = mc.test_snapshot() - - assert len(after_first_batch) >= initial_count, "Should have at least as many samples after first batch" - assert len(final_samples) >= 0, "Final snapshot should be valid" - - for samples in [initial_samples, after_first_batch, final_samples]: - for sample in samples: - assert sample.size > 0, f"Size should be positive int, got {sample.size}" - assert sample.count > 0, f"Count should be positive int, got {sample.count}" - assert sample.in_use_size >= 0, f"in_use_size should be non-negative int, got {sample.in_use_size}" - assert sample.alloc_size >= 0, f"alloc_size should be non-negative int, got {sample.alloc_size}" - - batch_one_live_samples = [ - sample - for sample in final_samples - if has_function_in_traceback(sample.frames, "one") and sample.in_use_size > 0 - ] - - batch_two_live_samples = [ - sample - for sample in final_samples - if has_function_in_traceback(sample.frames, "two") and sample.in_use_size > 0 - ] - - assert ( - len(batch_one_live_samples) > 0 - ), f"Should have live samples from batch one, got {len(batch_one_live_samples)}" - assert ( - len(batch_two_live_samples) > 0 - ), f"Should have live samples from batch two, got {len(batch_two_live_samples)}" - - assert all(sample.in_use_size > 0 and sample.alloc_size == 0 for sample in batch_one_live_samples) - assert all(sample.in_use_size > 0 and sample.alloc_size > 0 for sample in batch_two_live_samples) - - del first_batch - del second_batch - - -def test_memory_collector_exception_handling(): - mc = memalloc.MemoryCollector(heap_sample_size=256) - - with pytest.raises(ValueError): - with mc: - _allocate_1k() - samples = mc.test_snapshot() - assert isinstance(samples, tuple) - raise ValueError("Test exception") - - with mc: - _allocate_1k() - samples = mc.test_snapshot() - assert isinstance(samples, tuple) - - -def test_memory_collector_allocation_during_shutdown(): - """Test that verifies that when _memalloc.stop() is called while allocations are still - happening in another thread, the shutdown process completes without deadlocks or crashes. - """ - import time - - from ddtrace.profiling.collector import _memalloc - - _memalloc.start(32, 512) - - shutdown_event = threading.Event() - allocation_thread = None - - def allocate_continuously(): - while not shutdown_event.is_set(): - data = [0] * 100 - del data - time.sleep(0.001) - - try: - allocation_thread = threading.Thread(target=allocate_continuously) - allocation_thread.start() - - time.sleep(0.1) - - _memalloc.stop() - - finally: - shutdown_event.set() - if allocation_thread: - allocation_thread.join(timeout=1) - - -def test_memory_collector_buffer_pool_exhaustion(): - """Test that the memory collector handles buffer pool exhaustion. - This test creates multiple threads that simultaneously allocate with very deep - stack traces, which could potentially exhaust internal buffer pools. - """ - mc = memalloc.MemoryCollector(heap_sample_size=64) - - with mc: - threads = [] - barrier = threading.Barrier(10) - - def allocate_with_traceback(): - barrier.wait() - - def deep_alloc(depth): - if depth == 0: - return _create_allocation(100) - return deep_alloc(depth - 1) - - data = deep_alloc(50) - del data - - for i in range(10): - t = threading.Thread(target=allocate_with_traceback) - threads.append(t) - t.start() - - for t in threads: - t.join() - - samples = mc.test_snapshot() - - deep_alloc_count = 0 - max_stack_depth = 0 - - for sample in samples: - assert sample.frames is not None, "Buffer pool test: All samples should have stack frames" - stack_depth = len(sample.frames) - max_stack_depth = max(max_stack_depth, stack_depth) - - for frame in sample.frames: - if frame.function_name == "deep_alloc": - deep_alloc_count += 1 - break - - assert ( - deep_alloc_count >= 10 - ), f"Buffer pool test: Expected many allocations from concurrent threads, got {deep_alloc_count}" - - assert max_stack_depth >= 50, ( - f"Buffer pool test: Stack traces should be preserved even under stress (expecting at least 50 frames), " - f"but max depth was only {max_stack_depth}" - ) - - -def test_memory_collector_thread_lifecycle(): - """Test that continuously creates and destroys threads while they perform allocations, - verifying that the collector can track allocations across changing thread contexts. - """ - mc = memalloc.MemoryCollector(heap_sample_size=8) - - with mc: - threads = [] - - def worker(): - for i in range(10): - data = [i] * 100 - del data - - for i in range(20): - t = threading.Thread(target=worker) - t.start() - threads.append(t) - - if i > 5: - old_thread = threads.pop(0) - old_thread.join() - - for t in threads: - t.join() - - samples = mc.test_snapshot() - - worker_samples = 0 - for sample in samples: - for frame in sample.frames: - if frame.function_name == "worker": - worker_samples += 1 - break - - assert ( - worker_samples > 0 - ), "Thread lifecycle test: Should capture allocations even as threads are created/destroyed" diff --git a/tests/profiling_v2/collector/test_stack.py b/tests/profiling_v2/collector/test_stack.py deleted file mode 100644 index f5aa2ec2692..00000000000 --- a/tests/profiling_v2/collector/test_stack.py +++ /dev/null @@ -1,785 +0,0 @@ -import _thread -import os -import sys -import threading -import time -from unittest.mock import patch -import uuid - -import pytest - -from ddtrace import ext -from ddtrace.internal.datadog.profiling import ddup -from ddtrace.profiling.collector import stack -from tests.profiling.collector import pprof_utils -from tests.profiling.collector import test_collector - - -# Python 3.11.9 is not compatible with gevent, https://github.com/gevent/gevent/issues/2040 -# https://github.com/python/cpython/issues/117983 -# The fix was not backported to 3.11. The fix was first released in 3.12.5 for -# Python 3.12. Tested with Python 3.11.8 and 3.12.5 to confirm the issue. -TESTING_GEVENT = os.getenv("DD_PROFILE_TEST_GEVENT", False) and ( - sys.version_info < (3, 11, 9) or sys.version_info >= (3, 12, 5) -) - - -# Use subprocess as ddup config persists across tests. -@pytest.mark.subprocess( - env=dict( - DD_PROFILING_MAX_FRAMES="5", - DD_PROFILING_OUTPUT_PPROF="/tmp/test_collect_truncate", - ) -) -def test_collect_truncate(): - import os - - from ddtrace.profiling import profiler - from tests.profiling.collector import pprof_utils - from tests.profiling.collector.test_stack import func1 - - pprof_prefix = os.environ["DD_PROFILING_OUTPUT_PPROF"] - output_filename = pprof_prefix + "." + str(os.getpid()) - - max_nframes = int(os.environ["DD_PROFILING_MAX_FRAMES"]) - - p = profiler.Profiler() - p.start() - - func1() - - p.stop() - - profile = pprof_utils.parse_newest_profile(output_filename) - samples = pprof_utils.get_samples_with_value_type(profile, "wall-time") - assert len(samples) > 0 - for sample in samples: - # stack v2 adds one extra frame for "%d frames omitted" message - # Also, it allows max_nframes + 1 frames, so we add 2 here. - assert len(sample.location_id) <= max_nframes + 2, len(sample.location_id) - - -def test_stack_locations(tmp_path): - test_name = "test_stack_locations" - pprof_prefix = str(tmp_path / test_name) - output_filename = pprof_prefix + "." + str(os.getpid()) - - assert ddup.is_available - ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) - ddup.start() - - def baz(): - time.sleep(0.1) - - def bar(): - baz() - - def foo(): - bar() - - with stack.StackCollector(_stack_collector_v2_enabled=True): - for _ in range(10): - foo() - ddup.upload() - - profile = pprof_utils.parse_newest_profile(output_filename) - samples = pprof_utils.get_samples_with_value_type(profile, "wall-time") - assert len(samples) > 0 - - expected_sample = pprof_utils.StackEvent( - thread_id=_thread.get_ident(), - thread_name="MainThread", - locations=[ - pprof_utils.StackLocation( - function_name="baz", - filename="test_stack.py", - line_no=baz.__code__.co_firstlineno + 1, - ), - pprof_utils.StackLocation( - function_name="bar", - filename="test_stack.py", - line_no=bar.__code__.co_firstlineno + 1, - ), - pprof_utils.StackLocation( - function_name="foo", - filename="test_stack.py", - line_no=foo.__code__.co_firstlineno + 1, - ), - ], - ) - - pprof_utils.assert_profile_has_sample(profile, samples=samples, expected_sample=expected_sample) - - -def test_push_span(tmp_path, tracer): - test_name = "test_push_span" - pprof_prefix = str(tmp_path / test_name) - output_filename = pprof_prefix + "." + str(os.getpid()) - - tracer._endpoint_call_counter_span_processor.enable() - - assert ddup.is_available - ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) - ddup.start() - - resource = str(uuid.uuid4()) - span_type = ext.SpanTypes.WEB - - with stack.StackCollector( - tracer=tracer, - endpoint_collection_enabled=True, - ignore_profiler=True, # this is not necessary, but it's here to trim samples - _stack_collector_v2_enabled=True, - ): - with tracer.trace("foobar", resource=resource, span_type=span_type) as span: - span_id = span.span_id - local_root_span_id = span._local_root.span_id - for _ in range(10): - time.sleep(0.1) - ddup.upload(tracer=tracer) - - profile = pprof_utils.parse_newest_profile(output_filename) - samples = pprof_utils.get_samples_with_label_key(profile, "span id") - assert len(samples) > 0 - for sample in samples: - pprof_utils.assert_stack_event( - profile, - sample, - expected_event=pprof_utils.StackEvent( - span_id=span_id, - local_root_span_id=local_root_span_id, - trace_type=span_type, - trace_endpoint=resource, - ), - ) - - -def test_push_span_unregister_thread(tmp_path, monkeypatch, tracer): - with patch("ddtrace.internal.datadog.profiling.stack_v2.unregister_thread") as unregister_thread: - tracer._endpoint_call_counter_span_processor.enable() - - test_name = "test_push_span_unregister_thread" - pprof_prefix = str(tmp_path / test_name) - output_filename = pprof_prefix + "." + str(os.getpid()) - - assert ddup.is_available - ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) - ddup.start() - - resource = str(uuid.uuid4()) - span_type = ext.SpanTypes.WEB - - def target_fun(): - for _ in range(10): - time.sleep(0.1) - - with stack.StackCollector( - tracer=tracer, - endpoint_collection_enabled=True, - ignore_profiler=True, # this is not necessary, but it's here to trim samples - _stack_collector_v2_enabled=True, - ): - with tracer.trace("foobar", resource=resource, span_type=span_type) as span: - span_id = span.span_id - local_root_span_id = span._local_root.span_id - t = threading.Thread(target=target_fun) - t.start() - t.join() - thread_id = t.ident - ddup.upload(tracer=tracer) - - profile = pprof_utils.parse_newest_profile(output_filename) - samples = pprof_utils.get_samples_with_label_key(profile, "span id") - assert len(samples) > 0 - for sample in samples: - pprof_utils.assert_stack_event( - profile, - sample, - expected_event=pprof_utils.StackEvent( - span_id=span_id, - local_root_span_id=local_root_span_id, - trace_type=span_type, - trace_endpoint=resource, - ), - ) - - unregister_thread.assert_called_with(thread_id) - - -def test_push_non_web_span(tmp_path, tracer): - tracer._endpoint_call_counter_span_processor.enable() - - test_name = "test_push_non_web_span" - pprof_prefix = str(tmp_path / test_name) - output_filename = pprof_prefix + "." + str(os.getpid()) - - assert ddup.is_available - ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) - ddup.start() - - resource = str(uuid.uuid4()) - span_type = ext.SpanTypes.SQL - - with stack.StackCollector( - tracer=tracer, - endpoint_collection_enabled=True, - ignore_profiler=True, # this is not necessary, but it's here to trim samples - _stack_collector_v2_enabled=True, - ): - with tracer.trace("foobar", resource=resource, span_type=span_type) as span: - span_id = span.span_id - local_root_span_id = span._local_root.span_id - for _ in range(10): - time.sleep(0.1) - ddup.upload(tracer=tracer) - - profile = pprof_utils.parse_newest_profile(output_filename) - samples = pprof_utils.get_samples_with_label_key(profile, "span id") - assert len(samples) > 0 - for sample in samples: - pprof_utils.assert_stack_event( - profile, - sample, - expected_event=pprof_utils.StackEvent( - span_id=span_id, - local_root_span_id=local_root_span_id, - trace_type=span_type, - # trace_endpoint is not set for non-web spans - ), - ) - - -def test_push_span_none_span_type(tmp_path, tracer): - # Test for https://github.com/DataDog/dd-trace-py/issues/11141 - test_name = "test_push_span_none_span_type" - pprof_prefix = str(tmp_path / test_name) - output_filename = pprof_prefix + "." + str(os.getpid()) - - assert ddup.is_available - ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) - ddup.start() - - tracer._endpoint_call_counter_span_processor.enable() - - resource = str(uuid.uuid4()) - - with stack.StackCollector( - tracer=tracer, - endpoint_collection_enabled=True, - ignore_profiler=True, # this is not necessary, but it's here to trim samples - _stack_collector_v2_enabled=True, - ): - # Explicitly set None span_type as the default could change in the - # future. - with tracer.trace("foobar", resource=resource, span_type=None) as span: - span_id = span.span_id - local_root_span_id = span._local_root.span_id - for _ in range(10): - time.sleep(0.1) - ddup.upload(tracer=tracer) - - profile = pprof_utils.parse_newest_profile(output_filename) - samples = pprof_utils.get_samples_with_label_key(profile, "span id") - assert len(samples) > 0 - for sample in samples: - pprof_utils.assert_stack_event( - profile, - sample, - expected_event=pprof_utils.StackEvent( - span_id=span_id, - local_root_span_id=local_root_span_id, - # span_type is None - # trace_endpoint is not set for non-web spans - ), - ) - - -@pytest.mark.skipif(not stack.FEATURES["stack-exceptions"], reason="Stack exceptions are not supported") -@pytest.mark.parametrize("stack_v2_enabled", [True, False]) -def test_exception_collection(stack_v2_enabled, tmp_path): - test_name = "test_exception_collection" - pprof_prefix = str(tmp_path / test_name) - output_filename = pprof_prefix + "." + str(os.getpid()) - - assert ddup.is_available - ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) - ddup.start() - - with stack.StackCollector(ignore_profiler=True, _stack_collector_v2_enabled=stack_v2_enabled): - try: - raise ValueError("hello") - except Exception: - time.sleep(1) - - ddup.upload() - - profile = pprof_utils.parse_newest_profile(output_filename) - samples = pprof_utils.get_samples_with_label_key(profile, "exception type") - - if stack_v2_enabled: - # DEV: update the test once we have exception profiling for stack v2 - # using echion - assert len(samples) == 0 - else: - assert len(samples) > 0 - for sample in samples: - pprof_utils.assert_stack_event( - profile, - sample, - expected_event=pprof_utils.StackEvent( - thread_id=_thread.get_ident(), - thread_name="MainThread", - exception_type="builtins.ValueError", - locations=[ - pprof_utils.StackLocation( - filename="test_stack.py", - function_name="test_exception_collection", - line_no=test_exception_collection.__code__.co_firstlineno + 15, - ), - ], - ), - ) - - -@pytest.mark.skipif(not stack.FEATURES["stack-exceptions"], reason="Stack exceptions are not supported") -@pytest.mark.parametrize("stack_v2_enabled", [True, False]) -def test_exception_collection_threads(stack_v2_enabled, tmp_path): - test_name = "test_exception_collection_threads" - pprof_prefix = str(tmp_path / test_name) - output_filename = pprof_prefix + "." + str(os.getpid()) - - assert ddup.is_available - ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) - ddup.start() - - with stack.StackCollector(ignore_profiler=True, _stack_collector_v2_enabled=stack_v2_enabled): - - def target_fun(): - try: - raise ValueError("hello") - except Exception: - time.sleep(1) - - threads = [] - for _ in range(10): - t = threading.Thread(target=target_fun) - threads.append(t) - t.start() - - for t in threads: - t.join() - - ddup.upload() - - profile = pprof_utils.parse_newest_profile(output_filename) - samples = pprof_utils.get_samples_with_label_key(profile, "exception type") - - if stack_v2_enabled: - assert len(samples) == 0 - else: - assert len(samples) > 0 - for sample in samples: - thread_id_label = pprof_utils.get_label_with_key(profile.string_table, sample, "thread id") - thread_id = int(thread_id_label.num) - assert thread_id in [t.ident for t in threads] - - pprof_utils.assert_stack_event( - profile, - sample, - expected_event=pprof_utils.StackEvent( - exception_type="builtins.ValueError", - thread_name=r"Thread-\d+ \(target_fun\)" if sys.version_info[:2] > (3, 9) else r"Thread-\d+", - locations=[ - pprof_utils.StackLocation( - filename="test_stack.py", - function_name="target_fun", - line_no=target_fun.__code__.co_firstlineno + 4, - ), - ], - ), - ) - - -@pytest.mark.skipif(not stack.FEATURES["stack-exceptions"], reason="Stack exceptions are not supported") -@pytest.mark.parametrize("stack_v2_enabled", [True, False]) -def test_exception_collection_trace(stack_v2_enabled, tmp_path, tracer): - test_name = "test_exception_collection_trace" - pprof_prefix = str(tmp_path / test_name) - output_filename = pprof_prefix + "." + str(os.getpid()) - - tracer._endpoint_call_counter_span_processor.enable() - - assert ddup.is_available - ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) - ddup.start() - - with stack.StackCollector(tracer=tracer, ignore_profiler=True, _stack_collector_v2_enabled=stack_v2_enabled): - with tracer.trace("foobar", resource="resource", span_type=ext.SpanTypes.WEB): - try: - raise ValueError("hello") - except Exception: - time.sleep(1) - - ddup.upload(tracer=tracer) - - profile = pprof_utils.parse_newest_profile(output_filename) - samples = pprof_utils.get_samples_with_label_key(profile, "exception type") - - if stack_v2_enabled: - assert len(samples) == 0 - else: - assert len(samples) > 0 - for sample in samples: - pprof_utils.assert_stack_event( - profile, - sample, - expected_event=pprof_utils.StackEvent( - thread_id=_thread.get_ident(), - thread_name="MainThread", - exception_type="builtins.ValueError", - trace_type=ext.SpanTypes.WEB, - trace_endpoint="resource", - locations=[ - pprof_utils.StackLocation( - filename="test_stack.py", - function_name="test_exception_collection_trace", - line_no=test_exception_collection_trace.__code__.co_firstlineno + 18, - ), - ], - ), - ) - - -def test_collect_once_with_class(tmp_path): - class SomeClass(object): - @classmethod - def sleep_class(cls): - return cls().sleep_instance() - - def sleep_instance(self): - for _ in range(10): - time.sleep(0.1) - - test_name = "test_collect_once_with_class" - pprof_prefix = str(tmp_path / test_name) - output_filename = pprof_prefix + "." + str(os.getpid()) - - assert ddup.is_available - ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) - ddup.start() - - with stack.StackCollector(ignore_profiler=True, _stack_collector_v2_enabled=True): - SomeClass.sleep_class() - - ddup.upload() - - profile = pprof_utils.parse_newest_profile(output_filename) - samples = pprof_utils.get_samples_with_value_type(profile, "wall-time") - assert len(samples) > 0 - - pprof_utils.assert_profile_has_sample( - profile, - samples=samples, - expected_sample=pprof_utils.StackEvent( - thread_id=_thread.get_ident(), - thread_name="MainThread", - locations=[ - pprof_utils.StackLocation( - function_name="sleep_instance", - filename="test_stack.py", - line_no=SomeClass.sleep_instance.__code__.co_firstlineno + 2, - ), - pprof_utils.StackLocation( - function_name="sleep_class", - filename="test_stack.py", - line_no=SomeClass.sleep_class.__code__.co_firstlineno + 2, - ), - pprof_utils.StackLocation( - function_name="test_collect_once_with_class", - filename="test_stack.py", - line_no=test_collect_once_with_class.__code__.co_firstlineno + 19, - ), - ], - ), - ) - - -def test_collect_once_with_class_not_right_type(tmp_path): - class SomeClass(object): - @classmethod - def sleep_class(foobar, cls): - return foobar().sleep_instance(cls) - - def sleep_instance(foobar, self): - for _ in range(10): - time.sleep(0.1) - - test_name = "test_collect_once_with_class" - pprof_prefix = str(tmp_path / test_name) - output_filename = pprof_prefix + "." + str(os.getpid()) - - assert ddup.is_available - ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) - ddup.start() - - with stack.StackCollector(ignore_profiler=True, _stack_collector_v2_enabled=True): - SomeClass.sleep_class(123) - - ddup.upload() - - profile = pprof_utils.parse_newest_profile(output_filename) - samples = pprof_utils.get_samples_with_value_type(profile, "wall-time") - assert len(samples) > 0 - - pprof_utils.assert_profile_has_sample( - profile, - samples=samples, - expected_sample=pprof_utils.StackEvent( - thread_id=_thread.get_ident(), - thread_name="MainThread", - locations=[ - pprof_utils.StackLocation( - function_name="sleep_instance", - filename="test_stack.py", - line_no=SomeClass.sleep_instance.__code__.co_firstlineno + 2, - ), - pprof_utils.StackLocation( - function_name="sleep_class", - filename="test_stack.py", - line_no=SomeClass.sleep_class.__code__.co_firstlineno + 2, - ), - pprof_utils.StackLocation( - function_name="test_collect_once_with_class_not_right_type", - filename="test_stack.py", - line_no=test_collect_once_with_class_not_right_type.__code__.co_firstlineno + 19, - ), - ], - ), - ) - - -def _fib(n): - if n == 1: - return 1 - elif n == 0: - return 0 - else: - return _fib(n - 1) + _fib(n - 2) - - -@pytest.mark.skipif(not TESTING_GEVENT, reason="Not testing gevent") -@pytest.mark.subprocess(ddtrace_run=True) -def test_collect_gevent_thread_task(): - # TODO(taegyunkim): update echion to support gevent and test with stack v2 - - from gevent import monkey - - monkey.patch_all() - - import os - import threading - import time - - from ddtrace.internal.datadog.profiling import ddup - from ddtrace.profiling.collector import stack - from tests.profiling.collector import pprof_utils - from tests.profiling_v2.collector.test_stack import _fib - - test_name = "test_collect_gevent_thread_task" - pprof_prefix = "/tmp/" + test_name - output_filename = pprof_prefix + "." + str(os.getpid()) - - assert ddup.is_available - ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) - ddup.start() - - # Start some (green)threads - def _dofib(): - for _ in range(5): - # spend some time in CPU so the profiler can catch something - # On a Mac w/ Apple M3 MAX with Python 3.11 it takes about 200ms to calculate _fib(32) - # And _fib() is called 5 times so it should take about 1 second - # We use 5 threads below so it should take about 5 seconds - _fib(32) - # Just make sure gevent switches threads/greenlets - time.sleep(0) - - threads = [] - - with stack.StackCollector(): - for i in range(5): - t = threading.Thread(target=_dofib, name="TestThread %d" % i) - t.start() - threads.append(t) - for t in threads: - t.join() - - ddup.upload() - - profile = pprof_utils.parse_newest_profile(output_filename) - samples = pprof_utils.get_samples_with_label_key(profile, "task name") - assert len(samples) > 0 - - pprof_utils.assert_profile_has_sample( - profile, - samples, - expected_sample=pprof_utils.StackEvent( - thread_name="MainThread", - task_name=r"Greenlet-\d+$", - locations=[ - # Since we're using recursive function _fib(), we expect to have - # multiple locations for _fib(n) = _fib(n-1) + _fib(n-2) - pprof_utils.StackLocation( - filename="test_stack.py", - function_name="_fib", - line_no=_fib.__code__.co_firstlineno + 6, - ), - pprof_utils.StackLocation( - filename="test_stack.py", - function_name="_fib", - line_no=_fib.__code__.co_firstlineno + 6, - ), - pprof_utils.StackLocation( - filename="test_stack.py", - function_name="_fib", - line_no=_fib.__code__.co_firstlineno + 6, - ), - ], - ), - ) - - -def test_max_time_usage(): - with pytest.raises(ValueError): - stack.StackCollector(max_time_usage_pct=0) - - -def test_max_time_usage_over(): - with pytest.raises(ValueError): - stack.StackCollector(max_time_usage_pct=200) - - -@pytest.mark.parametrize( - "stack_v2_enabled", - [True, False], -) -@pytest.mark.parametrize( - "ignore_profiler", - [True, False], -) -def test_ignore_profiler(stack_v2_enabled, ignore_profiler, tmp_path): - if stack_v2_enabled: - pytest.xfail("Echion doesn't support ignore_profiler yet, and the test flakes") - - test_name = "test_ignore_profiler" - pprof_prefix = str(tmp_path / test_name) - output_filename = pprof_prefix + "." + str(os.getpid()) - - assert ddup.is_available - ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) - ddup.start() - - s = stack.StackCollector(ignore_profiler=ignore_profiler, _stack_collector_v2_enabled=stack_v2_enabled) - collector_worker_thread_id = None - - with s: - for _ in range(10): - time.sleep(0.1) - collector_worker_thread_id = s._worker.ident - - ddup.upload() - - profile = pprof_utils.parse_newest_profile(output_filename) - samples = pprof_utils.get_samples_with_label_key(profile, "thread id") - - thread_ids = set() - - for sample in samples: - thread_id_label = pprof_utils.get_label_with_key(profile.string_table, sample, "thread id") - thread_id = int(thread_id_label.num) - thread_ids.add(thread_id) - - # TODO(taegyunkim): update echion to support ignore_profiler and test with stack v2 - # Echion by default does not track native threads that are not registered - # after https://github.com/P403n1x87/echion/pull/83. - if stack_v2_enabled or ignore_profiler: - assert collector_worker_thread_id not in thread_ids - else: - assert collector_worker_thread_id in thread_ids - - -# TODO: support ignore profiler with stack_v2 and update this test -@pytest.mark.skipif(not TESTING_GEVENT, reason="Not testing gevent") -@pytest.mark.skip(reason="ignore_profiler is not supported with stack v2") -@pytest.mark.subprocess( - ddtrace_run=True, - env=dict(DD_PROFILING_IGNORE_PROFILER="1", DD_PROFILING_OUTPUT_PPROF="/tmp/test_ignore_profiler_gevent_task"), -) -def test_ignore_profiler_gevent_task(): - import gevent.monkey - - gevent.monkey.patch_all() - - import os - import time - import typing - - from ddtrace.profiling import collector - from ddtrace.profiling import event as event_mod - from ddtrace.profiling import profiler - from ddtrace.profiling.collector import stack - from tests.profiling.collector import pprof_utils - - def _fib(n): - if n == 1: - return 1 - elif n == 0: - return 0 - else: - return _fib(n - 1) + _fib(n - 2) - - class CollectorTest(collector.PeriodicCollector): - def collect(self) -> typing.Iterable[typing.Iterable[event_mod.Event]]: - _fib(22) - return [] - - output_filename = os.environ["DD_PROFILING_OUTPUT_PPROF"] - - p = profiler.Profiler() - - p.start() - - for c in p._profiler._collectors: - if isinstance(c, stack.StackCollector): - c.ignore_profiler - - c = CollectorTest(None, interval=0.00001) - c.start() - - time.sleep(3) - - worker_ident = c._worker.ident - - c.stop() - p.stop() - - profile = pprof_utils.parse_newest_profile(output_filename + "." + str(os.getpid())) - - samples = pprof_utils.get_samples_with_value_type(profile, "cpu-time") - - thread_ids = set() - for sample in samples: - thread_id_label = pprof_utils.get_label_with_key(profile.string_table, sample, "thread id") - thread_id = int(thread_id_label.num) - thread_ids.add(thread_id) - - assert worker_ident not in thread_ids - - -def test_repr(): - test_collector._test_repr( - stack.StackCollector, - "StackCollector(status=, " - "min_interval_time=0.01, max_time_usage_pct=1.0, " - "nframes=64, ignore_profiler=False, endpoint_collection_enabled=None, tracer=None)", - ) diff --git a/tests/profiling_v2/collector/test_stack_asyncio.py b/tests/profiling_v2/collector/test_stack_asyncio.py deleted file mode 100644 index a6375569b9d..00000000000 --- a/tests/profiling_v2/collector/test_stack_asyncio.py +++ /dev/null @@ -1,879 +0,0 @@ -import pytest - - -@pytest.mark.subprocess( - env=dict( - DD_PROFILING_OUTPUT_PPROF="/tmp/test_stack_asyncio", - ), - err=None, -) -# For macOS: err=None ignores expected stderr from tracer failing to connect to agent (not relevant to this test) -def test_asyncio(): - import asyncio - import os - import time - import uuid - - from ddtrace import ext - from ddtrace.internal.datadog.profiling import stack_v2 - from ddtrace.profiling import profiler - from ddtrace.trace import tracer - from tests.profiling.collector import pprof_utils - - assert stack_v2.is_available, stack_v2.failure_msg - - sleep_time = 0.2 - loop_run_time = 3 - - async def stuff() -> None: - start_time = time.time() - while time.time() < start_time + loop_run_time: - await asyncio.sleep(sleep_time) - - async def hello(): - t1 = asyncio.create_task(stuff(), name="sleep 1") - t2 = asyncio.create_task(stuff(), name="sleep 2") - await stuff() - return (t1, t2) - - resource = str(uuid.uuid4()) - span_type = ext.SpanTypes.WEB - - p = profiler.Profiler(tracer=tracer) - assert p._profiler._stack_v2_enabled - p.start() - with tracer.trace("test_asyncio", resource=resource, span_type=span_type) as span: - span_id = span.span_id - local_root_span_id = span._local_root.span_id - - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - maintask = loop.create_task(hello(), name="main") - - t1, t2 = loop.run_until_complete(maintask) - p.stop() - - t1_name = t1.get_name() - t2_name = t2.get_name() - - assert t1_name == "sleep 1" - assert t2_name == "sleep 2" - - output_filename = os.environ["DD_PROFILING_OUTPUT_PPROF"] + "." + str(os.getpid()) - - profile = pprof_utils.parse_newest_profile(output_filename) - - samples_with_span_id = pprof_utils.get_samples_with_label_key(profile, "span id") - assert len(samples_with_span_id) > 0 - - # get samples with task_name - samples = pprof_utils.get_samples_with_label_key(profile, "task name") - # The next fails if stack_v2 is not properly configured with asyncio task - # tracking via ddtrace.profiling._asyncio - assert len(samples) > 0 - - pprof_utils.assert_profile_has_sample( - profile, - samples, - expected_sample=pprof_utils.StackEvent( - thread_name="MainThread", - task_name="main", - span_id=span_id, - local_root_span_id=local_root_span_id, - locations=[ - pprof_utils.StackLocation( - function_name="hello", filename="test_stack_asyncio.py", line_no=hello.__code__.co_firstlineno + 3 - ) - ], - ), - ) - - pprof_utils.assert_profile_has_sample( - profile, - samples, - expected_sample=pprof_utils.StackEvent( - thread_name="MainThread", - task_name=t1_name, - span_id=span_id, - local_root_span_id=local_root_span_id, - locations=[ - pprof_utils.StackLocation( - function_name="stuff", filename="test_stack_asyncio.py", line_no=stuff.__code__.co_firstlineno + 3 - ), - ], - ), - ) - - pprof_utils.assert_profile_has_sample( - profile, - samples, - expected_sample=pprof_utils.StackEvent( - thread_name="MainThread", - task_name=t2_name, - span_id=span_id, - local_root_span_id=local_root_span_id, - locations=[ - pprof_utils.StackLocation( - function_name="stuff", filename="test_stack_asyncio.py", line_no=stuff.__code__.co_firstlineno + 3 - ), - ], - ), - ) - - -@pytest.mark.subprocess( - env=dict( - DD_PROFILING_OUTPUT_PPROF="/tmp/test_asyncio_start_profiler_from_process_before_importing_asyncio", - ), - err=None, -) -# For macOS: err=None ignores expected stderr from tracer failing to connect to agent (not relevant to this test) -def test_asyncio_start_profiler_from_process_before_importing_asyncio(): - from ddtrace.internal.datadog.profiling import stack_v2 - from ddtrace.profiling import profiler - - assert stack_v2.is_available, stack_v2.failure_msg - - p = profiler.Profiler() - assert p._profiler._stack_v2_enabled - p.start() - - import asyncio - import os - import sys - import time - - # Start an asyncio loop BEFORE importing profiler modules - # This simulates the bug scenario where a loop exists before profiling is enabled - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - - async def my_function(): - async def background_task_func() -> None: - """Background task that runs in the existing loop.""" - await asyncio.sleep(1.5) - - # Create and start a task in the existing loop - background_task = loop.create_task(background_task_func(), name="background") - assert background_task is not None - - # Run tasks that should be tracked - sleep_time = 0.2 - loop_run_time = 0.75 - - async def tracked_task() -> None: - start_time = time.time() - while time.time() < start_time + loop_run_time: - await asyncio.sleep(sleep_time) - - async def main_task(): - t1 = asyncio.create_task(tracked_task(), name="tracked 1") - t2 = asyncio.create_task(tracked_task(), name="tracked 2") - await tracked_task() - await asyncio.sleep(0.25) - return t1, t2 - - result = await main_task() - - await background_task - - return tracked_task, background_task_func, result - - main_task = loop.create_task(my_function(), name="main") - tracked_task_def, background_task_def, (t1, t2) = loop.run_until_complete(main_task) - - p.stop() - - t1_name = t1.get_name() - t2_name = t2.get_name() - - assert t1_name == "tracked 1" - assert t2_name == "tracked 2" - - from tests.profiling.collector import pprof_utils - - output_filename = os.environ["DD_PROFILING_OUTPUT_PPROF"] + "." + str(os.getpid()) - profile = pprof_utils.parse_newest_profile(output_filename) - - samples = pprof_utils.get_samples_with_label_key(profile, "task name") - assert len(samples) > 0, "No task names found - existing loop was not tracked!" - - if sys.version_info >= (3, 11): - EXPECTED_FUNCTION_NAME_BACKGROUND = f"{my_function.__name__}..{background_task_def.__name__}" - else: - EXPECTED_FUNCTION_NAME_BACKGROUND = background_task_def.__name__ - EXPECTED_FILENAME_BACKGROUND = os.path.basename(background_task_def.__code__.co_filename) - EXPECTED_LINE_NO_BACKGROUND = background_task_def.__code__.co_firstlineno + 2 - - pprof_utils.assert_profile_has_sample( - profile, - samples, - expected_sample=pprof_utils.StackEvent( - thread_name="MainThread", - task_name="background", - locations=[ - pprof_utils.StackLocation( - function_name=EXPECTED_FUNCTION_NAME_BACKGROUND, - filename=EXPECTED_FILENAME_BACKGROUND, - line_no=EXPECTED_LINE_NO_BACKGROUND, - ), - ], - ), - ) - - # Verify specific tasks are in the profile - if sys.version_info >= (3, 11): - EXPECTED_FUNCTION_NAME_TRACKED = f"{my_function.__name__}..{tracked_task_def.__name__}" - else: - EXPECTED_FUNCTION_NAME_TRACKED = tracked_task_def.__name__ - EXPECTED_FILENAME_TRACKED = os.path.basename(tracked_task_def.__code__.co_filename) - EXPECTED_LINE_NO_TRACKED = tracked_task_def.__code__.co_firstlineno + 3 - - pprof_utils.assert_profile_has_sample( - profile, - samples, - expected_sample=pprof_utils.StackEvent( - thread_name="MainThread", - task_name=t1_name, - locations=[ - pprof_utils.StackLocation( - function_name=EXPECTED_FUNCTION_NAME_TRACKED, - filename=EXPECTED_FILENAME_TRACKED, - line_no=EXPECTED_LINE_NO_TRACKED, - ) - ], - ), - ) - - -@pytest.mark.subprocess( - env=dict( - DD_PROFILING_OUTPUT_PPROF="/tmp/test_asyncio_start_profiler_from_process_before_starting_loop", - ), - err=None, -) -# For macOS: err=None ignores expected stderr from tracer failing to connect to agent (not relevant to this test) -def test_asyncio_start_profiler_from_process_before_starting_loop(): - import asyncio - import os - import sys - import time - - from ddtrace.internal.datadog.profiling import stack_v2 - from ddtrace.profiling import profiler - - assert stack_v2.is_available, stack_v2.failure_msg - - p = profiler.Profiler() - assert p._profiler._stack_v2_enabled - p.start() - - # Start an asyncio loop BEFORE importing profiler modules - # This simulates the bug scenario where a loop exists before profiling is enabled - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - - async def my_function(): - async def background_task_func() -> None: - """Background task that runs in the existing loop.""" - await asyncio.sleep(1.5) - - # Create and start a task in the existing loop - background_task = loop.create_task(background_task_func(), name="background") - assert background_task is not None - - # Run tasks that should be tracked - sleep_time = 0.2 - loop_run_time = 0.75 - - async def tracked_task() -> None: - start_time = time.time() - while time.time() < start_time + loop_run_time: - await asyncio.sleep(sleep_time) - - async def main_task(): - t1 = asyncio.create_task(tracked_task(), name="tracked 1") - t2 = asyncio.create_task(tracked_task(), name="tracked 2") - await tracked_task() - await asyncio.sleep(0.25) - return t1, t2 - - result = await main_task() - - await background_task - - return tracked_task, background_task_func, result - - main_task = loop.create_task(my_function(), name="main") - tracked_task_def, background_task_def, (t1, t2) = loop.run_until_complete(main_task) - - p.stop() - - t1_name = t1.get_name() - t2_name = t2.get_name() - - assert t1_name == "tracked 1" - assert t2_name == "tracked 2" - - from tests.profiling.collector import pprof_utils - - output_filename = os.environ["DD_PROFILING_OUTPUT_PPROF"] + "." + str(os.getpid()) - profile = pprof_utils.parse_newest_profile(output_filename) - - samples = pprof_utils.get_samples_with_label_key(profile, "task name") - assert len(samples) > 0, "No task names found - existing loop was not tracked!" - - if sys.version_info >= (3, 11): - EXPECTED_FUNCTION_NAME_BACKGROUND = f"{my_function.__name__}..{background_task_def.__name__}" - else: - EXPECTED_FUNCTION_NAME_BACKGROUND = background_task_def.__name__ - EXPECTED_FILENAME_BACKGROUND = os.path.basename(background_task_def.__code__.co_filename) - EXPECTED_LINE_NO_BACKGROUND = background_task_def.__code__.co_firstlineno + 2 - - pprof_utils.assert_profile_has_sample( - profile, - samples, - expected_sample=pprof_utils.StackEvent( - thread_name="MainThread", - task_name="background", - locations=[ - pprof_utils.StackLocation( - function_name=EXPECTED_FUNCTION_NAME_BACKGROUND, - filename=EXPECTED_FILENAME_BACKGROUND, - line_no=EXPECTED_LINE_NO_BACKGROUND, - ), - ], - ), - ) - - # Verify specific tasks are in the profile - if sys.version_info >= (3, 11): - EXPECTED_FUNCTION_NAME_TRACKED = f"{my_function.__name__}..{tracked_task_def.__name__}" - else: - EXPECTED_FUNCTION_NAME_TRACKED = tracked_task_def.__name__ - EXPECTED_FILENAME_TRACKED = os.path.basename(tracked_task_def.__code__.co_filename) - EXPECTED_LINE_NO_TRACKED = tracked_task_def.__code__.co_firstlineno + 3 - - pprof_utils.assert_profile_has_sample( - profile, - samples, - expected_sample=pprof_utils.StackEvent( - thread_name="MainThread", - task_name=t1_name, - locations=[ - pprof_utils.StackLocation( - function_name=EXPECTED_FUNCTION_NAME_TRACKED, - filename=EXPECTED_FILENAME_TRACKED, - line_no=EXPECTED_LINE_NO_TRACKED, - ) - ], - ), - ) - - -@pytest.mark.xfail(reason="This test fails because there's no way to get the current loop if it's not already running.") -@pytest.mark.subprocess( - env=dict( - DD_PROFILING_OUTPUT_PPROF="/tmp/test_asyncio_start_profiler_from_process_after_creating_loop", - ), - err=None, -) -# For macOS: err=None ignores expected stderr from tracer failing to connect to agent (not relevant to this test) -def test_asyncio_start_profiler_from_process_after_creating_loop(): - import asyncio - import os - import sys - import time - - from ddtrace.internal.datadog.profiling import stack_v2 - from ddtrace.profiling import profiler - - # Start an asyncio loop BEFORE importing profiler modules - # This simulates the bug scenario where a loop exists before profiling is enabled - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - - assert stack_v2.is_available, stack_v2.failure_msg - - p = profiler.Profiler() - assert p._profiler._stack_v2_enabled - p.start() - - async def my_function(): - async def background_task_func() -> None: - """Background task that runs in the existing loop.""" - await asyncio.sleep(1.5) - - # Create and start a task in the existing loop - background_task = loop.create_task(background_task_func(), name="background") - assert background_task is not None - - # Run tasks that should be tracked - sleep_time = 0.2 - loop_run_time = 0.75 - - async def tracked_task() -> None: - start_time = time.time() - while time.time() < start_time + loop_run_time: - await asyncio.sleep(sleep_time) - - async def main_task(): - t1 = asyncio.create_task(tracked_task(), name="tracked 1") - t2 = asyncio.create_task(tracked_task(), name="tracked 2") - await tracked_task() - await asyncio.sleep(0.25) - return t1, t2 - - result = await main_task() - - await background_task - - return tracked_task, background_task_func, result - - main_task = loop.create_task(my_function(), name="main") - tracked_task_def, background_task_def, (t1, t2) = loop.run_until_complete(main_task) - - p.stop() - - t1_name = t1.get_name() - t2_name = t2.get_name() - - assert t1_name == "tracked 1" - assert t2_name == "tracked 2" - - from tests.profiling.collector import pprof_utils - - output_filename = os.environ["DD_PROFILING_OUTPUT_PPROF"] + "." + str(os.getpid()) - profile = pprof_utils.parse_newest_profile(output_filename) - - samples = pprof_utils.get_samples_with_label_key(profile, "task name") - assert len(samples) > 0, "No task names found - existing loop was not tracked!" - - EXPECTED_FILENAME_BACKGROUND = os.path.basename(background_task_def.__code__.co_filename) - EXPECTED_LINE_NO_BACKGROUND = background_task_def.__code__.co_firstlineno + 2 - if sys.version_info >= (3, 11): - EXPECTED_FUNCTION_NAME_BACKGROUND = f"{my_function.__name__}..{background_task_def.__name__}" - else: - EXPECTED_FUNCTION_NAME_BACKGROUND = background_task_def.__name__ - - pprof_utils.assert_profile_has_sample( - profile, - samples, - expected_sample=pprof_utils.StackEvent( - thread_name="MainThread", - task_name="background", - locations=[ - pprof_utils.StackLocation( - function_name=EXPECTED_FUNCTION_NAME_BACKGROUND, - filename=EXPECTED_FILENAME_BACKGROUND, - line_no=EXPECTED_LINE_NO_BACKGROUND, - ), - ], - ), - ) - - # Verify specific tasks are in the profile - EXPECTED_FILENAME_TRACKED = os.path.basename(tracked_task_def.__code__.co_filename) - EXPECTED_LINE_NO_TRACKED = tracked_task_def.__code__.co_firstlineno + 3 - if sys.version_info >= (3, 11): - EXPECTED_FUNCTION_NAME_TRACKED = f"{my_function.__name__}..{tracked_task_def.__name__}" - else: - EXPECTED_FUNCTION_NAME_TRACKED = tracked_task_def.__name__ - - pprof_utils.assert_profile_has_sample( - profile, - samples, - expected_sample=pprof_utils.StackEvent( - thread_name="MainThread", - task_name=t1_name, - locations=[ - pprof_utils.StackLocation( - function_name=EXPECTED_FUNCTION_NAME_TRACKED, - filename=EXPECTED_FILENAME_TRACKED, - line_no=EXPECTED_LINE_NO_TRACKED, - ) - ], - ), - ) - - -@pytest.mark.xfail(reason="This test fails because there's no way to get the current loop if it's not already running.") -@pytest.mark.subprocess( - env=dict( - DD_PROFILING_OUTPUT_PPROF="/tmp/test_asyncio_import_profiler_from_process_after_starting_loop", - ), - err=None, -) -# For macOS: err=None ignores expected stderr from tracer failing to connect to agent (not relevant to this test) -def test_asyncio_import_profiler_from_process_after_starting_loop(): - import asyncio - import os - import sys - import time - - # Start an asyncio loop BEFORE importing profiler modules - # This simulates the bug scenario where a loop exists before profiling is enabled - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - - from ddtrace.internal.datadog.profiling import stack_v2 - from ddtrace.profiling import profiler - - assert stack_v2.is_available, stack_v2.failure_msg - - p = profiler.Profiler() - assert p._profiler._stack_v2_enabled - p.start() - - async def my_function(): - async def background_task_func() -> None: - """Background task that runs in the existing loop.""" - await asyncio.sleep(1.5) - - # Create and start a task in the existing loop - background_task = loop.create_task(background_task_func(), name="background") - assert background_task is not None - - # Run tasks that should be tracked - sleep_time = 0.2 - loop_run_time = 0.75 - - async def tracked_task() -> None: - start_time = time.time() - while time.time() < start_time + loop_run_time: - await asyncio.sleep(sleep_time) - - async def main_task(): - t1 = asyncio.create_task(tracked_task(), name="tracked 1") - t2 = asyncio.create_task(tracked_task(), name="tracked 2") - await tracked_task() - await asyncio.sleep(0.25) - return t1, t2 - - result = await main_task() - - await background_task - - return tracked_task, background_task_func, result - - main_task = loop.create_task(my_function(), name="main") - tracked_task_def, background_task_def, (t1, t2) = loop.run_until_complete(main_task) - - p.stop() - - t1_name = t1.get_name() - t2_name = t2.get_name() - - assert t1_name == "tracked 1" - assert t2_name == "tracked 2" - - from tests.profiling.collector import pprof_utils - - output_filename = os.environ["DD_PROFILING_OUTPUT_PPROF"] + "." + str(os.getpid()) - profile = pprof_utils.parse_newest_profile(output_filename) - - samples = pprof_utils.get_samples_with_label_key(profile, "task name") - assert len(samples) > 0, "No task names found - existing loop was not tracked!" - - EXPECTED_FILENAME_BACKGROUND = os.path.basename(background_task_def.__code__.co_filename) - EXPECTED_LINE_NO_BACKGROUND = background_task_def.__code__.co_firstlineno - if sys.version_info >= (3, 11): - EXPECTED_FUNCTION_NAME_BACKGROUND = f"{my_function.__name__}..{background_task_def.__name__}" - else: - EXPECTED_FUNCTION_NAME_BACKGROUND = background_task_def.__name__ - - pprof_utils.assert_profile_has_sample( - profile, - samples, - expected_sample=pprof_utils.StackEvent( - thread_name="MainThread", - task_name="background", - locations=[ - pprof_utils.StackLocation( - function_name=EXPECTED_FUNCTION_NAME_BACKGROUND, - filename=EXPECTED_FILENAME_BACKGROUND, - line_no=EXPECTED_LINE_NO_BACKGROUND, - ), - ], - ), - ) - - # Verify specific tasks are in the profile - EXPECTED_FILENAME_TRACKED = os.path.basename(tracked_task_def.__code__.co_filename) - EXPECTED_LINE_NO_TRACKED = tracked_task_def.__code__.co_firstlineno + 3 - if sys.version_info >= (3, 11): - EXPECTED_FUNCTION_NAME_TRACKED = f"{my_function.__name__}..{tracked_task_def.__name__}" - else: - EXPECTED_FUNCTION_NAME_TRACKED = tracked_task_def.__name__ - - pprof_utils.assert_profile_has_sample( - profile, - samples, - expected_sample=pprof_utils.StackEvent( - thread_name="MainThread", - task_name=t1_name, - locations=[ - pprof_utils.StackLocation( - function_name=EXPECTED_FUNCTION_NAME_TRACKED, - filename=EXPECTED_FILENAME_TRACKED, - line_no=EXPECTED_LINE_NO_TRACKED, - ) - ], - ), - ) - - -@pytest.mark.subprocess( - env=dict( - DD_PROFILING_OUTPUT_PPROF="/tmp/test_asyncio_start_profiler_from_process_after_creating_loop_and_task", - ), - err=None, -) -def test_asyncio_start_profiler_from_process_after_task_start(): - # NOW import profiling modules - this should track the existing loop - import asyncio - import os - import sys - import time - - from ddtrace.internal.datadog.profiling import stack_v2 - from ddtrace.profiling import profiler - - # Start an asyncio loop BEFORE importing profiler modules - # This simulates the bug scenario where a loop exists before profiling is enabled - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - - async def my_function(): - async def background_task_func() -> None: - """Background task that runs in the existing loop.""" - await asyncio.sleep(1.5) - - # Create and start a task in the existing loop - background_task = loop.create_task(background_task_func(), name="background") - assert background_task is not None - - # Start profiler after loop is already running - assert asyncio.get_running_loop() is loop - - assert stack_v2.is_available, stack_v2.failure_msg - - p = profiler.Profiler() - assert p._profiler._stack_v2_enabled - p.start() - - # Run tasks that should be tracked - sleep_time = 0.2 - loop_run_time = 0.75 - - async def tracked_task() -> None: - start_time = time.time() - while time.time() < start_time + loop_run_time: - await asyncio.sleep(sleep_time) - - async def main_task(): - t1 = asyncio.create_task(tracked_task(), name="tracked 1") - t2 = asyncio.create_task(tracked_task(), name="tracked 2") - await tracked_task() - await asyncio.sleep(0.25) - return t1, t2 - - result = await main_task() - - await background_task - - return tracked_task, background_task_func, p, result - - main_task = loop.create_task(my_function(), name="main") - tracked_task_def, background_task_def, p, (t1, t2) = loop.run_until_complete(main_task) - - p.stop() - - t1_name = t1.get_name() - t2_name = t2.get_name() - - assert t1_name == "tracked 1" - assert t2_name == "tracked 2" - - from tests.profiling.collector import pprof_utils - - output_filename = os.environ["DD_PROFILING_OUTPUT_PPROF"] + "." + str(os.getpid()) - profile = pprof_utils.parse_newest_profile(output_filename) - - samples = pprof_utils.get_samples_with_label_key(profile, "task name") - assert len(samples) > 0, "No task names found - existing loop was not tracked!" - - EXPECTED_FILENAME_BACKGROUND = os.path.basename(background_task_def.__code__.co_filename) - EXPECTED_LINE_NO_BACKGROUND = background_task_def.__code__.co_firstlineno - if sys.version_info >= (3, 11): - EXPECTED_FUNCTION_NAME_BACKGROUND = f"{my_function.__name__}..{background_task_def.__name__}" - else: - EXPECTED_FUNCTION_NAME_BACKGROUND = background_task_def.__name__ - - pprof_utils.assert_profile_has_sample( - profile, - samples, - expected_sample=pprof_utils.StackEvent( - thread_name="MainThread", - task_name="background", - locations=[ - pprof_utils.StackLocation( - function_name=EXPECTED_FUNCTION_NAME_BACKGROUND, - filename=EXPECTED_FILENAME_BACKGROUND, - line_no=EXPECTED_LINE_NO_BACKGROUND, - ), - ], - ), - ) - - # Verify specific tasks are in the profile - EXPECTED_FILENAME_TRACKED = os.path.basename(tracked_task_def.__code__.co_filename) - EXPECTED_LINE_NO_TRACKED = tracked_task_def.__code__.co_firstlineno + 3 - if sys.version_info >= (3, 11): - EXPECTED_FUNCTION_NAME_TRACKED = f"{my_function.__name__}..{tracked_task_def.__name__}" - else: - EXPECTED_FUNCTION_NAME_TRACKED = tracked_task_def.__name__ - - pprof_utils.assert_profile_has_sample( - profile, - samples, - expected_sample=pprof_utils.StackEvent( - thread_name="MainThread", - task_name=t1_name, - locations=[ - pprof_utils.StackLocation( - function_name=EXPECTED_FUNCTION_NAME_TRACKED, - filename=EXPECTED_FILENAME_TRACKED, - line_no=EXPECTED_LINE_NO_TRACKED, - ) - ], - ), - ) - - -@pytest.mark.subprocess( - env=dict( - DD_PROFILING_OUTPUT_PPROF="/tmp/test_asyncio_start_profiler_from_process_after_task_start", - ), - err=None, -) -def test_asyncio_import_and_start_profiler_from_process_after_task_start(): - import asyncio - import os - import sys - import time - - # Start an asyncio loop BEFORE importing profiler modules - # This simulates the bug scenario where a loop exists before profiling is enabled - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - - async def my_function(): - async def background_task_func() -> None: - """Background task that runs in the existing loop.""" - await asyncio.sleep(1.5) - - # Create and start a task in the existing loop - background_task = loop.create_task(background_task_func(), name="background") - assert background_task is not None - - # Start profiler after loop is already running - assert asyncio.get_running_loop() is loop - - # NOW import profiling modules - this should track the existing loop - from ddtrace.internal.datadog.profiling import stack_v2 - from ddtrace.profiling import profiler - - assert stack_v2.is_available, stack_v2.failure_msg - - p = profiler.Profiler() - assert p._profiler._stack_v2_enabled - p.start() - - # Run tasks that should be tracked - sleep_time = 0.2 - loop_run_time = 0.75 - - async def tracked_task() -> None: - start_time = time.time() - while time.time() < start_time + loop_run_time: - await asyncio.sleep(sleep_time) - - async def main_task(): - t1 = asyncio.create_task(tracked_task(), name="tracked 1") - t2 = asyncio.create_task(tracked_task(), name="tracked 2") - await tracked_task() - await asyncio.sleep(0.25) - return t1, t2 - - result = await main_task() - - await background_task - - return tracked_task, background_task_func, p, result - - main_task = loop.create_task(my_function(), name="main") - tracked_task_def, background_task_def, p, (t1, t2) = loop.run_until_complete(main_task) - - p.stop() - - t1_name = t1.get_name() - t2_name = t2.get_name() - - assert t1_name == "tracked 1" - assert t2_name == "tracked 2" - - from tests.profiling.collector import pprof_utils - - output_filename = os.environ["DD_PROFILING_OUTPUT_PPROF"] + "." + str(os.getpid()) - profile = pprof_utils.parse_newest_profile(output_filename) - - samples = pprof_utils.get_samples_with_label_key(profile, "task name") - assert len(samples) > 0, "No task names found - existing loop was not tracked!" - - if sys.version_info >= (3, 11): - EXPECTED_FUNCTION_NAME_BACKGROUND = f"{my_function.__name__}..{background_task_def.__name__}" - else: - EXPECTED_FUNCTION_NAME_BACKGROUND = background_task_def.__name__ - EXPECTED_FILENAME_BACKGROUND = os.path.basename(background_task_def.__code__.co_filename) - EXPECTED_LINE_NO_BACKGROUND = background_task_def.__code__.co_firstlineno - - pprof_utils.assert_profile_has_sample( - profile, - samples, - expected_sample=pprof_utils.StackEvent( - thread_name="MainThread", - task_name="background", - locations=[ - pprof_utils.StackLocation( - function_name=EXPECTED_FUNCTION_NAME_BACKGROUND, - filename=EXPECTED_FILENAME_BACKGROUND, - line_no=EXPECTED_LINE_NO_BACKGROUND, - ), - ], - ), - ) - - # Verify specific tasks are in the profile - if sys.version_info >= (3, 11): - EXPECTED_FUNCTION_NAME_TRACKED = f"{my_function.__name__}..{tracked_task_def.__name__}" - else: - EXPECTED_FUNCTION_NAME_TRACKED = tracked_task_def.__name__ - EXPECTED_FILENAME_TRACKED = os.path.basename(tracked_task_def.__code__.co_filename) - EXPECTED_LINE_NO_TRACKED = tracked_task_def.__code__.co_firstlineno + 3 - - pprof_utils.assert_profile_has_sample( - profile, - samples, - expected_sample=pprof_utils.StackEvent( - thread_name="MainThread", - task_name=t1_name, - locations=[ - pprof_utils.StackLocation( - function_name=EXPECTED_FUNCTION_NAME_TRACKED, - filename=EXPECTED_FILENAME_TRACKED, - line_no=EXPECTED_LINE_NO_TRACKED, - ) - ], - ), - ) diff --git a/tests/profiling_v2/exporter/__init__.py b/tests/profiling_v2/exporter/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/tests/profiling_v2/simple_program.py b/tests/profiling_v2/simple_program.py deleted file mode 100755 index ed07bc5a402..00000000000 --- a/tests/profiling_v2/simple_program.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python -import os -import sys -import time - -from ddtrace.internal import service -from ddtrace.profiling import bootstrap -from ddtrace.profiling.collector import stack - - -for running_collector in bootstrap.profiler._profiler._collectors: - if isinstance(running_collector, stack.StackCollector): - break -else: - raise AssertionError("Unable to find stack collector") - - -print("hello world") -assert running_collector.status == service.ServiceStatus.RUNNING -print(running_collector.interval) - -t0 = time.time() -while time.time() - t0 < (running_collector.interval * 10): - pass - -# Do some serious memory allocations! -for _ in range(5000000): - object() - -print(os.getpid()) -print(bootstrap.profiler._profiler._stack_v2_enabled) -sys.exit(42) diff --git a/tests/profiling_v2/simple_program_fork.py b/tests/profiling_v2/simple_program_fork.py deleted file mode 100644 index ad8c0541ccd..00000000000 --- a/tests/profiling_v2/simple_program_fork.py +++ /dev/null @@ -1,32 +0,0 @@ -import os -import sys -import threading - -from ddtrace.internal import service -import ddtrace.profiling.auto -import ddtrace.profiling.bootstrap -import ddtrace.profiling.profiler - - -lock = threading.Lock() -lock.acquire() - - -assert ddtrace.profiling.bootstrap.profiler.status == service.ServiceStatus.RUNNING - - -child_pid = os.fork() -if child_pid == 0: - # Release it - lock.release() - - # We track this one though - lock = threading.Lock() - lock.acquire() - lock.release() -else: - lock.release() - assert ddtrace.profiling.bootstrap.profiler.status == service.ServiceStatus.RUNNING - print(child_pid) - pid, status = os.waitpid(child_pid, 0) - sys.exit(os.WEXITSTATUS(status)) diff --git a/tests/profiling_v2/simple_program_gevent.py b/tests/profiling_v2/simple_program_gevent.py deleted file mode 100644 index f50fa3aa2e0..00000000000 --- a/tests/profiling_v2/simple_program_gevent.py +++ /dev/null @@ -1,34 +0,0 @@ -# Import from ddtrace before monkey patching to ensure that we grab all the -# necessary references to the unpatched modules. -import ddtrace.auto # noqa: F401, I001 -import ddtrace.profiling.auto # noqa:F401 - - -import gevent.monkey # noqa:F402 - -gevent.monkey.patch_all() - -import threading # noqa: E402, F402, I001 -import time # noqa: E402, F402 - - -def fibonacci(n): - if n == 0: - return 0 - elif n == 1: - return 1 - else: - return fibonacci(n - 1) + fibonacci(n - 2) - - -i = 1 -for _ in range(20): - threads = [] - for _ in range(10): - t = threading.Thread(target=fibonacci, args=(i,)) - t.start() - threads.append(t) - i += 1 - for t in threads: - t.join() - time.sleep(0.1) diff --git a/tests/profiling_v2/test_gunicorn.py b/tests/profiling_v2/test_gunicorn.py deleted file mode 100644 index 78297c85e55..00000000000 --- a/tests/profiling_v2/test_gunicorn.py +++ /dev/null @@ -1,140 +0,0 @@ -# -*- encoding: utf-8 -*- -import os -import re -import subprocess -import sys -import time -import urllib.request - -import pytest - -from tests.profiling.collector import pprof_utils - - -# DEV: gunicorn tests are hard to debug, so keeping these print statements for -# future debugging -DEBUG_PRINT = True - - -def debug_print(*args): - if DEBUG_PRINT: - print(*args) - - -# gunicorn is not available on Windows -if sys.platform == "win32": - pytestmark = pytest.mark.skip - -TESTING_GEVENT = os.getenv("DD_PROFILE_TEST_GEVENT", False) - - -def _run_gunicorn(*args): - cmd = ( - [ - "ddtrace-run", - "gunicorn", - "--bind", - "127.0.0.1:7644", - "--worker-tmp-dir", - "/dev/shm", - "-c", - os.path.dirname(__file__) + "/gunicorn.conf.py", - "--chdir", - os.path.dirname(__file__), - ] - + list(args) - + ["tests.profiling.gunicorn-app:app"] - ) - return subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - - -@pytest.fixture -def gunicorn(monkeypatch): - monkeypatch.setenv("DD_PROFILING_IGNORE_PROFILER", "1") - monkeypatch.setenv("DD_PROFILING_ENABLED", "1") - - yield _run_gunicorn - - -def _get_worker_pids(stdout): - # type: (str) -> list[int] - return [int(_) for _ in re.findall(r"Booting worker with pid: (\d+)", stdout)] - - -def _test_gunicorn(gunicorn, tmp_path, monkeypatch, *args): - # type: (...) -> None - filename = str(tmp_path / "gunicorn.pprof") - monkeypatch.setenv("DD_PROFILING_OUTPUT_PPROF", filename) - monkeypatch.setenv("_DD_PROFILING_STACK_V2_ADAPTIVE_SAMPLING_ENABLED", "0") - - debug_print("Creating gunicorn workers") - # DEV: We only start 1 worker to simplify the test - proc = gunicorn("-w", "1", *args) - # Wait for the workers to start - time.sleep(5) - - if proc.poll() is not None: - pytest.fail("Gunicorn failed to start") - - debug_print("Making request to gunicorn server") - try: - with urllib.request.urlopen("http://127.0.0.1:7644", timeout=5) as f: - status_code = f.getcode() - assert status_code == 200, status_code - response = f.read().decode() - debug_print(response) - except Exception as e: - proc.terminate() - output = proc.stdout.read().decode() - print(output) - pytest.fail("Failed to make request to gunicorn server %s" % e) - finally: - # Need to terminate the process to get the output and release the port - proc.terminate() - - debug_print("Reading gunicorn worker output to get PIDs") - output = proc.stdout.read().decode() - worker_pids = _get_worker_pids(output) - debug_print("Gunicorn worker PIDs: %s" % worker_pids) - - for line in output.splitlines(): - debug_print(line) - - assert len(worker_pids) == 1, output - - debug_print("Waiting for gunicorn process to terminate") - try: - assert proc.wait(timeout=5) == 0, output - except subprocess.TimeoutExpired: - pytest.fail("Failed to terminate gunicorn process ", output) - assert "module 'threading' has no attribute '_active'" not in output, output - - for pid in worker_pids: - debug_print("Reading pprof file with prefix %s.%d" % (filename, pid)) - profile = pprof_utils.parse_newest_profile("%s.%d" % (filename, pid)) - # This returns a list of samples that have non-zero cpu-time - samples = pprof_utils.get_samples_with_value_type(profile, "cpu-time") - assert len(samples) > 0 - - # DEV: somehow the filename is reported as either __init__.py or gunicorn-app.py - # when run on GitLab CI. We need to match either of these two. - filename_regex = r"^(?:__init__\.py|gunicorn-app\.py)$" - - expected_location = pprof_utils.StackLocation(function_name="fib", filename=filename_regex, line_no=8) - - pprof_utils.assert_profile_has_sample( - profile, - samples=samples, - # DEV: we expect multiple locations as fibonacci is recursive - expected_sample=pprof_utils.StackEvent(locations=[expected_location, expected_location]), - ) - - -@pytest.mark.skipif( - sys.version_info[:2] == (3, 8) and os.environ.get("DD_PROFILE_TEST_GEVENT") == "1", - reason="Flaky and fails often on Python 3.8 with DD_PROFILE_TEST_GEVENT=1", -) -def test_gunicorn(gunicorn, tmp_path, monkeypatch): - # type: (...) -> None - args = ("-k", "gevent") if TESTING_GEVENT else tuple() - _test_gunicorn(gunicorn, tmp_path, monkeypatch, *args) diff --git a/tests/profiling_v2/test_main.py b/tests/profiling_v2/test_main.py deleted file mode 100644 index cbd10b294a6..00000000000 --- a/tests/profiling_v2/test_main.py +++ /dev/null @@ -1,207 +0,0 @@ -# -*- encoding: utf-8 -*- -import multiprocessing -import os -import sys - -import pytest - -from tests.profiling.collector import lock_utils -from tests.profiling.collector import pprof_utils -from tests.utils import call_program - - -def test_call_script(): - env = os.environ.copy() - env["DD_PROFILING_ENABLED"] = "1" - stdout, stderr, exitcode, _ = call_program( - "ddtrace-run", sys.executable, os.path.join(os.path.dirname(__file__), "simple_program.py"), env=env - ) - if sys.platform == "win32": - assert exitcode == 0, (stdout, stderr) - else: - assert exitcode == 42, (stdout, stderr) - hello, interval, pid, stack_v2 = list(s.strip() for s in stdout.decode().strip().split("\n")) - assert hello == "hello world", stdout.decode().strip() - assert float(interval) >= 0.01, stdout.decode().strip() - assert stack_v2 == str(True) - - -@pytest.mark.skipif(not os.getenv("DD_PROFILE_TEST_GEVENT", False), reason="Not testing gevent") -def test_call_script_gevent(): - if sys.version_info[:2] == (3, 8): - pytest.skip("this test is flaky on 3.8 with stack v2") - env = os.environ.copy() - env["DD_PROFILING_ENABLED"] = "1" - stdout, stderr, exitcode, pid = call_program( - sys.executable, os.path.join(os.path.dirname(__file__), "simple_program_gevent.py"), env=env - ) - assert exitcode == 0, (stdout, stderr) - - -def test_call_script_pprof_output(tmp_path): - """This checks if the pprof output and atexit register work correctly. - - The script does not run for one minute, so if the `stop_on_exit` flag is broken, this test will fail. - """ - filename = str(tmp_path / "pprof") - env = os.environ.copy() - env["DD_PROFILING_OUTPUT_PPROF"] = filename - env["DD_PROFILING_CAPTURE_PCT"] = "1" - env["DD_PROFILING_ENABLED"] = "1" - stdout, stderr, exitcode, _ = call_program( - "ddtrace-run", - sys.executable, - os.path.join(os.path.dirname(__file__), "../profiling", "simple_program.py"), - env=env, - ) - if sys.platform == "win32": - assert exitcode == 0, (stdout, stderr) - else: - assert exitcode == 42, (stdout, stderr) - _, _, pid = list(s.strip() for s in stdout.decode().strip().split("\n")) - profile = pprof_utils.parse_newest_profile(filename + "." + str(pid)) - samples = pprof_utils.get_samples_with_value_type(profile, "cpu-time") - assert len(samples) > 0 - - -@pytest.mark.skipif(sys.platform == "win32", reason="fork only available on Unix") -def test_fork(tmp_path): - filename = str(tmp_path / "pprof") - env = os.environ.copy() - env["DD_PROFILING_OUTPUT_PPROF"] = filename - env["DD_PROFILING_CAPTURE_PCT"] = "100" - stdout, stderr, exitcode, pid = call_program( - "python", os.path.join(os.path.dirname(__file__), "simple_program_fork.py"), env=env - ) - assert exitcode == 0 - child_pid = stdout.decode().strip() - profile = pprof_utils.parse_newest_profile(filename + "." + str(pid)) - parent_expected_acquire_events = [ - pprof_utils.LockAcquireEvent( - caller_name="", - filename="simple_program_fork.py", - linenos=lock_utils.LineNo(create=11, acquire=12, release=28), - lock_name="lock", - ), - ] - parent_expected_release_events = [ - pprof_utils.LockReleaseEvent( - caller_name="", - filename="simple_program_fork.py", - linenos=lock_utils.LineNo(create=11, acquire=12, release=28), - lock_name="lock", - ), - ] - pprof_utils.assert_lock_events( - profile, - expected_acquire_events=parent_expected_acquire_events, - expected_release_events=parent_expected_release_events, - ) - child_profile = pprof_utils.parse_newest_profile(filename + "." + str(child_pid)) - # We expect the child profile to not have lock events from the parent process - # Note that assert_lock_events function only checks that the given events - # exists, and doesn't assert that other events don't exist. - with pytest.raises(AssertionError): - pprof_utils.assert_lock_events( - child_profile, - expected_acquire_events=parent_expected_acquire_events, - expected_release_events=parent_expected_release_events, - ) - pprof_utils.assert_lock_events( - child_profile, - expected_acquire_events=[ - # After fork(), we clear the samples in child, so we only have one - # lock acquire event - pprof_utils.LockAcquireEvent( - caller_name="", - filename="simple_program_fork.py", - linenos=lock_utils.LineNo(create=24, acquire=25, release=26), - lock_name="lock", - ), - ], - expected_release_events=[ - pprof_utils.LockReleaseEvent( - caller_name="", - filename="simple_program_fork.py", - linenos=lock_utils.LineNo(create=11, acquire=12, release=21), - lock_name="lock", - ), - pprof_utils.LockReleaseEvent( - caller_name="", - filename="simple_program_fork.py", - linenos=lock_utils.LineNo(create=24, acquire=25, release=26), - lock_name="lock", - ), - ], - ) - - -@pytest.mark.skipif(sys.platform == "win32", reason="fork only available on Unix") -@pytest.mark.skipif(not os.getenv("DD_PROFILE_TEST_GEVENT", False), reason="Not testing gevent") -def test_fork_gevent(): - env = os.environ.copy() - stdout, stderr, exitcode, pid = call_program( - "python", os.path.join(os.path.dirname(__file__), "../profiling", "gevent_fork.py"), env=env - ) - assert exitcode == 0 - - -methods = multiprocessing.get_all_start_methods() - - -@pytest.mark.parametrize( - "method", - set(methods) - {"forkserver", "fork"}, -) -def test_multiprocessing(method, tmp_path): - filename = str(tmp_path / "pprof") - env = os.environ.copy() - env["DD_PROFILING_OUTPUT_PPROF"] = filename - env["DD_PROFILING_ENABLED"] = "1" - env["DD_PROFILING_CAPTURE_PCT"] = "1" - stdout, stderr, exitcode, _ = call_program( - "ddtrace-run", - sys.executable, - os.path.join(os.path.dirname(__file__), "../profiling", "_test_multiprocessing.py"), - method, - env=env, - ) - assert exitcode == 0, (stdout, stderr) - pid, child_pid = list(s.strip() for s in stdout.decode().strip().split("\n")) - profile = pprof_utils.parse_newest_profile(filename + "." + str(pid)) - samples = pprof_utils.get_samples_with_value_type(profile, "cpu-time") - assert len(samples) > 0 - child_profile = pprof_utils.parse_newest_profile(filename + "." + str(child_pid)) - child_samples = pprof_utils.get_samples_with_value_type(child_profile, "cpu-time") - assert len(child_samples) > 0 - - -@pytest.mark.subprocess( - ddtrace_run=True, - env=dict(DD_PROFILING_ENABLED="1"), - err=lambda _: "RuntimeError: the memalloc module is already started" not in _, -) -def test_memalloc_no_init_error_on_fork(): - import os - - pid = os.fork() - if not pid: - exit(0) - os.waitpid(pid, 0) - - -@pytest.mark.subprocess( - ddtrace_run=True, - env=dict( - DD_PROFILING_ENABLED="1", - DD_UNLOAD_MODULES_FROM_SITECUSTOMIZE="1", - ), - out="OK\n", - err=None, -) -def test_profiler_start_up_with_module_clean_up_in_protobuf_app(): - # This can cause segfaults if we do module clean up with later versions of - # protobuf. This is a regression test. - from google.protobuf import empty_pb2 # noqa:F401 - - print("OK") diff --git a/tests/profiling_v2/test_profiler.py b/tests/profiling_v2/test_profiler.py deleted file mode 100644 index 85dd02c83c7..00000000000 --- a/tests/profiling_v2/test_profiler.py +++ /dev/null @@ -1,320 +0,0 @@ -import logging -import sys -import time - -import mock -import pytest - -import ddtrace -from ddtrace.internal.compat import PYTHON_VERSION_INFO -from ddtrace.profiling import collector -from ddtrace.profiling import profiler -from ddtrace.profiling import scheduler -from ddtrace.profiling.collector import asyncio -from ddtrace.profiling.collector import stack -from ddtrace.profiling.collector import threading - - -def test_status(): - p = profiler.Profiler() - assert repr(p.status) == "" - p.start() - assert repr(p.status) == "" - p.stop(flush=False) - assert repr(p.status) == "" - - -def test_restart(): - p = profiler.Profiler() - p.start() - p.stop(flush=False) - p.start() - p.stop(flush=False) - - -def test_multiple_stop(): - """Check that the profiler can be stopped twice.""" - p = profiler.Profiler() - p.start() - p.stop(flush=False) - p.stop(flush=False) - - -def test_tracer_api(monkeypatch): - monkeypatch.setenv("DD_API_KEY", "foobar") - prof = profiler.Profiler(tracer=ddtrace.tracer) - assert prof.tracer == ddtrace.tracer - for col in prof._profiler._collectors: - if isinstance(col, stack.StackCollector): - assert col.tracer == ddtrace.tracer - break - else: - pytest.fail("Unable to find stack collector") - - -@pytest.mark.subprocess() -def test_default_memory(): - from ddtrace.profiling import profiler - from ddtrace.profiling.collector import memalloc - - assert any(isinstance(col, memalloc.MemoryCollector) for col in profiler.Profiler()._profiler._collectors) - - -@pytest.mark.subprocess(env=dict(DD_PROFILING_MEMORY_ENABLED="true")) -def test_enable_memory(): - from ddtrace.profiling import profiler - from ddtrace.profiling.collector import memalloc - - assert any(isinstance(col, memalloc.MemoryCollector) for col in profiler.Profiler()._profiler._collectors) - - -@pytest.mark.subprocess(env=dict(DD_PROFILING_MEMORY_ENABLED="false")) -def test_disable_memory(): - from ddtrace.profiling import profiler - from ddtrace.profiling.collector import memalloc - - assert all(not isinstance(col, memalloc.MemoryCollector) for col in profiler.Profiler()._profiler._collectors) - - -def test_copy(): - p = profiler._ProfilerInstance(env="123", version="dwq", service="foobar") - c = p.copy() - assert c == p - assert p.env == c.env - assert p.version == c.version - assert p.service == c.service - assert p.tracer == c.tracer - assert p.tags == c.tags - - -def test_failed_start_collector(caplog, monkeypatch): - class ErrCollect(collector.Collector): - def _start_service(self): - raise RuntimeError("could not import required module") - - def _stop_service(self): - pass - - @staticmethod - def collect(): - pass - - @staticmethod - def snapshot(): - raise Exception("error!") - - monkeypatch.setenv("DD_PROFILING_UPLOAD_INTERVAL", "1") - - class TestProfiler(profiler._ProfilerInstance): - def _build_default_exporters(self, *args, **kargs): - return [] - - p = TestProfiler() - err_collector = mock.MagicMock(wraps=ErrCollect()) - p._collectors = [err_collector] - p.start() - - def profiling_tuples(tuples): - return [t for t in tuples if t[0].startswith("ddtrace.profiling")] - - assert profiling_tuples(caplog.record_tuples) == [ - ("ddtrace.profiling.profiler", logging.ERROR, "Failed to start collector %r, disabling." % err_collector) - ] - time.sleep(2) - p.stop() - assert err_collector.snapshot.call_count == 0 - assert profiling_tuples(caplog.record_tuples) == [ - ("ddtrace.profiling.profiler", logging.ERROR, "Failed to start collector %r, disabling." % err_collector) - ] - - -def test_default_collectors(): - p = profiler.Profiler() - assert any(isinstance(c, stack.StackCollector) for c in p._profiler._collectors) - assert any(isinstance(c, threading.ThreadingLockCollector) for c in p._profiler._collectors) - try: - import asyncio as _ # noqa: F401 - except ImportError: - pass - else: - assert any(isinstance(c, asyncio.AsyncioLockCollector) for c in p._profiler._collectors) - p.stop(flush=False) - - -def test_profiler_serverless(monkeypatch): - # type: (...) -> None - monkeypatch.setenv("AWS_LAMBDA_FUNCTION_NAME", "foobar") - p = profiler.Profiler() - assert isinstance(p._scheduler, scheduler.ServerlessScheduler) - assert p.tags["functionname"] == "foobar" - - -@pytest.mark.skipif(PYTHON_VERSION_INFO < (3, 9), reason="Python 3.8 throws a deprecation warning") -@pytest.mark.subprocess() -def test_profiler_ddtrace_deprecation(): - """ - ddtrace interfaces loaded by the profiler can be marked deprecated, and we should update - them when this happens. As reported by https://github.com/DataDog/dd-trace-py/issues/8881 - """ - import warnings - - with warnings.catch_warnings(): - warnings.simplefilter("error", DeprecationWarning) - from ddtrace.profiling import _threading # noqa:F401 - from ddtrace.profiling import event # noqa:F401 - from ddtrace.profiling import profiler # noqa:F401 - from ddtrace.profiling import scheduler # noqa:F401 - from ddtrace.profiling.collector import _lock # noqa:F401 - from ddtrace.profiling.collector import _task # noqa:F401 - from ddtrace.profiling.collector import _traceback # noqa:F401 - from ddtrace.profiling.collector import memalloc # noqa:F401 - from ddtrace.profiling.collector import stack # noqa:F401 - - -@pytest.mark.subprocess( - env=dict(DD_PROFILING_ENABLED="true"), - err="Failed to load ddup module (mock failure message), disabling profiling\n", -) -def test_libdd_failure_telemetry_logging(): - """Test that libdd initialization failures log to telemetry. This mimics - one of the two scenarios where profiling can be configured. - 1) using ddtrace-run with DD_PROFILNG_ENABLED=true - 2) import ddtrace.profiling.auto - """ - - import mock - - with mock.patch.multiple( - "ddtrace.internal.datadog.profiling.ddup", - failure_msg="mock failure message", - is_available=False, - ), mock.patch("ddtrace.internal.telemetry.telemetry_writer.add_log") as mock_add_log: - from ddtrace.internal.telemetry.constants import TELEMETRY_LOG_LEVEL - from ddtrace.settings.profiling import config # noqa:F401 - - mock_add_log.assert_called_once() - call_args = mock_add_log.call_args - assert call_args[0][0] == TELEMETRY_LOG_LEVEL.ERROR - message = call_args[0][1] - assert "Failed to load ddup module" in message - assert "mock failure message" in message - - -@pytest.mark.subprocess( - # We'd like to check the stderr, but it somehow leads to triggering the - # upload code path on macOS - err=None -) -def test_libdd_failure_telemetry_logging_with_auto(): - import mock - - with mock.patch.multiple( - "ddtrace.internal.datadog.profiling.ddup", - failure_msg="mock failure message", - is_available=False, - ), mock.patch("ddtrace.internal.telemetry.telemetry_writer.add_log") as mock_add_log: - from ddtrace.internal.telemetry.constants import TELEMETRY_LOG_LEVEL - import ddtrace.profiling.auto # noqa: F401 - - mock_add_log.assert_called_once() - call_args = mock_add_log.call_args - assert call_args[0][0] == TELEMETRY_LOG_LEVEL.ERROR - message = call_args[0][1] - assert "Failed to load ddup module" in message - assert "mock failure message" in message - - -@pytest.mark.subprocess( - env=dict(DD_PROFILING_ENABLED="true"), - err="Failed to load stack_v2 module (mock failure message), falling back to v1 stack sampler\n", -) -def test_stack_v2_failure_telemetry_logging(): - # Test that stack_v2 initialization failures log to telemetry. This is - # mimicking the behavior of ddtrace-run, where the config is imported to - # determine if profiling/stack_v2 is enabled - - import mock - - with mock.patch.multiple( - "ddtrace.internal.datadog.profiling.stack_v2", - failure_msg="mock failure message", - is_available=False, - ), mock.patch("ddtrace.internal.telemetry.telemetry_writer.add_log") as mock_add_log: - from ddtrace.internal.telemetry.constants import TELEMETRY_LOG_LEVEL - from ddtrace.settings.profiling import config # noqa: F401 - - mock_add_log.assert_called_once() - call_args = mock_add_log.call_args - assert call_args[0][0] == TELEMETRY_LOG_LEVEL.ERROR - message = call_args[0][1] - assert "Failed to load stack_v2 module" in message - assert "mock failure message" in message - - -@pytest.mark.subprocess( - # We'd like to check the stderr, but it somehow leads to triggering the - # upload code path on macOS. - err=None, -) -def test_stack_v2_failure_telemetry_logging_with_auto(): - import mock - - with mock.patch.multiple( - "ddtrace.internal.datadog.profiling.stack_v2", - failure_msg="mock failure message", - is_available=False, - ), mock.patch("ddtrace.internal.telemetry.telemetry_writer.add_log") as mock_add_log: - from ddtrace.internal.telemetry.constants import TELEMETRY_LOG_LEVEL - import ddtrace.profiling.auto # noqa: F401 - - mock_add_log.assert_called_once() - call_args = mock_add_log.call_args - assert call_args[0][0] == TELEMETRY_LOG_LEVEL.ERROR - message = call_args[0][1] - assert "Failed to load stack_v2 module" in message - assert "mock failure message" in message - - -@pytest.mark.skipif(not sys.platform.startswith("linux"), reason="only works on linux") -@pytest.mark.subprocess(err=None) -# For macOS: Could print 'Error uploading' but okay to ignore since we are checking if native_id is set -def test_user_threads_have_native_id(): - from os import getpid - from threading import Thread - from threading import _MainThread # pyright: ignore[reportAttributeAccessIssue] - from threading import current_thread - from time import sleep - - from ddtrace.profiling import profiler - - # DEV: We used to run this test with ddtrace_run=True passed into the - # subprocess decorator, but that caused this to be flaky for Python 3.8.x - # with gevent. When it failed for that specific venv, current_thread() - # returned a DummyThread instead of a _MainThread. - p = profiler.Profiler() - p.start() - - main = current_thread() - assert isinstance(main, _MainThread) - # We expect the current thread to have the same ID as the PID - assert main.native_id == getpid(), (main.native_id, getpid()) - - t = Thread(target=lambda: None) - t.start() - - for _ in range(10): - try: - # The TID should be higher than the PID, but not too high - assert 0 < t.native_id - getpid() < 100, (t.native_id, getpid()) - except AttributeError: - # The native_id attribute is set by the thread so we might have to - # wait a bit for it to be set. - sleep(0.1) - else: - break - else: - raise AssertionError("Thread.native_id not set") - - t.join() - - p.stop() diff --git a/tests/profiling_v2/test_scheduler.py b/tests/profiling_v2/test_scheduler.py deleted file mode 100644 index f35479d431c..00000000000 --- a/tests/profiling_v2/test_scheduler.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- encoding: utf-8 -*- -import logging -import time - -import mock - -from ddtrace.profiling import scheduler - - -def test_thread_name(): - s = scheduler.Scheduler() - s.start() - assert s._worker.name == "ddtrace.profiling.scheduler:Scheduler" - s.stop() - - -def test_before_flush(): - x = {} - - def call_me(): - x["OK"] = True - - s = scheduler.Scheduler(before_flush=call_me) - s.flush() - assert x["OK"] - - -def test_before_flush_failure(caplog): - def call_me(): - raise Exception("LOL") - - s = scheduler.Scheduler(before_flush=call_me) - s.flush() - assert caplog.record_tuples == [ - (("ddtrace.profiling.scheduler", logging.ERROR, "Scheduler before_flush hook failed")) - ] - - -@mock.patch("ddtrace.profiling.scheduler.Scheduler.periodic") -def test_serverless_periodic(mock_periodic): - s = scheduler.ServerlessScheduler() - # Fake start() - s._last_export = time.time_ns() - s.periodic() - assert s._profiled_intervals == 1 - mock_periodic.assert_not_called() - s._last_export = time.time_ns() - 65 - s._profiled_intervals = 65 - s.periodic() - assert s._profiled_intervals == 0 - assert s.interval == 1 - mock_periodic.assert_called() diff --git a/tests/profiling_v2/test_uwsgi.py b/tests/profiling_v2/test_uwsgi.py deleted file mode 100644 index 6b5d4e7cf23..00000000000 --- a/tests/profiling_v2/test_uwsgi.py +++ /dev/null @@ -1,228 +0,0 @@ -from importlib.metadata import version -import os -import re -import signal -from subprocess import TimeoutExpired -import sys -import time - -import pytest - -from tests.contrib.uwsgi import run_uwsgi -from tests.profiling.collector import pprof_utils - - -# uwsgi is not available on Windows -if sys.platform == "win32": - pytestmark = pytest.mark.skip - -TESTING_GEVENT = os.getenv("DD_PROFILE_TEST_GEVENT", False) -THREADS_MSG = ( - b"ddtrace.internal.uwsgi.uWSGIConfigError: enable-threads option must be set to true, or a positive " - b"number of threads must be set" -) - -uwsgi_app = os.path.join(os.path.dirname(__file__), "..", "profiling", "uwsgi-app.py") - - -@pytest.fixture -def uwsgi(monkeypatch, tmp_path): - # Do not ignore profiler so we have samples in the output pprof - monkeypatch.setenv("DD_PROFILING_IGNORE_PROFILER", "0") - # Do not use pytest tmpdir fixtures which generate directories longer than allowed for a socket file name - socket_name = str(tmp_path / "uwsgi.sock") - import os - - cmd = [ - "uwsgi", - "--need-app", - "--die-on-term", - "--socket", - socket_name, - "--wsgi-file", - uwsgi_app, - ] - - try: - yield run_uwsgi(cmd) - finally: - os.unlink(socket_name) - - -def test_uwsgi_threads_disabled(uwsgi): - proc = uwsgi() - stdout, _ = proc.communicate() - assert proc.wait() != 0 - assert THREADS_MSG in stdout - - -def test_uwsgi_threads_number_set(uwsgi): - proc = uwsgi("--threads", "1") - try: - stdout, _ = proc.communicate(timeout=1) - except TimeoutExpired: - proc.terminate() - stdout, _ = proc.communicate() - assert THREADS_MSG not in stdout - - -def test_uwsgi_threads_enabled(uwsgi, tmp_path, monkeypatch): - filename = str(tmp_path / "uwsgi.pprof") - monkeypatch.setenv("DD_PROFILING_OUTPUT_PPROF", filename) - proc = uwsgi("--enable-threads") - worker_pids = _get_worker_pids(proc.stdout, 1) - # Give some time to the process to actually startup - time.sleep(3) - proc.terminate() - assert proc.wait() == 30 - for pid in worker_pids: - profile = pprof_utils.parse_newest_profile("%s.%d" % (filename, pid)) - samples = pprof_utils.get_samples_with_value_type(profile, "wall-time") - assert len(samples) > 0 - - -def test_uwsgi_threads_processes_no_primary(uwsgi, monkeypatch): - proc = uwsgi("--enable-threads", "--processes", "2") - stdout, _ = proc.communicate() - assert ( - b"ddtrace.internal.uwsgi.uWSGIConfigError: master option must be enabled when multiple processes are used" - in stdout - ) - - -def _get_worker_pids(stdout, num_worker, num_app_started=1): - worker_pids = [] - started = 0 - while True: - line = stdout.readline() - if line == b"": - break - elif b"WSGI app 0 (mountpoint='') ready" in line: - started += 1 - else: - m = re.match(r"^spawned uWSGI worker \d+ .*\(pid: (\d+),", line.decode()) - if m: - worker_pids.append(int(m.group(1))) - - if len(worker_pids) == num_worker and num_app_started == started: - break - - return worker_pids - - -def test_uwsgi_threads_processes_primary(uwsgi, tmp_path, monkeypatch): - filename = str(tmp_path / "uwsgi.pprof") - monkeypatch.setenv("DD_PROFILING_OUTPUT_PPROF", filename) - proc = uwsgi("--enable-threads", "--master", "--py-call-uwsgi-fork-hooks", "--processes", "2") - worker_pids = _get_worker_pids(proc.stdout, 2) - # Give some time to child to actually startup - time.sleep(3) - proc.terminate() - assert proc.wait() == 0 - for pid in worker_pids: - profile = pprof_utils.parse_newest_profile("%s.%d" % (filename, pid)) - samples = pprof_utils.get_samples_with_value_type(profile, "wall-time") - assert len(samples) > 0 - - -def test_uwsgi_threads_processes_primary_lazy_apps(uwsgi, tmp_path, monkeypatch): - filename = str(tmp_path / "uwsgi.pprof") - monkeypatch.setenv("DD_PROFILING_OUTPUT_PPROF", filename) - monkeypatch.setenv("DD_PROFILING_UPLOAD_INTERVAL", "1") - # For uwsgi<2.0.30, --skip-atexit is required to avoid crashes when - # the child process exits. - proc = uwsgi("--enable-threads", "--master", "--processes", "2", "--lazy-apps", "--skip-atexit") - worker_pids = _get_worker_pids(proc.stdout, 2, 2) - # Give some time to child to actually startup and output a profile - time.sleep(3) - proc.terminate() - assert proc.wait() == 0 - for pid in worker_pids: - profile = pprof_utils.parse_newest_profile("%s.%d" % (filename, pid)) - samples = pprof_utils.get_samples_with_value_type(profile, "wall-time") - assert len(samples) > 0 - - -def test_uwsgi_threads_processes_no_primary_lazy_apps(uwsgi, tmp_path, monkeypatch): - filename = str(tmp_path / "uwsgi.pprof") - monkeypatch.setenv("DD_PROFILING_OUTPUT_PPROF", filename) - monkeypatch.setenv("DD_PROFILING_UPLOAD_INTERVAL", "1") - # For uwsgi<2.0.30, --skip-atexit is required to avoid crashes when - # the child process exits. - proc = uwsgi("--enable-threads", "--processes", "2", "--lazy-apps", "--skip-atexit") - worker_pids = _get_worker_pids(proc.stdout, 2, 2) - assert len(worker_pids) == 2 - - # Give some time to child to actually startup and output a profile - time.sleep(3) - - # Kill master process - parent_pid: int = worker_pids[0] - os.kill(parent_pid, signal.SIGTERM) - - # Wait for master to exit - res_pid, res_status = os.waitpid(parent_pid, 0) - print("") - print(f"INFO: Master process {parent_pid} exited with status {res_status} and pid {res_pid}") - - # Attempt to kill worker proc once - worker_pid: int = worker_pids[1] - print(f"DEBUG: Checking worker {worker_pid} status after master exit:") - try: - os.kill(worker_pid, 0) - print(f"WARNING: Worker {worker_pid} is a zombie (will be cleaned up by init).") - - os.kill(worker_pid, signal.SIGKILL) - print(f"WARNING: Worker {worker_pid} could not be killed with SIGKILL (will be cleaned up by init).") - except OSError: - print(f"INFO: Worker {worker_pid} was successfully killed.") - - for pid in worker_pids: - profile = pprof_utils.parse_newest_profile("%s.%d" % (filename, pid)) - samples = pprof_utils.get_samples_with_value_type(profile, "wall-time") - assert len(samples) > 0 - - -@pytest.mark.parametrize("lazy_flag", ["--lazy-apps", "--lazy"]) -@pytest.mark.skipif( - tuple(int(x) for x in version("uwsgi").split(".")) >= (2, 0, 30), - reason="uwsgi>=2.0.30 does not require --skip-atexit", -) -def test_uwsgi_require_skip_atexit_when_lazy_with_master(uwsgi, lazy_flag): - expected_warning = b"ddtrace.internal.uwsgi.uWSGIConfigDeprecationWarning: skip-atexit option must be set" - - proc = uwsgi("--enable-threads", "--master", "--processes", "2", lazy_flag) - time.sleep(1) - proc.terminate() - stdout, _ = proc.communicate() - assert expected_warning in stdout - - -@pytest.mark.parametrize("lazy_flag", ["--lazy-apps", "--lazy"]) -@pytest.mark.skipif( - tuple(int(x) for x in version("uwsgi").split(".")) >= (2, 0, 30), - reason="uwsgi>=2.0.30 does not require --skip-atexit", -) -def test_uwsgi_require_skip_atexit_when_lazy_without_master(uwsgi, lazy_flag): - expected_warning = b"ddtrace.internal.uwsgi.uWSGIConfigDeprecationWarning: skip-atexit option must be set" - num_workers = 2 - proc = uwsgi("--enable-threads", "--processes", str(num_workers), lazy_flag) - - worker_pids = [] - logged_warning = 0 - while True: - line = proc.stdout.readline() - if line == b"": - break - if expected_warning in line: - logged_warning += 1 - else: - m = re.match(r"^spawned uWSGI worker \d+ .*\(pid: (\d+),", line.decode()) - if m: - worker_pids.append(int(m.group(1))) - - if logged_warning == num_workers: - break - - for pid in worker_pids: - os.kill(pid, signal.SIGTERM) diff --git a/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion.json b/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion.json deleted file mode 100644 index 67597a9ef4d..00000000000 --- a/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion.json +++ /dev/null @@ -1,28 +0,0 @@ -[[ - { - "name": "gemini.request", - "service": "tests.contrib.google_generativeai", - "resource": "GenerativeModel.generate_content", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "68715e7800000000", - "google_generativeai.request.model": "gemini-1.5-flash", - "google_generativeai.request.provider": "google", - "language": "python", - "runtime-id": "e72fd406a9a04657a973cf959e2935f5" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "process_id": 66831 - }, - "duration": 176000, - "start": 1752260216102575000 - }]] diff --git a/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_error.json b/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_error.json deleted file mode 100644 index d40ce8ad422..00000000000 --- a/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_error.json +++ /dev/null @@ -1,31 +0,0 @@ -[[ - { - "name": "gemini.request", - "service": "tests.contrib.google_generativeai", - "resource": "GenerativeModel.generate_content", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 1, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "68715e7800000000", - "error.message": "400 Invalid API key. Please pass a valid API key.", - "error.stack": "Traceback (most recent call last):\n File \"/Users/jacob.simpher/go/src/github.com/DataDog/dd-trace-py/ddtrace/contrib/internal/google_generativeai/patch.py\", line 51, in traced_generate\n generations = func(*args, **kwargs)\n File \"/Users/jacob.simpher/go/src/github.com/DataDog/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio_google-generativeai~070_pillow_google-ai-generativelanguage_vertexai/lib/python3.10/site-packages/google/generativeai/generative_models.py\", line 331, in generate_content\n response = self._client.generate_content(\n File \"/Users/jacob.simpher/go/src/github.com/DataDog/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio_google-generativeai~070_pillow_google-ai-generativelanguage_vertexai/lib/python3.10/site-packages/mock/mock.py\", line 1190, in __call__\n return _mock_self._mock_call(*args, **kwargs)\n File \"/Users/jacob.simpher/go/src/github.com/DataDog/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio_google-generativeai~070_pillow_google-ai-generativelanguage_vertexai/lib/python3.10/site-packages/mock/mock.py\", line 1194, in _mock_call\n return _mock_self._execute_mock_call(*args, **kwargs)\n File \"/Users/jacob.simpher/go/src/github.com/DataDog/dd-trace-py/.riot/venv_py31013_mock_pytest_pytest-mock_coverage_pytest-cov_opentracing_hypothesis6451_pytest-asyncio_google-generativeai~070_pillow_google-ai-generativelanguage_vertexai/lib/python3.10/site-packages/mock/mock.py\", line 1251, in _execute_mock_call\n raise effect\ngoogle.api_core.exceptions.InvalidArgument: 400 Invalid API key. Please pass a valid API key.\n", - "error.type": "google.api_core.exceptions.InvalidArgument", - "google_generativeai.request.model": "gemini-1.5-flash", - "google_generativeai.request.provider": "google", - "language": "python", - "runtime-id": "e72fd406a9a04657a973cf959e2935f5" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "process_id": 66831 - }, - "duration": 1902000, - "start": 1752260216124732000 - }]] diff --git a/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_image.json b/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_image.json deleted file mode 100644 index 4ca0cc7ed59..00000000000 --- a/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_image.json +++ /dev/null @@ -1,28 +0,0 @@ -[[ - { - "name": "gemini.request", - "service": "tests.contrib.google_generativeai", - "resource": "GenerativeModel.generate_content", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "68715e7800000000", - "google_generativeai.request.model": "gemini-1.5-flash", - "google_generativeai.request.provider": "google", - "language": "python", - "runtime-id": "e72fd406a9a04657a973cf959e2935f5" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "process_id": 66831 - }, - "duration": 3668000, - "start": 1752260216359632000 - }]] diff --git a/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_multiple_messages.json b/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_multiple_messages.json deleted file mode 100644 index 170138a02ef..00000000000 --- a/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_multiple_messages.json +++ /dev/null @@ -1,28 +0,0 @@ -[[ - { - "name": "gemini.request", - "service": "tests.contrib.google_generativeai", - "resource": "GenerativeModel.generate_content", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "68715e7800000000", - "google_generativeai.request.model": "gemini-1.5-flash", - "google_generativeai.request.provider": "google", - "language": "python", - "runtime-id": "e72fd406a9a04657a973cf959e2935f5" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "process_id": 66831 - }, - "duration": 358000, - "start": 1752260216149341000 - }]] diff --git a/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_stream.json b/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_stream.json deleted file mode 100644 index f53f69772b0..00000000000 --- a/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_stream.json +++ /dev/null @@ -1,28 +0,0 @@ -[[ - { - "name": "gemini.request", - "service": "tests.contrib.google_generativeai", - "resource": "GenerativeModel.generate_content", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "68715e7800000000", - "google_generativeai.request.model": "gemini-1.5-flash", - "google_generativeai.request.provider": "google", - "language": "python", - "runtime-id": "e72fd406a9a04657a973cf959e2935f5" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "process_id": 66831 - }, - "duration": 1104000, - "start": 1752260216208097000 - }]] diff --git a/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_system_prompt.json b/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_system_prompt.json deleted file mode 100644 index f75e740b8c4..00000000000 --- a/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_system_prompt.json +++ /dev/null @@ -1,28 +0,0 @@ -[[ - { - "name": "gemini.request", - "service": "tests.contrib.google_generativeai", - "resource": "GenerativeModel.generate_content", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "68715e7800000000", - "google_generativeai.request.model": "gemini-1.5-flash", - "google_generativeai.request.provider": "google", - "language": "python", - "runtime-id": "e72fd406a9a04657a973cf959e2935f5" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "process_id": 66831 - }, - "duration": 176000, - "start": 1752260216187574000 - }]] diff --git a/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_tool_stream.json b/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_tool_stream.json deleted file mode 100644 index 6a32e85f576..00000000000 --- a/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_completion_tool_stream.json +++ /dev/null @@ -1,28 +0,0 @@ -[[ - { - "name": "gemini.request", - "service": "tests.contrib.google_generativeai", - "resource": "GenerativeModel.generate_content", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "68715e7800000000", - "google_generativeai.request.model": "gemini-1.5-flash", - "google_generativeai.request.provider": "google", - "language": "python", - "runtime-id": "e72fd406a9a04657a973cf959e2935f5" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "process_id": 66831 - }, - "duration": 335000, - "start": 1752260216313639000 - }]] diff --git a/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_tool_chat_completion.json b/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_tool_chat_completion.json deleted file mode 100644 index f43670eb5a2..00000000000 --- a/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_tool_chat_completion.json +++ /dev/null @@ -1,56 +0,0 @@ -[[ - { - "name": "gemini.request", - "service": "tests.contrib.google_generativeai", - "resource": "GenerativeModel.generate_content", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "68715e7800000000", - "google_generativeai.request.model": "gemini-1.5-flash", - "google_generativeai.request.provider": "google", - "language": "python", - "runtime-id": "e72fd406a9a04657a973cf959e2935f5" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "process_id": 66831 - }, - "duration": 125000, - "start": 1752260216291315000 - }], -[ - { - "name": "gemini.request", - "service": "tests.contrib.google_generativeai", - "resource": "GenerativeModel.generate_content", - "trace_id": 1, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "68715e7800000000", - "google_generativeai.request.model": "gemini-1.5-flash", - "google_generativeai.request.provider": "google", - "language": "python", - "runtime-id": "e72fd406a9a04657a973cf959e2935f5" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "process_id": 66831 - }, - "duration": 117000, - "start": 1752260216291724000 - }]] diff --git a/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_tool_completion.json b/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_tool_completion.json deleted file mode 100644 index 58c850ee3f9..00000000000 --- a/tests/snapshots/tests.contrib.google_generativeai.test_google_generativeai.test_gemini_tool_completion.json +++ /dev/null @@ -1,28 +0,0 @@ -[[ - { - "name": "gemini.request", - "service": "tests.contrib.google_generativeai", - "resource": "GenerativeModel.generate_content", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "68715e7800000000", - "google_generativeai.request.model": "gemini-1.5-flash", - "google_generativeai.request.provider": "google", - "language": "python", - "runtime-id": "e72fd406a9a04657a973cf959e2935f5" - }, - "metrics": { - "_dd.measured": 1, - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "process_id": 66831 - }, - "duration": 201000, - "start": 1752260216268538000 - }]] diff --git a/tests/snapshots/tests.integration.test_integration_snapshots.test_encode_span_with_large_bytes_attributes[v0.4].json b/tests/snapshots/tests.integration.test_integration_snapshots.test_encode_span_with_large_bytes_attributes[v0.4].json deleted file mode 100644 index 72421845bff..00000000000 --- a/tests/snapshots/tests.integration.test_integration_snapshots.test_encode_span_with_large_bytes_attributes[v0.4].json +++ /dev/null @@ -1,25 +0,0 @@ -[[ - { - "name": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "service": "tests.integration", - "resource": "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb...", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "6827828300000000", - "language": "python", - "runtime-id": "b9add865029f4a57a1e5f2f108dcae5b" - }, - "metrics": { - "_dd.top_level": 1, - "_dd.tracer_kr": 0.19999999999999996, - "_sampling_priority_v1": 1, - "process_id": 36277 - }, - "duration": 109334, - "start": 1747419779274312637 - }]] diff --git a/tests/snapshots/tests.integration.test_integration_snapshots.test_encode_span_with_large_bytes_attributes[v0.5].json b/tests/snapshots/tests.integration.test_integration_snapshots.test_encode_span_with_large_bytes_attributes[v0.5].json deleted file mode 100644 index 8d95383c7aa..00000000000 --- a/tests/snapshots/tests.integration.test_integration_snapshots.test_encode_span_with_large_bytes_attributes[v0.5].json +++ /dev/null @@ -1,25 +0,0 @@ -[[ - { - "name": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "service": "tests.integration", - "resource": "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb...", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "6827828100000000", - "language": "python", - "runtime-id": "b9add865029f4a57a1e5f2f108dcae5b" - }, - "metrics": { - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "process_id": 36277 - }, - "duration": 116833, - "start": 1747419777808787387 - }]] diff --git a/tests/snapshots/tests.opentelemetry.test_span.test_otel_span_attributes.json b/tests/snapshots/tests.opentelemetry.test_span.test_otel_span_attributes.json index 0fff2ecd8c9..8f1f87b1c17 100644 --- a/tests/snapshots/tests.opentelemetry.test_span.test_otel_span_attributes.json +++ b/tests/snapshots/tests.opentelemetry.test_span.test_otel_span_attributes.json @@ -12,6 +12,7 @@ "_dd.base_service": "tests.opentelemetry", "_dd.p.dm": "-0", "_dd.p.tid": "655529ab00000000", + "bytes_tag": "bstr", "language": "python", "real_string_tag": "rstr", "runtime-id": "e4724609efa84cf58424a8b1ef44b17d", diff --git a/tests/suitespec.yml b/tests/suitespec.yml index 1688622f893..245c21d1a95 100644 --- a/tests/suitespec.yml +++ b/tests/suitespec.yml @@ -76,7 +76,8 @@ components: - ddtrace/__init__.py - ddtrace/py.typed - ddtrace/version.py - - ddtrace/settings/_config.py + - ddtrace/_version.py + - ddtrace/internal/settings/_config.py - src/native/* datastreams: - ddtrace/internal/datastreams/* @@ -93,13 +94,11 @@ components: opentelemetry: - ddtrace/opentelemetry/* - ddtrace/internal/opentelemetry/* - opentracer: - - ddtrace/opentracer/* profiling: - ddtrace/profiling/* - ddtrace/internal/datadog/profiling/* - ddtrace/internal/processor/endpoint_call_counter.py - - ddtrace/settings/profiling.py + - ddtrace/internal/settings/profiling.py remoteconfig: - ddtrace/internal/remoteconfig/* runtime: @@ -107,12 +106,12 @@ components: serverless: - ddtrace/internal/serverless/* settings: - - ddtrace/settings/* + - ddtrace/internal/settings/* sourcecode: - ddtrace/sourcecode/* symbol_db: - ddtrace/internal/symbol_db/* - - ddtrace/settings/symbol_db.py + - ddtrace/internal/settings/symbol_db.py telemetry: - ddtrace/internal/telemetry/* tracing: @@ -122,11 +121,10 @@ components: - ddtrace/_trace/* - ddtrace/trace/* - ddtrace/constants.py - - ddtrace/settings/__init__.py - - ddtrace/settings/_config.py - - ddtrace/settings/http.py - - ddtrace/settings/exceptions.py - - ddtrace/settings/integration.py + - ddtrace/internal/settings/__init__.py + - ddtrace/internal/settings/_config.py + - ddtrace/internal/settings/http.py + - ddtrace/internal/settings/integration.py - ddtrace/internal/_encoding.py* - ddtrace/internal/_tagset.py* - ddtrace/internal/_utils.* @@ -136,7 +134,7 @@ components: - ddtrace/internal/pack.h - ddtrace/internal/pack_template.h - ddtrace/internal/peer_service/* - - ddtrace/settings/peer_service.py + - ddtrace/internal/settings/peer_service.py - ddtrace/internal/processor/__init__.py - ddtrace/internal/processor/stats.py - ddtrace/internal/runtime/* diff --git a/tests/telemetry/test_telemetry_metrics_e2e.py b/tests/telemetry/test_telemetry_metrics_e2e.py index 8eed0b55426..03bf27b9682 100644 --- a/tests/telemetry/test_telemetry_metrics_e2e.py +++ b/tests/telemetry/test_telemetry_metrics_e2e.py @@ -141,69 +141,3 @@ def test_span_creation_and_finished_metrics_otel(test_agent_session, ddtrace_run assert metrics_sf[0]["metric"] == "spans_finished" assert metrics_sf[0]["tags"] == ["integration_name:otel"] assert metrics_sf[0]["points"][0][1] == 9 - - -def test_span_creation_and_finished_metrics_opentracing(test_agent_session, ddtrace_run_python_code_in_subprocess): - code = """ -from ddtrace.opentracer import Tracer - -ot = Tracer() -for _ in range(2): - with ot.start_span('span'): - pass -""" - env = os.environ.copy() - env["DD_TRACE_OTEL_ENABLED"] = "true" - env["_DD_INSTRUMENTATION_TELEMETRY_TESTS_FORCE_APP_STARTED"] = "true" - _, stderr, status, _ = ddtrace_run_python_code_in_subprocess(code, env=env) - assert status == 0, stderr - - metrics_sc = test_agent_session.get_metrics("spans_created") - assert len(metrics_sc) == 1 - assert metrics_sc[0]["metric"] == "spans_created" - assert metrics_sc[0]["tags"] == ["integration_name:opentracing"] - assert metrics_sc[0]["points"][0][1] == 2 - - metrics_sf = test_agent_session.get_metrics("spans_finished") - assert len(metrics_sf) == 1 - assert metrics_sf[0]["metric"] == "spans_finished" - assert metrics_sf[0]["tags"] == ["integration_name:opentracing"] - assert metrics_sf[0]["points"][0][1] == 2 - - -def test_span_creation_no_finish(test_agent_session, ddtrace_run_python_code_in_subprocess): - code = """ -import ddtrace -import opentelemetry.trace -from ddtrace import opentracer - -ddtracer = ddtrace.tracer -otel = opentelemetry.trace.get_tracer(__name__) -ot = opentracer.Tracer() - -# we must finish at least one span to enable sending telemetry to the agent -ddtracer.trace("first_span").finish() - -for _ in range(4): - ot.start_span('ot_span') - otel.start_span('otel_span') - ddtracer.trace("ddspan") -""" - env = os.environ.copy() - env["DD_TRACE_OTEL_ENABLED"] = "true" - env["_DD_INSTRUMENTATION_TELEMETRY_TESTS_FORCE_APP_STARTED"] = "true" - _, stderr, status, _ = ddtrace_run_python_code_in_subprocess(code, env=env) - assert status == 0, stderr - - metrics = test_agent_session.get_metrics("spans_created") - assert len(metrics) == 3 - - assert metrics[0]["metric"] == "spans_created" - assert metrics[0]["tags"] == ["integration_name:datadog"] - assert metrics[0]["points"][0][1] == 5 - assert metrics[1]["metric"] == "spans_created" - assert metrics[1]["tags"] == ["integration_name:opentracing"] - assert metrics[1]["points"][0][1] == 4 - assert metrics[2]["metric"] == "spans_created" - assert metrics[2]["tags"] == ["integration_name:otel"] - assert metrics[2]["points"][0][1] == 4 diff --git a/tests/telemetry/test_writer.py b/tests/telemetry/test_writer.py index f1518f2ede5..a096a4fdf8f 100644 --- a/tests/telemetry/test_writer.py +++ b/tests/telemetry/test_writer.py @@ -12,6 +12,8 @@ from ddtrace import config from ddtrace.internal.compat import PYTHON_VERSION_INFO +from ddtrace.internal.settings._agent import get_agent_hostname +from ddtrace.internal.settings._telemetry import config as telemetry_config import ddtrace.internal.telemetry from ddtrace.internal.telemetry.constants import TELEMETRY_APM_PRODUCT from ddtrace.internal.telemetry.constants import TELEMETRY_LOG_LEVEL @@ -20,8 +22,6 @@ from ddtrace.internal.telemetry.writer import TelemetryWriter from ddtrace.internal.telemetry.writer import get_runtime_id from ddtrace.internal.utils.version import _pep440_to_semver -from ddtrace.settings._agent import get_agent_hostname -from ddtrace.settings._telemetry import config as telemetry_config from tests.conftest import DEFAULT_DDTRACE_SUBPROCESS_TEST_SERVICE_NAME from tests.utils import call_program from tests.utils import override_global_config @@ -80,9 +80,9 @@ def test_app_started_event_configuration_override(test_agent_session, run_python # most configurations are reported when ddtrace.auto is imported import ddtrace.auto # report configurations not used by ddtrace.auto -import ddtrace.settings.symbol_db -import ddtrace.settings.dynamic_instrumentation -import ddtrace.settings.exception_replay +import ddtrace.internal.settings.symbol_db +import ddtrace.internal.settings.dynamic_instrumentation +import ddtrace.internal.settings.exception_replay import opentelemetry """ @@ -307,7 +307,6 @@ def test_app_started_event_configuration_override(test_agent_session, run_python {"name": "DD_PROFILING_PYTORCH_EVENTS_LIMIT", "origin": "default", "value": 1000000}, {"name": "DD_PROFILING_SAMPLE_POOL_CAPACITY", "origin": "default", "value": 4}, {"name": "DD_PROFILING_STACK_ENABLED", "origin": "env_var", "value": False}, - {"name": "DD_PROFILING_STACK_V2_ENABLED", "origin": "default", "value": PYTHON_VERSION_INFO < (3, 14)}, {"name": "DD_PROFILING_TAGS", "origin": "default", "value": ""}, {"name": "DD_PROFILING_TIMELINE_ENABLED", "origin": "default", "value": True}, {"name": "DD_PROFILING_UPLOAD_INTERVAL", "origin": "env_var", "value": 10.0}, diff --git a/tests/tracer/runtime/test_runtime_metrics.py b/tests/tracer/runtime/test_runtime_metrics.py index 984142380e1..118e85df126 100644 --- a/tests/tracer/runtime/test_runtime_metrics.py +++ b/tests/tracer/runtime/test_runtime_metrics.py @@ -20,8 +20,8 @@ @contextlib.contextmanager -def runtime_metrics_service(tracer=None, flush_interval=None): - RuntimeWorker.enable(tracer=tracer, flush_interval=flush_interval) +def runtime_metrics_service(tracer=None): + RuntimeWorker.enable(tracer=tracer) assert RuntimeWorker._instance is not None assert RuntimeWorker._instance.status == ServiceStatus.RUNNING @@ -151,9 +151,7 @@ def test_tracer_metrics(self): # Mock socket.socket to hijack the dogstatsd socket with mock.patch("socket.socket") as sock: sock.return_value.getsockopt.return_value = 0 - # configure tracer for runtime metrics - interval = 1.0 / 4 - with runtime_metrics_service(tracer=self.tracer, flush_interval=interval): + with runtime_metrics_service(tracer=self.tracer): self.tracer.set_tags({"env": "tests.dog"}) with self.override_global_tracer(self.tracer): @@ -167,7 +165,7 @@ def test_tracer_metrics(self): with self.start_span( "query", service="db", span_type=SpanTypes.SQL, child_of=child.context ): - time.sleep(interval * 4) + time.sleep(4) # Get the mocked socket for inspection later statsd_socket = RuntimeWorker._instance._dogstatsd_client.socket received = [s.args[0].decode("utf-8") for s in statsd_socket.send.mock_calls] diff --git a/tests/tracer/test_agent.py b/tests/tracer/test_agent.py index 451ec47c08f..6f9bf95b07f 100644 --- a/tests/tracer/test_agent.py +++ b/tests/tracer/test_agent.py @@ -3,8 +3,8 @@ from ddtrace.internal import agent from ddtrace.internal.agent import info +from ddtrace.internal.settings._agent import is_ipv6_hostname from ddtrace.internal.utils.http import verify_url -from ddtrace.settings._agent import is_ipv6_hostname @pytest.mark.parametrize( @@ -32,7 +32,7 @@ def test_hostname(): import os from urllib.parse import urlparse - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config assert urlparse(config.trace_agent_url).hostname == os.environ.get("DD_AGENT_HOST") assert urlparse(config.dogstatsd_url).hostname == os.environ.get("DD_AGENT_HOST"), urlparse(config.dogstatsd_url) @@ -44,7 +44,7 @@ def test_hostname(): def test_trace_hostname(): from urllib.parse import urlparse - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config assert urlparse(config.trace_agent_url).hostname == "monkey" @@ -53,7 +53,7 @@ def test_trace_hostname(): def test_hostname_not_set(): from urllib.parse import urlparse - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config assert urlparse(config.trace_agent_url).hostname == "localhost" @@ -62,7 +62,7 @@ def test_hostname_not_set(): def test_trace_port(): from urllib.parse import urlparse - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config assert urlparse(config.trace_agent_url).port == 9999 @@ -71,7 +71,7 @@ def test_trace_port(): def test_agent_port(): from urllib.parse import urlparse - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config assert urlparse(config.trace_agent_url).port == 1235 @@ -80,7 +80,7 @@ def test_agent_port(): def test_trace_port_not_set(): from urllib.parse import urlparse - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config assert urlparse(config.trace_agent_url).port == 8126 @@ -89,7 +89,7 @@ def test_trace_port_not_set(): def test_stats_port(): from urllib.parse import urlparse - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config assert urlparse(config.dogstatsd_url).port == 1235 @@ -98,7 +98,7 @@ def test_stats_port(): def test_stats_port_not_set(): from urllib.parse import urlparse - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config assert urlparse(config.dogstatsd_url).port == 8125 @@ -117,7 +117,7 @@ def test_trace_url_uds(): import mock with mock.patch("os.path.exists", return_value=True): - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config assert config.trace_agent_url == "unix:///var/run/datadog/apm.socket" @@ -135,7 +135,7 @@ def test_trace_url_default(): # with nothing set by user, and the default UDS unavailable, we choose default http address import mock - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config with mock.patch("os.path.exists", return_value=False): assert config.trace_agent_url == "http://localhost:8126" @@ -148,7 +148,7 @@ def test_trace_url_with_port(): # with port set by user, and default UDS unavailable, we choose user settings import mock - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config with mock.patch("os.path.exists", return_value=False): url = config.trace_agent_url @@ -168,7 +168,7 @@ def test_trace_url_with_host(): # with host set by user, and default UDS unavailable, we choose user settings import mock - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config with mock.patch("os.path.exists", return_value=False): assert config.trace_agent_url == "http://mars:8126", config.trace_agent_url @@ -186,7 +186,7 @@ def test_trace_url_with_host_and_port(): # with host and port set by user, and default UDS unavailable, we choose user settings import mock - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config with mock.patch("os.path.exists", return_value=False): assert config.trace_agent_url == "http://mars:1235" @@ -199,7 +199,7 @@ def test_trace_url_with_uds_and_port(): # with port set by user, and default UDS available, we choose user settings import mock - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config with mock.patch("os.path.exists", return_value=True): assert config.trace_agent_url == "http://localhost:1235" @@ -218,7 +218,7 @@ def test_trace_url_with_uds_and_host(): # with host set by user, and default UDS available, we choose user settings import mock - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config with mock.patch("os.path.exists", return_value=True): assert config.trace_agent_url == "http://mars:8126" @@ -236,7 +236,7 @@ def test_trace_url_with_uds_host_and_port(): # with host and port set by user, and default UDS available, we choose user settings import mock - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config with mock.patch("os.path.exists", return_value=True): assert config.trace_agent_url == "http://mars:1235" @@ -249,7 +249,7 @@ def test_trace_url_with_uds_url_host_and_port(): # with port, host, and url set by user, and default UDS available, we choose url import mock - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config with mock.patch("os.path.exists", return_value=True): assert config.trace_agent_url == "http://saturn:1111" @@ -262,7 +262,7 @@ def test_trace_url_with_url_host_and_port(): # with port, host, and url set by user, and default UDS unavailable, we choose url import mock - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config with mock.patch("os.path.exists", return_value=False): assert config.trace_agent_url == "http://saturn:1111" @@ -281,7 +281,7 @@ def test_stats_url_default(): # with nothing set by user, and the default UDS unavailable, we choose default http address import mock - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config with mock.patch("os.path.exists", return_value=False): assert config.dogstatsd_url == "udp://localhost:8125" @@ -300,7 +300,7 @@ def test_stats_url_with_port(): # with port set by user, and default UDS unavailable, we choose user settings import mock - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config with mock.patch("os.path.exists", return_value=False): assert config.dogstatsd_url == "udp://localhost:1235" @@ -319,7 +319,7 @@ def test_stats_url_with_host(): # with host set by user, and default UDS unavailable, we choose user settings import mock - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config with mock.patch("os.path.exists", return_value=False): assert config.dogstatsd_url == "udp://mars:8125" @@ -332,7 +332,7 @@ def test_stats_url_with_host_and_port(): # with host and port set by user, and default UDS unavailable, we choose user settings import mock - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config with mock.patch("os.path.exists", return_value=False): assert config.dogstatsd_url == "udp://mars:1235" @@ -351,7 +351,7 @@ def test_stats_url_with_uds_and_port(): # with port set by user, and default UDS available, we choose user settings import mock - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config with mock.patch("os.path.exists", return_value=True): assert config.dogstatsd_url == "udp://localhost:1235" @@ -370,7 +370,7 @@ def test_stats_url_with_uds_and_host(): # with host set by user, and default UDS available, we choose user settings import mock - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config with mock.patch("os.path.exists", return_value=True): assert config.dogstatsd_url == "udp://mars:8125" @@ -383,7 +383,7 @@ def test_stats_url_with_uds_host_and_port(): # with host and port set by user, and default UDS available, we choose user settings import mock - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config with mock.patch("os.path.exists", return_value=True): assert config.dogstatsd_url == "udp://mars:1235" @@ -396,7 +396,7 @@ def test_stats_url_with_uds_url_host_and_port(): # with port, host, and url set by user, and default UDS available, we choose url import mock - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config with mock.patch("os.path.exists", return_value=True): assert config.dogstatsd_url == "udp://saturn:1111" @@ -409,7 +409,7 @@ def test_stats_url_with_url_host_and_port(): # with port, host, and url set by user, and default UDS unavailable, we choose url import mock - from ddtrace.settings._agent import config + from ddtrace.internal.settings._agent import config with mock.patch("os.path.exists", return_value=False): assert config.dogstatsd_url == "udp://saturn:1111" diff --git a/tests/tracer/test_correlation_log_context.py b/tests/tracer/test_correlation_log_context.py index abd82ad91a7..fa5f8b045f6 100644 --- a/tests/tracer/test_correlation_log_context.py +++ b/tests/tracer/test_correlation_log_context.py @@ -58,27 +58,6 @@ def test_get_log_correlation_trace_context(): }, dd_log_record -@pytest.mark.subprocess( - ddtrace_run=True, env={"DD_VERSION": "test-version", "DD_ENV": "test-env", "DD_SERVICE": "test-service"} -) -def test_get_log_correlation_context_opentracer(): - """Ensure expected DDLogRecord generated via get_correlation_log_record with an opentracing Tracer.""" - from ddtrace.internal.utils.formats import format_trace_id - from ddtrace.opentracer.tracer import Tracer as OT_Tracer - - ot_tracer = OT_Tracer(service_name="test-service") - with ot_tracer.start_active_span("operation") as scope: - dd_span = scope._span._dd_span - dd_log_record = ot_tracer.get_log_correlation_context() - assert dd_log_record == { - "dd.span_id": str(dd_span.span_id), - "dd.trace_id": format_trace_id(dd_span.trace_id), - "dd.service": "test-service", - "dd.env": "test-env", - "dd.version": "test-version", - }, dd_log_record - - @pytest.mark.subprocess() def test_get_log_correlation_context_no_active_span(): """Ensure empty DDLogRecord generated if no active span.""" diff --git a/tests/tracer/test_encoders.py b/tests/tracer/test_encoders.py index 079b3260c32..42d620069e3 100644 --- a/tests/tracer/test_encoders.py +++ b/tests/tracer/test_encoders.py @@ -4,6 +4,8 @@ import random import string import threading +from typing import Any +from typing import Dict from unittest import TestCase from hypothesis import given @@ -937,13 +939,9 @@ def _value(): {"start_ns": []}, {"duration_ns": {}}, {"span_type": 100}, - {"_meta": {"num": 100}}, - # Validating behavior with a context manager is a customer regression - {"_meta": {"key": _value()}}, - {"_metrics": {"key": "value"}}, ], ) -def test_encoding_invalid_data(data): +def test_encoding_invalid_data_raises(data): encoder = MsgpackEncoderV04(1 << 20, 1 << 20) span = Span(name="test") @@ -959,6 +957,41 @@ def test_encoding_invalid_data(data): assert (not encoded_traces) or (encoded_traces[0][0] is None) +@pytest.mark.parametrize( + "meta,metrics", + [ + ({"num": 100}, {}), + # Validating behavior with a context manager is a customer regression + ({"key": _value()}, {}), + ({}, {"key": "value"}), + ], +) +def test_encoding_invalid_data_ok(meta: Dict[str, Any], metrics: Dict[str, Any]): + """Encoding invalid meta/metrics data should not raise an exception""" + encoder = MsgpackEncoderV04(1 << 20, 1 << 20) + + span = Span(name="test") + span._meta = meta # type: ignore + span._metrics = metrics # type: ignore + + trace = [span] + encoder.put(trace) + + encoded_payloads = encoder.encode() + assert len(encoded_payloads) == 1 + + # Ensure it can be decoded properly + traces = msgpack.unpackb(encoded_payloads[0][0], raw=False) + assert len(traces) == 1 + assert len(traces[0]) == 1 + + # We didn't encode the invalid meta/metrics + for key in meta.keys(): + assert key not in traces[0][0]["meta"] + for key in metrics.keys(): + assert key not in traces[0][0]["metrics"] + + @allencodings def test_custom_msgpack_encode_thread_safe(encoding): class TracingThread(threading.Thread): diff --git a/tests/tracer/test_endpoint_config.py b/tests/tracer/test_endpoint_config.py index df35d43e243..a2eb6061bd4 100644 --- a/tests/tracer/test_endpoint_config.py +++ b/tests/tracer/test_endpoint_config.py @@ -6,7 +6,7 @@ from unittest import mock from ddtrace.internal.http import HTTPConnection -from ddtrace.settings.endpoint_config import fetch_config_from_endpoint +from ddtrace.internal.settings.endpoint_config import fetch_config_from_endpoint from tests.utils import override_env @@ -179,6 +179,6 @@ def test_set_config_endpoint_retries(caplog): ), mock.patch.object( HTTPConnection, "getresponse", new=mock_getresponse_enabled_after_4_retries ), mock.patch( - "ddtrace.settings.endpoint_config._get_retries", return_value=5 + "ddtrace.internal.settings.endpoint_config._get_retries", return_value=5 ): assert fetch_config_from_endpoint() == {"dd_iast_enabled": True} diff --git a/tests/tracer/test_env_vars.py b/tests/tracer/test_env_vars.py index 16b92cc49f2..eb9445571e1 100644 --- a/tests/tracer/test_env_vars.py +++ b/tests/tracer/test_env_vars.py @@ -50,7 +50,7 @@ def test_obfuscation_querystring_pattern_env_var( "-c", ( """import re;from ddtrace import config; -from ddtrace.settings._config import DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP_DEFAULT; +from ddtrace.internal.settings._config import DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP_DEFAULT; assert config._obfuscation_query_string_pattern == %s; assert config._global_query_string_obfuscation_disabled == %s; assert config._http_tag_query_string == %s diff --git a/tests/tracer/test_global_config.py b/tests/tracer/test_global_config.py index 761115c49d4..0d60f2319e7 100644 --- a/tests/tracer/test_global_config.py +++ b/tests/tracer/test_global_config.py @@ -4,8 +4,8 @@ import pytest from ddtrace import config as global_config -from ddtrace.settings._config import Config -from ddtrace.settings.integration import IntegrationConfig +from ddtrace.internal.settings._config import Config +from ddtrace.internal.settings.integration import IntegrationConfig from ..utils import DummyTracer from ..utils import override_env @@ -59,7 +59,7 @@ def test_missing_integration(self): assert isinstance(e.value, AttributeError) assert e.value.args[0] == ( - " object has no attribute " + " object has no attribute " "integration_that_does_not_exist, integration_that_does_not_exist is not a valid configuration" ) diff --git a/tests/tracer/test_instance_config.py b/tests/tracer/test_instance_config.py index 615e439789c..d2539be8d34 100644 --- a/tests/tracer/test_instance_config.py +++ b/tests/tracer/test_instance_config.py @@ -5,7 +5,7 @@ from ddtrace import config from ddtrace._trace.pin import Pin -from ddtrace.settings.integration import IntegrationConfig +from ddtrace.internal.settings.integration import IntegrationConfig class InstanceConfigTestCase(TestCase): diff --git a/tests/tracer/test_propagation.py b/tests/tracer/test_propagation.py index 9232d4c2f20..d9f93c4d490 100644 --- a/tests/tracer/test_propagation.py +++ b/tests/tracer/test_propagation.py @@ -194,27 +194,6 @@ def test_inject_tags_unicode(tracer): # noqa: F811 assert tags == set(["_dd.p.test=unicode"]) -def test_inject_tags_bytes(tracer): # noqa: F811 - """We properly encode when the meta key as long as it is just ascii characters""" - # Context._meta allows str and bytes for keys - # FIXME: W3C does not support byte headers - overrides = { - "_propagation_style_extract": [PROPAGATION_STYLE_DATADOG], - "_propagation_style_inject": [PROPAGATION_STYLE_DATADOG], - } - with override_global_config(overrides): - meta = {"_dd.p.test": b"bytes"} - ctx = Context(trace_id=1234, sampling_priority=2, dd_origin="synthetics", meta=meta) - tracer.context_provider.activate(ctx) - with tracer.trace("global_root_span") as span: - headers = {} - HTTPPropagator.inject(span.context, headers) - - # The ordering is non-deterministic, so compare as a list of tags - tags = set(headers[_HTTP_HEADER_TAGS].split(",")) - assert tags == set(["_dd.p.test=bytes"]) - - def test_inject_tags_unicode_error(tracer): # noqa: F811 """Unicode characters are not allowed""" meta = {"_dd.p.test": "unicode value ☺️"} @@ -3515,22 +3494,6 @@ def test_http_propagator_baggage_extract(headers): assert context._baggage == {"key1": "val1", "key2": "val2", "foo": "bar", "x": "y"} -@pytest.mark.subprocess( - env=dict(DD_TRACE_PROPAGATION_HTTP_BAGGAGE_ENABLED="True"), - parametrize=dict(DD_TRACE_PROPAGATION_EXTRACT_FIRST=["True", "False"]), -) -def test_opentracer_propagator_baggage_extract(): - from ddtrace.propagation.http import HTTPPropagator - - headers = { - "x-datadog-trace-id": "1234", - "x-datadog-parent-id": "5678", - "http_ot_baggage_key1": "value1", - } - context = HTTPPropagator.extract(headers) - assert context._baggage == {"key1": "value1"} - - def test_baggage_span_tags_default(): headers = {"baggage": "user.id=123,correlation_id=abc,region=us-east"} context = HTTPPropagator.extract(headers) @@ -3595,84 +3558,6 @@ def test_baggage_span_tags_wildcard(): assert "baggage.session.id" not in context._meta -def test_inject_non_active_span_parameter_deprecated(): - """Test that the non_active_span parameter triggers a deprecation warning.""" - headers = {} - with ddtracer.start_span("non_active_span") as span: - assert span.context.sampling_priority is None # No sampling decision yet - with pytest.warns() as warnings_list: - HTTPPropagator.inject(context=Context(), headers=headers, non_active_span=span) - assert span.context.sampling_priority is not None # Sampling should be triggered - assert not headers, f"No headers should be injected, Context is empty: {headers}" - - # Should capture exactly one deprecation warning - assert len(warnings_list) == 1 - assert "non_active_span parameter is deprecated" in str(warnings_list[0].message) - - -def test_inject_context_and_span_same_trace_deprecated(): - """Test injecting Context + non_active_span from the same trace (parent-child).""" - headers = {} - with ddtracer.trace("parent") as parent: - with ddtracer.start_span("child", child_of=parent) as non_active_child: - assert non_active_child.context.sampling_priority is None # No sampling yet - assert ddtracer.current_span() is not non_active_child # Child is not active - with mock.patch("ddtrace.propagation.http.log.debug") as mock_debug, pytest.warns() as warnings_list: - HTTPPropagator.inject( - context=non_active_child.context, headers=headers, non_active_span=non_active_child - ) - # Sampling decision should be set on root span even when child is used for propagation - assert parent.context.sampling_priority is not None - assert non_active_child.context.sampling_priority is not None - - mock_debug.assert_has_calls( - [ - mock.call( - "%s sampled before propagating trace: span_context=%s", - non_active_child._local_root, - non_active_child.context, - ) - ] - ) - assert headers.get("x-datadog-sampling-priority") == str(parent.context.sampling_priority) - # Parent span info propagated (context takes precedence over non_active_span) - # Non_active_span is only used to make a sampling decision, not to inject headers. - assert headers.get("x-datadog-parent-id") == str(non_active_child.span_id) - - # Should capture deprecation warning - assert len(warnings_list) == 1 - assert "non_active_span parameter is deprecated" in str(warnings_list[0].message) - - -def test_inject_context_and_span_different_trace_deprecated(): - """Test injecting Context + non_active_span from completely different traces.""" - headers = {} - with ddtracer.start_span("span1", child_of=None) as span1: - with ddtracer.start_span("span2", child_of=None) as span2: - with mock.patch("ddtrace.propagation.http.log.debug") as mock_debug, pytest.warns() as warnings_list: - HTTPPropagator.inject(context=span1.context, headers=headers, non_active_span=span2) - - mock_debug.assert_has_calls( - [ - mock.call( - "Sampling decision not available. Downstream spans will not inherit a sampling priority" - ": args=(context=%s, ..., non_active_span=%s) detected span context=%s", - span1.context, - span2, - span1.context, - ) - ] - ) - - # Span1 span info propagated (context takes precedence over Span2) - # non_active_span parameter is only used to make a sampling decision, not to inject headers. - assert headers.get("x-datadog-parent-id") == str(span1.span_id) - - # Should capture deprecation warning - assert len(warnings_list) == 1 - assert "non_active_span parameter is deprecated" in str(warnings_list[0].message) - - def test_inject_context_without_sampling_priority_active_trace(): """Test injecting a Context without sampling priority when there's an active trace.""" headers = {} @@ -3709,9 +3594,8 @@ def test_inject_context_without_sampling_priority_inactive_trace(): [ mock.call( "Sampling decision not available. Downstream spans will not inherit a sampling priority" - ": args=(context=%s, ..., non_active_span=%s) detected span context=%s", + ": args=(context=%s, ...) detected span context=%s", span.context, - None, span.context, ) ] diff --git a/tests/tracer/test_settings.py b/tests/tracer/test_settings.py index 1a241f46fd7..a63fee52574 100644 --- a/tests/tracer/test_settings.py +++ b/tests/tracer/test_settings.py @@ -1,11 +1,8 @@ -import warnings - import pytest -from ddtrace.internal.compat import PYTHON_VERSION_INFO -from ddtrace.settings._config import Config -from ddtrace.settings.http import HttpConfig -from ddtrace.settings.integration import IntegrationConfig +from ddtrace.internal.settings._config import Config +from ddtrace.internal.settings.http import HttpConfig +from ddtrace.internal.settings.integration import IntegrationConfig from tests.utils import BaseTestCase from tests.utils import override_env @@ -178,47 +175,6 @@ def test_app_analytics_property(self): assert self.integration_config.get_analytics_sample_rate() == 1 - def test_app_analytics_deprecation(self): - warnings.simplefilter("always") - with warnings.catch_warnings(record=True) as warns: - IntegrationConfig(self.config, "test") - assert len(warns) == 0 - - with warnings.catch_warnings(record=True) as warns: - self.integration_config.analytics_enabled - assert ( - "analytics_enabled is deprecated and will be removed in version '4.0.0': Controlling ingestion via analytics is no longer supported. See https://docs.datadoghq.com/tracing/legacy_app_analytics/?code-lang=python#migrate-to-the-new-configuration-options" # noqa:E501 - in str(warns[0].message) - ) - - with warnings.catch_warnings(record=True) as warns: - self.integration_config.analytics_enabled = True - assert ( - "analytics_enabled is deprecated and will be removed in version '4.0.0': Controlling ingestion via analytics is no longer supported. See https://docs.datadoghq.com/tracing/legacy_app_analytics/?code-lang=python#migrate-to-the-new-configuration-options" # noqa:E501 - in str(warns[0].message) - ) - - with warnings.catch_warnings(record=True) as warns: - self.integration_config.analytics_sample_rate - assert ( - "analytics_sample_rate is deprecated and will be removed in version '4.0.0': Controlling ingestion via analytics is no longer supported. See https://docs.datadoghq.com/tracing/legacy_app_analytics/?code-lang=python#migrate-to-the-new-configuration-options" # noqa:E501 - in str(warns[0].message) - ) - - with warnings.catch_warnings(record=True) as warns: - self.integration_config.analytics_sample_rate = 0.5 - assert ( - "analytics_sample_rate is deprecated and will be removed in version '4.0.0': Controlling ingestion via analytics is no longer supported. See https://docs.datadoghq.com/tracing/legacy_app_analytics/?code-lang=python#migrate-to-the-new-configuration-options" # noqa:E501 - in str(warns[0].message) - ) - - with warnings.catch_warnings(record=True) as warns: - self.integration_config.get_analytics_sample_rate() - assert ( - "get_analytics_sample_rate is deprecated and will be removed in version '4.0.0': Controlling ingestion via analytics is no longer supported. See https://docs.datadoghq.com/tracing/legacy_app_analytics/?code-lang=python#migrate-to-the-new-configuration-options" # noqa:E501 - in str(warns[0].message) - ) - def test_environment_header_tags(): with override_env(dict(DD_TRACE_HEADER_TAGS="Host:http.host,User-agent:http.user_agent")): @@ -244,72 +200,3 @@ def test_x_datadog_tags(env, expected): with override_env(env): _ = Config() assert expected == (_._x_datadog_tags_max_length, _._x_datadog_tags_enabled) - - -@pytest.mark.skipif(PYTHON_VERSION_INFO < (3, 9), reason="Additional deprecation warning under Python 3.8") -@pytest.mark.subprocess() -def test_config_exception_deprecation(): - import warnings - - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter("default") - - from ddtrace.settings import ConfigException # noqa: F401 - - assert len(warns) == 1 - warn = warns[0] - - assert issubclass(warn.category, DeprecationWarning) - assert "ddtrace.settings.ConfigException is deprecated" in str(warn.message) - assert "4.0.0" in str(warn.message) # TODO: update the version - - -@pytest.mark.skipif(PYTHON_VERSION_INFO < (3, 9), reason="Additional deprecation warning under Python 3.8") -@pytest.mark.subprocess() -def test_http_config_deprecation(): - import warnings - - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter("default") - - from ddtrace.settings import HttpConfig # noqa: F401 - - assert len(warns) == 1 - warn = warns[0] - assert issubclass(warn.category, DeprecationWarning) - assert "ddtrace.settings.HttpConfig is deprecated" in str(warn.message) - assert "4.0.0" in str(warn.message) # TODO: update the version - - -@pytest.mark.skipif(PYTHON_VERSION_INFO < (3, 9), reason="Additional deprecation warning under Python 3.8") -@pytest.mark.subprocess() -def test_hooks_deprecation(): - import warnings - - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter("default") - - from ddtrace.settings import Hooks # noqa: F401 - - assert len(warns) == 1 - warn = warns[0] - assert issubclass(warn.category, DeprecationWarning) - assert "ddtrace.settings.Hooks is deprecated" in str(warn.message) - assert "4.0.0" in str(warn.message) # TODO: update the version - - -@pytest.mark.skipif(PYTHON_VERSION_INFO < (3, 9), reason="Additional deprecation warning under Python 3.8") -@pytest.mark.subprocess() -def test_integration_config_deprecation(): - import warnings - - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter("default") - - from ddtrace.settings import IntegrationConfig # noqa: F401 - - assert len(warns) == 1 - warn = warns[0] - assert issubclass(warn.category, DeprecationWarning) - assert "ddtrace.settings.IntegrationConfig is deprecated" in str(warn.message) - assert "4.0.0" in str(warn.message) # TODO: update the version diff --git a/tests/tracer/test_span.py b/tests/tracer/test_span.py index a47391ef3da..fa3221a6912 100644 --- a/tests/tracer/test_span.py +++ b/tests/tracer/test_span.py @@ -10,7 +10,6 @@ from ddtrace._trace._span_link import SpanLink from ddtrace._trace._span_pointer import _SpanPointerDirection -from ddtrace._trace.context import Context from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import ENV_KEY from ddtrace.constants import ERROR_MSG @@ -678,23 +677,6 @@ def test_set_tag_measured_change_value(): assert_is_measured(s) -@mock.patch("ddtrace._trace.span.log") -def test_span_key(span_log): - # Span tag keys must be strings - s = Span(name="test.span") - - s.set_tag(123, True) - span_log.warning.assert_called_once_with("Ignoring tag pair %s:%s. Key must be a string.", 123, True) - assert s.get_tag(123) is None - assert s.get_tag("123") is None - - span_log.reset_mock() - - s.set_tag(None, "val") - span_log.warning.assert_called_once_with("Ignoring tag pair %s:%s. Key must be a string.", None, "val") - assert s.get_tag(123.32) is None - - def test_spans_finished(): span = Span(None) assert span.finished is False @@ -866,52 +848,6 @@ def test_span_preconditions(arg): Span("test", **{arg: "foo"}) -def test_span_pprint(): - root = Span("test.span", service="s", resource="r", span_type=SpanTypes.WEB, context=Context(trace_id=1, span_id=2)) - root.set_tag("t", "v") - root.set_metric("m", 1.0) - root._add_event("message", {"importance": 10}, 16789898242) - root.set_link(trace_id=99, span_id=10, attributes={"link.name": "s1_to_s2", "link.kind": "scheduled_by"}) - root._add_span_pointer("test_kind", _SpanPointerDirection.DOWNSTREAM, "test_hash_123", {"extra": "attr"}) - - root.finish() - actual = root._pprint() - assert "name='test.span'" in actual - assert "service='s'" in actual - assert "resource='r'" in actual - assert "type='web'" in actual - assert "error=0" in actual - assert "tags={'t': 'v'}" in actual - assert "metrics={'m': 1.0}" in actual - assert "events=[SpanEvent(name='message', time=16789898242, attributes={'importance': 10})]" in actual - assert ( - "SpanLink(trace_id=99, span_id=10, attributes={'link.name': 's1_to_s2', 'link.kind': 'scheduled_by'}, " - "tracestate=None, flags=None, dropped_attributes=0)" - ) in actual - assert "SpanPointer(trace_id=0, span_id=0, kind=span-pointer" in actual - assert "direction=d, hash=test_hash_123" in actual - assert ( - f"context=Context(trace_id={root.trace_id}, span_id={root.span_id}, _meta={{}}, " - "_metrics={}, _span_links=[], _baggage={}, _is_remote=False)" - ) in actual - assert f"span_id={root.span_id}" in actual - assert f"trace_id={root.trace_id}" in actual - assert f"parent_id={root.parent_id}" in actual - assert f"start={root.start_ns}" in actual - assert f"duration={root.duration_ns}" in actual - assert f"end={root.start_ns + root.duration_ns}" in actual - - root = Span("test.span", service="s", resource="r", span_type=SpanTypes.WEB) - root.error = 1 - kv = {f"😌{i}": "😌" for i in range(100)} - root.set_tags(kv) - actual = root._pprint() - assert "duration=None" in actual - assert "end=None" in actual - assert "error=1" in actual - assert f"tags={kv}" in actual - - def test_manual_context_usage(): span1 = Span("span1") span2 = Span("span2", context=span1.context) diff --git a/tests/tracer/test_trace_utils.py b/tests/tracer/test_trace_utils.py index 65b79767706..b56dc4e3eca 100644 --- a/tests/tracer/test_trace_utils.py +++ b/tests/tracer/test_trace_utils.py @@ -21,11 +21,11 @@ from ddtrace.ext import http from ddtrace.ext import net from ddtrace.internal.compat import ensure_text +from ddtrace.internal.settings._config import Config +from ddtrace.internal.settings.integration import IntegrationConfig from ddtrace.propagation.http import HTTP_HEADER_PARENT_ID from ddtrace.propagation.http import HTTP_HEADER_TRACE_ID from ddtrace.propagation.http import HTTPPropagator -from ddtrace.settings._config import Config -from ddtrace.settings.integration import IntegrationConfig from ddtrace.trace import Context from ddtrace.trace import Span from tests.appsec.utils import asm_context @@ -511,7 +511,7 @@ def test_set_http_meta( assert span.get_tag(tag) == value -@mock.patch("ddtrace.settings._config.log") +@mock.patch("ddtrace.internal.settings._config.log") @pytest.mark.parametrize( "error_codes,status_code,error,log_call", [ @@ -540,7 +540,7 @@ def test_set_http_meta_custom_errors(mock_log, span, int_config, error_codes, st def test_set_http_meta_custom_errors_via_env(): from ddtrace import config from ddtrace.contrib.internal.trace_utils import set_http_meta - from ddtrace.settings.integration import IntegrationConfig + from ddtrace.internal.settings.integration import IntegrationConfig from ddtrace.trace import tracer config.myint = IntegrationConfig(config, "myint") @@ -1118,7 +1118,7 @@ def test_url_in_http_with_empty_obfuscation_regex(): from ddtrace import config from ddtrace.contrib.internal.trace_utils import set_http_meta from ddtrace.ext import http - from ddtrace.settings.integration import IntegrationConfig + from ddtrace.internal.settings.integration import IntegrationConfig from ddtrace.trace import tracer assert config._obfuscation_query_string_pattern.pattern == b"", config._obfuscation_query_string_pattern @@ -1144,7 +1144,7 @@ def test_url_in_http_with_obfuscation_enabled_and_empty_regex(): from ddtrace import config from ddtrace.contrib.internal.trace_utils import set_http_meta from ddtrace.ext import http - from ddtrace.settings.integration import IntegrationConfig + from ddtrace.internal.settings.integration import IntegrationConfig from ddtrace.trace import tracer # assert obfuscation is disabled when the regex is an empty string diff --git a/tests/tracer/test_tracer.py b/tests/tracer/test_tracer.py index bd5b7909f87..f251a72ea4c 100644 --- a/tests/tracer/test_tracer.py +++ b/tests/tracer/test_tracer.py @@ -34,10 +34,9 @@ from ddtrace.internal.rate_limiter import RateLimiter from ddtrace.internal.serverless import has_aws_lambda_agent_extension from ddtrace.internal.serverless import in_aws_lambda -from ddtrace.internal.utils.deprecations import DDTraceDeprecationWarning +from ddtrace.internal.settings._config import Config from ddtrace.internal.writer import AgentWriterInterface from ddtrace.internal.writer import LogWriter -from ddtrace.settings._config import Config from ddtrace.trace import Context from ddtrace.trace import tracer as global_tracer from tests.subprocesstest import run_in_subprocess @@ -1032,50 +1031,6 @@ def test_tracer_runtime_tags_cross_execution(tracer): assert span.get_metric(PID) is not None -def test_start_span_hooks(): - t = DummyTracer() - - result = {} - - with pytest.warns(DDTraceDeprecationWarning): - - @t.on_start_span - def store_span(span): - result["span"] = span - - try: - span = t.start_span("hello") - - assert span == result["span"] - span.finish() - finally: - # Cleanup after the test is done - # DEV: Since we use the core API for these hooks, - # they are not isolated to a single tracer instance - with pytest.warns(DDTraceDeprecationWarning): - t.deregister_on_start_span(store_span) - - -def test_deregister_start_span_hooks(): - t = DummyTracer() - - result = {} - - with pytest.warns(DDTraceDeprecationWarning): - - @t.on_start_span - def store_span(span): - result["span"] = span - - with pytest.warns(DDTraceDeprecationWarning): - t.deregister_on_start_span(store_span) - - with t.start_span("hello"): - pass - - assert result == {} - - @pytest.mark.subprocess(parametrize={"DD_TRACE_ENABLED": ["true", "false"]}) def test_enable(): import os diff --git a/tests/utils.py b/tests/utils.py index edd499c739a..70193d61293 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -40,6 +40,10 @@ from ddtrace.internal.packages import is_third_party from ddtrace.internal.remoteconfig import Payload from ddtrace.internal.schema import SCHEMA_VERSION +from ddtrace.internal.settings._agent import config as agent_config +from ddtrace.internal.settings._database_monitoring import dbm_config +from ddtrace.internal.settings.asm import config as asm_config +from ddtrace.internal.settings.openfeature import config as ffe_config from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.formats import parse_tags_str from ddtrace.internal.writer import AgentWriter @@ -48,10 +52,6 @@ from ddtrace.propagation._database_monitoring import listen as dbm_config_listen from ddtrace.propagation._database_monitoring import unlisten as dbm_config_unlisten from ddtrace.propagation.http import _DatadogMultiHeader -from ddtrace.settings._agent import config as agent_config -from ddtrace.settings._database_monitoring import dbm_config -from ddtrace.settings.asm import config as asm_config -from ddtrace.settings.openfeature import config as ffe_config from ddtrace.trace import Span from ddtrace.trace import Tracer from tests.subprocesstest import SubprocessTestCase