From f99da493986f24a588af50e197bbe62f5ad6332a Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Tue, 12 Oct 2021 14:23:52 -0700 Subject: [PATCH] Python 3.10 Support (#405) * Add initial py310 tox tests Co-authored-by: lrafeei Co-authored-by: Uma Annamalai * Fix deprecation warnings * Add python 3.10 test setup. (#392) * Sanic Testing Fix (#395) * Sanic updates * [Mega-Linter] Apply linters fixes Co-authored-by: TimPansino * Add initial py310 tox tests Co-authored-by: lrafeei Co-authored-by: Uma Annamalai * Fix deprecation warnings * Add python 3.10 setup to tests.yml. * Update Python version to 3.10 rc2. * Temporarily disable graphql master tests. * Remove loop argument in agent feature tests. * Add try except. Co-authored-by: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Co-authored-by: TimPansino Co-authored-by: Tim Pansino Co-authored-by: lrafeei Co-authored-by: Uma Annamalai * Update urllib3 to v1.26.7 (#393) * Update urllib3 to 1.26.7 * Exclude packages from linting * Fix SSL context deprecation warning (#394) * Fix imp Deprecation (#398) * Sanic Testing Fix (#395) * Sanic updates * [Mega-Linter] Apply linters fixes Co-authored-by: TimPansino * Remove deprecated imp module Co-authored-by: Lalleh Rafeei * Formatting * Remove unused import * Fix sitecustomize issue * Fix missing attribute * Fix incorrect type Co-authored-by: TimPansino Co-authored-by: Lalleh Rafeei * Add Python 3.10 tox Tests (Part 1) (#397) * Add py310 tox tests * Remove unsupported frameworks from tox * Remove failing library test * Revert uvicorn py36 upgrade * Add Python 3.10 support for web frameworks. (#391) * Add py310 to web framework test matrix. * Remove unsupported library * Remove unsupported libraries * Fix aiohttp errors * Remove comments causing errors Co-authored-by: Tim Pansino * Add py310 builds (#402) * Tox changes for python3.10 (#403) * Fix asyncio Warnings in Python 3.10 (#396) * Sanic Testing Fix (#395) * Sanic updates * [Mega-Linter] Apply linters fixes Co-authored-by: TimPansino * Clean up asyncio event_loops * Linting * Remove space broken strings * Add init file * Fix random failures * Fix notice_error Rules Matching Settings Handling (#383) * Remove active traces from tests to expose bug * Handle settings passing in error matching rules * Better edge case handling * Apply linter fixes * Correct settings origin to transaction * Change logs in context expected errors * Apply linters fixes * Fix cross event loop failures * Formatting * Upgrade py310 rc to release * Quote python versions in GHA * Update Testing Infra (#401) * Add aggregate test check * Formatting * Add job timeouts * Fix uvicorn hang. Co-authored-by: Lalleh Rafeei * Fix error typing for uvicorn * Revert uvicorn changes Co-authored-by: TimPansino Co-authored-by: Lalleh Rafeei * Bump timeout to 30 minutes Co-authored-by: lrafeei Co-authored-by: Uma Annamalai Co-authored-by: Uma Annamalai Co-authored-by: TimPansino Co-authored-by: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> --- .github/workflows/deploy-python.yml | 4 +- .github/workflows/tests.yml | 273 ++++++++----- .mega-linter.yml | 36 +- newrelic/api/application.py | 2 +- newrelic/api/import_hook.py | 77 ++-- newrelic/api/transaction.py | 62 +-- newrelic/bootstrap/sitecustomize.py | 94 +++-- newrelic/common/log_file.py | 4 +- newrelic/console.py | 195 +++++----- newrelic/core/agent.py | 42 +- newrelic/core/application.py | 141 +++---- newrelic/core/profile_sessions.py | 108 +++--- newrelic/core/stats_engine.py | 8 +- newrelic/packages/urllib3/__init__.py | 20 +- newrelic/packages/urllib3/_collections.py | 9 +- newrelic/packages/urllib3/_version.py | 2 +- newrelic/packages/urllib3/connection.py | 227 +++++++++-- newrelic/packages/urllib3/connectionpool.py | 159 +++++--- .../contrib/_securetransport/bindings.py | 25 +- .../contrib/_securetransport/low_level.py | 75 +++- .../packages/urllib3/contrib/appengine.py | 12 +- newrelic/packages/urllib3/contrib/ntlmpool.py | 9 + .../packages/urllib3/contrib/pyopenssl.py | 36 +- .../urllib3/contrib/securetransport.py | 82 +++- newrelic/packages/urllib3/contrib/socks.py | 22 +- newrelic/packages/urllib3/exceptions.py | 153 +++++--- newrelic/packages/urllib3/fields.py | 5 +- newrelic/packages/urllib3/filepost.py | 4 +- .../urllib3/packages/backports/makefile.py | 1 - newrelic/packages/urllib3/packages/six.py | 124 ++++-- .../packages/ssl_match_hostname/__init__.py | 13 +- newrelic/packages/urllib3/poolmanager.py | 112 ++++-- newrelic/packages/urllib3/request.py | 9 +- newrelic/packages/urllib3/response.py | 63 +-- newrelic/packages/urllib3/util/__init__.py | 17 +- newrelic/packages/urllib3/util/connection.py | 20 +- newrelic/packages/urllib3/util/proxy.py | 57 +++ newrelic/packages/urllib3/util/queue.py | 1 + newrelic/packages/urllib3/util/request.py | 10 +- newrelic/packages/urllib3/util/response.py | 31 +- newrelic/packages/urllib3/util/retry.py | 219 +++++++++-- newrelic/packages/urllib3/util/ssl_.py | 148 +++++-- .../packages/urllib3/util/ssltransport.py | 221 +++++++++++ newrelic/packages/urllib3/util/timeout.py | 49 ++- newrelic/packages/urllib3/util/url.py | 10 +- newrelic/packages/urllib3/util/wait.py | 6 +- pyproject.toml | 2 +- tests/adapter_gunicorn/worker.py | 11 +- tests/adapter_uvicorn/test_uvicorn.py | 30 +- .../_test_async_coroutine_trace.py | 106 +++--- .../_test_async_coroutine_transaction.py | 9 +- tests/agent_features/conftest.py | 3 + tests/agent_features/test_asgi_transaction.py | 10 +- .../test_async_context_propagation.py | 198 +++++----- tests/agent_features/test_async_timing.py | 29 +- tests/agent_features/test_coroutine_trace.py | 306 ++++++++------- .../test_coroutine_transaction.py | 360 ++++++++++-------- .../test_event_loop_wait_time.py | 95 ++--- tests/agent_unittests/conftest.py | 2 +- tests/agent_unittests/test_http_client.py | 131 +++---- tests/application_gearman/test_gearman.py | 38 +- tests/base_requirements.txt | 3 - tests/coroutines_asyncio/conftest.py | 25 +- .../test_context_propagation.py | 202 +++++----- tests/datastore_asyncpg/conftest.py | 24 +- tests/datastore_asyncpg/test_multiple_dbs.py | 32 +- tests/datastore_asyncpg/test_query.py | 86 ++--- tests/external_httpx/conftest.py | 6 +- .../framework_aiohttp/_target_application.py | 11 +- tests/framework_aiohttp/conftest.py | 72 ++-- tests/framework_aiohttp/test_client.py | 113 +++--- .../test_client_async_await.py | 47 +-- tests/framework_aiohttp/test_client_cat.py | 185 +++++---- tests/framework_aiohttp/test_server.py | 7 +- .../test_application_async.py | 2 +- .../test_application_async.py | 19 +- tests/framework_sanic/conftest.py | 51 ++- .../test_application_async.py | 5 +- tests/testing_support/asgi_testing.py | 6 +- tests/testing_support/fixture/__init__.py | 13 + tests/testing_support/fixture/event_loop.py | 16 + tox.ini | 130 ++++--- 82 files changed, 3305 insertions(+), 2077 deletions(-) create mode 100644 newrelic/packages/urllib3/util/proxy.py create mode 100644 newrelic/packages/urllib3/util/ssltransport.py delete mode 100644 tests/base_requirements.txt create mode 100644 tests/testing_support/fixture/__init__.py create mode 100644 tests/testing_support/fixture/event_loop.py diff --git a/.github/workflows/deploy-python.yml b/.github/workflows/deploy-python.yml index 4404b0d01b..159876cc18 100644 --- a/.github/workflows/deploy-python.yml +++ b/.github/workflows/deploy-python.yml @@ -52,16 +52,14 @@ jobs: CIBW_BUILD: cp27-manylinux_x86_64 CIBW_ARCHS: x86_64 CIBW_ENVIRONMENT: "LD_LIBRARY_PATH=/opt/rh/=vtoolset-8/root/usr/lib64:/opt/rh/devtoolset-8/root/usr/lib:/opt/rh/devtoolset-8/root/usr/lib64/dyninst:/opt/rh/devtoolset-8/root/usr/lib/dyninst:/usr/local/lib64:/usr/local/lib" - CIBW_TEST_REQUIRES: beautifulsoup4 - name: Build Manylinux Wheels (Python 3) uses: pypa/cibuildwheel@v2.0.1 env: CIBW_PLATFORM: linux - CIBW_BUILD: cp36-manylinux_aarch64 cp37-manylinux_aarch64 cp38-manylinux_aarch64 cp39-manylinux_aarch64 cp36-manylinux_x86_64 cp37-manylinux_x86_64 cp38-manylinux_x86_64 cp39-manylinux_x86_64 + CIBW_BUILD: cp36-manylinux_aarch64 cp37-manylinux_aarch64 cp38-manylinux_aarch64 cp39-manylinux_aarch64 cp310-manylinux_aarch64 cp36-manylinux_x86_64 cp37-manylinux_x86_64 cp38-manylinux_x86_64 cp39-manylinux_x86_64 cp310-manylinux_x86_64 CIBW_ARCHS: x86_64 aarch64 CIBW_ENVIRONMENT: "LD_LIBRARY_PATH=/opt/rh/devtoolset-8/root/usr/lib64:/opt/rh/devtoolset-8/root/usr/lib:/opt/rh/devtoolset-8/root/usr/lib64/dyninst:/opt/rh/devtoolset-8/root/usr/lib/dyninst:/usr/local/lib64:/usr/local/lib" - CIBW_TEST_REQUIRES: beautifulsoup4 - name: Upload Package to S3 run: | diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index fb8687dbec..990f4cd4ce 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -75,7 +75,7 @@ jobs: ] runs-on: ubuntu-latest - timeout-minutes: 15 + timeout-minutes: 30 steps: - uses: actions/checkout@v2 @@ -83,38 +83,43 @@ jobs: # Set up all versions of python - uses: actions/setup-python@v2 with: - python-version: 2.7 + python-version: "2.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.6 + python-version: "3.6" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: "3.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: "3.8" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.9" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: pypy3 + python-version: "3.10" + architecture: x64 + + - uses: actions/setup-python@v2 + with: + python-version: "pypy3" architecture: x64 # Setup pypy2 after pypy3 to ensure pypy isn't aliased to pypy3 - uses: actions/setup-python@v2 with: - python-version: pypy2 + python-version: "pypy2" architecture: x64 - name: Install Dependencies @@ -145,7 +150,7 @@ jobs: group-number: [1] runs-on: ubuntu-latest - timeout-minutes: 15 + timeout-minutes: 30 steps: - uses: actions/checkout@v2 @@ -153,38 +158,43 @@ jobs: # Set up all versions of python - uses: actions/setup-python@v2 with: - python-version: 2.7 + python-version: "2.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.6 + python-version: "3.6" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: "3.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: "3.8" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.9" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: pypy3 + python-version: "3.10" + architecture: x64 + + - uses: actions/setup-python@v2 + with: + python-version: "pypy3" architecture: x64 # Setup pypy2 after pypy3 to ensure pypy isn't aliased to pypy3 - uses: actions/setup-python@v2 with: - python-version: pypy2 + python-version: "pypy2" architecture: x64 - name: Install Dependencies @@ -215,7 +225,7 @@ jobs: group-number: [1] runs-on: ubuntu-latest - timeout-minutes: 15 + timeout-minutes: 30 steps: - uses: actions/checkout@v2 @@ -223,39 +233,44 @@ jobs: # Set up all versions of python - uses: actions/setup-python@v2 with: - python-version: pypy3 + python-version: "pypy3" architecture: x64 # Set up pypy2 after pypy3 to ensure pypy isn't aliased to pypy3 - uses: actions/setup-python@v2 with: - python-version: pypy2 + python-version: "pypy2" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 2.7 + python-version: "2.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.6 + python-version: "3.6" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: "3.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: "3.8" architecture: x64 # Set up python 3.9 last to ensure tox runs CPython - uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.9" + architecture: x64 + + - uses: actions/setup-python@v2 + with: + python-version: "3.10" architecture: x64 - name: Install Dependencies @@ -292,7 +307,7 @@ jobs: group-number: [1, 2] runs-on: ubuntu-latest - timeout-minutes: 15 + timeout-minutes: 30 services: postgres: @@ -315,39 +330,44 @@ jobs: # Set up all versions of python - uses: actions/setup-python@v2 with: - python-version: pypy3 + python-version: "pypy3" architecture: x64 # Set up pypy2 after pypy3 to ensure pypy isn't aliased to pypy3 - uses: actions/setup-python@v2 with: - python-version: pypy2 + python-version: "pypy2" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 2.7 + python-version: "2.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.6 + python-version: "3.6" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: "3.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: "3.8" architecture: x64 # Set up python 3.9 last to ensure tox runs CPython - uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.9" + architecture: x64 + + - uses: actions/setup-python@v2 + with: + python-version: "3.10" architecture: x64 - name: Install Dependencies @@ -378,7 +398,7 @@ jobs: group-number: [1, 2] runs-on: ubuntu-latest - timeout-minutes: 15 + timeout-minutes: 30 services: mysql: @@ -404,39 +424,44 @@ jobs: # Set up all versions of python - uses: actions/setup-python@v2 with: - python-version: pypy3 + python-version: "pypy3" architecture: x64 # Set up pypy2 after pypy3 to ensure pypy isn't aliased to pypy3 - uses: actions/setup-python@v2 with: - python-version: pypy2 + python-version: "pypy2" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 2.7 + python-version: "2.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.6 + python-version: "3.6" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: "3.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: "3.8" architecture: x64 # Set up python 3.9 last to ensure tox runs CPython - uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.9" + architecture: x64 + + - uses: actions/setup-python@v2 + with: + python-version: "3.10" architecture: x64 - name: Install Dependencies @@ -467,7 +492,7 @@ jobs: group-number: [1] runs-on: ubuntu-latest - timeout-minutes: 15 + timeout-minutes: 30 services: redis: @@ -488,39 +513,44 @@ jobs: # Set up all versions of python - uses: actions/setup-python@v2 with: - python-version: pypy3 + python-version: "pypy3" architecture: x64 # Set up pypy2 after pypy3 to ensure pypy isn't aliased to pypy3 - uses: actions/setup-python@v2 with: - python-version: pypy2 + python-version: "pypy2" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 2.7 + python-version: "2.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.6 + python-version: "3.6" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: "3.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: "3.8" architecture: x64 # Set up python 3.9 last to ensure tox runs CPython - uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.9" + architecture: x64 + + - uses: actions/setup-python@v2 + with: + python-version: "3.10" architecture: x64 - name: Install Dependencies @@ -551,7 +581,7 @@ jobs: group-number: [1] runs-on: ubuntu-latest - timeout-minutes: 15 + timeout-minutes: 30 services: solr: @@ -574,39 +604,44 @@ jobs: # Set up all versions of python - uses: actions/setup-python@v2 with: - python-version: pypy3 + python-version: "pypy3" architecture: x64 # Set up pypy2 after pypy3 to ensure pypy isn't aliased to pypy3 - uses: actions/setup-python@v2 with: - python-version: pypy2 + python-version: "pypy2" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 2.7 + python-version: "2.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.6 + python-version: "3.6" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: "3.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: "3.8" architecture: x64 # Set up python 3.9 last to ensure tox runs CPython - uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.9" + architecture: x64 + + - uses: actions/setup-python@v2 + with: + python-version: "3.10" architecture: x64 - name: Install Dependencies @@ -637,7 +672,7 @@ jobs: group-number: [1, 2] runs-on: ubuntu-latest - timeout-minutes: 15 + timeout-minutes: 30 services: memcached: @@ -658,39 +693,44 @@ jobs: # Set up all versions of python - uses: actions/setup-python@v2 with: - python-version: pypy3 + python-version: "pypy3" architecture: x64 # Set up pypy2 after pypy3 to ensure pypy isn't aliased to pypy3 - uses: actions/setup-python@v2 with: - python-version: pypy2 + python-version: "pypy2" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 2.7 + python-version: "2.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.6 + python-version: "3.6" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: "3.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: "3.8" architecture: x64 # Set up python 3.9 last to ensure tox runs CPython - uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.9" + architecture: x64 + + - uses: actions/setup-python@v2 + with: + python-version: "3.10" architecture: x64 - name: Install Dependencies @@ -721,7 +761,7 @@ jobs: group-number: [1] runs-on: ubuntu-latest - timeout-minutes: 15 + timeout-minutes: 30 services: rabbitmq: @@ -743,39 +783,44 @@ jobs: # Set up all versions of python - uses: actions/setup-python@v2 with: - python-version: pypy3 + python-version: "pypy3" architecture: x64 # Set up pypy2 after pypy3 to ensure pypy isn't aliased to pypy3 - uses: actions/setup-python@v2 with: - python-version: pypy2 + python-version: "pypy2" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 2.7 + python-version: "2.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.6 + python-version: "3.6" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: "3.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: "3.8" architecture: x64 # Set up python 3.9 last to ensure tox runs CPython - uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.9" + architecture: x64 + + - uses: actions/setup-python@v2 + with: + python-version: "3.10" architecture: x64 - name: Install Dependencies @@ -806,7 +851,7 @@ jobs: group-number: [1] runs-on: ubuntu-latest - timeout-minutes: 15 + timeout-minutes: 30 services: mongodb: @@ -827,39 +872,44 @@ jobs: # Set up all versions of python - uses: actions/setup-python@v2 with: - python-version: pypy3 + python-version: "pypy3" architecture: x64 # Set up pypy2 after pypy3 to ensure pypy isn't aliased to pypy3 - uses: actions/setup-python@v2 with: - python-version: pypy2 + python-version: "pypy2" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 2.7 + python-version: "2.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.6 + python-version: "3.6" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: "3.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: "3.8" architecture: x64 # Set up python 3.9 last to ensure tox runs CPython - uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.9" + architecture: x64 + + - uses: actions/setup-python@v2 + with: + python-version: "3.10" architecture: x64 - name: Install Dependencies @@ -890,7 +940,7 @@ jobs: group-number: [1] runs-on: ubuntu-latest - timeout-minutes: 15 + timeout-minutes: 30 services: es01: @@ -913,39 +963,44 @@ jobs: # Set up all versions of python - uses: actions/setup-python@v2 with: - python-version: pypy3 + python-version: "pypy3" architecture: x64 # Set up pypy2 after pypy3 to ensure pypy isn't aliased to pypy3 - uses: actions/setup-python@v2 with: - python-version: pypy2 + python-version: "pypy2" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 2.7 + python-version: "2.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.6 + python-version: "3.6" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: "3.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: "3.8" architecture: x64 # Set up python 3.9 last to ensure tox runs CPython - uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.9" + architecture: x64 + + - uses: actions/setup-python@v2 + with: + python-version: "3.10" architecture: x64 - name: Install Dependencies @@ -976,7 +1031,7 @@ jobs: group-number: [1] runs-on: ubuntu-latest - timeout-minutes: 15 + timeout-minutes: 30 services: es01: @@ -999,39 +1054,44 @@ jobs: # Set up all versions of python - uses: actions/setup-python@v2 with: - python-version: pypy3 + python-version: "pypy3" architecture: x64 # Set up pypy2 after pypy3 to ensure pypy isn't aliased to pypy3 - uses: actions/setup-python@v2 with: - python-version: pypy2 + python-version: "pypy2" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 2.7 + python-version: "2.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.6 + python-version: "3.6" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: "3.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: "3.8" architecture: x64 # Set up python 3.9 last to ensure tox runs CPython - uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.9" + architecture: x64 + + - uses: actions/setup-python@v2 + with: + python-version: "3.10" architecture: x64 - name: Install Dependencies @@ -1062,7 +1122,7 @@ jobs: group-number: [1] runs-on: ubuntu-latest - timeout-minutes: 15 + timeout-minutes: 30 services: gearman: @@ -1082,39 +1142,44 @@ jobs: # Set up all versions of python - uses: actions/setup-python@v2 with: - python-version: pypy3 + python-version: "pypy3" architecture: x64 # Set up pypy2 after pypy3 to ensure pypy isn't aliased to pypy3 - uses: actions/setup-python@v2 with: - python-version: pypy2 + python-version: "pypy2" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 2.7 + python-version: "2.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.6 + python-version: "3.6" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: "3.7" architecture: x64 - uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: "3.8" architecture: x64 # Set up python 3.9 last to ensure tox runs CPython - uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.9" + architecture: x64 + + - uses: actions/setup-python@v2 + with: + python-version: "3.10" architecture: x64 - name: Install Dependencies diff --git a/.mega-linter.yml b/.mega-linter.yml index b5d4d709e1..05156ad49e 100644 --- a/.mega-linter.yml +++ b/.mega-linter.yml @@ -6,7 +6,10 @@ DEFAULT_BRANCH: main # Usually master or main SHOW_ELAPSED_TIME: true FILEIO_REPORTER: false PRINT_ALPACA: false -VALIDATE_ALL_CODEBASE: false +VALIDATE_ALL_CODEBASE: false # only lint new and edited files +IGNORE_GITIGNORED_FILES: true +FILTER_REGEX_EXCLUDE: "(.*/?packages/.*)" # Ignore packages directories + ENABLE_LINTERS: # If you use ENABLE_LINTERS variable, all other linters will be disabled by default - MARKDOWN_MARKDOWN_LINK_CHECK - MARKDOWN_MARKDOWNLINT @@ -30,34 +33,3 @@ PYTHON_ISORT_CONFIG_FILE: pyproject.toml # Bandit's next release supports pyproject.toml. Until then, add config by cli option PYTHON_BANDIT_ARGUMENTS: --skip=B110,B101,B404 PYTHON_PYLINT_ARGUMENTS: "--fail-under=0 --fail-on=E" - -# IGNORE_GITIGNORED_FILES: true # Currently broken -EXCLUDED_DIRECTORIES: - - "__pycache__" - - ".eggs" - - ".env" - - ".mypy_cache" - - ".nox" - - ".pytest_cache" - - ".pytype" - - ".tox" - - ".venv" - - "build" - - "cover" - - "cython_debug" - - "develop-eggs" - - "dist" - - "downloads" - - "eggs" - - "env" - - "htmlcov" - - "lib" - - "lib64" - - "parts" - - "report" - - "sdist" - - "python-wheels" - - "target" - - "var" - - "venv" - - "wheels" diff --git a/newrelic/api/application.py b/newrelic/api/application.py index 2f3d17e9ea..3928dd4cf6 100644 --- a/newrelic/api/application.py +++ b/newrelic/api/application.py @@ -115,7 +115,7 @@ def link_to_application(self, name): def record_exception(self, exc=None, value=None, tb=None, params=None, ignore_errors=None): # Deprecation Warning warnings.warn( - ("The record_exception function is deprecated. Please use the " "new api named notice_error instead."), + ("The record_exception function is deprecated. Please use the new api named notice_error instead."), DeprecationWarning, ) diff --git a/newrelic/api/import_hook.py b/newrelic/api/import_hook.py index 73c5dd5469..b0d0b914d9 100644 --- a/newrelic/api/import_hook.py +++ b/newrelic/api/import_hook.py @@ -12,10 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -import imp import logging import sys +from newrelic.packages import six + _logger = logging.getLogger(__name__) try: @@ -26,26 +27,44 @@ _import_hooks = {} _ok_modules = ( - # These modules are imported by the newrelic package and/or do not do - # nested imports, so they're ok to import before newrelic. - 'urllib', 'urllib2', 'httplib', 'http.client', 'urllib.request', - 'newrelic.agent', 'asyncio','asyncio.events', - - # These modules should not be added to the _uninstrumented_modules set - # because they have been deemed okay to import before initialization by - # the customer. - 'gunicorn.app.base', 'wsgiref.simple_server', 'gevent.wsgi', - 'gevent.pywsgi', 'cheroot.wsgi', 'cherrypy.wsgiserver', - 'flup.server.cgi', 'flup.server.ajp_base', 'flup.server.fcgi_base', - 'flup.server.scgi_base', 'meinheld.server', 'paste.httpserver', - 'waitress.server', 'gevent.monkey', 'asyncio.base_events', + # These modules are imported by the newrelic package and/or do not do + # nested imports, so they're ok to import before newrelic. + "urllib", + "urllib2", + "httplib", + "http.client", + "urllib.request", + "newrelic.agent", + "asyncio", + "asyncio.events", + # These modules should not be added to the _uninstrumented_modules set + # because they have been deemed okay to import before initialization by + # the customer. + "gunicorn.app.base", + "wsgiref.simple_server", + "gevent.wsgi", + "gevent.pywsgi", + "cheroot.wsgi", + "cherrypy.wsgiserver", + "flup.server.cgi", + "flup.server.ajp_base", + "flup.server.fcgi_base", + "flup.server.scgi_base", + "meinheld.server", + "paste.httpserver", + "waitress.server", + "gevent.monkey", + "asyncio.base_events", ) _uninstrumented_modules = set() -def register_import_hook(name, callable): - imp.acquire_lock() +def register_import_hook(name, callable): # pylint: disable=redefined-builtin + if six.PY2: + import imp + + imp.acquire_lock() try: hooks = _import_hooks.get(name, None) @@ -66,11 +85,14 @@ def register_import_hook(name, callable): # immediately. if module.__name__ not in _ok_modules: - _logger.debug('Module %s has been imported before the ' - 'newrelic.agent.initialize call. Import and ' - 'initialize the New Relic agent before all ' - 'other modules for best monitoring ' - 'results.' % module) + _logger.debug( + "Module %s has been imported before the " + "newrelic.agent.initialize call. Import and " + "initialize the New Relic agent before all " + "other modules for best monitoring " + "results.", + module, + ) # Add the module name to the set of uninstrumented modules. # During harvest, this set will be used to produce metrics. @@ -102,7 +124,8 @@ def register_import_hook(name, callable): _import_hooks[name].append(callable) finally: - imp.release_lock() + if six.PY2: + imp.release_lock() def _notify_import_hooks(name, module): @@ -116,12 +139,11 @@ def _notify_import_hooks(name, module): if hooks is not None: _import_hooks[name] = None - for callable in hooks: - callable(module) + for hook in hooks: + hook(module) class _ImportHookLoader: - def load_module(self, fullname): # Call the import hooks on the module being handled. @@ -133,7 +155,6 @@ def load_module(self, fullname): class _ImportHookChainedLoader: - def __init__(self, loader): self.loader = loader @@ -148,7 +169,6 @@ def load_module(self, fullname): class ImportHookFinder: - def __init__(self): self._skip = {} @@ -178,7 +198,7 @@ def find_module(self, fullname, path=None): if find_spec: spec = find_spec(fullname) - loader = getattr(spec, 'loader', None) + loader = getattr(spec, "loader", None) if loader: return _ImportHookChainedLoader(loader) @@ -201,6 +221,7 @@ def import_hook(name): def decorator(wrapped): register_import_hook(name, wrapped) return wrapped + return decorator diff --git a/newrelic/api/transaction.py b/newrelic/api/transaction.py index 05ba3f2ae5..84d2ee2db3 100644 --- a/newrelic/api/transaction.py +++ b/newrelic/api/transaction.py @@ -364,7 +364,11 @@ def __enter__(self): # actual thread and not a greenlet. if not hasattr(sys, "_current_frames") or self.thread_id in sys._current_frames(): - thread_instance = threading.currentThread() + try: + thread_instance = threading.current_thread() + except TypeError: + thread_instance = threading.currentThread() + self._utilization_tracker = utilization_tracker(self.application.name) if self._utilization_tracker: self._utilization_tracker.enter_transaction(thread_instance) @@ -1038,9 +1042,9 @@ def _create_distributed_trace_payload(self): d=data, ) except: - self._record_supportability("Supportability/DistributedTrace/" "CreatePayload/Exception") + self._record_supportability("Supportability/DistributedTrace/CreatePayload/Exception") else: - self._record_supportability("Supportability/DistributedTrace/" "CreatePayload/Success") + self._record_supportability("Supportability/DistributedTrace/CreatePayload/Success") return payload def create_distributed_trace_payload(self): @@ -1066,7 +1070,7 @@ def _generate_distributed_trace_headers(self, data=None): tracestate += "," + self.tracestate yield ("tracestate", tracestate) - self._record_supportability("Supportability/TraceContext/" "Create/Success") + self._record_supportability("Supportability/TraceContext/Create/Success") if not self._settings.distributed_tracing.exclude_newrelic_header: # Insert New Relic dt headers for backwards compatibility @@ -1075,13 +1079,13 @@ def _generate_distributed_trace_headers(self, data=None): d=data, ) yield ("newrelic", payload.http_safe()) - self._record_supportability("Supportability/DistributedTrace/" "CreatePayload/Success") + self._record_supportability("Supportability/DistributedTrace/CreatePayload/Success") except: - self._record_supportability("Supportability/TraceContext/" "Create/Exception") + self._record_supportability("Supportability/TraceContext/Create/Exception") if not self._settings.distributed_tracing.exclude_newrelic_header: - self._record_supportability("Supportability/DistributedTrace/" "CreatePayload/Exception") + self._record_supportability("Supportability/DistributedTrace/CreatePayload/Exception") def insert_distributed_trace_headers(self, headers): headers.extend(self._generate_distributed_trace_headers()) @@ -1096,23 +1100,21 @@ def _can_accept_distributed_trace_headers(self): if self._distributed_trace_state: if self._distributed_trace_state & ACCEPTED_DISTRIBUTED_TRACE: - self._record_supportability("Supportability/DistributedTrace/" "AcceptPayload/Ignored/Multiple") + self._record_supportability("Supportability/DistributedTrace/AcceptPayload/Ignored/Multiple") else: - self._record_supportability( - "Supportability/DistributedTrace/" "AcceptPayload/Ignored/CreateBeforeAccept" - ) + self._record_supportability("Supportability/DistributedTrace/AcceptPayload/Ignored/CreateBeforeAccept") return False return True def _accept_distributed_trace_payload(self, payload, transport_type="HTTP"): if not payload: - self._record_supportability("Supportability/DistributedTrace/" "AcceptPayload/Ignored/Null") + self._record_supportability("Supportability/DistributedTrace/AcceptPayload/Ignored/Null") return False payload = DistributedTracePayload.decode(payload) if not payload: - self._record_supportability("Supportability/DistributedTrace/" "AcceptPayload/ParseException") + self._record_supportability("Supportability/DistributedTrace/AcceptPayload/ParseException") return False try: @@ -1120,21 +1122,21 @@ def _accept_distributed_trace_payload(self, payload, transport_type="HTTP"): major_version = version and int(version[0]) if major_version is None: - self._record_supportability("Supportability/DistributedTrace/" "AcceptPayload/ParseException") + self._record_supportability("Supportability/DistributedTrace/AcceptPayload/ParseException") return False if major_version > DistributedTracePayload.version[0]: - self._record_supportability("Supportability/DistributedTrace/" "AcceptPayload/Ignored/MajorVersion") + self._record_supportability("Supportability/DistributedTrace/AcceptPayload/Ignored/MajorVersion") return False data = payload.get("d", {}) if not all(k in data for k in DISTRIBUTED_TRACE_KEYS_REQUIRED): - self._record_supportability("Supportability/DistributedTrace/" "AcceptPayload/ParseException") + self._record_supportability("Supportability/DistributedTrace/AcceptPayload/ParseException") return False # Must have either id or tx if not any(k in data for k in ("id", "tx")): - self._record_supportability("Supportability/DistributedTrace/" "AcceptPayload/ParseException") + self._record_supportability("Supportability/DistributedTrace/AcceptPayload/ParseException") return False settings = self._settings @@ -1143,10 +1145,10 @@ def _accept_distributed_trace_payload(self, payload, transport_type="HTTP"): # If trust key doesn't exist in the payload, use account_id received_trust_key = data.get("tk", account_id) if settings.trusted_account_key != received_trust_key: - self._record_supportability("Supportability/DistributedTrace/" "AcceptPayload/Ignored/UntrustedAccount") + self._record_supportability("Supportability/DistributedTrace/AcceptPayload/Ignored/UntrustedAccount") if settings.debug.log_untrusted_distributed_trace_keys: _logger.debug( - "Received untrusted key in distributed " "trace payload. received_trust_key=%r", + "Received untrusted key in distributed trace payload. received_trust_key=%r", received_trust_key, ) return False @@ -1163,11 +1165,11 @@ def _accept_distributed_trace_payload(self, payload, transport_type="HTTP"): data["pr"] = None self._accept_distributed_trace_data(data, transport_type) - self._record_supportability("Supportability/DistributedTrace/" "AcceptPayload/Success") + self._record_supportability("Supportability/DistributedTrace/AcceptPayload/Success") return True except: - self._record_supportability("Supportability/DistributedTrace/" "AcceptPayload/Exception") + self._record_supportability("Supportability/DistributedTrace/AcceptPayload/Exception") return False def accept_distributed_trace_payload(self, *args, **kwargs): @@ -1244,10 +1246,10 @@ def accept_distributed_trace_headers(self, headers, transport_type="HTTP"): data = None if not data: - self._record_supportability("Supportability/TraceContext/" "TraceParent/Parse/Exception") + self._record_supportability("Supportability/TraceContext/TraceParent/Parse/Exception") return False - self._record_supportability("Supportability/TraceContext/" "TraceParent/Accept/Success") + self._record_supportability("Supportability/TraceContext/TraceParent/Accept/Success") if tracestate: tracestate = ensure_str(tracestate) try: @@ -1257,7 +1259,7 @@ def accept_distributed_trace_headers(self, headers, transport_type="HTTP"): self.tracing_vendors = ",".join(vendors.keys()) self.tracestate = vendors.text(limit=31) except: - self._record_supportability("Supportability/TraceContext/" "TraceState/Parse/Exception") + self._record_supportability("Supportability/TraceContext/TraceState/Parse/Exception") else: # Remove trusted new relic header if available and parse if payload: @@ -1269,12 +1271,12 @@ def accept_distributed_trace_headers(self, headers, transport_type="HTTP"): self.trusted_parent_span = tracestate_data.pop("id", None) data.update(tracestate_data) else: - self._record_supportability("Supportability/TraceContext/" "TraceState/InvalidNrEntry") + self._record_supportability("Supportability/TraceContext/TraceState/InvalidNrEntry") else: - self._record_supportability("Supportability/TraceContext/" "TraceState/NoNrEntry") + self._record_supportability("Supportability/TraceContext/TraceState/NoNrEntry") self._accept_distributed_trace_data(data, transport_type) - self._record_supportability("Supportability/TraceContext/" "Accept/Success") + self._record_supportability("Supportability/TraceContext/Accept/Success") return True elif distributed_header: distributed_header = ensure_str(distributed_header) @@ -1455,7 +1457,7 @@ def set_transaction_name(self, name, group=None, priority=None): def record_exception(self, exc=None, value=None, tb=None, params=None, ignore_errors=None): # Deprecation Warning warnings.warn( - ("The record_exception function is deprecated. Please use the " "new api named notice_error instead."), + ("The record_exception function is deprecated. Please use the new api named notice_error instead."), DeprecationWarning, ) @@ -1596,9 +1598,7 @@ def add_custom_parameter(self, name, value): return False if len(self._custom_params) >= MAX_NUM_USER_ATTRIBUTES: - _logger.debug( - "Maximum number of custom attributes already " "added. Dropping attribute: %r=%r", name, value - ) + _logger.debug("Maximum number of custom attributes already added. Dropping attribute: %r=%r", name, value) return False key, val = process_user_attribute(name, value) diff --git a/newrelic/bootstrap/sitecustomize.py b/newrelic/bootstrap/sitecustomize.py index 7f4811d765..6640af0e99 100644 --- a/newrelic/bootstrap/sitecustomize.py +++ b/newrelic/bootstrap/sitecustomize.py @@ -14,50 +14,51 @@ import os import sys +import time # Define some debug logging routines to help sort out things when this # all doesn't work as expected. -import time -startup_debug = os.environ.get('NEW_RELIC_STARTUP_DEBUG', - 'off').lower() in ('on', 'true', '1') +# Avoiding additional imports by defining PY2 manually +PY2 = sys.version_info[0] == 2 + +startup_debug = os.environ.get("NEW_RELIC_STARTUP_DEBUG", "off").lower() in ("on", "true", "1") def log_message(text, *args, **kwargs): - critical = kwargs.get('critical', False) + critical = kwargs.get("critical", False) if startup_debug or critical: text = text % args - timestamp = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()) - sys.stdout.write('NEWRELIC: %s (%d) - %s\n' % (timestamp, - os.getpid(), text)) + timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + sys.stdout.write("NEWRELIC: %s (%d) - %s\n" % (timestamp, os.getpid(), text)) sys.stdout.flush() -log_message('New Relic Bootstrap (%s)', __file__) +log_message("New Relic Bootstrap (%s)", __file__) -log_message('working_directory = %r', os.getcwd()) +log_message("working_directory = %r", os.getcwd()) -log_message('sys.prefix = %r', os.path.normpath(sys.prefix)) +log_message("sys.prefix = %r", os.path.normpath(sys.prefix)) try: - log_message('sys.real_prefix = %r', sys.real_prefix) + log_message("sys.real_prefix = %r", sys.real_prefix) except AttributeError: pass -log_message('sys.version_info = %r', sys.version_info) -log_message('sys.executable = %r', sys.executable) +log_message("sys.version_info = %r", sys.version_info) +log_message("sys.executable = %r", sys.executable) -if hasattr(sys, 'flags'): - log_message('sys.flags = %r', sys.flags) +if hasattr(sys, "flags"): + log_message("sys.flags = %r", sys.flags) -log_message('sys.path = %r', sys.path) +log_message("sys.path = %r", sys.path) for name in sorted(os.environ.keys()): - if name.startswith('NEW_RELIC_') or name.startswith('PYTHON'): - if name == 'NEW_RELIC_LICENSE_KEY': + if name.startswith("NEW_RELIC_") or name.startswith("PYTHON"): + if name == "NEW_RELIC_LICENSE_KEY": continue - log_message('%s = %r', name, os.environ.get(name)) + log_message("%s = %r", name, os.environ.get(name)) # We need to import the original sitecustomize.py file if it exists. We # can't just try and import the existing one as we will pick up @@ -68,13 +69,11 @@ def log_message(text, *args, **kwargs): # imp module to find the module, excluding the bootstrap directory from # the search, and then load what was found. -import imp - boot_directory = os.path.dirname(__file__) root_directory = os.path.dirname(os.path.dirname(boot_directory)) -log_message('root_directory = %r', root_directory) -log_message('boot_directory = %r', boot_directory) +log_message("root_directory = %r", root_directory) +log_message("boot_directory = %r", boot_directory) path = list(sys.path) @@ -82,13 +81,25 @@ def log_message(text, *args, **kwargs): del path[path.index(boot_directory)] try: - (file, pathname, description) = imp.find_module('sitecustomize', path) + if PY2: + import imp + + module_spec = imp.find_module("sitecustomize", path) + else: + from importlib.machinery import PathFinder + + module_spec = PathFinder.find_spec("sitecustomize", path=path) + except ImportError: pass else: - log_message('sitecustomize = %r', (file, pathname, description)) + if module_spec is not None: # Import error not raised in importlib + log_message("sitecustomize = %r", module_spec) - imp.load_module('sitecustomize', file, pathname, description) + if PY2: + imp.load_module("sitecustomize", *module_spec) + else: + module_spec.loader.load_module("sitecustomize") # Because the PYTHONPATH environment variable has been amended and the # bootstrap directory added, if a Python application creates a sub @@ -99,17 +110,17 @@ def log_message(text, *args, **kwargs): # in the same Python installation as the original newrelic-admin script # which was run and only continue if we are. -expected_python_prefix = os.environ.get('NEW_RELIC_PYTHON_PREFIX') +expected_python_prefix = os.environ.get("NEW_RELIC_PYTHON_PREFIX") actual_python_prefix = os.path.realpath(os.path.normpath(sys.prefix)) -expected_python_version = os.environ.get('NEW_RELIC_PYTHON_VERSION') -actual_python_version = '.'.join(map(str, sys.version_info[:2])) +expected_python_version = os.environ.get("NEW_RELIC_PYTHON_VERSION") +actual_python_version = ".".join(map(str, sys.version_info[:2])) python_prefix_matches = expected_python_prefix == actual_python_prefix python_version_matches = expected_python_version == actual_python_version -log_message('python_prefix_matches = %r', python_prefix_matches) -log_message('python_version_matches = %r', python_version_matches) +log_message("python_prefix_matches = %r", python_prefix_matches) +log_message("python_version_matches = %r", python_version_matches) if python_prefix_matches and python_version_matches: # We also need to skip agent initialisation if neither the license @@ -118,12 +129,12 @@ def log_message(text, *args, **kwargs): # the wrapper script, and which controls whether the agent is # actually run based on the presence of the environment variables. - license_key = os.environ.get('NEW_RELIC_LICENSE_KEY', None) + license_key = os.environ.get("NEW_RELIC_LICENSE_KEY", None) - config_file = os.environ.get('NEW_RELIC_CONFIG_FILE', None) - environment = os.environ.get('NEW_RELIC_ENVIRONMENT', None) + config_file = os.environ.get("NEW_RELIC_CONFIG_FILE", None) + environment = os.environ.get("NEW_RELIC_ENVIRONMENT", None) - log_message('initialize_agent = %r', bool(license_key or config_file)) + log_message("initialize_agent = %r", bool(license_key or config_file)) if license_key or config_file: # When installed as an egg with buildout, the root directory for @@ -142,7 +153,7 @@ def log_message(text, *args, **kwargs): import newrelic.config - log_message('agent_version = %r', newrelic.version) + log_message("agent_version = %r", newrelic.version) if do_insert_path: try: @@ -154,9 +165,16 @@ def log_message(text, *args, **kwargs): newrelic.config.initialize(config_file, environment) else: - log_message("""New Relic could not start because the newrelic-admin script was called from a Python installation that is different from the Python installation that is currently running. To fix this problem, call the newrelic-admin script from the Python installation that is currently running (details below). + log_message( + """New Relic could not start because the newrelic-admin script was called from a Python installation that is different from the Python installation that is currently running. To fix this problem, call the newrelic-admin script from the Python installation that is currently running (details below). newrelic-admin Python directory: %r current Python directory: %r newrelic-admin Python version: %r -current Python version: %r""", expected_python_prefix, actual_python_prefix, expected_python_version, actual_python_version, critical=True) +current Python version: %r""", + expected_python_prefix, + actual_python_prefix, + expected_python_version, + actual_python_version, + critical=True, + ) diff --git a/newrelic/common/log_file.py b/newrelic/common/log_file.py index ae126a9a0e..583e24e5ce 100644 --- a/newrelic/common/log_file.py +++ b/newrelic/common/log_file.py @@ -37,7 +37,7 @@ def emit(self, record): _agent_logger.addHandler(_NullHandler()) _agent_logger.propagate = False -_LOG_FORMAT = "%(asctime)s (%(process)d/%(threadName)s) " "%(name)s %(levelname)s - %(message)s" +_LOG_FORMAT = "%(asctime)s (%(process)d/%(threadName)s) %(name)s %(levelname)s - %(message)s" _initialized = False @@ -101,7 +101,7 @@ def initialize_logging(log_file, log_level): except Exception: _initialize_stderr_logging(log_level) - _agent_logger.exception("Falling back to stderr logging as " "unable to create log file %r." % log_file) + _agent_logger.exception("Falling back to stderr logging as unable to create log file %r." % log_file) _initialized = True diff --git a/newrelic/console.py b/newrelic/console.py index f4d8846158..48cda6e7cc 100644 --- a/newrelic/console.py +++ b/newrelic/console.py @@ -26,9 +26,8 @@ import socket import sys import threading -import traceback -import os import time +import traceback try: import ConfigParser @@ -40,45 +39,54 @@ except ImportError: import builtins as __builtin__ + def _argspec_py2(func): return inspect.getargspec(func) + def _argspec_py3(func): a = inspect.getfullargspec(func) return (a.args, a.varargs, a.varkw, a.defaults) -if hasattr(inspect, 'getfullargspec'): + +if hasattr(inspect, "getfullargspec"): _argspec = _argspec_py3 else: _argspec = _argspec_py2 try: - from inspect import signature from collections import OrderedDict + from inspect import signature + def doc_signature(func): sig = signature(func) sig._parameters = OrderedDict(list(sig._parameters.items())[1:]) return str(sig) + + except ImportError: from inspect import formatargspec + def doc_signature(func): args, varargs, keywords, defaults = _argspec(func) return formatargspec(args[1:], varargs, keywords, defaults) -from newrelic.core.agent import agent_instance -from newrelic.core.config import global_settings, flatten_settings -from newrelic.api.transaction import Transaction + from newrelic.api.object_wrapper import ObjectWrapper +from newrelic.api.transaction import Transaction +from newrelic.core.agent import agent_instance +from newrelic.core.config import flatten_settings, global_settings from newrelic.core.trace_cache import trace_cache _trace_cache = trace_cache() + def shell_command(wrapped): args, varargs, keywords, defaults = _argspec(wrapped) parser = optparse.OptionParser() for name in args[1:]: - parser.add_option('--%s' % name, dest=name) + parser.add_option("--%s" % name, dest=name) @functools.wraps(wrapped) def wrapper(self, line): @@ -93,48 +101,51 @@ def wrapper(self, line): return wrapped(self, *args, **kwargs) - if wrapper.__name__.startswith('do_'): - prototype = wrapper.__name__[3:] + ' ' + doc_signature(wrapped) + if wrapper.__name__.startswith("do_"): + prototype = wrapper.__name__[3:] + " " + doc_signature(wrapped) - if hasattr(wrapper, '__doc__') and wrapper.__doc__ is not None: - wrapper.__doc__ = '\n'.join((prototype, - wrapper.__doc__.lstrip('\n'))) + if hasattr(wrapper, "__doc__") and wrapper.__doc__ is not None: + wrapper.__doc__ = "\n".join((prototype, wrapper.__doc__.lstrip("\n"))) return wrapper + _consoles = threading.local() + def acquire_console(shell): _consoles.active = shell + def release_console(): del _consoles.active + def setquit(): """Define new built-ins 'quit' and 'exit'. These are simply strings that display a hint on how to exit. """ - if os.sep == ':': - eof = 'Cmd-Q' - elif os.sep == '\\': - eof = 'Ctrl-Z plus Return' + if os.sep == ":": + eof = "Cmd-Q" + elif os.sep == "\\": + eof = "Ctrl-Z plus Return" else: - eof = 'Ctrl-D (i.e. EOF)' + eof = "Ctrl-D (i.e. EOF)" class Quitter(object): def __init__(self, name): self.name = name def __repr__(self): - return 'Use %s() or %s to exit' % (self.name, eof) + return "Use %s() or %s to exit" % (self.name, eof) def __call__(self, code=None): # If executed with our interactive console, only raise the # SystemExit exception but don't close sys.stdout as we are # not the owner of it. - if hasattr(_consoles, 'active'): + if hasattr(_consoles, "active"): raise SystemExit(code) # Shells like IDLE catch the SystemExit, but listen when their @@ -146,11 +157,11 @@ def __call__(self, code=None): pass raise SystemExit(code) - __builtin__.quit = Quitter('quit') - __builtin__.exit = Quitter('exit') + __builtin__.quit = Quitter("quit") + __builtin__.exit = Quitter("exit") -class OutputWrapper(ObjectWrapper): +class OutputWrapper(ObjectWrapper): def flush(self): try: shell = _consoles.active @@ -172,14 +183,15 @@ def writelines(self, data): except Exception: return self._nr_next_object.writelines(data) + def intercept_console(): setquit() sys.stdout = OutputWrapper(sys.stdout, None, None) sys.stderr = OutputWrapper(sys.stderr, None, None) -class EmbeddedConsole(code.InteractiveConsole): +class EmbeddedConsole(code.InteractiveConsole): def write(self, data): self.stdout.write(data) self.stdout.flush() @@ -188,34 +200,37 @@ def raw_input(self, prompt): self.stdout.write(prompt) self.stdout.flush() line = self.stdin.readline() - line = line.rstrip('\r\n') + line = line.rstrip("\r\n") return line + class ConsoleShell(cmd.Cmd): use_rawinput = 0 def __init__(self): cmd.Cmd.__init__(self) - self.do_prompt('on') + self.do_prompt("on") def emptyline(self): pass def help_help(self): - print("""help (command) + print( + """help (command) Output list of commands or help details for named command.""", - file=self.stdout) + file=self.stdout, + ) @shell_command def do_prompt(self, flag=None): """ Enable or disable the console prompt.""" - if flag == 'on': - self.prompt = '(newrelic:%d) ' % os.getpid() - elif flag == 'off': - self.prompt = '' + if flag == "on": + self.prompt = "(newrelic:%d) " % os.getpid() + elif flag == "off": + self.prompt = "" @shell_command def do_exit(self): @@ -252,7 +267,7 @@ def do_sys_modules(self): for name, module in sorted(sys.modules.items()): if module is not None: - file = getattr(module, '__file__', None) + file = getattr(module, "__file__", None) print("%s - %s" % (name, file), file=self.stdout) @shell_command @@ -282,7 +297,7 @@ def do_config_args(self): """ Displays the configure arguments used to build Python.""" - args = '' + args = "" try: # This may fail if using package Python and the @@ -290,7 +305,7 @@ def do_config_args(self): import distutils.sysconfig - args = distutils.sysconfig.get_config_var('CONFIG_ARGS') + args = distutils.sysconfig.get_config_var("CONFIG_ARGS") except Exception: pass @@ -312,7 +327,7 @@ def do_dump_config(self, name=None): config = flatten_settings(config) keys = sorted(config.keys()) for key in keys: - print('%s = %r' % (key, config[key]), file=self.stdout) + print("%s = %r" % (key, config[key]), file=self.stdout) @shell_command def do_agent_status(self): @@ -329,8 +344,7 @@ def do_applications(self): Displays a list of the applications. """ - print(repr(sorted( - agent_instance().applications.keys())), file=self.stdout) + print(repr(sorted(agent_instance().applications.keys())), file=self.stdout) @shell_command def do_application_status(self, name=None): @@ -363,24 +377,23 @@ def do_import_hooks(self): result = results[key] if result is None: if key[0] not in sys.modules: - print('%s: PENDING' % (key,), file=self.stdout) + print("%s: PENDING" % (key,), file=self.stdout) else: - print('%s: IMPORTED' % (key,), file=self.stdout) + print("%s: IMPORTED" % (key,), file=self.stdout) elif not result: - print('%s: INSTRUMENTED' % (key,), file=self.stdout) + print("%s: INSTRUMENTED" % (key,), file=self.stdout) else: - print('%s: FAILED' % (key,), file=self.stdout) + print("%s: FAILED" % (key,), file=self.stdout) for line in result: - print(line, end='', file=self.stdout) + print(line, end="", file=self.stdout) @shell_command def do_transactions(self): - """ - """ + """ """ for item in _trace_cache.active_threads(): transaction, thread_id, thread_type, frame = item - print('THREAD', item, file=self.stdout) + print("THREAD", item, file=self.stdout) if transaction is not None: transaction.dump(self.stdout) print(file=self.stdout) @@ -397,14 +410,13 @@ def do_interpreter(self): _settings = global_settings() if not _settings.console.allow_interpreter_cmd: - print('Sorry, the embedded Python ' \ - 'interpreter is disabled.', file=self.stdout) + print("Sorry, the embedded Python interpreter is disabled.", file=self.stdout) return locals = {} - locals['stdin'] = self.stdin - locals['stdout'] = self.stdout + locals["stdin"] = self.stdin + locals["stdout"] = self.stdout console = EmbeddedConsole(locals) @@ -433,36 +445,33 @@ def do_threads(self): all = [] for threadId, stack in sys._current_frames().items(): block = [] - block.append('# ThreadID: %s' % threadId) + block.append("# ThreadID: %s" % threadId) thr = threading._active.get(threadId) if thr: - block.append('# Type: %s' % type(thr).__name__) - block.append('# Name: %s' % thr.name) - for filename, lineno, name, line in traceback.extract_stack( - stack): - block.append('File: \'%s\', line %d, in %s' % (filename, - lineno, name)) + block.append("# Type: %s" % type(thr).__name__) + block.append("# Name: %s" % thr.name) + for filename, lineno, name, line in traceback.extract_stack(stack): + block.append("File: '%s', line %d, in %s" % (filename, lineno, name)) if line: - block.append(' %s' % (line.strip())) - all.append('\n'.join(block)) + block.append(" %s" % (line.strip())) + all.append("\n".join(block)) - print('\n\n'.join(all), file=self.stdout) + print("\n\n".join(all), file=self.stdout) -class ConnectionManager(object): +class ConnectionManager(object): def __init__(self, listener_socket): self.__listener_socket = listener_socket self.__console_initialized = False if not os.path.isabs(self.__listener_socket): - host, port = self.__listener_socket.split(':') + host, port = self.__listener_socket.split(":") port = int(port) self.__listener_socket = (host, port) - self.__thread = threading.Thread(target=self.__thread_run, - name='NR-Console-Manager') + self.__thread = threading.Thread(target=self.__thread_run, name="NR-Console-Manager") - self.__thread.setDaemon(True) + self.__thread.daemon = True self.__thread.start() def __socket_cleanup(self, path): @@ -499,8 +508,8 @@ def __thread_run(self): shell = ConsoleShell() - shell.stdin = client.makefile('r') - shell.stdout = client.makefile('w') + shell.stdin = client.makefile("r") + shell.stdout = client.makefile("w") while True: try: @@ -508,10 +517,9 @@ def __thread_run(self): except Exception: shell.stdout.flush() - print('Unexpected exception.', file=shell.stdout) + print("Unexpected exception.", file=shell.stdout) exc_info = sys.exc_info() - traceback.print_exception(exc_info[0], exc_info[1], - exc_info[2], file=shell.stdout) + traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], file=shell.stdout) exc_info = None else: @@ -524,9 +532,10 @@ def __thread_run(self): client.close() + class ClientShell(cmd.Cmd): - prompt = '(newrelic) ' + prompt = "(newrelic) " def __init__(self, config_file, stdin=None, stdout=None, log=None): cmd.Cmd.__init__(self, stdin=stdin, stdout=stdout) @@ -536,17 +545,14 @@ def __init__(self, config_file, stdin=None, stdout=None, log=None): self.__log_object = log if not self.__config_object.read([config_file]): - raise RuntimeError('Unable to open configuration file %s.' % - config_file) + raise RuntimeError("Unable to open configuration file %s." % config_file) - listener_socket = self.__config_object.get('newrelic', - 'console.listener_socket') % {'pid': '*'} + listener_socket = self.__config_object.get("newrelic", "console.listener_socket") % {"pid": "*"} if os.path.isabs(listener_socket): - self.__servers = [(socket.AF_UNIX, path) for path in - sorted(glob.glob(listener_socket))] + self.__servers = [(socket.AF_UNIX, path) for path in sorted(glob.glob(listener_socket))] else: - host, port = listener_socket.split(':') + host, port = listener_socket.split(":") port = int(port) self.__servers = [(socket.AF_INET, (host, port))] @@ -555,9 +561,11 @@ def emptyline(self): pass def help_help(self): - print("""help (command) + print( + """help (command) Output list of commands or help details for named command.""", - file=self.stdout) + file=self.stdout, + ) def do_exit(self, line): """exit @@ -570,7 +578,7 @@ def do_servers(self, line): Display a list of the servers which can be connected to.""" for i in range(len(self.__servers)): - print('%s: %s' % (i+1, self.__servers[i]), file=self.stdout) + print("%s: %s" % (i + 1, self.__servers[i]), file=self.stdout) def do_connect(self, line): """connect [index] @@ -579,16 +587,15 @@ def do_connect(self, line): be supplied.""" if len(self.__servers) == 0: - print('No servers to connect to.', file=self.stdout) + print("No servers to connect to.", file=self.stdout) return if not line: if len(self.__servers) != 1: - print('Multiple servers, which should be used?', - file=self.stdout) + print("Multiple servers, which should be used?", file=self.stdout) return else: - line = '1' + line = "1" try: selection = int(line) @@ -596,14 +603,14 @@ def do_connect(self, line): selection = None if selection is None: - print('Server selection not an integer.', file=self.stdout) + print("Server selection not an integer.", file=self.stdout) return if selection <= 0 or selection > len(self.__servers): - print('Invalid server selected.', file=self.stdout) + print("Invalid server selected.", file=self.stdout) return - server = self.__servers[selection-1] + server = self.__servers[selection - 1] client = socket.socket(server[0], socket.SOCK_STREAM) client.connect(server[1]) @@ -620,14 +627,14 @@ def write(): if self.__log_object: self.__log_object.write(c) - client.sendall(c.encode('utf-8')) + client.sendall(c.encode("utf-8")) except Exception: break def read(): while 1: try: - c = client.recv(1).decode('utf-8') + c = client.recv(1).decode("utf-8") if not c: break @@ -641,10 +648,10 @@ def read(): break thread1 = threading.Thread(target=write) - thread1.setDaemon(True) + thread1.daemon = True thread2 = threading.Thread(target=read) - thread2.setDaemon(True) + thread2.daemon = True thread1.start() thread2.start() @@ -653,6 +660,7 @@ def read(): return True + def main(): if len(sys.argv) == 1: print("Usage: newrelic-console config_file") @@ -661,5 +669,6 @@ def main(): shell = ClientShell(sys.argv[1]) shell.cmdloop() -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/newrelic/core/agent.py b/newrelic/core/agent.py index 2c97ed0810..fba8ca50be 100644 --- a/newrelic/core/agent.py +++ b/newrelic/core/agent.py @@ -162,22 +162,22 @@ def agent_singleton(): if "NEW_RELIC_ADMIN_COMMAND" in os.environ: if settings.debug.log_agent_initialization: _logger.info( - "Monitored application started using the " "newrelic-admin command with command line of %s.", + "Monitored application started using the newrelic-admin command with command line of %s.", os.environ["NEW_RELIC_ADMIN_COMMAND"], ) else: _logger.debug( - "Monitored application started using the " "newrelic-admin command with command line of %s.", + "Monitored application started using the newrelic-admin command with command line of %s.", os.environ["NEW_RELIC_ADMIN_COMMAND"], ) with Agent._instance_lock: if not Agent._instance: if settings.debug.log_agent_initialization: - _logger.info("Creating instance of Python agent in " "process %d.", os.getpid()) + _logger.info("Creating instance of Python agent in process %d.", os.getpid()) _logger.info("Agent was initialized from: %r", "".join(traceback.format_stack()[:-1])) else: - _logger.debug("Creating instance of Python agent in " "process %d.", os.getpid()) + _logger.debug("Creating instance of Python agent in process %d.", os.getpid()) _logger.debug("Agent was initialized from: %r", "".join(traceback.format_stack()[:-1])) instance = Agent(settings) @@ -208,7 +208,7 @@ def __init__(self, config): self._config = config self._harvest_thread = threading.Thread(target=self._harvest_loop, name="NR-Harvest-Thread") - self._harvest_thread.setDaemon(True) + self._harvest_thread.daemon = True self._harvest_shutdown = threading.Event() self._default_harvest_count = 0 @@ -362,10 +362,10 @@ def activate_application(self, app_name, linked_applications=None, timeout=None, ) if settings.debug.log_agent_initialization: - _logger.info("Creating application instance for %r " "in process %d.", app_name, os.getpid()) + _logger.info("Creating application instance for %r in process %d.", app_name, os.getpid()) _logger.info("Application was activated from: %r", "".join(traceback.format_stack()[:-1])) else: - _logger.debug("Creating application instance for %r " "in process %d.", app_name, os.getpid()) + _logger.debug("Creating application instance for %r in process %d.", app_name, os.getpid()) _logger.debug("Application was activated from: %r", "".join(traceback.format_stack()[:-1])) linked_applications = sorted(set(linked_applications)) @@ -450,7 +450,7 @@ def register_data_source(self, source, application=None, name=None, settings=Non def remove_thread_utilization(self): - _logger.debug("Removing thread utilization data source from all " "applications") + _logger.debug("Removing thread utilization data source from all applications") source_name = thread_utilization_data_source.__name__ factory_name = "Thread Utilization" @@ -478,7 +478,7 @@ def remove_thread_utilization(self): def record_exception(self, app_name, exc=None, value=None, tb=None, params=None, ignore_errors=None): # Deprecation Warning warnings.warn( - ("The record_exception function is deprecated. Please use the " "new api named notice_error instead."), + ("The record_exception function is deprecated. Please use the new api named notice_error instead."), DeprecationWarning, ) @@ -560,8 +560,14 @@ def compute_sampled(self, app_name): application = self._applications.get(app_name, None) return application.compute_sampled() + def _harvest_shutdown_is_set(self): + try: + return self._harvest_shutdown.is_set() + except TypeError: + return self._harvest_shutdown.isSet() + def _harvest_flexible(self, shutdown=False): - if not self._harvest_shutdown.isSet(): + if not self._harvest_shutdown_is_set(): event_harvest_config = self.global_settings().event_harvest_config self._scheduler.enter(event_harvest_config.report_period_ms / 1000.0, 1, self._harvest_flexible, ()) @@ -578,16 +584,16 @@ def _harvest_flexible(self, shutdown=False): try: application.harvest(shutdown=False, flexible=True) except Exception: - _logger.exception("Failed to harvest data " "for %s." % application.name) + _logger.exception("Failed to harvest data for %s." % application.name) self._flexible_harvest_duration = time.time() - self._last_flexible_harvest _logger.debug( - "Completed harvest[flexible] of application data in %.2f " "seconds.", self._flexible_harvest_duration + "Completed harvest[flexible] of application data in %.2f seconds.", self._flexible_harvest_duration ) def _harvest_default(self, shutdown=False): - if not self._harvest_shutdown.isSet(): + if not self._harvest_shutdown_is_set(): self._scheduler.enter(60.0, 2, self._harvest_default, ()) _logger.debug("Commencing harvest[default] of application data.") elif not shutdown: @@ -602,16 +608,14 @@ def _harvest_default(self, shutdown=False): try: application.harvest(shutdown, flexible=False) except Exception: - _logger.exception("Failed to harvest data " "for %s." % application.name) + _logger.exception("Failed to harvest data for %s." % application.name) self._default_harvest_duration = time.time() - self._last_default_harvest - _logger.debug( - "Completed harvest[default] of application data in %.2f " "seconds.", self._default_harvest_duration - ) + _logger.debug("Completed harvest[default] of application data in %.2f seconds.", self._default_harvest_duration) def _harvest_timer(self): - if self._harvest_shutdown.isSet(): + if self._harvest_shutdown_is_set(): return float("inf") return time.time() @@ -691,7 +695,7 @@ def _atexit_shutdown(self): self.shutdown_agent() def shutdown_agent(self, timeout=None): - if self._harvest_shutdown.isSet(): + if self._harvest_shutdown_is_set(): return if timeout is None: diff --git a/newrelic/core/application.py b/newrelic/core/application.py index 646d03c7a8..6c7d5fcd28 100644 --- a/newrelic/core/application.py +++ b/newrelic/core/application.py @@ -18,7 +18,6 @@ from __future__ import print_function -import imp import logging import os import sys @@ -51,6 +50,7 @@ NetworkInterfaceException, RetryDataForRequest, ) +from newrelic.packages import six from newrelic.samplers.data_sampler import DataSampler _logger = logging.getLogger(__name__) @@ -60,15 +60,18 @@ class Application(object): """Class which maintains recorded data for a single application.""" - def __init__(self, app_name, linked_applications=[]): + def __init__(self, app_name, linked_applications=None): _logger.debug( - "Initializing application with name %r and " "linked applications of %r.", app_name, linked_applications + "Initializing application with name %r and linked applications of %r.", app_name, linked_applications ) self._creation_time = time.time() self._app_name = app_name - self._linked_applications = sorted(set(linked_applications)) + if linked_applications is not None: + self._linked_applications = sorted(set(linked_applications)) + else: + self._linked_applications = [] self._process_id = None @@ -129,7 +132,7 @@ def __init__(self, app_name, linked_applications=[]): self.profile_manager = profile_session_manager() - self._uninstrumented = None + self._uninstrumented = [] @property def name(self): @@ -166,7 +169,10 @@ def dump(self, file): active_session = self._active_session if active_session: - print("Collector URL: %s" % (active_session.collector_url), file=file) + try: + print("Collector URL: %s" % (active_session._protocol.client._host), file=file) + except AttributeError: + pass print("Agent Run ID: %s" % (active_session.agent_run_id), file=file) print("URL Normalization Rules: %r" % (self._rules_engine["url"].rules), file=file) print("Metric Normalization Rules: %r" % (self._rules_engine["metric"].rules), file=file) @@ -223,7 +229,7 @@ def activate_session(self, activate_agent=None, timeout=0.0): thread = threading.Thread( target=self.connect_to_data_collector, name="NR-Activate-Session/%s" % self.name, args=(activate_agent,) ) - thread.setDaemon(True) + thread.daemon = True thread.start() if not timeout: @@ -232,7 +238,12 @@ def activate_session(self, activate_agent=None, timeout=0.0): if self._detect_deadlock: self._deadlock_event.wait(deadlock_timeout) - if not self._deadlock_event.isSet(): + try: + deadlock_event_set = self._deadlock_event.is_set() + except TypeError: + deadlock_event_set = self._deadlock_event.isSet() + + if not deadlock_event_set: _logger.warning( "Detected potential deadlock while waiting " "for activation of session for application %r. " @@ -249,9 +260,14 @@ def activate_session(self, activate_agent=None, timeout=0.0): self._connected_event.wait(timeout) - if not self._connected_event.isSet(): + try: + connected_event_set = self._connected_event.is_set() + except TypeError: + connected_event_set = self._connected_event.isSet() + + if not connected_event_set: _logger.debug( - "Timeout waiting for activation of session for " "application %r where timeout was %.02f seconds.", + "Timeout waiting for activation of session for application %r where timeout was %.02f seconds.", self._app_name, timeout, ) @@ -303,9 +319,14 @@ def connect_to_data_collector(self, activate_agent): # code run from this thread performs a deferred module import. if self._detect_deadlock: - imp.acquire_lock() - self._deadlock_event.set() - imp.release_lock() + if six.PY2: + import imp + + imp.acquire_lock() + self._deadlock_event.set() + imp.release_lock() + else: + self._deadlock_event.set() # Register the application with the data collector. Any errors # that occur will be dealt with by create_session(). The result @@ -385,15 +406,15 @@ def connect_to_data_collector(self, activate_agent): try: if settings.debug.log_normalization_rules: _logger.info( - "The URL normalization rules for " "%r are %r.", self._app_name, configuration.url_rules + "The URL normalization rules for %r are %r.", self._app_name, configuration.url_rules ) _logger.info( - "The metric normalization rules " "for %r are %r.", + "The metric normalization rules for %r are %r.", self._app_name, configuration.metric_name_rules, ) _logger.info( - "The transaction normalization " "rules for %r are %r.", + "The transaction normalization rules for %r are %r.", self._app_name, configuration.transaction_name_rules, ) @@ -512,9 +533,9 @@ def connect_to_data_collector(self, activate_agent): with InternalTraceContext(internal_metrics): internal_metric( - "Supportability/Python/Application/" "Registration/Duration", self._period_start - connect_start + "Supportability/Python/Application/Registration/Duration", self._period_start - connect_start ) - internal_metric("Supportability/Python/Application/" "Registration/Attempts", connect_attempts) + internal_metric("Supportability/Python/Application/Registration/Attempts", connect_attempts) self._stats_engine.merge_custom_metrics(internal_metrics.metrics()) @@ -581,13 +602,9 @@ def validate_process(self): settings = global_settings() if settings.debug.log_agent_initialization: - _logger.info( - "Process validation check was triggered " "from: %r", "".join(traceback.format_stack()[:-1]) - ) + _logger.info("Process validation check was triggered from: %r", "".join(traceback.format_stack()[:-1])) else: - _logger.debug( - "Process validation check was triggered " "from: %r", "".join(traceback.format_stack()[:-1]) - ) + _logger.debug("Process validation check was triggered from: %r", "".join(traceback.format_stack()[:-1])) # We now zero out the process ID so we know we have already # generated a warning message. @@ -657,9 +674,7 @@ def start_data_samplers(self): for data_sampler in self._data_samplers: try: - _logger.debug( - "Starting data sampler for %r in " "application %r.", data_sampler.name, self._app_name - ) + _logger.debug("Starting data sampler for %r in application %r.", data_sampler.name, self._app_name) data_sampler.start() except Exception: @@ -686,9 +701,7 @@ def stop_data_samplers(self): for data_sampler in self._data_samplers: try: - _logger.debug( - "Stopping data sampler for %r in " "application %r.", data_sampler.name, self._app_name - ) + _logger.debug("Stopping data sampler for %r in application %r.", data_sampler.name, self._app_name) data_sampler.stop() except Exception: @@ -714,7 +727,7 @@ def remove_data_source(self, name): try: _logger.debug( - "Removing/Stopping data sampler for %r in " "application %r.", data_sampler.name, self._app_name + "Removing/Stopping data sampler for %r in application %r.", data_sampler.name, self._app_name ) data_sampler.stop() @@ -724,7 +737,7 @@ def remove_data_source(self, name): # If sampler has not started yet, it may throw an error. _logger.debug( - "Exception when stopping " "data source %r when attempting to remove it.", data_sampler.name + "Exception when stopping data source %r when attempting to remove it.", data_sampler.name ) self._data_samplers.remove(data_sampler) @@ -736,7 +749,7 @@ def record_exception(self, exc=None, value=None, tb=None, params=None, ignore_er """ # Deprecation Warning warnings.warn( - ("The record_exception function is deprecated. Please use the " "new api named notice_error instead."), + ("The record_exception function is deprecated. Please use the new api named notice_error instead."), DeprecationWarning, ) @@ -1025,7 +1038,7 @@ def harvest(self, shutdown=False, flexible=False): self._pending_shutdown = True if not self._active_session or not self._harvest_enabled: - _logger.debug("Cannot perform a data harvest for %r as " "there is no active session.", self._app_name) + _logger.debug("Cannot perform a data harvest for %r as there is no active session.", self._app_name) return @@ -1083,7 +1096,7 @@ def harvest(self, shutdown=False, flexible=False): # data sampler, then should perhaps deregister it if it # keeps having problems. - _logger.debug("Fetching metrics from data sources for " "harvest of %r.", self._app_name) + _logger.debug("Fetching metrics from data sources for harvest of %r.", self._app_name) for data_sampler in self._data_samplers: try: @@ -1131,7 +1144,7 @@ def harvest(self, shutdown=False, flexible=False): if self._uninstrumented: for uninstrumented in self._uninstrumented: internal_count_metric("Supportability/Python/Uninstrumented", 1) - internal_count_metric("Supportability/Uninstrumented/" "%s" % uninstrumented, 1) + internal_count_metric("Supportability/Uninstrumented/%s" % uninstrumented, 1) # Create our time stamp as to when this reporting period # ends and start reporting the data. @@ -1151,7 +1164,7 @@ def harvest(self, shutdown=False, flexible=False): if shutdown and (transaction_count or global_events_account): if period_end - self._period_start < 1.0: - _logger.debug("Stretching harvest duration for " "forced harvest on shutdown.") + _logger.debug("Stretching harvest duration for forced harvest on shutdown.") period_end = self._period_start + 1.001 try: @@ -1164,7 +1177,7 @@ def harvest(self, shutdown=False, flexible=False): synthetics_events = stats.synthetics_events if synthetics_events: if synthetics_events.num_samples: - _logger.debug("Sending synthetics event data for " "harvest of %r.", self._app_name) + _logger.debug("Sending synthetics event data for harvest of %r.", self._app_name) self._active_session.send_transaction_events( synthetics_events.sampling_info, synthetics_events @@ -1179,14 +1192,14 @@ def harvest(self, shutdown=False, flexible=False): if transaction_events: # As per spec internal_metric( - "Supportability/Python/" "RequestSampler/requests", transaction_events.num_seen + "Supportability/Python/RequestSampler/requests", transaction_events.num_seen ) internal_metric( - "Supportability/Python/" "RequestSampler/samples", transaction_events.num_samples + "Supportability/Python/RequestSampler/samples", transaction_events.num_samples ) if transaction_events.num_samples: - _logger.debug("Sending analytics event data " "for harvest of %r.", self._app_name) + _logger.debug("Sending analytics event data for harvest of %r.", self._app_name) self._active_session.send_transaction_events( transaction_events.sampling_info, transaction_events @@ -1216,7 +1229,7 @@ def harvest(self, shutdown=False, flexible=False): if spans.num_samples > 0: span_samples = list(spans) - _logger.debug("Sending span event data " "for harvest of %r.", self._app_name) + _logger.debug("Sending span event data for harvest of %r.", self._app_name) self._active_session.send_span_events(spans.sampling_info, span_samples) span_samples = None @@ -1224,8 +1237,8 @@ def harvest(self, shutdown=False, flexible=False): # As per spec spans_seen = spans.num_seen spans_sampled = spans.num_samples - internal_count_metric("Supportability/SpanEvent/" "TotalEventsSeen", spans_seen) - internal_count_metric("Supportability/SpanEvent/" "TotalEventsSent", spans_sampled) + internal_count_metric("Supportability/SpanEvent/TotalEventsSeen", spans_seen) + internal_count_metric("Supportability/SpanEvent/TotalEventsSent", spans_sampled) stats.reset_span_events() @@ -1243,17 +1256,15 @@ def harvest(self, shutdown=False, flexible=False): if num_error_samples > 0: error_event_samples = list(error_events) - _logger.debug("Sending error event data " "for harvest of %r.", self._app_name) + _logger.debug("Sending error event data for harvest of %r.", self._app_name) samp_info = error_events.sampling_info self._active_session.send_error_events(samp_info, error_event_samples) error_event_samples = None # As per spec - internal_count_metric( - "Supportability/Events/" "TransactionError/Seen", error_events.num_seen - ) - internal_count_metric("Supportability/Events/" "TransactionError/Sent", num_error_samples) + internal_count_metric("Supportability/Events/TransactionError/Seen", error_events.num_seen) + internal_count_metric("Supportability/Events/TransactionError/Sent", num_error_samples) stats.reset_error_events() @@ -1267,14 +1278,14 @@ def harvest(self, shutdown=False, flexible=False): if customs.num_samples > 0: custom_samples = list(customs) - _logger.debug("Sending custom event data " "for harvest of %r.", self._app_name) + _logger.debug("Sending custom event data for harvest of %r.", self._app_name) self._active_session.send_custom_events(customs.sampling_info, custom_samples) custom_samples = None # As per spec - internal_count_metric("Supportability/Events/" "Customer/Seen", customs.num_seen) - internal_count_metric("Supportability/Events/" "Customer/Sent", customs.num_samples) + internal_count_metric("Supportability/Events/Customer/Seen", customs.num_seen) + internal_count_metric("Supportability/Events/Customer/Sent", customs.num_samples) stats.reset_custom_events() @@ -1284,7 +1295,7 @@ def harvest(self, shutdown=False, flexible=False): error_data = stats.error_data() if error_data: - _logger.debug("Sending error data for harvest " "of %r.", self._app_name) + _logger.debug("Sending error data for harvest of %r.", self._app_name) self._active_session.send_errors(error_data) @@ -1294,19 +1305,19 @@ def harvest(self, shutdown=False, flexible=False): with connections: if configuration.slow_sql.enabled: - _logger.debug("Processing slow SQL data " "for harvest of %r.", self._app_name) + _logger.debug("Processing slow SQL data for harvest of %r.", self._app_name) slow_sql_data = stats.slow_sql_data(connections) if slow_sql_data: - _logger.debug("Sending slow SQL data for " "harvest of %r.", self._app_name) + _logger.debug("Sending slow SQL data for harvest of %r.", self._app_name) self._active_session.send_sql_traces(slow_sql_data) slow_transaction_data = stats.transaction_trace_data(connections) if slow_transaction_data: - _logger.debug("Sending slow transaction " "data for harvest of %r.", self._app_name) + _logger.debug("Sending slow transaction data for harvest of %r.", self._app_name) self._active_session.send_transaction_traces(slow_transaction_data) @@ -1338,7 +1349,7 @@ def harvest(self, shutdown=False, flexible=False): # Send metrics self._active_session.send_metric_data(self._period_start, period_end, metric_data) - _logger.debug("Done sending data for harvest of " "%r.", self._app_name) + _logger.debug("Done sending data for harvest of %r.", self._app_name) stats.reset_metric_stats() @@ -1355,7 +1366,7 @@ def harvest(self, shutdown=False, flexible=False): # Fetch agent commands sent from the data collector # and process them. - _logger.debug("Process agent commands during " "harvest of %r.", self._app_name) + _logger.debug("Process agent commands during harvest of %r.", self._app_name) self.process_agent_commands() # Send the accumulated profile data back to the data @@ -1366,7 +1377,7 @@ def harvest(self, shutdown=False, flexible=False): # results last ensures we send back that data from # the stopped profiling session immediately. - _logger.debug("Send profiling data for harvest of " "%r.", self._app_name) + _logger.debug("Send profiling data for harvest of %r.", self._app_name) self.report_profile_data() @@ -1422,7 +1433,7 @@ def harvest(self, shutdown=False, flexible=False): exc_type = sys.exc_info()[0] - internal_metric("Supportability/Python/Harvest/" "Exception/%s" % callable_name(exc_type), 1) + internal_metric("Supportability/Python/Harvest/Exception/%s" % callable_name(exc_type), 1) if self._period_start != period_end: self._stats_engine.rollback(stats) @@ -1435,7 +1446,7 @@ def harvest(self, shutdown=False, flexible=False): exc_type = sys.exc_info()[0] - internal_metric("Supportability/Python/Harvest/" "Exception/%s" % callable_name(exc_type), 1) + internal_metric("Supportability/Python/Harvest/Exception/%s" % callable_name(exc_type), 1) self._discard_count += 1 @@ -1445,7 +1456,7 @@ def harvest(self, shutdown=False, flexible=False): exc_type = sys.exc_info()[0] - internal_metric("Supportability/Python/Harvest/" "Exception/%s" % callable_name(exc_type), 1) + internal_metric("Supportability/Python/Harvest/Exception/%s" % callable_name(exc_type), 1) _logger.exception( "Unexpected exception when attempting " @@ -1478,7 +1489,7 @@ def report_profile_data(self): for profile_data in self.profile_manager.profile_data(self._app_name): if profile_data: - _logger.debug("Reporting thread profiling session data " "for %r.", self._app_name) + _logger.debug("Reporting thread profiling session data for %r.", self._app_name) self._active_session.send_profile_data(profile_data) def internal_agent_shutdown(self, restart=False): @@ -1571,11 +1582,11 @@ def process_agent_commands(self): if cmd_handler is None: _logger.debug( - "Received unknown agent command " "%r from the data collector for %r.", cmd_name, self._app_name + "Received unknown agent command %r from the data collector for %r.", cmd_name, self._app_name ) continue - _logger.debug("Process agent command %r from the data " "collector for %r.", cmd_name, self._app_name) + _logger.debug("Process agent command %r from the data collector for %r.", cmd_name, self._app_name) cmd_res = cmd_handler(cmd_id, **cmd_args) diff --git a/newrelic/core/profile_sessions.py b/newrelic/core/profile_sessions.py index 1603ebc33a..663e90fe4e 100644 --- a/newrelic/core/profile_sessions.py +++ b/newrelic/core/profile_sessions.py @@ -12,23 +12,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os +import base64 import logging -import time +import os import threading +import time import zlib -import base64 - -from collections import deque, defaultdict +from collections import defaultdict, deque -import newrelic.packages.six as six import newrelic - +import newrelic.packages.six as six +from newrelic.common.encoding_utils import json_encode from newrelic.core.config import global_settings from newrelic.core.trace_cache import trace_cache -from newrelic.common.encoding_utils import json_encode - try: from sys import intern except ImportError: @@ -36,7 +33,7 @@ _logger = logging.getLogger(__name__) -AGENT_PACKAGE_DIRECTORY = os.path.dirname(newrelic.__file__) + '/' +AGENT_PACKAGE_DIRECTORY = os.path.dirname(newrelic.__file__) + "/" class SessionState(object): @@ -45,8 +42,7 @@ class SessionState(object): def format_stack_trace(frame, thread_category): - """Formats the frame obj into a list of stack trace tuples. - """ + """Formats the frame obj into a list of stack trace tuples.""" stack_trace = deque() @@ -74,8 +70,7 @@ def format_stack_trace(frame, thread_category): # though as we still need to seem them in that case so can # debug what the agent itself is doing. - if (thread_category != 'AGENT' and - filename.startswith(AGENT_PACKAGE_DIRECTORY)): + if thread_category != "AGENT" and filename.startswith(AGENT_PACKAGE_DIRECTORY): continue if not stack_trace: @@ -102,12 +97,11 @@ def collect_stack_traces(include_nr_threads=False): python threads. """ - for (txn, thread_id, thread_category, frame) in \ - trace_cache().active_threads(): + for (txn, thread_id, thread_category, frame) in trace_cache().active_threads(): # Skip NR Threads unless explicitly requested. - if (thread_category == 'AGENT') and (not include_nr_threads): + if (thread_category == "AGENT") and (not include_nr_threads): continue stack_trace = format_stack_trace(frame, thread_category) @@ -128,6 +122,7 @@ class ProfileSessionManager(object): instantiate directly from this class. Instead use profile_session_manager() """ + _lock = threading.Lock() _instance = None @@ -155,8 +150,7 @@ def __init__(self): self.profile_agent_code = False self.sample_period_s = 0.1 - def start_profile_session(self, app_name, profile_id, stop_time, - sample_period_s=0.1, profile_agent_code=False): + def start_profile_session(self, app_name, profile_id, stop_time, sample_period_s=0.1, profile_agent_code=False): """Start a new profiler session. If a full_profiler is already running, do nothing and return false. @@ -187,9 +181,8 @@ def start_profile_session(self, app_name, profile_id, stop_time, # if a background thread doesn't already exist. if not self._profiler_thread_running: - self._profiler_thread = threading.Thread( - target=self._profiler_loop, name='NR-Profiler-Thread') - self._profiler_thread.setDaemon(True) + self._profiler_thread = threading.Thread(target=self._profiler_loop, name="NR-Profiler-Thread") + self._profiler_thread.daemon = True self._profiler_thread.start() self._profiler_thread_running = True @@ -208,12 +201,10 @@ def stop_profile_session(self, app_name): # harvest thread is starting/stopping new sessions. with self._lock: - if ((self.full_profile_session is not None) and (app_name == - self.full_profile_app)): + if (self.full_profile_session is not None) and (app_name == self.full_profile_app): self.full_profile_session.state = SessionState.FINISHED self.full_profile_session.actual_stop_time_s = time.time() - self.finished_sessions[app_name].append( - self.full_profile_session) + self.finished_sessions[app_name].append(self.full_profile_session) self.full_profile_session = None self.full_profile_app = None @@ -229,11 +220,14 @@ def profile_data(self, app_name): """ with self._lock: for session in self.finished_sessions[app_name]: - _logger.debug('Reporting final thread profiling data for ' - '%d transactions over a period of %.2f seconds ' - 'and %d samples.', session.transaction_count, - time.time() - session.start_time_s, - session.sample_count) + _logger.debug( + "Reporting final thread profiling data for " + "%d transactions over a period of %.2f seconds " + "and %d samples.", + session.transaction_count, + time.time() - session.start_time_s, + session.sample_count, + ) yield session.profile_data() @@ -251,15 +245,13 @@ def _profiler_loop(self): while True: - for category, stack in collect_stack_traces( - self.profile_agent_code): + for category, stack in collect_stack_traces(self.profile_agent_code): # Merge the stack_trace to the call tree only for # full_profile_session. if self.full_profile_session: - self.full_profile_session.update_call_tree(category, - stack) + self.full_profile_session.update_call_tree(category, stack) self.update_profile_sessions() @@ -281,12 +273,10 @@ def update_profile_sessions(self): self.full_profile_session.sample_count += 1 if time.time() >= self.full_profile_session.stop_time_s: self.stop_profile_session(self.full_profile_app) - _logger.info('Finished thread profiling session.') + _logger.info("Finished thread profiling session.") def shutdown(self, app_name): - """Stop all profile sessions running on the given app_name. - - """ + """Stop all profile sessions running on the given app_name.""" # Check if we need to stop the full profiler. @@ -306,8 +296,7 @@ def __init__(self, profile_id, stop_time): self.reset_profile_data() def reset_profile_data(self): - self.call_buckets = {'REQUEST': {}, 'AGENT': {}, 'BACKGROUND': {}, - 'OTHER': {}} + self.call_buckets = {"REQUEST": {}, "AGENT": {}, "BACKGROUND": {}, "OTHER": {}} self._node_list = [] self.start_time_s = time.time() self.sample_count = 0 @@ -381,8 +370,7 @@ def _prune_call_trees(self, limit): # categories in UI, the duplicates only appear as one after the # UI merges them. - self._node_list.sort(key=lambda x: (x.call_count, -x.depth), - reverse=True) + self._node_list.sort(key=lambda x: (x.call_count, -x.depth), reverse=True) for node in self._node_list[limit:]: node.ignore = True @@ -419,23 +407,30 @@ def profile_data(self): # this point to cut its size. if settings.debug.log_thread_profile_payload: - _logger.debug('Encoding thread profile data where ' - 'payload=%r.', flat_tree) + _logger.debug("Encoding thread profile data where payload=%r.", flat_tree) json_call_tree = json_encode(flat_tree) level = settings.agent_limits.data_compression_level level = level or zlib.Z_DEFAULT_COMPRESSION - encoded_tree = base64.standard_b64encode( - zlib.compress(six.b(json_call_tree), level)) + encoded_tree = base64.standard_b64encode(zlib.compress(six.b(json_call_tree), level)) if six.PY3: - encoded_tree = encoded_tree.decode('Latin-1') - - profile = [[self.profile_id, self.start_time_s * 1000, - (self.actual_stop_time_s or time.time()) * 1000, self.sample_count, - encoded_tree, thread_count, 0, None]] + encoded_tree = encoded_tree.decode("Latin-1") + + profile = [ + [ + self.profile_id, + self.start_time_s * 1000, + (self.actual_stop_time_s or time.time()) * 1000, + self.sample_count, + encoded_tree, + thread_count, + 0, + None, + ] + ] # Reset the data structures to default. @@ -461,14 +456,11 @@ def flatten(self): # are labeled with an @ sign in the second element of the tuple. if func_line == exec_line: - method_data = (filename, '@%s#%s' % (func_name, func_line), - exec_line) + method_data = (filename, "@%s#%s" % (func_name, func_line), exec_line) else: - method_data = (filename, '%s#%s' % (func_name, func_line), - exec_line) + method_data = (filename, "%s#%s" % (func_name, func_line), exec_line) - return [method_data, self.call_count, 0, - [x.flatten() for x in self.children.values() if not x.ignore]] + return [method_data, self.call_count, 0, [x.flatten() for x in self.children.values() if not x.ignore]] def profile_session_manager(): diff --git a/newrelic/core/stats_engine.py b/newrelic/core/stats_engine.py index 05984d364b..3806d41af4 100644 --- a/newrelic/core/stats_engine.py +++ b/newrelic/core/stats_engine.py @@ -559,7 +559,7 @@ def record_time_metrics(self, metrics): def record_exception(self, exc=None, value=None, tb=None, params=None, ignore_errors=None): # Deprecation Warning warnings.warn( - ("The record_exception function is deprecated. Please use the " "new api named notice_error instead."), + ("The record_exception function is deprecated. Please use the new api named notice_error instead."), DeprecationWarning, ) @@ -666,7 +666,7 @@ def notice_error(self, error=None, attributes=None, expected=None, ignore=None, if settings.high_security: if attributes: - _logger.debug("Cannot add custom parameters in " "High Security Mode.") + _logger.debug("Cannot add custom parameters in High Security Mode.") user_attributes = [] else: custom_attributes = {} @@ -1218,7 +1218,7 @@ def transaction_trace_data(self, connections): data = [transaction_trace, list(trace.string_table.values())] if self.__settings.debug.log_transaction_trace_payload: - _logger.debug("Encoding slow transaction data where " "payload=%r.", data) + _logger.debug("Encoding slow transaction data where payload=%r.", data) json_data = json_encode(data) @@ -1283,7 +1283,7 @@ def slow_transaction_data(self): data = [transaction_trace, list(self.__slow_transaction.string_table.values())] if self.__settings.debug.log_transaction_trace_payload: - _logger.debug("Encoding slow transaction data where " "payload=%r.", data) + _logger.debug("Encoding slow transaction data where payload=%r.", data) json_data = json_encode(data) diff --git a/newrelic/packages/urllib3/__init__.py b/newrelic/packages/urllib3/__init__.py index 09024d4798..fe86b59d78 100644 --- a/newrelic/packages/urllib3/__init__.py +++ b/newrelic/packages/urllib3/__init__.py @@ -1,25 +1,23 @@ """ -urllib3 - Thread-safe connection pooling and re-using. +Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more """ from __future__ import absolute_import -import warnings -from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url +# Set default logging handler to avoid "No handler found" warnings. +import logging +import warnings +from logging import NullHandler from . import exceptions +from ._version import __version__ +from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url from .filepost import encode_multipart_formdata from .poolmanager import PoolManager, ProxyManager, proxy_from_url from .response import HTTPResponse from .util.request import make_headers -from .util.url import get_host -from .util.timeout import Timeout from .util.retry import Retry -from ._version import __version__ - - -# Set default logging handler to avoid "No handler found" warnings. -import logging -from logging import NullHandler +from .util.timeout import Timeout +from .util.url import get_host __author__ = "Andrey Petrov (andrey.petrov@shazow.net)" __license__ = "MIT" diff --git a/newrelic/packages/urllib3/_collections.py b/newrelic/packages/urllib3/_collections.py index 019d1511d5..da9857e986 100644 --- a/newrelic/packages/urllib3/_collections.py +++ b/newrelic/packages/urllib3/_collections.py @@ -17,9 +17,10 @@ def __exit__(self, exc_type, exc_value, traceback): from collections import OrderedDict -from .exceptions import InvalidHeader -from .packages.six import iterkeys, itervalues, PY3 +from .exceptions import InvalidHeader +from .packages import six +from .packages.six import iterkeys, itervalues __all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"] @@ -174,7 +175,7 @@ def __eq__(self, other): def __ne__(self, other): return not self.__eq__(other) - if not PY3: # Python 2 + if six.PY2: # Python 2 iterkeys = MutableMapping.iterkeys itervalues = MutableMapping.itervalues @@ -190,7 +191,7 @@ def __iter__(self): def pop(self, key, default=__marker): """D.pop(k[,d]) -> v, remove specified key and return the corresponding value. - If key is not found, d is returned if given, otherwise KeyError is raised. + If key is not found, d is returned if given, otherwise KeyError is raised. """ # Using the MutableMapping function directly fails due to the private marker. # Using ordinary dict.pop would expose the internal structures. diff --git a/newrelic/packages/urllib3/_version.py b/newrelic/packages/urllib3/_version.py index 8697ad85d5..5141d980bb 100644 --- a/newrelic/packages/urllib3/_version.py +++ b/newrelic/packages/urllib3/_version.py @@ -1,2 +1,2 @@ # This file is protected via CODEOWNERS -__version__ = "1.25.10" +__version__ = "1.26.7" diff --git a/newrelic/packages/urllib3/connection.py b/newrelic/packages/urllib3/connection.py index d67c0a70b3..60f70f794a 100644 --- a/newrelic/packages/urllib3/connection.py +++ b/newrelic/packages/urllib3/connection.py @@ -1,14 +1,18 @@ from __future__ import absolute_import -import re + import datetime import logging import os +import re import socket -from socket import error as SocketError, timeout as SocketTimeout import warnings +from socket import error as SocketError +from socket import timeout as SocketTimeout + from .packages import six from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection from .packages.six.moves.http_client import HTTPException # noqa: F401 +from .util.proxy import create_proxy_ssl_context try: # Compiled with SSL? import ssl @@ -30,66 +34,69 @@ class ConnectionError(Exception): pass +try: # Python 3: + # Not a no-op, we're adding this to the namespace so it can be imported. + BrokenPipeError = BrokenPipeError +except NameError: # Python 2: + + class BrokenPipeError(Exception): + pass + + +from ._collections import HTTPHeaderDict # noqa (historical, removed in v2) +from ._version import __version__ from .exceptions import ( - NewConnectionError, ConnectTimeoutError, + NewConnectionError, SubjectAltNameWarning, SystemTimeWarning, ) -from .packages.ssl_match_hostname import match_hostname, CertificateError - +from .packages.ssl_match_hostname import CertificateError, match_hostname +from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection from .util.ssl_ import ( - resolve_cert_reqs, - resolve_ssl_version, assert_fingerprint, create_urllib3_context, + is_ipaddress, + resolve_cert_reqs, + resolve_ssl_version, ssl_wrap_socket, ) - -from .util import connection - -from ._collections import HTTPHeaderDict - log = logging.getLogger(__name__) port_by_scheme = {"http": 80, "https": 443} # When it comes time to update this value as a part of regular maintenance # (ie test_recent_date is failing) update it to ~6 months before the current date. -RECENT_DATE = datetime.date(2019, 1, 1) +RECENT_DATE = datetime.date(2020, 7, 1) _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") -class DummyConnection(object): - """Used to detect a failed ConnectionCls import.""" - - pass - - class HTTPConnection(_HTTPConnection, object): """ - Based on httplib.HTTPConnection but provides an extra constructor + Based on :class:`http.client.HTTPConnection` but provides an extra constructor backwards-compatibility layer between older and newer Pythons. Additional keyword parameters are used to configure attributes of the connection. Accepted parameters include: - - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool` - - ``source_address``: Set the source address for the current connection. - - ``socket_options``: Set specific options on the underlying socket. If not specified, then - defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling - Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. + - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool` + - ``source_address``: Set the source address for the current connection. + - ``socket_options``: Set specific options on the underlying socket. If not specified, then + defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling + Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. - For example, if you wish to enable TCP Keep Alive in addition to the defaults, - you might pass:: + For example, if you wish to enable TCP Keep Alive in addition to the defaults, + you might pass: - HTTPConnection.default_socket_options + [ - (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), - ] + .. code-block:: python - Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). + HTTPConnection.default_socket_options + [ + (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), + ] + + Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). """ default_port = port_by_scheme["http"] @@ -101,6 +108,10 @@ class HTTPConnection(_HTTPConnection, object): #: Whether this connection verifies the host's certificate. is_verified = False + #: Whether this proxy connection (if used) verifies the proxy host's + #: certificate. + proxy_is_verified = None + def __init__(self, *args, **kw): if not six.PY2: kw.pop("strict", None) @@ -112,6 +123,10 @@ def __init__(self, *args, **kw): #: provided, we use the default options. self.socket_options = kw.pop("socket_options", self.default_socket_options) + # Proxy options provided by the user. + self.proxy = kw.pop("proxy", None) + self.proxy_config = kw.pop("proxy_config", None) + _HTTPConnection.__init__(self, *args, **kw) @property @@ -144,7 +159,7 @@ def host(self, value): self._dns_host = value def _new_conn(self): - """ Establish a socket connection and set nodelay settings on it. + """Establish a socket connection and set nodelay settings on it. :return: New socket connection. """ @@ -174,10 +189,13 @@ def _new_conn(self): return conn + def _is_using_tunnel(self): + # Google App Engine's httplib does not define _tunnel_host + return getattr(self, "_tunnel_host", None) + def _prepare_conn(self, conn): self.sock = conn - # Google App Engine's httplib does not define _tunnel_host - if getattr(self, "_tunnel_host", None): + if self._is_using_tunnel(): # TODO: Fix tunnel so it doesn't depend on self.sock state. self._tunnel() # Mark this connection as not reusable @@ -188,7 +206,9 @@ def connect(self): self._prepare_conn(conn) def putrequest(self, method, url, *args, **kwargs): - """Send a request to the server""" + """ """ + # Empty docstring because the indentation of CPython's implementation + # is broken but we don't want this method in our documentation. match = _CONTAINS_CONTROL_CHAR_RE.search(method) if match: raise ValueError( @@ -198,20 +218,43 @@ def putrequest(self, method, url, *args, **kwargs): return _HTTPConnection.putrequest(self, method, url, *args, **kwargs) + def putheader(self, header, *values): + """ """ + if not any(isinstance(v, str) and v == SKIP_HEADER for v in values): + _HTTPConnection.putheader(self, header, *values) + elif six.ensure_str(header.lower()) not in SKIPPABLE_HEADERS: + raise ValueError( + "urllib3.util.SKIP_HEADER only supports '%s'" + % ("', '".join(map(str.title, sorted(SKIPPABLE_HEADERS))),) + ) + + def request(self, method, url, body=None, headers=None): + if headers is None: + headers = {} + else: + # Avoid modifying the headers passed into .request() + headers = headers.copy() + if "user-agent" not in (six.ensure_str(k.lower()) for k in headers): + headers["User-Agent"] = _get_default_user_agent() + super(HTTPConnection, self).request(method, url, body=body, headers=headers) + def request_chunked(self, method, url, body=None, headers=None): """ Alternative to the common request method, which sends the body with chunked encoding and not as one block """ - headers = HTTPHeaderDict(headers if headers is not None else {}) - skip_accept_encoding = "accept-encoding" in headers - skip_host = "host" in headers + headers = headers or {} + header_keys = set([six.ensure_str(k.lower()) for k in headers]) + skip_accept_encoding = "accept-encoding" in header_keys + skip_host = "host" in header_keys self.putrequest( method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host ) + if "user-agent" not in header_keys: + self.putheader("User-Agent", _get_default_user_agent()) for header, value in headers.items(): self.putheader(header, value) - if "transfer-encoding" not in headers: + if "transfer-encoding" not in header_keys: self.putheader("Transfer-Encoding", "chunked") self.endheaders() @@ -236,6 +279,11 @@ def request_chunked(self, method, url, body=None, headers=None): class HTTPSConnection(HTTPConnection): + """ + Many of the parameters to this constructor are passed to the underlying SSL + socket by means of :py:func:`urllib3.util.ssl_wrap_socket`. + """ + default_port = port_by_scheme["https"] cert_reqs = None @@ -244,6 +292,7 @@ class HTTPSConnection(HTTPConnection): ca_cert_data = None ssl_version = None assert_fingerprint = None + tls_in_tls_required = False def __init__( self, @@ -308,10 +357,15 @@ def connect(self): # Add certificate verification conn = self._new_conn() hostname = self.host + tls_in_tls = False + + if self._is_using_tunnel(): + if self.tls_in_tls_required: + conn = self._connect_tls_proxy(hostname, conn) + tls_in_tls = True - # Google App Engine's httplib does not define _tunnel_host - if getattr(self, "_tunnel_host", None): self.sock = conn + # Calls self._set_hostport(), so self.host is # self._tunnel_host below. self._tunnel() @@ -369,8 +423,26 @@ def connect(self): ca_cert_data=self.ca_cert_data, server_hostname=server_hostname, ssl_context=context, + tls_in_tls=tls_in_tls, ) + # If we're using all defaults and the connection + # is TLSv1 or TLSv1.1 we throw a DeprecationWarning + # for the host. + if ( + default_ssl_context + and self.ssl_version is None + and hasattr(self.sock, "version") + and self.sock.version() in {"TLSv1", "TLSv1.1"} + ): + warnings.warn( + "Negotiating TLSv1/TLSv1.1 by default is deprecated " + "and will be disabled in urllib3 v2.0.0. Connecting to " + "'%s' with '%s' can be enabled by explicitly opting-in " + "with 'ssl_version'" % (self.host, self.sock.version()), + DeprecationWarning, + ) + if self.assert_fingerprint: assert_fingerprint( self.sock.getpeercert(binary_form=True), self.assert_fingerprint @@ -401,8 +473,71 @@ def connect(self): or self.assert_fingerprint is not None ) + def _connect_tls_proxy(self, hostname, conn): + """ + Establish a TLS connection to the proxy using the provided SSL context. + """ + proxy_config = self.proxy_config + ssl_context = proxy_config.ssl_context + if ssl_context: + # If the user provided a proxy context, we assume CA and client + # certificates have already been set + return ssl_wrap_socket( + sock=conn, + server_hostname=hostname, + ssl_context=ssl_context, + ) + + ssl_context = create_proxy_ssl_context( + self.ssl_version, + self.cert_reqs, + self.ca_certs, + self.ca_cert_dir, + self.ca_cert_data, + ) + + # If no cert was provided, use only the default options for server + # certificate validation + socket = ssl_wrap_socket( + sock=conn, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + ca_cert_data=self.ca_cert_data, + server_hostname=hostname, + ssl_context=ssl_context, + ) + + if ssl_context.verify_mode != ssl.CERT_NONE and not getattr( + ssl_context, "check_hostname", False + ): + # While urllib3 attempts to always turn off hostname matching from + # the TLS library, this cannot always be done. So we check whether + # the TLS Library still thinks it's matching hostnames. + cert = socket.getpeercert() + if not cert.get("subjectAltName", ()): + warnings.warn( + ( + "Certificate for {0} has no `subjectAltName`, falling back to check for a " + "`commonName` for now. This feature is being removed by major browsers and " + "deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 " + "for details.)".format(hostname) + ), + SubjectAltNameWarning, + ) + _match_hostname(cert, hostname) + + self.proxy_is_verified = ssl_context.verify_mode == ssl.CERT_REQUIRED + return socket + def _match_hostname(cert, asserted_hostname): + # Our upstream implementation of ssl.match_hostname() + # only applies this normalization to IP addresses so it doesn't + # match DNS SANs so we do the same thing! + stripped_hostname = asserted_hostname.strip("u[]") + if is_ipaddress(stripped_hostname): + asserted_hostname = stripped_hostname + try: match_hostname(cert, asserted_hostname) except CertificateError as e: @@ -417,6 +552,16 @@ def _match_hostname(cert, asserted_hostname): raise +def _get_default_user_agent(): + return "python-urllib3/%s" % __version__ + + +class DummyConnection(object): + """Used to detect a failed ConnectionCls import.""" + + pass + + if not ssl: HTTPSConnection = DummyConnection # noqa: F811 diff --git a/newrelic/packages/urllib3/connectionpool.py b/newrelic/packages/urllib3/connectionpool.py index 174fe6c2e1..8dccf4bc2a 100644 --- a/newrelic/packages/urllib3/connectionpool.py +++ b/newrelic/packages/urllib3/connectionpool.py @@ -1,57 +1,53 @@ from __future__ import absolute_import + import errno import logging +import socket import sys import warnings +from socket import error as SocketError +from socket import timeout as SocketTimeout -from socket import error as SocketError, timeout as SocketTimeout -import socket - - +from .connection import ( + BaseSSLError, + BrokenPipeError, + DummyConnection, + HTTPConnection, + HTTPException, + HTTPSConnection, + VerifiedHTTPSConnection, + port_by_scheme, +) from .exceptions import ( ClosedPoolError, - ProtocolError, EmptyPoolError, HeaderParsingError, HostChangedError, + InsecureRequestWarning, LocationValueError, MaxRetryError, + NewConnectionError, + ProtocolError, ProxyError, ReadTimeoutError, SSLError, TimeoutError, - InsecureRequestWarning, - NewConnectionError, ) -from .packages.ssl_match_hostname import CertificateError from .packages import six from .packages.six.moves import queue -from .connection import ( - port_by_scheme, - DummyConnection, - HTTPConnection, - HTTPSConnection, - VerifiedHTTPSConnection, - HTTPException, - BaseSSLError, -) +from .packages.ssl_match_hostname import CertificateError from .request import RequestMethods from .response import HTTPResponse - from .util.connection import is_connection_dropped +from .util.proxy import connection_requires_http_tunnel +from .util.queue import LifoQueue from .util.request import set_file_position from .util.response import assert_header_parsing from .util.retry import Retry from .util.timeout import Timeout -from .util.url import ( - get_host, - parse_url, - Url, - _normalize_host as normalize_host, - _encode_target, -) -from .util.queue import LifoQueue - +from .util.url import Url, _encode_target +from .util.url import _normalize_host as normalize_host +from .util.url import get_host, parse_url xrange = six.moves.xrange @@ -111,16 +107,16 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): :param host: Host used for this HTTP Connection (e.g. "localhost"), passed into - :class:`httplib.HTTPConnection`. + :class:`http.client.HTTPConnection`. :param port: Port used for this HTTP Connection (None is equivalent to 80), passed - into :class:`httplib.HTTPConnection`. + into :class:`http.client.HTTPConnection`. :param strict: Causes BadStatusLine to be raised if the status line can't be parsed as a valid HTTP/1.0 or 1.1 status line, passed into - :class:`httplib.HTTPConnection`. + :class:`http.client.HTTPConnection`. .. note:: Only works in Python 2. This parameter is ignored in Python 3. @@ -154,11 +150,11 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods): :param _proxy: Parsed proxy URL, should not be used directly, instead, see - :class:`urllib3.connectionpool.ProxyManager`" + :class:`urllib3.ProxyManager` :param _proxy_headers: A dictionary with proxy headers, should not be used directly, - instead, see :class:`urllib3.connectionpool.ProxyManager`" + instead, see :class:`urllib3.ProxyManager` :param \\**conn_kw: Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, @@ -181,6 +177,7 @@ def __init__( retries=None, _proxy=None, _proxy_headers=None, + _proxy_config=None, **conn_kw ): ConnectionPool.__init__(self, host, port) @@ -202,6 +199,7 @@ def __init__( self.proxy = _proxy self.proxy_headers = _proxy_headers or {} + self.proxy_config = _proxy_config # Fill the queue up so that doing get() on it will block properly for _ in xrange(maxsize): @@ -218,6 +216,9 @@ def __init__( # list. self.conn_kw.setdefault("socket_options", []) + self.conn_kw["proxy"] = self.proxy + self.conn_kw["proxy_config"] = self.proxy_config + def _new_conn(self): """ Return a fresh :class:`HTTPConnection`. @@ -272,7 +273,7 @@ def _get_conn(self, timeout=None): conn.close() if getattr(conn, "auto_open", 1) == 0: # This is a proxied connection that has been mutated by - # httplib._tunnel() and cannot be reused (since it would + # http.client._tunnel() and cannot be reused (since it would # attempt to bypass the proxy) conn = None @@ -317,7 +318,7 @@ def _prepare_proxy(self, conn): pass def _get_timeout(self, timeout): - """ Helper that always returns a :class:`urllib3.util.Timeout` """ + """Helper that always returns a :class:`urllib3.util.Timeout`""" if timeout is _Default: return self.timeout.clone() @@ -384,12 +385,30 @@ def _make_request( self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) raise - # conn.request() calls httplib.*.request, not the method in + # conn.request() calls http.client.*.request, not the method in # urllib3.request. It also calls makefile (recv) on the socket. - if chunked: - conn.request_chunked(method, url, **httplib_request_kw) - else: - conn.request(method, url, **httplib_request_kw) + try: + if chunked: + conn.request_chunked(method, url, **httplib_request_kw) + else: + conn.request(method, url, **httplib_request_kw) + + # We are swallowing BrokenPipeError (errno.EPIPE) since the server is + # legitimately able to close the connection after sending a valid response. + # With this behaviour, the received response is still readable. + except BrokenPipeError: + # Python 3 + pass + except IOError as e: + # Python 2 and macOS/Linux + # EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS + # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/ + if e.errno not in { + errno.EPIPE, + errno.ESHUTDOWN, + errno.EPROTOTYPE, + }: + raise # Reset the timeout for the recv() on the socket read_timeout = timeout_obj.read_timeout @@ -532,10 +551,12 @@ def urlopen( :param method: HTTP request method (such as GET, POST, PUT, etc.) + :param url: + The URL to perform the request on. + :param body: - Data to send in the request body (useful for creating - POST requests, see HTTPConnectionPool.post_url for - more convenience). + Data to send in the request body, either :class:`str`, :class:`bytes`, + an iterable of :class:`str`/:class:`bytes`, or a file-like object. :param headers: Dictionary of custom headers to send, such as User-Agent, @@ -565,7 +586,7 @@ def urlopen( :param assert_same_host: If ``True``, will make sure that the host of the pool requests is - consistent else will raise HostChangedError. When False, you can + consistent else will raise HostChangedError. When ``False``, you can use the pool on an HTTP proxy and request foreign hosts. :param timeout: @@ -602,6 +623,10 @@ def urlopen( Additional parameters are passed to :meth:`urllib3.response.HTTPResponse.from_httplib` """ + + parsed_url = parse_url(url) + destination_scheme = parsed_url.scheme + if headers is None: headers = self.headers @@ -619,7 +644,7 @@ def urlopen( if url.startswith("/"): url = six.ensure_str(_encode_target(url)) else: - url = six.ensure_str(parse_url(url).url) + url = six.ensure_str(parsed_url.url) conn = None @@ -634,10 +659,14 @@ def urlopen( # [1] release_this_conn = release_conn - # Merge the proxy headers. Only do this in HTTP. We have to copy the - # headers dict so we can safely change it without those changes being - # reflected in anyone else's copy. - if self.scheme == "http": + http_tunnel_required = connection_requires_http_tunnel( + self.proxy, self.proxy_config, destination_scheme + ) + + # Merge the proxy headers. Only done when not using HTTP CONNECT. We + # have to copy the headers dict so we can safely change it without those + # changes being reflected in anyone else's copy. + if not http_tunnel_required: headers = headers.copy() headers.update(self.proxy_headers) @@ -663,7 +692,7 @@ def urlopen( is_new_proxy_conn = self.proxy is not None and not getattr( conn, "sock", None ) - if is_new_proxy_conn: + if is_new_proxy_conn and http_tunnel_required: self._prepare_proxy(conn) # Make the request on the httplib connection object. @@ -837,11 +866,7 @@ class HTTPSConnectionPool(HTTPConnectionPool): """ Same as :class:`.HTTPConnectionPool`, but HTTPS. - When Python is compiled with the :mod:`ssl` module, then - :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates, - instead of :class:`.HTTPSConnection`. - - :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``, + :class:`.HTTPSConnection` uses one of ``assert_fingerprint``, ``assert_hostname`` and ``host`` in this order to verify connections. If ``assert_hostname`` is False, no verification is done. @@ -925,15 +950,22 @@ def _prepare_conn(self, conn): def _prepare_proxy(self, conn): """ - Establish tunnel connection early, because otherwise httplib - would improperly set Host: header to proxy's IP:port. + Establishes a tunnel connection through HTTP CONNECT. + + Tunnel connection is established early because otherwise httplib would + improperly set Host: header to proxy's IP:port. """ + conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers) + + if self.proxy.scheme == "https": + conn.tls_in_tls_required = True + conn.connect() def _new_conn(self): """ - Return a fresh :class:`httplib.HTTPSConnection`. + Return a fresh :class:`http.client.HTTPSConnection`. """ self.num_connections += 1 log.debug( @@ -982,12 +1014,23 @@ def _validate_conn(self, conn): ( "Unverified HTTPS request is being made to host '%s'. " "Adding certificate verification is strongly advised. See: " - "https://urllib3.readthedocs.io/en/latest/advanced-usage.html" + "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html" "#ssl-warnings" % conn.host ), InsecureRequestWarning, ) + if getattr(conn, "proxy_is_verified", None) is False: + warnings.warn( + ( + "Unverified HTTPS connection done to an HTTPS proxy. " + "Adding certificate verification is strongly advised. See: " + "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html" + "#ssl-warnings" + ), + InsecureRequestWarning, + ) + def connection_from_url(url, **kw): """ diff --git a/newrelic/packages/urllib3/contrib/_securetransport/bindings.py b/newrelic/packages/urllib3/contrib/_securetransport/bindings.py index 453bed7deb..11524d400b 100644 --- a/newrelic/packages/urllib3/contrib/_securetransport/bindings.py +++ b/newrelic/packages/urllib3/contrib/_securetransport/bindings.py @@ -32,21 +32,23 @@ from __future__ import absolute_import import platform -from ctypes.util import find_library from ctypes import ( - c_void_p, - c_int32, + CDLL, + CFUNCTYPE, + POINTER, + c_bool, + c_byte, c_char_p, + c_int32, + c_long, c_size_t, - c_byte, c_uint32, c_ulong, - c_long, - c_bool, + c_void_p, ) -from ctypes import CDLL, POINTER, CFUNCTYPE -from urllib3.packages.six import raise_from +from ctypes.util import find_library +from urllib3.packages.six import raise_from if platform.system() != "Darwin": raise ImportError("Only macOS is supported") @@ -293,6 +295,13 @@ def load_cdll(name, macos10_16_path): Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol] Security.SSLSetProtocolVersionMax.restype = OSStatus + try: + Security.SSLSetALPNProtocols.argtypes = [SSLContextRef, CFArrayRef] + Security.SSLSetALPNProtocols.restype = OSStatus + except AttributeError: + # Supported only in 10.12+ + pass + Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p] Security.SecCopyErrorMessageString.restype = CFStringRef diff --git a/newrelic/packages/urllib3/contrib/_securetransport/low_level.py b/newrelic/packages/urllib3/contrib/_securetransport/low_level.py index e60168cac1..fa0b245d27 100644 --- a/newrelic/packages/urllib3/contrib/_securetransport/low_level.py +++ b/newrelic/packages/urllib3/contrib/_securetransport/low_level.py @@ -10,13 +10,13 @@ import base64 import ctypes import itertools -import re import os +import re import ssl +import struct import tempfile -from .bindings import Security, CoreFoundation, CFConst - +from .bindings import CFConst, CoreFoundation, Security # This regular expression is used to grab PEM data out of a PEM bundle. _PEM_CERTS_RE = re.compile( @@ -56,6 +56,51 @@ def _cf_dictionary_from_tuples(tuples): ) +def _cfstr(py_bstr): + """ + Given a Python binary data, create a CFString. + The string must be CFReleased by the caller. + """ + c_str = ctypes.c_char_p(py_bstr) + cf_str = CoreFoundation.CFStringCreateWithCString( + CoreFoundation.kCFAllocatorDefault, + c_str, + CFConst.kCFStringEncodingUTF8, + ) + return cf_str + + +def _create_cfstring_array(lst): + """ + Given a list of Python binary data, create an associated CFMutableArray. + The array must be CFReleased by the caller. + + Raises an ssl.SSLError on failure. + """ + cf_arr = None + try: + cf_arr = CoreFoundation.CFArrayCreateMutable( + CoreFoundation.kCFAllocatorDefault, + 0, + ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), + ) + if not cf_arr: + raise MemoryError("Unable to allocate memory!") + for item in lst: + cf_str = _cfstr(item) + if not cf_str: + raise MemoryError("Unable to allocate memory!") + try: + CoreFoundation.CFArrayAppendValue(cf_arr, cf_str) + finally: + CoreFoundation.CFRelease(cf_str) + except BaseException as e: + if cf_arr: + CoreFoundation.CFRelease(cf_arr) + raise ssl.SSLError("Unable to allocate array: %s" % (e,)) + return cf_arr + + def _cf_string_to_unicode(value): """ Creates a Unicode string from a CFString object. Used entirely for error @@ -143,6 +188,7 @@ def _cert_array_from_pem(pem_bundle): # We only want to do that if an error occurs: otherwise, the caller # should free. CoreFoundation.CFRelease(cert_array) + raise return cert_array @@ -326,3 +372,26 @@ def _load_client_cert_chain(keychain, *paths): finally: for obj in itertools.chain(identities, certificates): CoreFoundation.CFRelease(obj) + + +TLS_PROTOCOL_VERSIONS = { + "SSLv2": (0, 2), + "SSLv3": (3, 0), + "TLSv1": (3, 1), + "TLSv1.1": (3, 2), + "TLSv1.2": (3, 3), +} + + +def _build_tls_unknown_ca_alert(version): + """ + Builds a TLS alert record for an unknown CA. + """ + ver_maj, ver_min = TLS_PROTOCOL_VERSIONS[version] + severity_fatal = 0x02 + description_unknown_ca = 0x30 + msg = struct.pack(">BB", severity_fatal, description_unknown_ca) + msg_len = len(msg) + record_type_alert = 0x15 + record = struct.pack(">BBBH", record_type_alert, ver_maj, ver_min, msg_len) + msg + return record diff --git a/newrelic/packages/urllib3/contrib/appengine.py b/newrelic/packages/urllib3/contrib/appengine.py index 9b7044ffb0..f91bdd6e77 100644 --- a/newrelic/packages/urllib3/contrib/appengine.py +++ b/newrelic/packages/urllib3/contrib/appengine.py @@ -39,24 +39,24 @@ """ from __future__ import absolute_import + import io import logging import warnings -from ..packages.six.moves.urllib.parse import urljoin from ..exceptions import ( HTTPError, HTTPWarning, MaxRetryError, ProtocolError, - TimeoutError, SSLError, + TimeoutError, ) - +from ..packages.six.moves.urllib.parse import urljoin from ..request import RequestMethods from ..response import HTTPResponse -from ..util.timeout import Timeout from ..util.retry import Retry +from ..util.timeout import Timeout from . import _appengine_environ try: @@ -90,7 +90,7 @@ class AppEngineManager(RequestMethods): * If you attempt to use this on App Engine Flexible, as full socket support is available. * If a request size is more than 10 megabytes. - * If a response size is more than 32 megabtyes. + * If a response size is more than 32 megabytes. * If you use an unsupported request method such as OPTIONS. Beyond those cases, it will raise normal urllib3 errors. @@ -111,7 +111,7 @@ def __init__( warnings.warn( "urllib3 is using URLFetch on Google App Engine sandbox instead " "of sockets. To use sockets directly instead of URLFetch see " - "https://urllib3.readthedocs.io/en/latest/reference/urllib3.contrib.html.", + "https://urllib3.readthedocs.io/en/1.26.x/reference/urllib3.contrib.html.", AppEnginePlatformWarning, ) diff --git a/newrelic/packages/urllib3/contrib/ntlmpool.py b/newrelic/packages/urllib3/contrib/ntlmpool.py index 1fd242a6e0..41a8fd174c 100644 --- a/newrelic/packages/urllib3/contrib/ntlmpool.py +++ b/newrelic/packages/urllib3/contrib/ntlmpool.py @@ -5,12 +5,21 @@ """ from __future__ import absolute_import +import warnings from logging import getLogger + from ntlm import ntlm from .. import HTTPSConnectionPool from ..packages.six.moves.http_client import HTTPSConnection +warnings.warn( + "The 'urllib3.contrib.ntlmpool' module is deprecated and will be removed " + "in urllib3 v2.0 release, urllib3 is not able to support it properly due " + "to reasons listed in issue: https://github.com/urllib3/urllib3/issues/2282. " + "If you are a user of this module please comment in the mentioned issue.", + DeprecationWarning, +) log = getLogger(__name__) diff --git a/newrelic/packages/urllib3/contrib/pyopenssl.py b/newrelic/packages/urllib3/contrib/pyopenssl.py index 81a80651d4..def83afdb2 100644 --- a/newrelic/packages/urllib3/contrib/pyopenssl.py +++ b/newrelic/packages/urllib3/contrib/pyopenssl.py @@ -1,27 +1,31 @@ """ -SSL with SNI_-support for Python 2. Follow these instructions if you would -like to verify SSL certificates in Python 2. Note, the default libraries do +TLS with SNI_-support for Python 2. Follow these instructions if you would +like to verify TLS certificates in Python 2. Note, the default libraries do *not* do certificate checking; you need to do additional work to validate certificates yourself. This needs the following packages installed: -* pyOpenSSL (tested with 16.0.0) -* cryptography (minimum 1.3.4, from pyopenssl) -* idna (minimum 2.0, from cryptography) +* `pyOpenSSL`_ (tested with 16.0.0) +* `cryptography`_ (minimum 1.3.4, from pyopenssl) +* `idna`_ (minimum 2.0, from cryptography) However, pyopenssl depends on cryptography, which depends on idna, so while we use all three directly here we end up having relatively few packages required. You can install them with the following command: - pip install pyopenssl cryptography idna +.. code-block:: bash + + $ python -m pip install pyopenssl cryptography idna To activate certificate checking, call :func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code before you begin making HTTP requests. This can be done in a ``sitecustomize`` module, or at any other time before your application begins using ``urllib3``, -like this:: +like this: + +.. code-block:: python try: import urllib3.contrib.pyopenssl @@ -35,11 +39,11 @@ Activating this module also has the positive side effect of disabling SSL/TLS compression in Python 2 (see `CRIME attack`_). -If you want to configure the default list of supported cipher suites, you can -set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable. - .. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication .. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit) +.. _pyopenssl: https://www.pyopenssl.org +.. _cryptography: https://cryptography.io +.. _idna: https://github.com/kjd/idna """ from __future__ import absolute_import @@ -56,8 +60,9 @@ class UnsupportedExtension(Exception): pass -from socket import timeout, error as SocketError from io import BytesIO +from socket import error as SocketError +from socket import timeout try: # Platform-specific: Python 2 from socket import _fileobject @@ -67,11 +72,11 @@ class UnsupportedExtension(Exception): import logging import ssl -from ..packages import six import sys from .. import util - +from ..packages import six +from ..util.ssl_ import PROTOCOL_TLS_CLIENT __all__ = ["inject_into_urllib3", "extract_from_urllib3"] @@ -81,6 +86,7 @@ class UnsupportedExtension(Exception): # Map from urllib3 to PyOpenSSL compatible parameter-values. _openssl_versions = { util.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD, + PROTOCOL_TLS_CLIENT: OpenSSL.SSL.SSLv23_METHOD, ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD, } @@ -465,6 +471,10 @@ def load_cert_chain(self, certfile, keyfile=None, password=None): self._ctx.set_passwd_cb(lambda *_: password) self._ctx.use_privatekey_file(keyfile or certfile) + def set_alpn_protocols(self, protocols): + protocols = [six.ensure_binary(p) for p in protocols] + return self._ctx.set_alpn_protos(protocols) + def wrap_socket( self, sock, diff --git a/newrelic/packages/urllib3/contrib/securetransport.py b/newrelic/packages/urllib3/contrib/securetransport.py index a6b7e94ade..554c015fed 100644 --- a/newrelic/packages/urllib3/contrib/securetransport.py +++ b/newrelic/packages/urllib3/contrib/securetransport.py @@ -29,6 +29,8 @@ that reason, this code should be considered to be covered both by urllib3's license and by oscrypto's: +.. code-block:: + Copyright (c) 2015-2016 Will Bond Permission is hereby granted, free of charge, to any person obtaining a @@ -58,16 +60,22 @@ import shutil import socket import ssl +import struct import threading import weakref +import six + from .. import util -from ._securetransport.bindings import Security, SecurityConst, CoreFoundation +from ..util.ssl_ import PROTOCOL_TLS_CLIENT +from ._securetransport.bindings import CoreFoundation, Security, SecurityConst from ._securetransport.low_level import ( _assert_no_error, + _build_tls_unknown_ca_alert, _cert_array_from_pem, - _temporary_keychain, + _create_cfstring_array, _load_client_cert_chain, + _temporary_keychain, ) try: # Platform-specific: Python 2 @@ -147,7 +155,8 @@ # TLSv1 and a high of TLSv1.2. For everything else, we pin to that version. # TLSv1 to 1.2 are supported on macOS 10.8+ _protocol_to_min_max = { - util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12) + util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12), + PROTOCOL_TLS_CLIENT: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12), } if hasattr(ssl, "PROTOCOL_SSLv2"): @@ -374,16 +383,55 @@ def _set_ciphers(self): ) _assert_no_error(result) + def _set_alpn_protocols(self, protocols): + """ + Sets up the ALPN protocols on the context. + """ + if not protocols: + return + protocols_arr = _create_cfstring_array(protocols) + try: + result = Security.SSLSetALPNProtocols(self.context, protocols_arr) + _assert_no_error(result) + finally: + CoreFoundation.CFRelease(protocols_arr) + def _custom_validate(self, verify, trust_bundle): """ Called when we have set custom validation. We do this in two cases: first, when cert validation is entirely disabled; and second, when using a custom trust DB. + Raises an SSLError if the connection is not trusted. """ # If we disabled cert validation, just say: cool. if not verify: return + successes = ( + SecurityConst.kSecTrustResultUnspecified, + SecurityConst.kSecTrustResultProceed, + ) + try: + trust_result = self._evaluate_trust(trust_bundle) + if trust_result in successes: + return + reason = "error code: %d" % (trust_result,) + except Exception as e: + # Do not trust on error + reason = "exception: %r" % (e,) + + # SecureTransport does not send an alert nor shuts down the connection. + rec = _build_tls_unknown_ca_alert(self.version()) + self.socket.sendall(rec) + # close the connection immediately + # l_onoff = 1, activate linger + # l_linger = 0, linger for 0 seoncds + opts = struct.pack("ii", 1, 0) + self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, opts) + self.close() + raise ssl.SSLError("certificate verify failed, %s" % reason) + + def _evaluate_trust(self, trust_bundle): # We want data in memory, so load it up. if os.path.isfile(trust_bundle): with open(trust_bundle, "rb") as f: @@ -421,15 +469,7 @@ def _custom_validate(self, verify, trust_bundle): if cert_array is not None: CoreFoundation.CFRelease(cert_array) - # Ok, now we can look at what the result was. - successes = ( - SecurityConst.kSecTrustResultUnspecified, - SecurityConst.kSecTrustResultProceed, - ) - if trust_result.value not in successes: - raise ssl.SSLError( - "certificate verify failed, error code: %d" % trust_result.value - ) + return trust_result.value def handshake( self, @@ -441,6 +481,7 @@ def handshake( client_cert, client_key, client_key_passphrase, + alpn_protocols, ): """ Actually performs the TLS handshake. This is run automatically by @@ -481,6 +522,9 @@ def handshake( # Setup the ciphers. self._set_ciphers() + # Setup the ALPN protocols. + self._set_alpn_protocols(alpn_protocols) + # Set the minimum and maximum TLS versions. result = Security.SSLSetProtocolVersionMin(self.context, min_version) _assert_no_error(result) @@ -754,6 +798,7 @@ def __init__(self, protocol): self._client_cert = None self._client_key = None self._client_key_passphrase = None + self._alpn_protocols = None @property def check_hostname(self): @@ -831,6 +876,18 @@ def load_cert_chain(self, certfile, keyfile=None, password=None): self._client_key = keyfile self._client_cert_passphrase = password + def set_alpn_protocols(self, protocols): + """ + Sets the ALPN protocols that will later be set on the context. + + Raises a NotImplementedError if ALPN is not supported. + """ + if not hasattr(Security, "SSLSetALPNProtocols"): + raise NotImplementedError( + "SecureTransport supports ALPN only in macOS 10.12+" + ) + self._alpn_protocols = [six.ensure_binary(p) for p in protocols] + def wrap_socket( self, sock, @@ -860,5 +917,6 @@ def wrap_socket( self._client_cert, self._client_key, self._client_key_passphrase, + self._alpn_protocols, ) return wrapped_socket diff --git a/newrelic/packages/urllib3/contrib/socks.py b/newrelic/packages/urllib3/contrib/socks.py index 9e97f7aa98..c326e80dd1 100644 --- a/newrelic/packages/urllib3/contrib/socks.py +++ b/newrelic/packages/urllib3/contrib/socks.py @@ -14,22 +14,26 @@ - SOCKS5 with local DNS (``proxy_url='socks5://...``) - Usernames and passwords for the SOCKS proxy - .. note:: - It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in - your ``proxy_url`` to ensure that DNS resolution is done from the remote - server instead of client-side when connecting to a domain name. +.. note:: + It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in + your ``proxy_url`` to ensure that DNS resolution is done from the remote + server instead of client-side when connecting to a domain name. SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5 supports IPv4, IPv6, and domain names. When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url`` -will be sent as the ``userid`` section of the SOCKS request:: +will be sent as the ``userid`` section of the SOCKS request: + +.. code-block:: python proxy_url="socks4a://@proxy-host" When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion of the ``proxy_url`` will be sent as the username/password to authenticate -with the proxy:: +with the proxy: + +.. code-block:: python proxy_url="socks5h://:@proxy-host" @@ -40,19 +44,21 @@ import socks except ImportError: import warnings + from ..exceptions import DependencyWarning warnings.warn( ( "SOCKS support in urllib3 requires the installation of optional " "dependencies: specifically, PySocks. For more information, see " - "https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies" + "https://urllib3.readthedocs.io/en/1.26.x/contrib.html#socks-proxies" ), DependencyWarning, ) raise -from socket import error as SocketError, timeout as SocketTimeout +from socket import error as SocketError +from socket import timeout as SocketTimeout from ..connection import HTTPConnection, HTTPSConnection from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool diff --git a/newrelic/packages/urllib3/exceptions.py b/newrelic/packages/urllib3/exceptions.py index 5cc4d8a4f1..cba6f3f560 100644 --- a/newrelic/packages/urllib3/exceptions.py +++ b/newrelic/packages/urllib3/exceptions.py @@ -1,21 +1,24 @@ from __future__ import absolute_import + from .packages.six.moves.http_client import IncompleteRead as httplib_IncompleteRead # Base Exceptions class HTTPError(Exception): - "Base exception used by this module." + """Base exception used by this module.""" + pass class HTTPWarning(Warning): - "Base warning used by this module." + """Base warning used by this module.""" + pass class PoolError(HTTPError): - "Base exception for errors caused within a pool." + """Base exception for errors caused within a pool.""" def __init__(self, pool, message): self.pool = pool @@ -27,7 +30,7 @@ def __reduce__(self): class RequestError(PoolError): - "Base exception for PoolErrors that have associated URLs." + """Base exception for PoolErrors that have associated URLs.""" def __init__(self, pool, url, message): self.url = url @@ -39,12 +42,13 @@ def __reduce__(self): class SSLError(HTTPError): - "Raised when SSL certificate fails in an HTTPS connection." + """Raised when SSL certificate fails in an HTTPS connection.""" + pass class ProxyError(HTTPError): - "Raised when the connection to a proxy fails." + """Raised when the connection to a proxy fails.""" def __init__(self, message, error, *args): super(ProxyError, self).__init__(message, error, *args) @@ -52,12 +56,14 @@ def __init__(self, message, error, *args): class DecodeError(HTTPError): - "Raised when automatic decoding based on Content-Type fails." + """Raised when automatic decoding based on Content-Type fails.""" + pass class ProtocolError(HTTPError): - "Raised when something unexpected happens mid-request/response." + """Raised when something unexpected happens mid-request/response.""" + pass @@ -87,7 +93,7 @@ def __init__(self, pool, url, reason=None): class HostChangedError(RequestError): - "Raised when an existing pool gets a request for a foreign host." + """Raised when an existing pool gets a request for a foreign host.""" def __init__(self, pool, url, retries=3): message = "Tried to open a foreign host with url: %s" % url @@ -96,13 +102,13 @@ def __init__(self, pool, url, retries=3): class TimeoutStateError(HTTPError): - """ Raised when passing an invalid state to a timeout """ + """Raised when passing an invalid state to a timeout""" pass class TimeoutError(HTTPError): - """ Raised when a socket timeout error occurs. + """Raised when a socket timeout error occurs. Catching this error will catch both :exc:`ReadTimeoutErrors ` and :exc:`ConnectTimeoutErrors `. @@ -112,39 +118,45 @@ class TimeoutError(HTTPError): class ReadTimeoutError(TimeoutError, RequestError): - "Raised when a socket timeout occurs while receiving data from a server" + """Raised when a socket timeout occurs while receiving data from a server""" + pass # This timeout error does not have a URL attached and needs to inherit from the # base HTTPError class ConnectTimeoutError(TimeoutError): - "Raised when a socket timeout occurs while connecting to a server" + """Raised when a socket timeout occurs while connecting to a server""" + pass class NewConnectionError(ConnectTimeoutError, PoolError): - "Raised when we fail to establish a new connection. Usually ECONNREFUSED." + """Raised when we fail to establish a new connection. Usually ECONNREFUSED.""" + pass class EmptyPoolError(PoolError): - "Raised when a pool runs out of connections and no more are allowed." + """Raised when a pool runs out of connections and no more are allowed.""" + pass class ClosedPoolError(PoolError): - "Raised when a request enters a pool after the pool has been closed." + """Raised when a request enters a pool after the pool has been closed.""" + pass class LocationValueError(ValueError, HTTPError): - "Raised when there is something wrong with a given URL input." + """Raised when there is something wrong with a given URL input.""" + pass class LocationParseError(LocationValueError): - "Raised when get_host or similar fails to parse the URL input." + """Raised when get_host or similar fails to parse the URL input.""" def __init__(self, location): message = "Failed to parse: %s" % location @@ -153,39 +165,56 @@ def __init__(self, location): self.location = location +class URLSchemeUnknown(LocationValueError): + """Raised when a URL input has an unsupported scheme.""" + + def __init__(self, scheme): + message = "Not supported URL scheme %s" % scheme + super(URLSchemeUnknown, self).__init__(message) + + self.scheme = scheme + + class ResponseError(HTTPError): - "Used as a container for an error reason supplied in a MaxRetryError." + """Used as a container for an error reason supplied in a MaxRetryError.""" + GENERIC_ERROR = "too many error responses" SPECIFIC_ERROR = "too many {status_code} error responses" class SecurityWarning(HTTPWarning): - "Warned when performing security reducing actions" + """Warned when performing security reducing actions""" + pass class SubjectAltNameWarning(SecurityWarning): - "Warned when connecting to a host with a certificate missing a SAN." + """Warned when connecting to a host with a certificate missing a SAN.""" + pass class InsecureRequestWarning(SecurityWarning): - "Warned when making an unverified HTTPS request." + """Warned when making an unverified HTTPS request.""" + pass class SystemTimeWarning(SecurityWarning): - "Warned when system time is suspected to be wrong" + """Warned when system time is suspected to be wrong""" + pass class InsecurePlatformWarning(SecurityWarning): - "Warned when certain SSL configuration is not available on a platform." + """Warned when certain TLS/SSL configuration is not available on a platform.""" + pass class SNIMissingWarning(HTTPWarning): - "Warned when making a HTTPS request without SNI available." + """Warned when making a HTTPS request without SNI available.""" + pass @@ -198,29 +227,16 @@ class DependencyWarning(HTTPWarning): pass -class InvalidProxyConfigurationWarning(HTTPWarning): - """ - Warned when using an HTTPS proxy and an HTTPS URL. Currently - urllib3 doesn't support HTTPS proxies and the proxy will be - contacted via HTTP instead. This warning can be fixed by - changing your HTTPS proxy URL into an HTTP proxy URL. - - If you encounter this warning read this: - https://github.com/urllib3/urllib3/issues/1850 - """ - - pass - - class ResponseNotChunked(ProtocolError, ValueError): - "Response needs to be chunked in order to read it as chunks." + """Response needs to be chunked in order to read it as chunks.""" + pass class BodyNotHttplibCompatible(HTTPError): """ - Body should be httplib.HTTPResponse like (have an fp attribute which - returns raw chunks) for read_chunked(). + Body should be :class:`http.client.HTTPResponse` like + (have an fp attribute which returns raw chunks) for read_chunked(). """ pass @@ -230,9 +246,8 @@ class IncompleteRead(HTTPError, httplib_IncompleteRead): """ Response length doesn't match expected Content-Length - Subclass of http_client.IncompleteRead to allow int value - for `partial` to avoid creating large objects on streamed - reads. + Subclass of :class:`http.client.IncompleteRead` to allow int value + for ``partial`` to avoid creating large objects on streamed reads. """ def __init__(self, partial, expected): @@ -245,22 +260,57 @@ def __repr__(self): ) +class InvalidChunkLength(HTTPError, httplib_IncompleteRead): + """Invalid chunk length in a chunked response.""" + + def __init__(self, response, length): + super(InvalidChunkLength, self).__init__( + response.tell(), response.length_remaining + ) + self.response = response + self.length = length + + def __repr__(self): + return "InvalidChunkLength(got length %r, %i bytes read)" % ( + self.length, + self.partial, + ) + + class InvalidHeader(HTTPError): - "The header provided was somehow invalid." + """The header provided was somehow invalid.""" + pass -class ProxySchemeUnknown(AssertionError, ValueError): - "ProxyManager does not support the supplied scheme" +class ProxySchemeUnknown(AssertionError, URLSchemeUnknown): + """ProxyManager does not support the supplied scheme""" + # TODO(t-8ch): Stop inheriting from AssertionError in v2.0. def __init__(self, scheme): - message = "Not supported proxy scheme %s" % scheme + # 'localhost' is here because our URL parser parses + # localhost:8080 -> scheme=localhost, remove if we fix this. + if scheme == "localhost": + scheme = None + if scheme is None: + message = "Proxy URL had no scheme, should start with http:// or https://" + else: + message = ( + "Proxy URL had unsupported scheme %s, should use http:// or https://" + % scheme + ) super(ProxySchemeUnknown, self).__init__(message) +class ProxySchemeUnsupported(ValueError): + """Fetching HTTPS resources through HTTPS proxies is unsupported""" + + pass + + class HeaderParsingError(HTTPError): - "Raised by assert_header_parsing, but we convert it to a log.warning statement." + """Raised by assert_header_parsing, but we convert it to a log.warning statement.""" def __init__(self, defects, unparsed_data): message = "%s, unparsed data: %r" % (defects or "Unknown", unparsed_data) @@ -268,5 +318,6 @@ def __init__(self, defects, unparsed_data): class UnrewindableBodyError(HTTPError): - "urllib3 encountered an error when trying to rewind a body" + """urllib3 encountered an error when trying to rewind a body""" + pass diff --git a/newrelic/packages/urllib3/fields.py b/newrelic/packages/urllib3/fields.py index 8715b2202b..9d630f491d 100644 --- a/newrelic/packages/urllib3/fields.py +++ b/newrelic/packages/urllib3/fields.py @@ -1,4 +1,5 @@ from __future__ import absolute_import + import email.utils import mimetypes import re @@ -26,7 +27,8 @@ def format_header_param_rfc2231(name, value): strategy defined in RFC 2231. Particularly useful for header parameters which might contain - non-ASCII values, like file names. This follows RFC 2388 Section 4.4. + non-ASCII values, like file names. This follows + `RFC 2388 Section 4.4 `_. :param name: The name of the parameter, a string expected to be ASCII only. @@ -65,7 +67,6 @@ def format_header_param_rfc2231(name, value): u"\u0022": u"%22", # Replace "\" with "\\". u"\u005C": u"\u005C\u005C", - u"\u005C": u"\u005C\u005C", } # All control characters from 0x00 to 0x1F *except* 0x1B. diff --git a/newrelic/packages/urllib3/filepost.py b/newrelic/packages/urllib3/filepost.py index b7b00992c6..36c9252c64 100644 --- a/newrelic/packages/urllib3/filepost.py +++ b/newrelic/packages/urllib3/filepost.py @@ -1,13 +1,13 @@ from __future__ import absolute_import + import binascii import codecs import os - from io import BytesIO +from .fields import RequestField from .packages import six from .packages.six import b -from .fields import RequestField writer = codecs.lookup("utf-8")[3] diff --git a/newrelic/packages/urllib3/packages/backports/makefile.py b/newrelic/packages/urllib3/packages/backports/makefile.py index a3156a69c0..b8fb2154b6 100644 --- a/newrelic/packages/urllib3/packages/backports/makefile.py +++ b/newrelic/packages/urllib3/packages/backports/makefile.py @@ -7,7 +7,6 @@ wants to create a "fake" socket object. """ import io - from socket import SocketIO diff --git a/newrelic/packages/urllib3/packages/six.py b/newrelic/packages/urllib3/packages/six.py index 314424099f..ba50acb062 100644 --- a/newrelic/packages/urllib3/packages/six.py +++ b/newrelic/packages/urllib3/packages/six.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010-2019 Benjamin Peterson +# Copyright (c) 2010-2020 Benjamin Peterson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -29,7 +29,7 @@ import types __author__ = "Benjamin Peterson " -__version__ = "1.12.0" +__version__ = "1.16.0" # Useful for very coarse version differentiation. @@ -71,6 +71,11 @@ def __len__(self): MAXSIZE = int((1 << 63) - 1) del X +if PY34: + from importlib.util import spec_from_loader +else: + spec_from_loader = None + def _add_doc(func, doc): """Add documentation to a function.""" @@ -182,6 +187,11 @@ def find_module(self, fullname, path=None): return self return None + def find_spec(self, fullname, path, target=None): + if fullname in self.known_modules: + return spec_from_loader(fullname, self) + return None + def __get_module(self, fullname): try: return self.known_modules[fullname] @@ -220,6 +230,12 @@ def get_code(self, fullname): get_source = get_code # same as get_code + def create_module(self, spec): + return self.load_module(spec.name) + + def exec_module(self, module): + pass + _importer = _SixMetaPathImporter(__name__) @@ -260,9 +276,19 @@ class _MovedItems(_LazyModule): ), MovedModule("builtins", "__builtin__"), MovedModule("configparser", "ConfigParser"), + MovedModule( + "collections_abc", + "collections", + "collections.abc" if sys.version_info >= (3, 3) else "collections", + ), MovedModule("copyreg", "copy_reg"), MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), - MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), + MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), + MovedModule( + "_dummy_thread", + "dummy_thread", + "_dummy_thread" if sys.version_info < (3, 9) else "_thread", + ), MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), MovedModule("http_cookies", "Cookie", "http.cookies"), MovedModule("html_entities", "htmlentitydefs", "html.entities"), @@ -307,7 +333,9 @@ class _MovedItems(_LazyModule): ] # Add windows specific modules. if sys.platform == "win32": - _moved_attributes += [MovedModule("winreg", "_winreg")] + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] for attr in _moved_attributes: setattr(_MovedItems, attr.name, attr) @@ -476,7 +504,7 @@ class Module_six_moves_urllib_robotparser(_LazyModule): _urllib_robotparser_moved_attributes = [ - MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser") + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), ] for attr in _urllib_robotparser_moved_attributes: setattr(Module_six_moves_urllib_robotparser, attr.name, attr) @@ -678,9 +706,11 @@ def u(s): if sys.version_info[1] <= 1: _assertRaisesRegex = "assertRaisesRegexp" _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" else: _assertRaisesRegex = "assertRaisesRegex" _assertRegex = "assertRegex" + _assertNotRegex = "assertNotRegex" else: def b(s): @@ -707,6 +737,7 @@ def indexbytes(buf, i): _assertCountEqual = "assertItemsEqual" _assertRaisesRegex = "assertRaisesRegexp" _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" _add_doc(b, """Byte literal""") _add_doc(u, """Text literal""") @@ -723,6 +754,10 @@ def assertRegex(self, *args, **kwargs): return getattr(self, _assertRegex)(*args, **kwargs) +def assertNotRegex(self, *args, **kwargs): + return getattr(self, _assertNotRegex)(*args, **kwargs) + + if PY3: exec_ = getattr(moves.builtins, "exec") @@ -750,7 +785,7 @@ def exec_(_code_, _globs_=None, _locs_=None): del frame elif _locs_ is None: _locs_ = _globs_ - exec("""exec _code_ in _globs_, _locs_""") + exec ("""exec _code_ in _globs_, _locs_""") exec_( """def reraise(tp, value, tb=None): @@ -762,18 +797,7 @@ def exec_(_code_, _globs_=None, _locs_=None): ) -if sys.version_info[:2] == (3, 2): - exec_( - """def raise_from(value, from_value): - try: - if from_value is None: - raise value - raise value from from_value - finally: - value = None -""" - ) -elif sys.version_info[:2] > (3, 2): +if sys.version_info[:2] > (3,): exec_( """def raise_from(value, from_value): try: @@ -863,19 +887,41 @@ def print_(*args, **kwargs): _add_doc(reraise, """Reraise an exception.""") if sys.version_info[0:2] < (3, 4): + # This does exactly the same what the :func:`py3:functools.update_wrapper` + # function does on Python versions after 3.2. It sets the ``__wrapped__`` + # attribute on ``wrapper`` object and it doesn't raise an error if any of + # the attributes mentioned in ``assigned`` and ``updated`` are missing on + # ``wrapped`` object. + def _update_wrapper( + wrapper, + wrapped, + assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES, + ): + for attr in assigned: + try: + value = getattr(wrapped, attr) + except AttributeError: + continue + else: + setattr(wrapper, attr, value) + for attr in updated: + getattr(wrapper, attr).update(getattr(wrapped, attr, {})) + wrapper.__wrapped__ = wrapped + return wrapper + + _update_wrapper.__doc__ = functools.update_wrapper.__doc__ def wraps( wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, updated=functools.WRAPPER_UPDATES, ): - def wrapper(f): - f = functools.wraps(wrapped, assigned, updated)(f) - f.__wrapped__ = wrapped - return f - - return wrapper + return functools.partial( + _update_wrapper, wrapped=wrapped, assigned=assigned, updated=updated + ) + wraps.__doc__ = functools.wraps.__doc__ else: wraps = functools.wraps @@ -888,7 +934,15 @@ def with_metaclass(meta, *bases): # the actual metaclass. class metaclass(type): def __new__(cls, name, this_bases, d): - return meta(name, bases, d) + if sys.version_info[:2] >= (3, 7): + # This version introduced PEP 560 that requires a bit + # of extra care (we mimic what is done by __build_class__). + resolved_bases = types.resolve_bases(bases) + if resolved_bases is not bases: + d["__orig_bases__"] = bases + else: + resolved_bases = bases + return meta(name, resolved_bases, d) @classmethod def __prepare__(cls, name, this_bases): @@ -928,12 +982,11 @@ def ensure_binary(s, encoding="utf-8", errors="strict"): - `str` -> encoded to `bytes` - `bytes` -> `bytes` """ + if isinstance(s, binary_type): + return s if isinstance(s, text_type): return s.encode(encoding, errors) - elif isinstance(s, binary_type): - return s - else: - raise TypeError("not expecting type '%s'" % type(s)) + raise TypeError("not expecting type '%s'" % type(s)) def ensure_str(s, encoding="utf-8", errors="strict"): @@ -947,12 +1000,15 @@ def ensure_str(s, encoding="utf-8", errors="strict"): - `str` -> `str` - `bytes` -> decoded to `str` """ - if not isinstance(s, (text_type, binary_type)): - raise TypeError("not expecting type '%s'" % type(s)) + # Optimization: Fast return for the common case. + if type(s) is str: + return s if PY2 and isinstance(s, text_type): - s = s.encode(encoding, errors) + return s.encode(encoding, errors) elif PY3 and isinstance(s, binary_type): - s = s.decode(encoding, errors) + return s.decode(encoding, errors) + elif not isinstance(s, (text_type, binary_type)): + raise TypeError("not expecting type '%s'" % type(s)) return s @@ -977,7 +1033,7 @@ def ensure_text(s, encoding="utf-8", errors="strict"): def python_2_unicode_compatible(klass): """ - A decorator that defines __unicode__ and __str__ methods under Python 2. + A class decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method diff --git a/newrelic/packages/urllib3/packages/ssl_match_hostname/__init__.py b/newrelic/packages/urllib3/packages/ssl_match_hostname/__init__.py index 75b6bb1cf0..ef3fde5206 100644 --- a/newrelic/packages/urllib3/packages/ssl_match_hostname/__init__.py +++ b/newrelic/packages/urllib3/packages/ssl_match_hostname/__init__.py @@ -1,19 +1,24 @@ import sys try: - # Our match_hostname function is the same as 3.5's, so we only want to + # Our match_hostname function is the same as 3.10's, so we only want to # import the match_hostname function if it's at least that good. - if sys.version_info < (3, 5): + # We also fallback on Python 3.10+ because our code doesn't emit + # deprecation warnings and is the same as Python 3.10 otherwise. + if sys.version_info < (3, 5) or sys.version_info >= (3, 10): raise ImportError("Fallback to vendored code") from ssl import CertificateError, match_hostname except ImportError: try: # Backport of the function from a pypi module - from backports.ssl_match_hostname import CertificateError, match_hostname + from backports.ssl_match_hostname import ( # type: ignore + CertificateError, + match_hostname, + ) except ImportError: # Our vendored copy - from ._implementation import CertificateError, match_hostname + from ._implementation import CertificateError, match_hostname # type: ignore # Not needed, but documenting what we provide. __all__ = ("CertificateError", "match_hostname") diff --git a/newrelic/packages/urllib3/poolmanager.py b/newrelic/packages/urllib3/poolmanager.py index e2bd3bd8db..3a31a285bf 100644 --- a/newrelic/packages/urllib3/poolmanager.py +++ b/newrelic/packages/urllib3/poolmanager.py @@ -1,24 +1,24 @@ from __future__ import absolute_import + import collections import functools import logging -import warnings from ._collections import RecentlyUsedContainer -from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool -from .connectionpool import port_by_scheme +from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme from .exceptions import ( LocationValueError, MaxRetryError, ProxySchemeUnknown, - InvalidProxyConfigurationWarning, + ProxySchemeUnsupported, + URLSchemeUnknown, ) from .packages import six from .packages.six.moves.urllib.parse import urljoin from .request import RequestMethods -from .util.url import parse_url +from .util.proxy import connection_requires_http_tunnel from .util.retry import Retry - +from .util.url import parse_url __all__ = ["PoolManager", "ProxyManager", "proxy_from_url"] @@ -59,6 +59,7 @@ "key_headers", # dict "key__proxy", # parsed proxy url "key__proxy_headers", # dict + "key__proxy_config", # class "key_socket_options", # list of (level (int), optname (int), value (int or str)) tuples "key__socks_options", # dict "key_assert_hostname", # bool or string @@ -70,6 +71,9 @@ #: All custom key schemes should include the fields in this key at a minimum. PoolKey = collections.namedtuple("PoolKey", _key_fields) +_proxy_config_fields = ("ssl_context", "use_forwarding_for_https") +ProxyConfig = collections.namedtuple("ProxyConfig", _proxy_config_fields) + def _default_key_normalizer(key_class, request_context): """ @@ -161,6 +165,7 @@ class PoolManager(RequestMethods): """ proxy = None + proxy_config = None def __init__(self, num_pools=10, headers=None, **connection_pool_kw): RequestMethods.__init__(self, headers) @@ -182,7 +187,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): def _new_pool(self, scheme, host, port, request_context=None): """ - Create a new :class:`ConnectionPool` based on host, port, scheme, and + Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and any additional pool keyword arguments. If ``request_context`` is provided, it is provided as keyword arguments @@ -218,7 +223,7 @@ def clear(self): def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None): """ - Get a :class:`ConnectionPool` based on the host, port, and scheme. + Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme. If ``port`` isn't given, it will be derived from the ``scheme`` using ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is @@ -241,20 +246,22 @@ def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None) def connection_from_context(self, request_context): """ - Get a :class:`ConnectionPool` based on the request context. + Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context. ``request_context`` must at least contain the ``scheme`` key and its value must be a key in ``key_fn_by_scheme`` instance variable. """ scheme = request_context["scheme"].lower() - pool_key_constructor = self.key_fn_by_scheme[scheme] + pool_key_constructor = self.key_fn_by_scheme.get(scheme) + if not pool_key_constructor: + raise URLSchemeUnknown(scheme) pool_key = pool_key_constructor(request_context) return self.connection_from_pool_key(pool_key, request_context=request_context) def connection_from_pool_key(self, pool_key, request_context=None): """ - Get a :class:`ConnectionPool` based on the provided pool key. + Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key. ``pool_key`` should be a namedtuple that only contains immutable objects. At a minimum it must have the ``scheme``, ``host``, and @@ -312,9 +319,39 @@ def _merge_pool_kwargs(self, override): base_pool_kwargs[key] = value return base_pool_kwargs + def _proxy_requires_url_absolute_form(self, parsed_url): + """ + Indicates if the proxy requires the complete destination URL in the + request. Normally this is only needed when not using an HTTP CONNECT + tunnel. + """ + if self.proxy is None: + return False + + return not connection_requires_http_tunnel( + self.proxy, self.proxy_config, parsed_url.scheme + ) + + def _validate_proxy_scheme_url_selection(self, url_scheme): + """ + Validates that were not attempting to do TLS in TLS connections on + Python2 or with unsupported SSL implementations. + """ + if self.proxy is None or url_scheme != "https": + return + + if self.proxy.scheme != "https": + return + + if six.PY2 and not self.proxy_config.use_forwarding_for_https: + raise ProxySchemeUnsupported( + "Contacting HTTPS destinations through HTTPS proxies " + "'via CONNECT tunnels' is not supported in Python 2" + ) + def urlopen(self, method, url, redirect=True, **kw): """ - Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen` + Same as :meth:`urllib3.HTTPConnectionPool.urlopen` with custom cross-host redirect logic and only sends the request-uri portion of the ``url``. @@ -322,6 +359,8 @@ def urlopen(self, method, url, redirect=True, **kw): :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it. """ u = parse_url(url) + self._validate_proxy_scheme_url_selection(u.scheme) + conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme) kw["assert_same_host"] = False @@ -330,7 +369,7 @@ def urlopen(self, method, url, redirect=True, **kw): if "headers" not in kw: kw["headers"] = self.headers.copy() - if self.proxy is not None and u.scheme == "http": + if self._proxy_requires_url_absolute_form(u): response = conn.urlopen(method, url, **kw) else: response = conn.urlopen(method, u.request_uri, **kw) @@ -392,6 +431,19 @@ class ProxyManager(PoolManager): HTTPS/CONNECT case they are sent only once. Could be used for proxy authentication. + :param proxy_ssl_context: + The proxy SSL context is used to establish the TLS connection to the + proxy when using HTTPS proxies. + + :param use_forwarding_for_https: + (Defaults to False) If set to True will forward requests to the HTTPS + proxy to be made on behalf of the client instead of creating a TLS + tunnel via the CONNECT method. **Enabling this flag means that request + and response headers and content will be visible from the HTTPS proxy** + whereas tunneling keeps request and response headers and content + private. IP address, target hostname, SNI, and port are always visible + to an HTTPS proxy even when this flag is disabled. + Example: >>> proxy = urllib3.ProxyManager('http://localhost:3128/') >>> r1 = proxy.request('GET', 'http://google.com/') @@ -411,6 +463,8 @@ def __init__( num_pools=10, headers=None, proxy_headers=None, + proxy_ssl_context=None, + use_forwarding_for_https=False, **connection_pool_kw ): @@ -421,18 +475,22 @@ def __init__( proxy_url.port, ) proxy = parse_url(proxy_url) - if not proxy.port: - port = port_by_scheme.get(proxy.scheme, 80) - proxy = proxy._replace(port=port) if proxy.scheme not in ("http", "https"): raise ProxySchemeUnknown(proxy.scheme) + if not proxy.port: + port = port_by_scheme.get(proxy.scheme, 80) + proxy = proxy._replace(port=port) + self.proxy = proxy self.proxy_headers = proxy_headers or {} + self.proxy_ssl_context = proxy_ssl_context + self.proxy_config = ProxyConfig(proxy_ssl_context, use_forwarding_for_https) connection_pool_kw["_proxy"] = self.proxy connection_pool_kw["_proxy_headers"] = self.proxy_headers + connection_pool_kw["_proxy_config"] = self.proxy_config super(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw) @@ -461,27 +519,13 @@ def _set_proxy_headers(self, url, headers=None): headers_.update(headers) return headers_ - def _validate_proxy_scheme_url_selection(self, url_scheme): - if url_scheme == "https" and self.proxy.scheme == "https": - warnings.warn( - "Your proxy configuration specified an HTTPS scheme for the proxy. " - "Are you sure you want to use HTTPS to contact the proxy? " - "This most likely indicates an error in your configuration. " - "Read this issue for more info: " - "https://github.com/urllib3/urllib3/issues/1850", - InvalidProxyConfigurationWarning, - stacklevel=3, - ) - def urlopen(self, method, url, redirect=True, **kw): "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute." u = parse_url(url) - self._validate_proxy_scheme_url_selection(u.scheme) - - if u.scheme == "http": - # For proxied HTTPS requests, httplib sets the necessary headers - # on the CONNECT to the proxy. For HTTP, we'll definitely - # need to set 'Host' at the very least. + if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme): + # For connections using HTTP CONNECT, httplib sets the necessary + # headers on the CONNECT to the proxy. If we're not using CONNECT, + # we'll definitely need to set 'Host' at the very least. headers = kw.get("headers", self.headers) kw["headers"] = self._set_proxy_headers(url, headers) diff --git a/newrelic/packages/urllib3/request.py b/newrelic/packages/urllib3/request.py index 55f160bbf1..398386a5b9 100644 --- a/newrelic/packages/urllib3/request.py +++ b/newrelic/packages/urllib3/request.py @@ -3,15 +3,14 @@ from .filepost import encode_multipart_formdata from .packages.six.moves.urllib.parse import urlencode - __all__ = ["RequestMethods"] class RequestMethods(object): """ Convenience mixin for classes who implement a :meth:`urlopen` method, such - as :class:`~urllib3.connectionpool.HTTPConnectionPool` and - :class:`~urllib3.poolmanager.PoolManager`. + as :class:`urllib3.HTTPConnectionPool` and + :class:`urllib3.PoolManager`. Provides behavior for making common types of HTTP request methods and decides which type of request field encoding to use. @@ -111,9 +110,9 @@ def request_encode_body( the body. This is useful for request methods like POST, PUT, PATCH, etc. When ``encode_multipart=True`` (default), then - :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode + :func:`urllib3.encode_multipart_formdata` is used to encode the payload with the appropriate content type. Otherwise - :meth:`urllib.urlencode` is used with the + :func:`urllib.parse.urlencode` is used with the 'application/x-www-form-urlencoded' content type. Multipart encoding must be used when posting files, and it's reasonably diff --git a/newrelic/packages/urllib3/response.py b/newrelic/packages/urllib3/response.py index 43a85acbc1..38693f4fc6 100644 --- a/newrelic/packages/urllib3/response.py +++ b/newrelic/packages/urllib3/response.py @@ -1,10 +1,11 @@ from __future__ import absolute_import -from contextlib import contextmanager -import zlib + import io import logging -from socket import timeout as SocketTimeout +import zlib +from contextlib import contextmanager from socket import error as SocketError +from socket import timeout as SocketTimeout try: import brotli @@ -12,19 +13,20 @@ brotli = None from ._collections import HTTPHeaderDict +from .connection import BaseSSLError, HTTPException from .exceptions import ( BodyNotHttplibCompatible, - ProtocolError, DecodeError, - ReadTimeoutError, - ResponseNotChunked, + HTTPError, IncompleteRead, + InvalidChunkLength, InvalidHeader, - HTTPError, + ProtocolError, + ReadTimeoutError, + ResponseNotChunked, + SSLError, ) -from .packages.six import string_types as basestring, PY3 -from .packages.six.moves import http_client as httplib -from .connection import HTTPException, BaseSSLError +from .packages import six from .util.response import is_fp_closed, is_response_to_head log = logging.getLogger(__name__) @@ -156,13 +158,13 @@ class HTTPResponse(io.IOBase): """ HTTP Response container. - Backwards-compatible to httplib's HTTPResponse but the response ``body`` is + Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is loaded and decoded on-demand when the ``data`` property is accessed. This class is also compatible with the Python standard library's :mod:`io` module, and can hence be treated as a readable object in the context of that framework. - Extra parameters for behaviour not present in httplib.HTTPResponse: + Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`: :param preload_content: If True, the response's body will be preloaded during construction. @@ -172,7 +174,7 @@ class is also compatible with the Python standard library's :mod:`io` 'content-encoding' header. :param original_response: - When this HTTPResponse wrapper is generated from an httplib.HTTPResponse + When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse` object, it's convenient to include the original for debug purposes. It's otherwise unused. @@ -232,7 +234,7 @@ def __init__( self.msg = msg self._request_url = request_url - if body and isinstance(body, (basestring, bytes)): + if body and isinstance(body, (six.string_types, bytes)): self._body = body self._pool = pool @@ -290,7 +292,7 @@ def drain_conn(self): @property def data(self): - # For backwords-compat with earlier urllib3 0.4 and earlier. + # For backwards-compat with earlier urllib3 0.4 and earlier. if self._body: return self._body @@ -307,8 +309,8 @@ def isclosed(self): def tell(self): """ Obtain the number of bytes pulled over the wire so far. May differ from - the amount of content returned by :meth:``HTTPResponse.read`` if bytes - are encoded on the wire (e.g, compressed). + the amount of content returned by :meth:``urllib3.response.HTTPResponse.read`` + if bytes are encoded on the wire (e.g, compressed). """ return self._fp_bytes_read @@ -442,10 +444,9 @@ def _error_catcher(self): except BaseSSLError as e: # FIXME: Is there a better way to differentiate between SSLErrors? - if "read operation timed out" not in str(e): # Defensive: - # This shouldn't happen but just in case we're missing an edge - # case, let's avoid swallowing SSL errors. - raise + if "read operation timed out" not in str(e): + # SSL errors related to framing/MAC get wrapped and reraised here + raise SSLError(e) raise ReadTimeoutError(self._pool, None, "Read timed out.") @@ -479,7 +480,7 @@ def _error_catcher(self): def read(self, amt=None, decode_content=None, cache_content=False): """ - Similar to :meth:`httplib.HTTPResponse.read`, but with two additional + Similar to :meth:`http.client.HTTPResponse.read`, but with two additional parameters: ``decode_content`` and ``cache_content``. :param amt: @@ -580,7 +581,7 @@ def stream(self, amt=2 ** 16, decode_content=None): @classmethod def from_httplib(ResponseCls, r, **response_kw): """ - Given an :class:`httplib.HTTPResponse` instance ``r``, return a + Given an :class:`http.client.HTTPResponse` instance ``r``, return a corresponding :class:`urllib3.response.HTTPResponse` object. Remaining parameters are passed to the HTTPResponse constructor, along @@ -589,11 +590,11 @@ def from_httplib(ResponseCls, r, **response_kw): headers = r.msg if not isinstance(headers, HTTPHeaderDict): - if PY3: - headers = HTTPHeaderDict(headers.items()) - else: + if six.PY2: # Python 2.7 headers = HTTPHeaderDict.from_httplib(headers) + else: + headers = HTTPHeaderDict(headers.items()) # HTTPResponse objects in Python 3 don't have a .strict attribute strict = getattr(r, "strict", 0) @@ -609,7 +610,7 @@ def from_httplib(ResponseCls, r, **response_kw): ) return resp - # Backwards-compatibility methods for httplib.HTTPResponse + # Backwards-compatibility methods for http.client.HTTPResponse def getheaders(self): return self.headers @@ -679,8 +680,8 @@ def readinto(self, b): def supports_chunked_reads(self): """ Checks if the underlying file-like object looks like a - httplib.HTTPResponse object. We do this by testing for the fp - attribute. If it is present we assume it returns raw chunks as + :class:`http.client.HTTPResponse` object. We do this by testing for + the fp attribute. If it is present we assume it returns raw chunks as processed by read_chunked(). """ return hasattr(self._fp, "fp") @@ -697,7 +698,7 @@ def _update_chunk_length(self): except ValueError: # Invalid chunked protocol response, abort. self.close() - raise httplib.IncompleteRead(line) + raise InvalidChunkLength(self, line) def _handle_chunk(self, amt): returned_chunk = None @@ -744,7 +745,7 @@ def read_chunked(self, amt=None, decode_content=None): ) if not self.supports_chunked_reads(): raise BodyNotHttplibCompatible( - "Body should be httplib.HTTPResponse like. " + "Body should be http.client.HTTPResponse like. " "It should have have an fp attribute which returns raw chunks." ) diff --git a/newrelic/packages/urllib3/util/__init__.py b/newrelic/packages/urllib3/util/__init__.py index a96c73a9d8..4547fc522b 100644 --- a/newrelic/packages/urllib3/util/__init__.py +++ b/newrelic/packages/urllib3/util/__init__.py @@ -2,23 +2,23 @@ # For backwards compatibility, provide imports that used to be here. from .connection import is_connection_dropped -from .request import make_headers +from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers from .response import is_fp_closed +from .retry import Retry from .ssl_ import ( - SSLContext, + ALPN_PROTOCOLS, HAS_SNI, IS_PYOPENSSL, IS_SECURETRANSPORT, + PROTOCOL_TLS, + SSLContext, assert_fingerprint, resolve_cert_reqs, resolve_ssl_version, ssl_wrap_socket, - PROTOCOL_TLS, ) -from .timeout import current_time, Timeout - -from .retry import Retry -from .url import get_host, parse_url, split_first, Url +from .timeout import Timeout, current_time +from .url import Url, get_host, parse_url, split_first from .wait import wait_for_read, wait_for_write __all__ = ( @@ -27,6 +27,7 @@ "IS_SECURETRANSPORT", "SSLContext", "PROTOCOL_TLS", + "ALPN_PROTOCOLS", "Retry", "Timeout", "Url", @@ -43,4 +44,6 @@ "ssl_wrap_socket", "wait_for_read", "wait_for_write", + "SKIP_HEADER", + "SKIPPABLE_HEADERS", ) diff --git a/newrelic/packages/urllib3/util/connection.py b/newrelic/packages/urllib3/util/connection.py index 86f0a3b00e..30b2d174de 100644 --- a/newrelic/packages/urllib3/util/connection.py +++ b/newrelic/packages/urllib3/util/connection.py @@ -1,7 +1,12 @@ from __future__ import absolute_import + import socket -from .wait import NoWayToWaitForSocketError, wait_for_read + +from ..exceptions import LocationParseError + from ..contrib import _appengine_environ +from ..packages import six +from .wait import NoWayToWaitForSocketError, wait_for_read def is_connection_dropped(conn): # Platform-specific @@ -9,7 +14,7 @@ def is_connection_dropped(conn): # Platform-specific Returns True if the connection is dropped and should be closed. :param conn: - :class:`httplib.HTTPConnection` object. + :class:`http.client.HTTPConnection` object. Note: For platforms like AppEngine, this will always return ``False`` to let the platform handle connection recycling transparently for us. @@ -42,7 +47,7 @@ def create_connection( port)``) and return the socket object. Passing the optional *timeout* parameter will set the timeout on the socket instance before attempting to connect. If no *timeout* is supplied, the - global default timeout setting returned by :func:`getdefaulttimeout` + global default timeout setting returned by :func:`socket.getdefaulttimeout` is used. If *source_address* is set it must be a tuple of (host, port) for the socket to bind as a source address before making the connection. An host of '' or port 0 tells the OS to use the default. @@ -58,6 +63,13 @@ def create_connection( # The original create_connection function always returns all records. family = allowed_gai_family() + try: + host.encode("idna") + except UnicodeError: + return six.raise_from( + LocationParseError(u"'%s', label empty or too long" % host), None + ) + for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res sock = None @@ -106,7 +118,7 @@ def allowed_gai_family(): def _has_ipv6(host): - """ Returns True if the system can bind an IPv6 address. """ + """Returns True if the system can bind an IPv6 address.""" sock = None has_ipv6 = False diff --git a/newrelic/packages/urllib3/util/proxy.py b/newrelic/packages/urllib3/util/proxy.py new file mode 100644 index 0000000000..2199cc7b7f --- /dev/null +++ b/newrelic/packages/urllib3/util/proxy.py @@ -0,0 +1,57 @@ +from .ssl_ import create_urllib3_context, resolve_cert_reqs, resolve_ssl_version + + +def connection_requires_http_tunnel( + proxy_url=None, proxy_config=None, destination_scheme=None +): + """ + Returns True if the connection requires an HTTP CONNECT through the proxy. + + :param URL proxy_url: + URL of the proxy. + :param ProxyConfig proxy_config: + Proxy configuration from poolmanager.py + :param str destination_scheme: + The scheme of the destination. (i.e https, http, etc) + """ + # If we're not using a proxy, no way to use a tunnel. + if proxy_url is None: + return False + + # HTTP destinations never require tunneling, we always forward. + if destination_scheme == "http": + return False + + # Support for forwarding with HTTPS proxies and HTTPS destinations. + if ( + proxy_url.scheme == "https" + and proxy_config + and proxy_config.use_forwarding_for_https + ): + return False + + # Otherwise always use a tunnel. + return True + + +def create_proxy_ssl_context( + ssl_version, cert_reqs, ca_certs=None, ca_cert_dir=None, ca_cert_data=None +): + """ + Generates a default proxy ssl context if one hasn't been provided by the + user. + """ + ssl_context = create_urllib3_context( + ssl_version=resolve_ssl_version(ssl_version), + cert_reqs=resolve_cert_reqs(cert_reqs), + ) + + if ( + not ca_certs + and not ca_cert_dir + and not ca_cert_data + and hasattr(ssl_context, "load_default_certs") + ): + ssl_context.load_default_certs() + + return ssl_context diff --git a/newrelic/packages/urllib3/util/queue.py b/newrelic/packages/urllib3/util/queue.py index d3d379a199..41784104ee 100644 --- a/newrelic/packages/urllib3/util/queue.py +++ b/newrelic/packages/urllib3/util/queue.py @@ -1,4 +1,5 @@ import collections + from ..packages import six from ..packages.six.moves import queue diff --git a/newrelic/packages/urllib3/util/request.py b/newrelic/packages/urllib3/util/request.py index 3b7bb54daf..25103383ec 100644 --- a/newrelic/packages/urllib3/util/request.py +++ b/newrelic/packages/urllib3/util/request.py @@ -1,8 +1,16 @@ from __future__ import absolute_import + from base64 import b64encode -from ..packages.six import b, integer_types from ..exceptions import UnrewindableBodyError +from ..packages.six import b, integer_types + +# Pass as a value within ``headers`` to skip +# emitting some HTTP headers that are added automatically. +# The only headers that are supported are ``Accept-Encoding``, +# ``Host``, and ``User-Agent``. +SKIP_HEADER = "@@@SKIP_HEADER@@@" +SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"]) ACCEPT_ENCODING = "gzip,deflate" try: diff --git a/newrelic/packages/urllib3/util/response.py b/newrelic/packages/urllib3/util/response.py index 715868dd10..5ea609cced 100644 --- a/newrelic/packages/urllib3/util/response.py +++ b/newrelic/packages/urllib3/util/response.py @@ -1,7 +1,9 @@ from __future__ import absolute_import -from ..packages.six.moves import http_client as httplib + +from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect from ..exceptions import HeaderParsingError +from ..packages.six.moves import http_client as httplib def is_fp_closed(obj): @@ -42,8 +44,7 @@ def assert_header_parsing(headers): Only works on Python 3. - :param headers: Headers to verify. - :type headers: `httplib.HTTPMessage`. + :param http.client.HTTPMessage headers: Headers to verify. :raises urllib3.exceptions.HeaderParsingError: If parsing errors are found. @@ -66,6 +67,25 @@ def assert_header_parsing(headers): if isinstance(payload, (bytes, str)): unparsed_data = payload + if defects: + # httplib is assuming a response body is available + # when parsing headers even when httplib only sends + # header data to parse_headers() This results in + # defects on multipart responses in particular. + # See: https://github.com/urllib3/urllib3/issues/800 + + # So we ignore the following defects: + # - StartBoundaryNotFoundDefect: + # The claimed start boundary was never found. + # - MultipartInvariantViolationDefect: + # A message claimed to be a multipart but no subparts were found. + defects = [ + defect + for defect in defects + if not isinstance( + defect, (StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect) + ) + ] if defects or unparsed_data: raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data) @@ -76,8 +96,9 @@ def is_response_to_head(response): Checks whether the request of a response has been a HEAD-request. Handles the quirks of AppEngine. - :param conn: - :type conn: :class:`httplib.HTTPResponse` + :param http.client.HTTPResponse response: + Response to check if the originating request + used 'HEAD' as a method. """ # FIXME: Can we do this somehow without accessing private httplib _method? method = response._method diff --git a/newrelic/packages/urllib3/util/retry.py b/newrelic/packages/urllib3/util/retry.py index ee30c91b14..c7dc42f1d6 100644 --- a/newrelic/packages/urllib3/util/retry.py +++ b/newrelic/packages/urllib3/util/retry.py @@ -1,23 +1,24 @@ from __future__ import absolute_import -import time + +import email import logging +import re +import time +import warnings from collections import namedtuple from itertools import takewhile -import email -import re from ..exceptions import ( ConnectTimeoutError, + InvalidHeader, MaxRetryError, ProtocolError, + ProxyError, ReadTimeoutError, ResponseError, - InvalidHeader, - ProxyError, ) from ..packages import six - log = logging.getLogger(__name__) @@ -27,8 +28,51 @@ ) +# TODO: In v2 we can remove this sentinel and metaclass with deprecated options. +_Default = object() + + +class _RetryMeta(type): + @property + def DEFAULT_METHOD_WHITELIST(cls): + warnings.warn( + "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and " + "will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead", + DeprecationWarning, + ) + return cls.DEFAULT_ALLOWED_METHODS + + @DEFAULT_METHOD_WHITELIST.setter + def DEFAULT_METHOD_WHITELIST(cls, value): + warnings.warn( + "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and " + "will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead", + DeprecationWarning, + ) + cls.DEFAULT_ALLOWED_METHODS = value + + @property + def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls): + warnings.warn( + "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and " + "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead", + DeprecationWarning, + ) + return cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT + + @DEFAULT_REDIRECT_HEADERS_BLACKLIST.setter + def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls, value): + warnings.warn( + "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and " + "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead", + DeprecationWarning, + ) + cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT = value + + +@six.add_metaclass(_RetryMeta) class Retry(object): - """ Retry configuration. + """Retry configuration. Each retry attempt will create a new Retry object with updated values, so they can be safely reused. @@ -54,8 +98,7 @@ class Retry(object): Total number of retries to allow. Takes precedence over other counts. Set to ``None`` to remove this constraint and fall back on other - counts. It's a good idea to set this to some sensibly-high value to - account for unexpected edge cases and avoid infinite retry loops. + counts. Set to ``0`` to fail on the first retry. @@ -96,18 +139,35 @@ class Retry(object): Set to ``0`` to fail on the first retry of this type. - :param iterable method_whitelist: + :param int other: + How many times to retry on other errors. + + Other errors are errors that are not connect, read, redirect or status errors. + These errors might be raised after the request was sent to the server, so the + request might have side-effects. + + Set to ``0`` to fail on the first retry of this type. + + If ``total`` is not set, it's a good idea to set this to 0 to account + for unexpected edge cases and avoid infinite retry loops. + + :param iterable allowed_methods: Set of uppercased HTTP method verbs that we should retry on. By default, we only retry on methods which are considered to be idempotent (multiple requests with the same parameters end with the - same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`. + same state). See :attr:`Retry.DEFAULT_ALLOWED_METHODS`. Set to a ``False`` value to retry on any verb. + .. warning:: + + Previously this parameter was named ``method_whitelist``, that + usage is deprecated in v1.26.0 and will be removed in v2.0. + :param iterable status_forcelist: A set of integer HTTP status codes that we should force a retry on. - A retry is initiated if the request method is in ``method_whitelist`` + A retry is initiated if the request method is in ``allowed_methods`` and the response status code is in ``status_forcelist``. By default, this is disabled with ``None``. @@ -148,13 +208,16 @@ class Retry(object): request. """ - DEFAULT_METHOD_WHITELIST = frozenset( + #: Default methods to be used for ``allowed_methods`` + DEFAULT_ALLOWED_METHODS = frozenset( ["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"] ) + #: Default status codes to be used for ``status_forcelist`` RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) - DEFAULT_REDIRECT_HEADERS_BLACKLIST = frozenset(["Authorization"]) + #: Default headers to be used for ``remove_headers_on_redirect`` + DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"]) #: Maximum backoff time. BACKOFF_MAX = 120 @@ -166,20 +229,43 @@ def __init__( read=None, redirect=None, status=None, - method_whitelist=DEFAULT_METHOD_WHITELIST, + other=None, + allowed_methods=_Default, status_forcelist=None, backoff_factor=0, raise_on_redirect=True, raise_on_status=True, history=None, respect_retry_after_header=True, - remove_headers_on_redirect=DEFAULT_REDIRECT_HEADERS_BLACKLIST, + remove_headers_on_redirect=_Default, + # TODO: Deprecated, remove in v2.0 + method_whitelist=_Default, ): + if method_whitelist is not _Default: + if allowed_methods is not _Default: + raise ValueError( + "Using both 'allowed_methods' and " + "'method_whitelist' together is not allowed. " + "Instead only use 'allowed_methods'" + ) + warnings.warn( + "Using 'method_whitelist' with Retry is deprecated and " + "will be removed in v2.0. Use 'allowed_methods' instead", + DeprecationWarning, + stacklevel=2, + ) + allowed_methods = method_whitelist + if allowed_methods is _Default: + allowed_methods = self.DEFAULT_ALLOWED_METHODS + if remove_headers_on_redirect is _Default: + remove_headers_on_redirect = self.DEFAULT_REMOVE_HEADERS_ON_REDIRECT + self.total = total self.connect = connect self.read = read self.status = status + self.other = other if redirect is False or total is False: redirect = 0 @@ -187,7 +273,7 @@ def __init__( self.redirect = redirect self.status_forcelist = status_forcelist or set() - self.method_whitelist = method_whitelist + self.allowed_methods = allowed_methods self.backoff_factor = backoff_factor self.raise_on_redirect = raise_on_redirect self.raise_on_status = raise_on_status @@ -204,7 +290,7 @@ def new(self, **kw): read=self.read, redirect=self.redirect, status=self.status, - method_whitelist=self.method_whitelist, + other=self.other, status_forcelist=self.status_forcelist, backoff_factor=self.backoff_factor, raise_on_redirect=self.raise_on_redirect, @@ -213,12 +299,29 @@ def new(self, **kw): remove_headers_on_redirect=self.remove_headers_on_redirect, respect_retry_after_header=self.respect_retry_after_header, ) + + # TODO: If already given in **kw we use what's given to us + # If not given we need to figure out what to pass. We decide + # based on whether our class has the 'method_whitelist' property + # and if so we pass the deprecated 'method_whitelist' otherwise + # we use 'allowed_methods'. Remove in v2.0 + if "method_whitelist" not in kw and "allowed_methods" not in kw: + if "method_whitelist" in self.__dict__: + warnings.warn( + "Using 'method_whitelist' with Retry is deprecated and " + "will be removed in v2.0. Use 'allowed_methods' instead", + DeprecationWarning, + ) + params["method_whitelist"] = self.allowed_methods + else: + params["allowed_methods"] = self.allowed_methods + params.update(kw) return type(self)(**params) @classmethod def from_int(cls, retries, redirect=True, default=None): - """ Backwards-compatibility for the old retries format.""" + """Backwards-compatibility for the old retries format.""" if retries is None: retries = default if default is not None else cls.DEFAULT @@ -231,7 +334,7 @@ def from_int(cls, retries, redirect=True, default=None): return new_retries def get_backoff_time(self): - """ Formula for computing the current backoff + """Formula for computing the current backoff :rtype: float """ @@ -252,10 +355,17 @@ def parse_retry_after(self, retry_after): if re.match(r"^\s*[0-9]+\s*$", retry_after): seconds = int(retry_after) else: - retry_date_tuple = email.utils.parsedate(retry_after) + retry_date_tuple = email.utils.parsedate_tz(retry_after) if retry_date_tuple is None: raise InvalidHeader("Invalid Retry-After header: %s" % retry_after) - retry_date = time.mktime(retry_date_tuple) + if retry_date_tuple[9] is None: # Python 2 + # Assume UTC if no timezone was specified + # On Python2.7, parsedate_tz returns None for a timezone offset + # instead of 0 if no timezone is given, where mktime_tz treats + # a None timezone offset as local time. + retry_date_tuple = retry_date_tuple[:9] + (0,) + retry_date_tuple[10:] + + retry_date = email.utils.mktime_tz(retry_date_tuple) seconds = retry_date - time.time() if seconds < 0: @@ -264,7 +374,7 @@ def parse_retry_after(self, retry_after): return seconds def get_retry_after(self, response): - """ Get the value of Retry-After in seconds. """ + """Get the value of Retry-After in seconds.""" retry_after = response.getheader("Retry-After") @@ -288,7 +398,7 @@ def _sleep_backoff(self): time.sleep(backoff) def sleep(self, response=None): - """ Sleep between retry attempts. + """Sleep between retry attempts. This method will respect a server's ``Retry-After`` response header and sleep the duration of the time requested. If that is not present, it @@ -304,7 +414,7 @@ def sleep(self, response=None): self._sleep_backoff() def _is_connection_error(self, err): - """ Errors when we're fairly sure that the server did not receive the + """Errors when we're fairly sure that the server did not receive the request, so it should be safe to retry. """ if isinstance(err, ProxyError): @@ -312,22 +422,33 @@ def _is_connection_error(self, err): return isinstance(err, ConnectTimeoutError) def _is_read_error(self, err): - """ Errors that occur after the request has been started, so we should + """Errors that occur after the request has been started, so we should assume that the server began processing it. """ return isinstance(err, (ReadTimeoutError, ProtocolError)) def _is_method_retryable(self, method): - """ Checks if a given HTTP method should be retried upon, depending if - it is included on the method whitelist. + """Checks if a given HTTP method should be retried upon, depending if + it is included in the allowed_methods """ - if self.method_whitelist and method.upper() not in self.method_whitelist: - return False + # TODO: For now favor if the Retry implementation sets its own method_whitelist + # property outside of our constructor to avoid breaking custom implementations. + if "method_whitelist" in self.__dict__: + warnings.warn( + "Using 'method_whitelist' with Retry is deprecated and " + "will be removed in v2.0. Use 'allowed_methods' instead", + DeprecationWarning, + ) + allowed_methods = self.method_whitelist + else: + allowed_methods = self.allowed_methods + if allowed_methods and method.upper() not in allowed_methods: + return False return True def is_retry(self, method, status_code, has_retry_after=False): - """ Is this method/status code retryable? (Based on whitelists and control + """Is this method/status code retryable? (Based on allowlists and control variables such as the number of total retries to allow, whether to respect the Retry-After header, whether this header is present, and whether the returned status code is on the list of status codes to @@ -347,8 +468,15 @@ def is_retry(self, method, status_code, has_retry_after=False): ) def is_exhausted(self): - """ Are we out of retries? """ - retry_counts = (self.total, self.connect, self.read, self.redirect, self.status) + """Are we out of retries?""" + retry_counts = ( + self.total, + self.connect, + self.read, + self.redirect, + self.status, + self.other, + ) retry_counts = list(filter(None, retry_counts)) if not retry_counts: return False @@ -364,7 +492,7 @@ def increment( _pool=None, _stacktrace=None, ): - """ Return a new Retry object with incremented retry counters. + """Return a new Retry object with incremented retry counters. :param response: A response object, or None, if the server did not return a response. @@ -386,6 +514,7 @@ def increment( read = self.read redirect = self.redirect status_count = self.status + other = self.other cause = "unknown" status = None redirect_location = None @@ -404,6 +533,11 @@ def increment( elif read is not None: read -= 1 + elif error: + # Other retry? + if other is not None: + other -= 1 + elif response and response.get_redirect_location(): # Redirect retry? if redirect is not None: @@ -414,7 +548,7 @@ def increment( else: # Incrementing because of a server error like a 500 in - # status_forcelist and a the given method is in the whitelist + # status_forcelist and the given method is in the allowed_methods cause = ResponseError.GENERIC_ERROR if response and response.status: if status_count is not None: @@ -432,6 +566,7 @@ def increment( read=read, redirect=redirect, status=status_count, + other=other, history=history, ) @@ -448,6 +583,20 @@ def __repr__(self): "read={self.read}, redirect={self.redirect}, status={self.status})" ).format(cls=type(self), self=self) + def __getattr__(self, item): + if item == "method_whitelist": + # TODO: Remove this deprecated alias in v2.0 + warnings.warn( + "Using 'method_whitelist' with Retry is deprecated and " + "will be removed in v2.0. Use 'allowed_methods' instead", + DeprecationWarning, + ) + return self.allowed_methods + try: + return getattr(super(Retry, self), item) + except AttributeError: + return getattr(Retry, item) + # For backwards compatibility (equivalent to pre-v1.9): Retry.DEFAULT = Retry(3) diff --git a/newrelic/packages/urllib3/util/ssl_.py b/newrelic/packages/urllib3/util/ssl_.py index 3d89a56c08..8f867812a5 100644 --- a/newrelic/packages/urllib3/util/ssl_.py +++ b/newrelic/packages/urllib3/util/ssl_.py @@ -1,22 +1,27 @@ from __future__ import absolute_import -import errno -import warnings + import hmac import os import sys - +import warnings from binascii import hexlify, unhexlify from hashlib import md5, sha1, sha256 -from .url import IPV4_RE, BRACELESS_IPV6_ADDRZ_RE -from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning +from ..exceptions import ( + InsecurePlatformWarning, + ProxySchemeUnsupported, + SNIMissingWarning, + SSLError, +) from ..packages import six - +from .url import BRACELESS_IPV6_ADDRZ_RE, IPV4_RE SSLContext = None +SSLTransport = None HAS_SNI = False IS_PYOPENSSL = False IS_SECURETRANSPORT = False +ALPN_PROTOCOLS = ["http/1.1"] # Maps the length of a digest to a possible hash function producing this digest HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256} @@ -39,11 +44,21 @@ def _const_compare_digest_backport(a, b): try: # Test for SSL features import ssl - from ssl import wrap_socket, CERT_REQUIRED + from ssl import CERT_REQUIRED, wrap_socket +except ImportError: + pass + +try: from ssl import HAS_SNI # Has SNI? except ImportError: pass +try: + from .ssltransport import SSLTransport +except ImportError: + pass + + try: # Platform-specific: Python 3.6 from ssl import PROTOCOL_TLS @@ -56,14 +71,25 @@ def _const_compare_digest_backport(a, b): except ImportError: PROTOCOL_SSLv23 = PROTOCOL_TLS = 2 +try: + from ssl import PROTOCOL_TLS_CLIENT +except ImportError: + PROTOCOL_TLS_CLIENT = PROTOCOL_TLS + try: - from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION + from ssl import OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3 except ImportError: OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000 OP_NO_COMPRESSION = 0x20000 +try: # OP_NO_TICKET was added in Python 3.6 + from ssl import OP_NO_TICKET +except ImportError: + OP_NO_TICKET = 0x4000 + + # A secure default. # Sources for more information on TLS ciphers: # @@ -138,7 +164,7 @@ def wrap_socket(self, socket, server_hostname=None, server_side=False): "urllib3 from configuring SSL appropriately and may cause " "certain SSL connections to fail. You can upgrade to a newer " "version of Python to solve this. For more information, see " - "https://urllib3.readthedocs.io/en/latest/advanced-usage.html" + "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html" "#ssl-warnings", InsecurePlatformWarning, ) @@ -250,14 +276,18 @@ def create_urllib3_context( ``ssl.CERT_REQUIRED``. :param options: Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``, - ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``. + ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``. :param ciphers: Which cipher suites to allow the server to select. :returns: Constructed SSLContext object with specified options :rtype: SSLContext """ - context = SSLContext(ssl_version or PROTOCOL_TLS) + # PROTOCOL_TLS is deprecated in Python 3.10 + if not ssl_version or ssl_version == PROTOCOL_TLS: + ssl_version = PROTOCOL_TLS_CLIENT + + context = SSLContext(ssl_version) context.set_ciphers(ciphers or DEFAULT_CIPHERS) @@ -273,6 +303,11 @@ def create_urllib3_context( # Disable compression to prevent CRIME attacks for OpenSSL 1.0+ # (issue #309) options |= OP_NO_COMPRESSION + # TLSv1.2 only. Unless set explicitly, do not request tickets. + # This may save some bandwidth on wire, and although the ticket is encrypted, + # there is a risk associated with it being on wire, + # if the server is not rotating its ticketing keys properly. + options |= OP_NO_TICKET context.options |= options @@ -287,18 +322,32 @@ def create_urllib3_context( ) is not None: context.post_handshake_auth = True - context.verify_mode = cert_reqs - if ( - getattr(context, "check_hostname", None) is not None - ): # Platform-specific: Python 3.2 - # We do our own verification, including fingerprints and alternative - # hostnames. So disable it here - context.check_hostname = False + def disable_check_hostname(): + if ( + getattr(context, "check_hostname", None) is not None + ): # Platform-specific: Python 3.2 + # We do our own verification, including fingerprints and alternative + # hostnames. So disable it here + context.check_hostname = False + + # The order of the below lines setting verify_mode and check_hostname + # matter due to safe-guards SSLContext has to prevent an SSLContext with + # check_hostname=True, verify_mode=NONE/OPTIONAL. This is made even more + # complex because we don't know whether PROTOCOL_TLS_CLIENT will be used + # or not so we don't know the initial state of the freshly created SSLContext. + if cert_reqs == ssl.CERT_REQUIRED: + context.verify_mode = cert_reqs + disable_check_hostname() + else: + disable_check_hostname() + context.verify_mode = cert_reqs # Enable logging of TLS session keys via defacto standard environment variable - # 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). + # 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values. if hasattr(context, "keylog_filename"): - context.keylog_filename = os.environ.get("SSLKEYLOGFILE") + sslkeylogfile = os.environ.get("SSLKEYLOGFILE") + if sslkeylogfile: + context.keylog_filename = sslkeylogfile return context @@ -316,6 +365,7 @@ def ssl_wrap_socket( ca_cert_dir=None, key_password=None, ca_cert_data=None, + tls_in_tls=False, ): """ All arguments except for server_hostname, ssl_context, and ca_cert_dir have @@ -337,6 +387,8 @@ def ssl_wrap_socket( :param ca_cert_data: Optional string containing CA certificates in PEM format suitable for passing as the cadata parameter to SSLContext.load_verify_locations() + :param tls_in_tls: + Use SSLTransport to wrap the existing socket. """ context = ssl_context if context is None: @@ -348,14 +400,8 @@ def ssl_wrap_socket( if ca_certs or ca_cert_dir or ca_cert_data: try: context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data) - except IOError as e: # Platform-specific: Python 2.7 + except (IOError, OSError) as e: raise SSLError(e) - # Py33 raises FileNotFoundError which subclasses OSError - # These are not equivalent unless we check the errno attribute - except OSError as e: # Platform-specific: Python 3.3 and beyond - if e.errno == errno.ENOENT: - raise SSLError(e) - raise elif ssl_context is None and hasattr(context, "load_default_certs"): # try to load OS default certs; works well on Windows (require Python3.4+) @@ -373,28 +419,39 @@ def ssl_wrap_socket( else: context.load_cert_chain(certfile, keyfile, key_password) + try: + if hasattr(context, "set_alpn_protocols"): + context.set_alpn_protocols(ALPN_PROTOCOLS) + except NotImplementedError: # Defensive: in CI, we always have set_alpn_protocols + pass + # If we detect server_hostname is an IP address then the SNI # extension should not be used according to RFC3546 Section 3.1 - # We shouldn't warn the user if SNI isn't available but we would - # not be using SNI anyways due to IP address for server_hostname. - if ( - server_hostname is not None and not is_ipaddress(server_hostname) - ) or IS_SECURETRANSPORT: - if HAS_SNI and server_hostname is not None: - return context.wrap_socket(sock, server_hostname=server_hostname) - + use_sni_hostname = server_hostname and not is_ipaddress(server_hostname) + # SecureTransport uses server_hostname in certificate verification. + send_sni = (use_sni_hostname and HAS_SNI) or ( + IS_SECURETRANSPORT and server_hostname + ) + # Do not warn the user if server_hostname is an invalid SNI hostname. + if not HAS_SNI and use_sni_hostname: warnings.warn( "An HTTPS request has been made, but the SNI (Server Name " "Indication) extension to TLS is not available on this platform. " "This may cause the server to present an incorrect TLS " "certificate, which can cause validation failures. You can upgrade to " "a newer version of Python to solve this. For more information, see " - "https://urllib3.readthedocs.io/en/latest/advanced-usage.html" + "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html" "#ssl-warnings", SNIMissingWarning, ) - return context.wrap_socket(sock) + if send_sni: + ssl_sock = _ssl_wrap_socket_impl( + sock, context, tls_in_tls, server_hostname=server_hostname + ) + else: + ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls) + return ssl_sock def is_ipaddress(hostname): @@ -419,3 +476,20 @@ def _is_key_file_encrypted(key_file): return True return False + + +def _ssl_wrap_socket_impl(sock, ssl_context, tls_in_tls, server_hostname=None): + if tls_in_tls: + if not SSLTransport: + # Import error, ssl is not available. + raise ProxySchemeUnsupported( + "TLS in TLS requires support for the 'ssl' module" + ) + + SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context) + return SSLTransport(sock, ssl_context, server_hostname) + + if server_hostname: + return ssl_context.wrap_socket(sock, server_hostname=server_hostname) + else: + return ssl_context.wrap_socket(sock) diff --git a/newrelic/packages/urllib3/util/ssltransport.py b/newrelic/packages/urllib3/util/ssltransport.py new file mode 100644 index 0000000000..c2186bced9 --- /dev/null +++ b/newrelic/packages/urllib3/util/ssltransport.py @@ -0,0 +1,221 @@ +import io +import socket +import ssl + +from urllib3.exceptions import ProxySchemeUnsupported +from urllib3.packages import six + +SSL_BLOCKSIZE = 16384 + + +class SSLTransport: + """ + The SSLTransport wraps an existing socket and establishes an SSL connection. + + Contrary to Python's implementation of SSLSocket, it allows you to chain + multiple TLS connections together. It's particularly useful if you need to + implement TLS within TLS. + + The class supports most of the socket API operations. + """ + + @staticmethod + def _validate_ssl_context_for_tls_in_tls(ssl_context): + """ + Raises a ProxySchemeUnsupported if the provided ssl_context can't be used + for TLS in TLS. + + The only requirement is that the ssl_context provides the 'wrap_bio' + methods. + """ + + if not hasattr(ssl_context, "wrap_bio"): + if six.PY2: + raise ProxySchemeUnsupported( + "TLS in TLS requires SSLContext.wrap_bio() which isn't " + "supported on Python 2" + ) + else: + raise ProxySchemeUnsupported( + "TLS in TLS requires SSLContext.wrap_bio() which isn't " + "available on non-native SSLContext" + ) + + def __init__( + self, socket, ssl_context, server_hostname=None, suppress_ragged_eofs=True + ): + """ + Create an SSLTransport around socket using the provided ssl_context. + """ + self.incoming = ssl.MemoryBIO() + self.outgoing = ssl.MemoryBIO() + + self.suppress_ragged_eofs = suppress_ragged_eofs + self.socket = socket + + self.sslobj = ssl_context.wrap_bio( + self.incoming, self.outgoing, server_hostname=server_hostname + ) + + # Perform initial handshake. + self._ssl_io_loop(self.sslobj.do_handshake) + + def __enter__(self): + return self + + def __exit__(self, *_): + self.close() + + def fileno(self): + return self.socket.fileno() + + def read(self, len=1024, buffer=None): + return self._wrap_ssl_read(len, buffer) + + def recv(self, len=1024, flags=0): + if flags != 0: + raise ValueError("non-zero flags not allowed in calls to recv") + return self._wrap_ssl_read(len) + + def recv_into(self, buffer, nbytes=None, flags=0): + if flags != 0: + raise ValueError("non-zero flags not allowed in calls to recv_into") + if buffer and (nbytes is None): + nbytes = len(buffer) + elif nbytes is None: + nbytes = 1024 + return self.read(nbytes, buffer) + + def sendall(self, data, flags=0): + if flags != 0: + raise ValueError("non-zero flags not allowed in calls to sendall") + count = 0 + with memoryview(data) as view, view.cast("B") as byte_view: + amount = len(byte_view) + while count < amount: + v = self.send(byte_view[count:]) + count += v + + def send(self, data, flags=0): + if flags != 0: + raise ValueError("non-zero flags not allowed in calls to send") + response = self._ssl_io_loop(self.sslobj.write, data) + return response + + def makefile( + self, mode="r", buffering=None, encoding=None, errors=None, newline=None + ): + """ + Python's httpclient uses makefile and buffered io when reading HTTP + messages and we need to support it. + + This is unfortunately a copy and paste of socket.py makefile with small + changes to point to the socket directly. + """ + if not set(mode) <= {"r", "w", "b"}: + raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,)) + + writing = "w" in mode + reading = "r" in mode or not writing + assert reading or writing + binary = "b" in mode + rawmode = "" + if reading: + rawmode += "r" + if writing: + rawmode += "w" + raw = socket.SocketIO(self, rawmode) + self.socket._io_refs += 1 + if buffering is None: + buffering = -1 + if buffering < 0: + buffering = io.DEFAULT_BUFFER_SIZE + if buffering == 0: + if not binary: + raise ValueError("unbuffered streams must be binary") + return raw + if reading and writing: + buffer = io.BufferedRWPair(raw, raw, buffering) + elif reading: + buffer = io.BufferedReader(raw, buffering) + else: + assert writing + buffer = io.BufferedWriter(raw, buffering) + if binary: + return buffer + text = io.TextIOWrapper(buffer, encoding, errors, newline) + text.mode = mode + return text + + def unwrap(self): + self._ssl_io_loop(self.sslobj.unwrap) + + def close(self): + self.socket.close() + + def getpeercert(self, binary_form=False): + return self.sslobj.getpeercert(binary_form) + + def version(self): + return self.sslobj.version() + + def cipher(self): + return self.sslobj.cipher() + + def selected_alpn_protocol(self): + return self.sslobj.selected_alpn_protocol() + + def selected_npn_protocol(self): + return self.sslobj.selected_npn_protocol() + + def shared_ciphers(self): + return self.sslobj.shared_ciphers() + + def compression(self): + return self.sslobj.compression() + + def settimeout(self, value): + self.socket.settimeout(value) + + def gettimeout(self): + return self.socket.gettimeout() + + def _decref_socketios(self): + self.socket._decref_socketios() + + def _wrap_ssl_read(self, len, buffer=None): + try: + return self._ssl_io_loop(self.sslobj.read, len, buffer) + except ssl.SSLError as e: + if e.errno == ssl.SSL_ERROR_EOF and self.suppress_ragged_eofs: + return 0 # eof, return 0. + else: + raise + + def _ssl_io_loop(self, func, *args): + """Performs an I/O loop between incoming/outgoing and the socket.""" + should_loop = True + ret = None + + while should_loop: + errno = None + try: + ret = func(*args) + except ssl.SSLError as e: + if e.errno not in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE): + # WANT_READ, and WANT_WRITE are expected, others are not. + raise e + errno = e.errno + + buf = self.outgoing.read() + self.socket.sendall(buf) + + if errno is None: + should_loop = False + elif errno == ssl.SSL_ERROR_WANT_READ: + buf = self.socket.recv(SSL_BLOCKSIZE) + if buf: + self.incoming.write(buf) + else: + self.incoming.write_eof() + return ret diff --git a/newrelic/packages/urllib3/util/timeout.py b/newrelic/packages/urllib3/util/timeout.py index b61fea75c5..ff69593b05 100644 --- a/newrelic/packages/urllib3/util/timeout.py +++ b/newrelic/packages/urllib3/util/timeout.py @@ -1,9 +1,10 @@ from __future__ import absolute_import +import time + # The default socket timeout, used by httplib to indicate that no timeout was # specified by the user from socket import _GLOBAL_DEFAULT_TIMEOUT -import time from ..exceptions import TimeoutStateError @@ -17,22 +18,28 @@ class Timeout(object): - """ Timeout configuration. + """Timeout configuration. + + Timeouts can be defined as a default for a pool: + + .. code-block:: python + + timeout = Timeout(connect=2.0, read=7.0) + http = PoolManager(timeout=timeout) + response = http.request('GET', 'http://example.com/') - Timeouts can be defined as a default for a pool:: + Or per-request (which overrides the default for the pool): - timeout = Timeout(connect=2.0, read=7.0) - http = PoolManager(timeout=timeout) - response = http.request('GET', 'http://example.com/') + .. code-block:: python - Or per-request (which overrides the default for the pool):: + response = http.request('GET', 'http://example.com/', timeout=Timeout(10)) - response = http.request('GET', 'http://example.com/', timeout=Timeout(10)) + Timeouts can be disabled by setting all the parameters to ``None``: - Timeouts can be disabled by setting all the parameters to ``None``:: + .. code-block:: python - no_timeout = Timeout(connect=None, read=None) - response = http.request('GET', 'http://example.com/, timeout=no_timeout) + no_timeout = Timeout(connect=None, read=None) + response = http.request('GET', 'http://example.com/, timeout=no_timeout) :param total: @@ -43,7 +50,7 @@ class Timeout(object): Defaults to None. - :type total: integer, float, or None + :type total: int, float, or None :param connect: The maximum amount of time (in seconds) to wait for a connection @@ -53,7 +60,7 @@ class Timeout(object): `_. None will set an infinite timeout for connection attempts. - :type connect: integer, float, or None + :type connect: int, float, or None :param read: The maximum amount of time (in seconds) to wait between consecutive @@ -63,7 +70,7 @@ class Timeout(object): `_. None will set an infinite timeout. - :type read: integer, float, or None + :type read: int, float, or None .. note:: @@ -111,7 +118,7 @@ def __repr__(self): @classmethod def _validate_timeout(cls, value, name): - """ Check that a timeout attribute is valid. + """Check that a timeout attribute is valid. :param value: The timeout value to validate :param name: The name of the timeout attribute to validate. This is @@ -157,7 +164,7 @@ def _validate_timeout(cls, value, name): @classmethod def from_float(cls, timeout): - """ Create a new Timeout from a legacy timeout value. + """Create a new Timeout from a legacy timeout value. The timeout value used by httplib.py sets the same timeout on the connect(), and recv() socket requests. This creates a :class:`Timeout` @@ -172,7 +179,7 @@ def from_float(cls, timeout): return Timeout(read=timeout, connect=timeout) def clone(self): - """ Create a copy of the timeout object + """Create a copy of the timeout object Timeout properties are stored per-pool but each request needs a fresh Timeout object to ensure each one has its own start/stop configured. @@ -186,7 +193,7 @@ def clone(self): return Timeout(connect=self._connect, read=self._read, total=self.total) def start_connect(self): - """ Start the timeout clock, used during a connect() attempt + """Start the timeout clock, used during a connect() attempt :raises urllib3.exceptions.TimeoutStateError: if you attempt to start a timer that has been started already. @@ -197,7 +204,7 @@ def start_connect(self): return self._start_connect def get_connect_duration(self): - """ Gets the time elapsed since the call to :meth:`start_connect`. + """Gets the time elapsed since the call to :meth:`start_connect`. :return: Elapsed time in seconds. :rtype: float @@ -212,7 +219,7 @@ def get_connect_duration(self): @property def connect_timeout(self): - """ Get the value to use when setting a connection timeout. + """Get the value to use when setting a connection timeout. This will be a positive float or integer, the value None (never timeout), or the default system timeout. @@ -230,7 +237,7 @@ def connect_timeout(self): @property def read_timeout(self): - """ Get the value for the read timeout. + """Get the value for the read timeout. This assumes some time has elapsed in the connection timeout and computes the read timeout appropriately. diff --git a/newrelic/packages/urllib3/util/url.py b/newrelic/packages/urllib3/util/url.py index 793324e5fd..81a03da9e3 100644 --- a/newrelic/packages/urllib3/util/url.py +++ b/newrelic/packages/urllib3/util/url.py @@ -1,11 +1,11 @@ from __future__ import absolute_import + import re from collections import namedtuple from ..exceptions import LocationParseError from ..packages import six - url_attrs = ["scheme", "auth", "host", "port", "path", "query", "fragment"] # We only want to normalize urls with an HTTP(S) scheme. @@ -63,12 +63,12 @@ BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT[2:-2] + "$") ZONE_ID_RE = re.compile("(" + ZONE_ID_PAT + r")\]$") -SUBAUTHORITY_PAT = (u"^(?:(.*)@)?(%s|%s|%s)(?::([0-9]{0,5}))?$") % ( +_HOST_PORT_PAT = ("^(%s|%s|%s)(?::([0-9]{0,5}))?$") % ( REG_NAME_PAT, IPV4_PAT, IPV6_ADDRZ_PAT, ) -SUBAUTHORITY_RE = re.compile(SUBAUTHORITY_PAT, re.UNICODE | re.DOTALL) +_HOST_PORT_RE = re.compile(_HOST_PORT_PAT, re.UNICODE | re.DOTALL) UNRESERVED_CHARS = set( "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._-~" @@ -365,7 +365,9 @@ def parse_url(url): scheme = scheme.lower() if authority: - auth, host, port = SUBAUTHORITY_RE.match(authority).groups() + auth, _, host_port = authority.rpartition("@") + auth = auth or None + host, port = _HOST_PORT_RE.match(host_port).groups() if auth and normalize_uri: auth = _encode_invalid_chars(auth, USERINFO_CHARS) if port == "": diff --git a/newrelic/packages/urllib3/util/wait.py b/newrelic/packages/urllib3/util/wait.py index d71d2fd722..c280646c7b 100644 --- a/newrelic/packages/urllib3/util/wait.py +++ b/newrelic/packages/urllib3/util/wait.py @@ -1,7 +1,7 @@ import errno -from functools import partial import select import sys +from functools import partial try: from time import monotonic @@ -140,14 +140,14 @@ def wait_for_socket(*args, **kwargs): def wait_for_read(sock, timeout=None): - """ Waits for reading to be available on a given socket. + """Waits for reading to be available on a given socket. Returns True if the socket is readable, or False if the timeout expired. """ return wait_for_socket(sock, read=True, timeout=timeout) def wait_for_write(sock, timeout=None): - """ Waits for writing to be available on a given socket. + """Waits for writing to be available on a given socket. Returns True if the socket is readable, or False if the timeout expired. """ return wait_for_socket(sock, write=True, timeout=timeout) diff --git a/pyproject.toml b/pyproject.toml index bfea728370..a1b2989afc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ include = '\.pyi?$' profile = "black" [tool.pylint.messages_control] -disable = "B101,C0103,C0114,C0115,C0116,C0302,C0415,E0401,E1120,E122,E126,E127,E128,E203,E501,E722,R0201,R0205,R0401,R0801,R0902,R0903,R0904,R0912,R0913,R0914,R0915,R1710,R1725,W0201,W0212,W0223,W0603,W0612,W0613,W0702,W0703,W0706,W504,line-too-long,redefined-outer-name" +disable = "B101,C0103,C0114,C0115,C0116,C0209,C0302,C0415,E0401,E1120,E122,E126,E127,E128,E203,E501,E722,R0201,R0205,R0401,R0801,R0902,R0903,R0904,R0911,R0912,R0913,R0914,R0915,R1705,R1710,R1725,W0201,W0212,W0223,W0402,W0603,W0612,W0613,W0702,W0703,W0706,W504,line-too-long,redefined-outer-name" [tool.pylint.format] max-line-length = "120" diff --git a/tests/adapter_gunicorn/worker.py b/tests/adapter_gunicorn/worker.py index 1d3c16e9ab..5d3a984980 100644 --- a/tests/adapter_gunicorn/worker.py +++ b/tests/adapter_gunicorn/worker.py @@ -13,6 +13,7 @@ # limitations under the License. import asyncio + from gunicorn.workers.sync import SyncWorker @@ -24,17 +25,17 @@ def __init__(self, asgi): self.iterable = [] def __call__(self, environ, start_response): - loop = asyncio.get_event_loop() - instance = self.asgi({'type': 'http'}) + loop = asyncio.new_event_loop() + instance = self.asgi({"type": "http"}) loop.run_until_complete(instance(self.receive, self.send)) - start_response('200 OK', []) - return [b'PONG'] + start_response("200 OK", []) + return [b"PONG"] async def send(self, message): pass async def receive(self): - return {'type': 'http.request'} + return {"type": "http.request"} class AsgiWorker(SyncWorker): diff --git a/tests/adapter_uvicorn/test_uvicorn.py b/tests/adapter_uvicorn/test_uvicorn.py index e977236c9b..c93e719e84 100644 --- a/tests/adapter_uvicorn/test_uvicorn.py +++ b/tests/adapter_uvicorn/test_uvicorn.py @@ -13,7 +13,6 @@ # limitations under the License. import asyncio -import functools import logging import socket import threading @@ -21,24 +20,22 @@ import pytest import uvicorn -from uvicorn.config import Config -from uvicorn.main import Server - -from newrelic.api.asgi_application import ASGIApplicationWrapper -from newrelic.common.object_names import callable_name from testing_support.fixtures import ( override_application_settings, + raise_background_exceptions, validate_transaction_errors, validate_transaction_metrics, - raise_background_exceptions, wait_for_background_threads, ) from testing_support.sample_asgi_applications import ( - simple_app_v2_raw, AppWithCall, AppWithCallRaw, + simple_app_v2_raw, ) +from uvicorn.config import Config +from uvicorn.main import Server +from newrelic.common.object_names import callable_name UVICORN_VERSION = tuple(int(v) for v in uvicorn.__version__.split(".")[:2]) @@ -56,15 +53,11 @@ def get_open_port(): simple_app_v2_raw, pytest.param( AppWithCallRaw(), - marks=pytest.mark.skipif( - UVICORN_VERSION < (0, 6), reason="ASGI3 unsupported" - ), + marks=pytest.mark.skipif(UVICORN_VERSION < (0, 6), reason="ASGI3 unsupported"), ), pytest.param( AppWithCall(), - marks=pytest.mark.skipif( - UVICORN_VERSION < (0, 6), reason="ASGI3 unsupported" - ), + marks=pytest.mark.skipif(UVICORN_VERSION < (0, 6), reason="ASGI3 unsupported"), ), ), ids=("raw", "class_with_call", "class_with_call_double_wrapped"), @@ -102,12 +95,15 @@ async def on_tick(): pass server.run() - thread = threading.Thread(target=server_run) + thread = threading.Thread(target=server_run, daemon=True) thread.start() ready.wait() yield port - loops[0].stop() - thread.join(timeout=0.2) + _ = [loop.stop() for loop in loops] # Stop all loops + thread.join(timeout=1) + + if thread.is_alive(): + raise RuntimeError("Thread failed to exit in time.") @override_application_settings({"transaction_name.naming_scheme": "framework"}) diff --git a/tests/agent_features/_test_async_coroutine_trace.py b/tests/agent_features/_test_async_coroutine_trace.py index 39b85e2d2a..96cb3c7dcb 100644 --- a/tests/agent_features/_test_async_coroutine_trace.py +++ b/tests/agent_features/_test_async_coroutine_trace.py @@ -14,42 +14,40 @@ import asyncio import functools -import pytest import time +import pytest +from testing_support.fixtures import ( + capture_transaction_metrics, + validate_transaction_metrics, +) + from newrelic.api.background_task import background_task from newrelic.api.database_trace import database_trace from newrelic.api.datastore_trace import datastore_trace -from newrelic.api.function_trace import function_trace from newrelic.api.external_trace import external_trace +from newrelic.api.function_trace import function_trace from newrelic.api.memcache_trace import memcache_trace from newrelic.api.message_trace import message_trace -from testing_support.fixtures import (validate_transaction_metrics, - capture_transaction_metrics) - - -@pytest.mark.parametrize('trace,metric', [ - (functools.partial(function_trace, name='simple_gen'), - 'Function/simple_gen'), - (functools.partial(external_trace, library='lib', url='http://foo.com'), - 'External/foo.com/lib/'), - (functools.partial(database_trace, 'select * from foo'), - 'Datastore/statement/None/foo/select'), - (functools.partial(datastore_trace, 'lib', 'foo', 'bar'), - 'Datastore/statement/lib/foo/bar'), - (functools.partial(message_trace, 'lib', 'op', 'typ', 'name'), - 'MessageBroker/lib/typ/op/Named/name'), - (functools.partial(memcache_trace, 'cmd'), - 'Memcache/cmd'), -]) -def test_awaitable_timing(trace, metric): +@pytest.mark.parametrize( + "trace,metric", + [ + (functools.partial(function_trace, name="simple_gen"), "Function/simple_gen"), + (functools.partial(external_trace, library="lib", url="http://foo.com"), "External/foo.com/lib/"), + (functools.partial(database_trace, "select * from foo"), "Datastore/statement/None/foo/select"), + (functools.partial(datastore_trace, "lib", "foo", "bar"), "Datastore/statement/lib/foo/bar"), + (functools.partial(message_trace, "lib", "op", "typ", "name"), "MessageBroker/lib/typ/op/Named/name"), + (functools.partial(memcache_trace, "cmd"), "Memcache/cmd"), + ], +) +def test_awaitable_timing(event_loop, trace, metric): @trace() async def coro(): await asyncio.sleep(0.1) - @background_task(name='test_awaitable') + @background_task(name="test_awaitable") async def parent(): await coro() @@ -58,45 +56,41 @@ async def parent(): @capture_transaction_metrics(metrics, full_metrics) @validate_transaction_metrics( - 'test_awaitable', - background_task=True, - scoped_metrics=[(metric, 1)], - rollup_metrics=[(metric, 1)]) + "test_awaitable", background_task=True, scoped_metrics=[(metric, 1)], rollup_metrics=[(metric, 1)] + ) def _test(): - loop = asyncio.get_event_loop() - loop.run_until_complete(parent()) + event_loop.run_until_complete(parent()) _test() # Check that coroutines time the total call time (including pauses) - metric_key = (metric, '') + metric_key = (metric, "") assert full_metrics[metric_key].total_call_time >= 0.1 -@pytest.mark.parametrize('trace,metric', [ - (functools.partial(function_trace, name='simple_gen'), - 'Function/simple_gen'), - (functools.partial(external_trace, library='lib', url='http://foo.com'), - 'External/foo.com/lib/'), - (functools.partial(database_trace, 'select * from foo'), - 'Datastore/statement/None/foo/select'), - (functools.partial(datastore_trace, 'lib', 'foo', 'bar'), - 'Datastore/statement/lib/foo/bar'), - (functools.partial(message_trace, 'lib', 'op', 'typ', 'name'), - 'MessageBroker/lib/typ/op/Named/name'), - (functools.partial(memcache_trace, 'cmd'), - 'Memcache/cmd'), -]) -@pytest.mark.parametrize('yield_from', [True, False]) -@pytest.mark.parametrize('use_await', [True, False]) -@pytest.mark.parametrize('coro_decorator_first', [True, False]) -def test_asyncio_decorator_timing(trace, metric, yield_from, - use_await, coro_decorator_first): +@pytest.mark.parametrize( + "trace,metric", + [ + (functools.partial(function_trace, name="simple_gen"), "Function/simple_gen"), + (functools.partial(external_trace, library="lib", url="http://foo.com"), "External/foo.com/lib/"), + (functools.partial(database_trace, "select * from foo"), "Datastore/statement/None/foo/select"), + (functools.partial(datastore_trace, "lib", "foo", "bar"), "Datastore/statement/lib/foo/bar"), + (functools.partial(message_trace, "lib", "op", "typ", "name"), "MessageBroker/lib/typ/op/Named/name"), + (functools.partial(memcache_trace, "cmd"), "Memcache/cmd"), + ], +) +@pytest.mark.parametrize("yield_from", [True, False]) +@pytest.mark.parametrize("use_await", [True, False]) +@pytest.mark.parametrize("coro_decorator_first", [True, False]) +def test_asyncio_decorator_timing(event_loop, trace, metric, yield_from, use_await, coro_decorator_first): if yield_from: + def coro(): yield from asyncio.sleep(0.1) + else: + def coro(): time.sleep(0.1) @@ -106,30 +100,30 @@ def coro(): coro = asyncio.coroutine(trace()(coro)) if use_await: + async def parent(): await coro() + else: + @asyncio.coroutine def parent(): yield from coro() - parent = background_task(name='test_awaitable')(parent) + parent = background_task(name="test_awaitable")(parent) metrics = [] full_metrics = {} @capture_transaction_metrics(metrics, full_metrics) @validate_transaction_metrics( - 'test_awaitable', - background_task=True, - scoped_metrics=[(metric, 1)], - rollup_metrics=[(metric, 1)]) + "test_awaitable", background_task=True, scoped_metrics=[(metric, 1)], rollup_metrics=[(metric, 1)] + ) def _test(): - loop = asyncio.get_event_loop() - loop.run_until_complete(parent()) + event_loop.run_until_complete(parent()) _test() # Check that coroutines time the total call time (including pauses) - metric_key = (metric, '') + metric_key = (metric, "") assert full_metrics[metric_key].total_call_time >= 0.1 diff --git a/tests/agent_features/_test_async_coroutine_transaction.py b/tests/agent_features/_test_async_coroutine_transaction.py index 63e642081f..bb0ef471f5 100644 --- a/tests/agent_features/_test_async_coroutine_transaction.py +++ b/tests/agent_features/_test_async_coroutine_transaction.py @@ -16,11 +16,10 @@ from newrelic.api.transaction import current_transaction -loop = asyncio.get_event_loop() - -def native_coroutine_test(transaction, nr_enabled=True, does_hang=False, - call_exit=False, runtime_error=False): +def native_coroutine_test( + event_loop, transaction, nr_enabled=True, does_hang=False, call_exit=False, runtime_error=False +): @transaction async def task(): txn = current_transaction() @@ -35,7 +34,7 @@ async def task(): try: if does_hang: - await loop.create_future() + await event_loop.create_future() else: await asyncio.sleep(0.0) except GeneratorExit: diff --git a/tests/agent_features/conftest.py b/tests/agent_features/conftest.py index 1e31957c56..d3cadbd467 100644 --- a/tests/agent_features/conftest.py +++ b/tests/agent_features/conftest.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest from testing_support.fixtures import ( # noqa: F401; pylint: disable=W0611 code_coverage_fixture, collector_agent_registration_fixture, @@ -59,3 +60,5 @@ "test_asgi_distributed_tracing.py", "test_asgi_w3c_trace_context.py", ] +else: + from testing_support.fixture.event_loop import event_loop diff --git a/tests/agent_features/test_asgi_transaction.py b/tests/agent_features/test_asgi_transaction.py index 279d0aa6d8..1820efa86d 100644 --- a/tests/agent_features/test_asgi_transaction.py +++ b/tests/agent_features/test_asgi_transaction.py @@ -155,23 +155,21 @@ def test_multiple_calls_to_asgi_wrapper(caplog): assert not caplog.records -def test_non_http_scope_v3(): +def test_non_http_scope_v3(event_loop): async def _test(): await simple_app_v3({"type": "cookies"}, None, None) - loop = asyncio.get_event_loop() with pytest.raises(ValueError): - loop.run_until_complete(_test()) + event_loop.run_until_complete(_test()) -def test_non_http_scope_v2(): +def test_non_http_scope_v2(event_loop): async def _test(): call_me = simple_app_v2({"type": "cookies"}) await call_me(None, None) - loop = asyncio.get_event_loop() with pytest.raises(ValueError): - loop.run_until_complete(_test()) + event_loop.run_until_complete(_test()) @pytest.mark.parametrize("app", (simple_app_v3_wrapped, simple_app_v2_wrapped, simple_app_v2_init_exc)) diff --git a/tests/agent_features/test_async_context_propagation.py b/tests/agent_features/test_async_context_propagation.py index 530b5fde4a..ea850c095d 100644 --- a/tests/agent_features/test_async_context_propagation.py +++ b/tests/agent_features/test_async_context_propagation.py @@ -13,35 +13,38 @@ # limitations under the License. import pytest -from newrelic.api.background_task import background_task, BackgroundTask +from testing_support.fixtures import ( + function_not_called, + override_generic_settings, + validate_transaction_metrics, +) + from newrelic.api.application import application_instance as application -from newrelic.api.time_trace import current_trace -from newrelic.api.function_trace import FunctionTrace, function_trace +from newrelic.api.background_task import BackgroundTask, background_task from newrelic.api.database_trace import database_trace from newrelic.api.datastore_trace import datastore_trace from newrelic.api.external_trace import external_trace +from newrelic.api.function_trace import FunctionTrace, function_trace from newrelic.api.memcache_trace import memcache_trace from newrelic.api.message_trace import message_trace - -from newrelic.core.trace_cache import trace_cache +from newrelic.api.time_trace import current_trace from newrelic.core.config import global_settings -from testing_support.fixtures import (validate_transaction_metrics, - override_generic_settings, function_not_called) +from newrelic.core.trace_cache import trace_cache -@function_trace('waiter3') +@function_trace("waiter3") async def child(): pass async def waiter(asyncio, event, wait): - with FunctionTrace(name='waiter1', terminal=True): + with FunctionTrace(name="waiter1", terminal=True): event.set() # Block until the parent says to exit await wait.wait() - with FunctionTrace(name='waiter2', terminal=True): + with FunctionTrace(name="waiter2", terminal=True): pass await child() @@ -56,7 +59,7 @@ async def task(asyncio, trace, event, wait): await waiter(asyncio, event, wait) -@background_task(name='test_context_propagation') +@background_task(name="test_context_propagation") async def _test(asyncio, schedule, nr_enabled=True): trace = current_trace() @@ -67,8 +70,7 @@ async def _test(asyncio, schedule, nr_enabled=True): events = [asyncio.Event() for _ in range(2)] wait = asyncio.Event() - tasks = [schedule(task(asyncio, trace, events[idx], wait)) - for idx in range(2)] + tasks = [schedule(task(asyncio, trace, events[idx], wait)) for idx in range(2)] await asyncio.gather(*(e.wait() for e in events)) @@ -83,46 +85,49 @@ async def _test(asyncio, schedule, nr_enabled=True): return trace -@pytest.mark.parametrize('set_loop', (True, False)) -@pytest.mark.parametrize('schedule', ( - 'create_task', - 'ensure_future', -)) +@pytest.mark.parametrize("set_loop", (True, False)) +@pytest.mark.parametrize( + "schedule", + ( + "create_task", + "ensure_future", + ), +) @validate_transaction_metrics( - 'test_context_propagation', + "test_context_propagation", background_task=True, scoped_metrics=( - ('Function/waiter1', 2), - ('Function/waiter2', 2), - ('Function/waiter3', 2), + ("Function/waiter1", 2), + ("Function/waiter2", 2), + ("Function/waiter3", 2), ), ) -def test_context_propagation(schedule, set_loop): +def test_context_propagation(event_loop, schedule, set_loop): import asyncio _loop = None if set_loop: + class TestEventLoop(asyncio.SelectorEventLoop): def create_task(self, coro, **kwargs): return asyncio.tasks.Task(coro, loop=self, **kwargs) - _loop = asyncio.get_event_loop() - asyncio.set_event_loop(TestEventLoop()) - - loop = asyncio.get_event_loop() + _loop = event_loop + event_loop = TestEventLoop() + asyncio.set_event_loop(event_loop) exceptions = [] def handle_exception(loop, context): exceptions.append(context) - loop.set_exception_handler(handle_exception) + event_loop.set_exception_handler(handle_exception) - schedule = getattr(asyncio, schedule, None) or getattr(loop, schedule) + schedule = getattr(asyncio, schedule, None) or getattr(event_loop, schedule) # Keep the trace around so that it's not removed from the trace cache # through reference counting (for testing) - _ = loop.run_until_complete(_test(asyncio, schedule)) + _ = event_loop.run_until_complete(_test(asyncio, schedule)) # The agent should have removed all traces from the cache since # run_until_complete has terminated (all callbacks scheduled inside the @@ -135,44 +140,49 @@ def handle_exception(loop, context): asyncio.set_event_loop(_loop) -@override_generic_settings(global_settings(), { - 'enabled': False, -}) -@function_not_called('newrelic.core.stats_engine', - 'StatsEngine.record_transaction') -def test_nr_disabled(): +@override_generic_settings( + global_settings(), + { + "enabled": False, + }, +) +@function_not_called("newrelic.core.stats_engine", "StatsEngine.record_transaction") +def test_nr_disabled(event_loop): import asyncio - schedule = asyncio.ensure_future - loop = asyncio.get_event_loop() + schedule = asyncio.ensure_future exceptions = [] def handle_exception(loop, context): exceptions.append(context) - loop.set_exception_handler(handle_exception) + event_loop.set_exception_handler(handle_exception) - loop.run_until_complete(_test(asyncio, schedule, nr_enabled=False)) + event_loop.run_until_complete(_test(asyncio, schedule, nr_enabled=False)) # Assert that no exceptions have occurred assert not exceptions, exceptions -@pytest.mark.parametrize('trace', [ - function_trace(name='simple_gen'), - external_trace(library='lib', url='http://foo.com'), - database_trace('select * from foo'), - datastore_trace('lib', 'foo', 'bar'), - message_trace('lib', 'op', 'typ', 'name'), - memcache_trace('cmd'), -]) -def test_two_transactions(trace): +@pytest.mark.parametrize( + "trace", + [ + function_trace(name="simple_gen"), + external_trace(library="lib", url="http://foo.com"), + database_trace("select * from foo"), + datastore_trace("lib", "foo", "bar"), + message_trace("lib", "op", "typ", "name"), + memcache_trace("cmd"), + ], +) +def test_two_transactions(event_loop, trace): """ Instantiate a coroutine in one transaction and await it in another. This should not cause any errors. """ import asyncio + tasks = [] ready = asyncio.Event() @@ -200,16 +210,16 @@ async def await_task(): done.set() - afut = asyncio.ensure_future(create_coro()) - bfut = asyncio.ensure_future(await_task()) - asyncio.get_event_loop().run_until_complete(asyncio.gather(afut, bfut)) + afut = asyncio.ensure_future(create_coro(), loop=event_loop) + bfut = asyncio.ensure_future(await_task(), loop=event_loop) + event_loop.run_until_complete(asyncio.gather(afut, bfut)) # Sentinel left in cache transaction exited async def sentinel_in_cache_txn_exited(asyncio, bg): event = asyncio.Event() - with BackgroundTask(application(), 'fg') as txn: + with BackgroundTask(application(), "fg") as txn: _ = txn.root_span task = asyncio.ensure_future(bg(event)) @@ -221,8 +231,8 @@ async def sentinel_in_cache_txn_exited(asyncio, bg): async def trace_in_cache_txn_exited(asyncio, bg): event = asyncio.Event() - with BackgroundTask(application(), 'fg'): - with FunctionTrace('fg') as _: + with BackgroundTask(application(), "fg"): + with FunctionTrace("fg") as _: task = asyncio.ensure_future(bg(event)) await event.wait() @@ -233,31 +243,36 @@ async def trace_in_cache_txn_exited(asyncio, bg): async def trace_in_cache_txn_active(asyncio, bg): event = asyncio.Event() - with BackgroundTask(application(), 'fg'): - with FunctionTrace('fg') as _: + with BackgroundTask(application(), "fg"): + with FunctionTrace("fg") as _: task = asyncio.ensure_future(bg(event)) await event.wait() return task -@pytest.mark.parametrize('fg', (sentinel_in_cache_txn_exited, - trace_in_cache_txn_exited, - trace_in_cache_txn_active,)) -def test_transaction_exit_trace_cache(fg): +@pytest.mark.parametrize( + "fg", + ( + sentinel_in_cache_txn_exited, + trace_in_cache_txn_exited, + trace_in_cache_txn_active, + ), +) +def test_transaction_exit_trace_cache(event_loop, fg): """ Verifying that the use of ensure_future will not cause errors when traces remain in the trace cache after transaction exit """ import asyncio + exceptions = [] def handle_exception(loop, context): exceptions.append(context) async def bg(event): - with BackgroundTask( - application(), 'bg'): + with BackgroundTask(application(), "bg"): event.set() async def handler(): @@ -265,9 +280,8 @@ async def handler(): await task def _test(): - loop = asyncio.get_event_loop() - loop.set_exception_handler(handle_exception) - return loop.run_until_complete(handler()) + event_loop.set_exception_handler(handle_exception) + return event_loop.run_until_complete(handler()) _test() @@ -279,18 +293,18 @@ def _test(): assert not exceptions, exceptions -def test_incomplete_traces_exit_when_root_exits(): +def test_incomplete_traces_exit_when_root_exits(event_loop): """Verifies that child traces in the same task are exited when the root exits""" import asyncio - @function_trace(name='child') + @function_trace(name="child") async def child(start, end): start.set() await end.wait() - @background_task(name='parent') + @background_task(name="parent") async def parent(): start = asyncio.Event() end = asyncio.Event() @@ -300,29 +314,27 @@ async def parent(): return task @validate_transaction_metrics( - 'parent', background_task=True, - scoped_metrics=[('Function/child', 1)], + "parent", + background_task=True, + scoped_metrics=[("Function/child", 1)], ) def test(loop): return loop.run_until_complete(parent()) - loop = asyncio.get_event_loop() - task = test(loop) - loop.run_until_complete(task) + task = test(event_loop) + event_loop.run_until_complete(task) -def test_incomplete_traces_with_multiple_transactions(): +def test_incomplete_traces_with_multiple_transactions(event_loop): import asyncio - loop = asyncio.get_event_loop() - - @background_task(name='dummy') + @background_task(name="dummy") async def dummy(): task = asyncio.ensure_future(child(True)) await end.wait() await task - @function_trace(name='child') + @function_trace(name="child") async def child(running_at_end=False): trace = current_trace() start.set() @@ -332,25 +344,27 @@ async def child(running_at_end=False): else: assert current_trace() is not trace - @background_task(name='parent') + @background_task(name="parent") async def parent(): task = asyncio.ensure_future(child()) await start.wait() return task @validate_transaction_metrics( - 'parent', background_task=True, - scoped_metrics=[('Function/child', 1)], + "parent", + background_task=True, + scoped_metrics=[("Function/child", 1)], ) def parent_assertions(task): - return loop.run_until_complete(task) + return event_loop.run_until_complete(task) @validate_transaction_metrics( - 'dummy', background_task=True, - scoped_metrics=[('Function/child', 1)], + "dummy", + background_task=True, + scoped_metrics=[("Function/child", 1)], ) def dummy_assertions(task): - return loop.run_until_complete(task) + return event_loop.run_until_complete(task) async def startup(): return asyncio.Event(), asyncio.Event() @@ -361,11 +375,11 @@ async def start_dummy(): start.clear() return dummy_task - start, end = loop.run_until_complete(startup()) + start, end = event_loop.run_until_complete(startup()) # Kick start dummy transaction (forcing an ensure_future on another # transaction) - dummy_task = loop.run_until_complete(start_dummy()) + dummy_task = event_loop.run_until_complete(start_dummy()) # start and end a transaction, forcing the child to truncate child_task = parent_assertions(parent()) @@ -378,10 +392,11 @@ async def start_dummy(): # Wait for dummy/parent->child to terminate dummy_assertions(dummy_task) - loop.run_until_complete(child_task) + event_loop.run_until_complete(child_task) + @validate_transaction_metrics("Parent", background_task=True) -def test_transaction_end_on_different_task(): +def test_transaction_end_on_different_task(event_loop): import asyncio txn = BackgroundTask(application(), name="Parent") @@ -408,5 +423,4 @@ async def test(): task = await asyncio.ensure_future(parent()) await task - loop = asyncio.get_event_loop() - loop.run_until_complete(test()) + event_loop.run_until_complete(test()) diff --git a/tests/agent_features/test_async_timing.py b/tests/agent_features/test_async_timing.py index 70fe90b736..0198f151a8 100644 --- a/tests/agent_features/test_async_timing.py +++ b/tests/agent_features/test_async_timing.py @@ -13,26 +13,25 @@ # limitations under the License. import pytest -from newrelic.common.object_wrapper import function_wrapper +from testing_support.fixtures import capture_transaction_metrics + from newrelic.api.background_task import background_task from newrelic.api.function_trace import function_trace -from testing_support.fixtures import capture_transaction_metrics +from newrelic.common.object_wrapper import function_wrapper -asyncio = pytest.importorskip('asyncio') +asyncio = pytest.importorskip("asyncio") def validate_total_time_value_greater_than(value, concurrent=False): - @function_wrapper def _validate_total_time_value(wrapped, instance, args, kwargs): metrics = {} - result = capture_transaction_metrics([], metrics)( - wrapped)(*args, **kwargs) - total_time = metrics[('OtherTransactionTotalTime', '')][1] + result = capture_transaction_metrics([], metrics)(wrapped)(*args, **kwargs) + total_time = metrics[("OtherTransactionTotalTime", "")][1] # Assert total call time is at least that value assert total_time >= value - duration = metrics[('OtherTransaction/all', '')][1] + duration = metrics[("OtherTransaction/all", "")][1] if concurrent: # If there is concurrent work, the total_time must be strictly # greater than the duration @@ -44,13 +43,13 @@ def _validate_total_time_value(wrapped, instance, args, kwargs): return _validate_total_time_value -@function_trace(name='child') +@function_trace(name="child") @asyncio.coroutine def child(): yield from asyncio.sleep(0.1) -@background_task(name='parent') +@background_task(name="parent") @asyncio.coroutine def parent(calls): coros = [child() for _ in range(calls)] @@ -59,12 +58,10 @@ def parent(calls): @validate_total_time_value_greater_than(0.2) -def test_total_time_sync(): - loop = asyncio.get_event_loop() - loop.run_until_complete(parent(1)) +def test_total_time_sync(event_loop): + event_loop.run_until_complete(parent(1)) @validate_total_time_value_greater_than(0.3, concurrent=True) -def test_total_time_async(): - loop = asyncio.get_event_loop() - loop.run_until_complete(parent(2)) +def test_total_time_async(event_loop): + event_loop.run_until_complete(parent(2)) diff --git a/tests/agent_features/test_coroutine_trace.py b/tests/agent_features/test_coroutine_trace.py index e261be9558..b155375734 100644 --- a/tests/agent_features/test_coroutine_trace.py +++ b/tests/agent_features/test_coroutine_trace.py @@ -12,41 +12,42 @@ # See the License for the specific language governing permissions and # limitations under the License. -import gc import functools -import pytest +import gc import sys import time -from testing_support.fixtures import (validate_transaction_metrics, - capture_transaction_metrics, validate_transaction_errors, - validate_tt_parenting) +import pytest +from testing_support.fixtures import ( + capture_transaction_metrics, + validate_transaction_errors, + validate_transaction_metrics, + validate_tt_parenting, +) + from newrelic.api.background_task import background_task from newrelic.api.database_trace import database_trace from newrelic.api.datastore_trace import datastore_trace -from newrelic.api.function_trace import function_trace from newrelic.api.external_trace import external_trace +from newrelic.api.function_trace import function_trace from newrelic.api.memcache_trace import memcache_trace from newrelic.api.message_trace import message_trace -is_pypy = hasattr(sys, 'pypy_version_info') -asyncio = pytest.importorskip('asyncio') - - -@pytest.mark.parametrize('trace,metric', [ - (functools.partial(function_trace, name='simple_gen'), - 'Function/simple_gen'), - (functools.partial(external_trace, library='lib', url='http://foo.com'), - 'External/foo.com/lib/'), - (functools.partial(database_trace, 'select * from foo'), - 'Datastore/statement/None/foo/select'), - (functools.partial(datastore_trace, 'lib', 'foo', 'bar'), - 'Datastore/statement/lib/foo/bar'), - (functools.partial(message_trace, 'lib', 'op', 'typ', 'name'), - 'MessageBroker/lib/typ/op/Named/name'), - (functools.partial(memcache_trace, 'cmd'), - 'Memcache/cmd'), -]) +is_pypy = hasattr(sys, "pypy_version_info") +asyncio = pytest.importorskip("asyncio") + + +@pytest.mark.parametrize( + "trace,metric", + [ + (functools.partial(function_trace, name="simple_gen"), "Function/simple_gen"), + (functools.partial(external_trace, library="lib", url="http://foo.com"), "External/foo.com/lib/"), + (functools.partial(database_trace, "select * from foo"), "Datastore/statement/None/foo/select"), + (functools.partial(datastore_trace, "lib", "foo", "bar"), "Datastore/statement/lib/foo/bar"), + (functools.partial(message_trace, "lib", "op", "typ", "name"), "MessageBroker/lib/typ/op/Named/name"), + (functools.partial(memcache_trace, "cmd"), "Memcache/cmd"), + ], +) def test_coroutine_timing(trace, metric): @trace() def simple_gen(): @@ -59,11 +60,9 @@ def simple_gen(): @capture_transaction_metrics(metrics, full_metrics) @validate_transaction_metrics( - 'test_coroutine_timing', - background_task=True, - scoped_metrics=[(metric, 1)], - rollup_metrics=[(metric, 1)]) - @background_task(name='test_coroutine_timing') + "test_coroutine_timing", background_task=True, scoped_metrics=[(metric, 1)], rollup_metrics=[(metric, 1)] + ) + @background_task(name="test_coroutine_timing") def _test(): for _ in simple_gen(): pass @@ -71,24 +70,32 @@ def _test(): _test() # Check that coroutines time the total call time (including pauses) - metric_key = (metric, '') + metric_key = (metric, "") assert full_metrics[metric_key].total_call_time >= 0.2 @validate_tt_parenting( - ('TransactionNode', [ - ('FunctionNode', [ - ('FunctionNode', []), - ('FunctionNode', []), - ]), - ], -)) -@validate_transaction_metrics('test_coroutine_siblings', - background_task=True, - scoped_metrics=[('Function/child', 2)], - rollup_metrics=[('Function/child', 2)]) -@background_task(name='test_coroutine_siblings') -def test_coroutine_siblings(): + ( + "TransactionNode", + [ + ( + "FunctionNode", + [ + ("FunctionNode", []), + ("FunctionNode", []), + ], + ), + ], + ) +) +@validate_transaction_metrics( + "test_coroutine_siblings", + background_task=True, + scoped_metrics=[("Function/child", 2)], + rollup_metrics=[("Function/child", 2)], +) +@background_task(name="test_coroutine_siblings") +def test_coroutine_siblings(event_loop): # The expected segment map looks like this # parent # | child @@ -97,7 +104,7 @@ def test_coroutine_siblings(): # This test checks for the presence of 2 child metrics (which wouldn't be # the case if child was a child of child since child is terminal) - @function_trace('child', terminal=True) + @function_trace("child", terminal=True) @asyncio.coroutine def child(wait, event=None): if event: @@ -121,30 +128,31 @@ def middle(): yield from child_1 yield from child_0 - @function_trace('parent') + @function_trace("parent") @asyncio.coroutine def parent(): yield from asyncio.ensure_future(middle()) - loop = asyncio.get_event_loop() - loop.run_until_complete(parent()) + event_loop.run_until_complete(parent()) class MyException(Exception): pass -@validate_transaction_metrics('test_coroutine_error', - background_task=True, - scoped_metrics=[('Function/coro', 1)], - rollup_metrics=[('Function/coro', 1)]) -@validate_transaction_errors(errors=['test_coroutine_trace:MyException']) +@validate_transaction_metrics( + "test_coroutine_error", + background_task=True, + scoped_metrics=[("Function/coro", 1)], + rollup_metrics=[("Function/coro", 1)], +) +@validate_transaction_errors(errors=["test_coroutine_trace:MyException"]) def test_coroutine_error(): - @function_trace(name='coro') + @function_trace(name="coro") def coro(): yield - @background_task(name='test_coroutine_error') + @background_task(name="test_coroutine_error") def _test(): gen = coro() gen.send(None) @@ -154,13 +162,15 @@ def _test(): _test() -@validate_transaction_metrics('test_coroutine_caught_exception', - background_task=True, - scoped_metrics=[('Function/coro', 1)], - rollup_metrics=[('Function/coro', 1)]) +@validate_transaction_metrics( + "test_coroutine_caught_exception", + background_task=True, + scoped_metrics=[("Function/coro", 1)], + rollup_metrics=[("Function/coro", 1)], +) @validate_transaction_errors(errors=[]) def test_coroutine_caught_exception(): - @function_trace(name='coro') + @function_trace(name="coro") def coro(): for _ in range(2): time.sleep(0.1) @@ -173,7 +183,7 @@ def coro(): full_metrics = {} @capture_transaction_metrics(metrics, full_metrics) - @background_task(name='test_coroutine_caught_exception') + @background_task(name="test_coroutine_caught_exception") def _test(): gen = coro() # kickstart the generator (the try/except logic is inside the @@ -188,24 +198,26 @@ def _test(): # The ValueError should not be reraised _test() - assert full_metrics[('Function/coro', '')].total_call_time >= 0.2 + assert full_metrics[("Function/coro", "")].total_call_time >= 0.2 -@validate_transaction_metrics('test_coroutine_handles_terminal_nodes', - background_task=True, - scoped_metrics=[('Function/parent', 1), ('Function/coro', None)], - rollup_metrics=[('Function/parent', 1), ('Function/coro', None)]) +@validate_transaction_metrics( + "test_coroutine_handles_terminal_nodes", + background_task=True, + scoped_metrics=[("Function/parent", 1), ("Function/coro", None)], + rollup_metrics=[("Function/parent", 1), ("Function/coro", None)], +) def test_coroutine_handles_terminal_nodes(): # sometimes coroutines can be called underneath terminal nodes # In this case, the trace shouldn't actually be created and we also # shouldn't get any errors - @function_trace(name='coro') + @function_trace(name="coro") def coro(): yield time.sleep(0.1) - @function_trace(name='parent', terminal=True) + @function_trace(name="parent", terminal=True) def parent(): # parent calls child for _ in coro(): @@ -215,23 +227,25 @@ def parent(): full_metrics = {} @capture_transaction_metrics(metrics, full_metrics) - @background_task(name='test_coroutine_handles_terminal_nodes') + @background_task(name="test_coroutine_handles_terminal_nodes") def _test(): parent() _test() - metric_key = ('Function/parent', '') + metric_key = ("Function/parent", "") assert full_metrics[metric_key].total_exclusive_call_time >= 0.1 -@validate_transaction_metrics('test_coroutine_close_ends_trace', - background_task=True, - scoped_metrics=[('Function/coro', 1)], - rollup_metrics=[('Function/coro', 1)]) -@background_task(name='test_coroutine_close_ends_trace') +@validate_transaction_metrics( + "test_coroutine_close_ends_trace", + background_task=True, + scoped_metrics=[("Function/coro", 1)], + rollup_metrics=[("Function/coro", 1)], +) +@background_task(name="test_coroutine_close_ends_trace") def test_coroutine_close_ends_trace(): - @function_trace(name='coro') + @function_trace(name="coro") def coro(): yield @@ -248,25 +262,32 @@ def coro(): @validate_tt_parenting( - ('TransactionNode', [ - ('FunctionNode', [ - ('FunctionNode', []), - ]), - ], -)) -@validate_transaction_metrics('test_coroutine_parents', - background_task=True, - scoped_metrics=[('Function/child', 1), ('Function/parent', 1)], - rollup_metrics=[('Function/child', 1), ('Function/parent', 1)]) + ( + "TransactionNode", + [ + ( + "FunctionNode", + [ + ("FunctionNode", []), + ], + ), + ], + ) +) +@validate_transaction_metrics( + "test_coroutine_parents", + background_task=True, + scoped_metrics=[("Function/child", 1), ("Function/parent", 1)], + rollup_metrics=[("Function/child", 1), ("Function/parent", 1)], +) def test_coroutine_parents(): - - @function_trace(name='child') + @function_trace(name="child") def child(): yield time.sleep(0.1) yield - @function_trace(name='parent') + @function_trace(name="parent") def parent(): time.sleep(0.1) yield @@ -277,7 +298,7 @@ def parent(): full_metrics = {} @capture_transaction_metrics(metrics, full_metrics) - @background_task(name='test_coroutine_parents') + @background_task(name="test_coroutine_parents") def _test(): for _ in parent(): pass @@ -286,64 +307,67 @@ def _test(): # Check that the child time is subtracted from the parent time (parenting # relationship is correctly established) - key = ('Function/parent', '') + key = ("Function/parent", "") assert full_metrics[key].total_exclusive_call_time < 0.2 -@validate_transaction_metrics('test_throw_yields_a_value', - background_task=True, - scoped_metrics=[('Function/coro', 1)], - rollup_metrics=[('Function/coro', 1)]) -@background_task(name='test_throw_yields_a_value') +@validate_transaction_metrics( + "test_throw_yields_a_value", + background_task=True, + scoped_metrics=[("Function/coro", 1)], + rollup_metrics=[("Function/coro", 1)], +) +@background_task(name="test_throw_yields_a_value") def test_throw_yields_a_value(): - - @function_trace(name='coro') + @function_trace(name="coro") def coro(): for _ in range(2): try: yield except MyException: - yield 'foobar' + yield "foobar" c = coro() # kickstart the coroutine next(c) - assert c.throw(MyException) == 'foobar' + assert c.throw(MyException) == "foobar" # finish consumption of the coroutine if necessary for _ in c: pass -@pytest.mark.parametrize('trace', [ - function_trace(name='simple_gen'), - external_trace(library='lib', url='http://foo.com'), - database_trace('select * from foo'), - datastore_trace('lib', 'foo', 'bar'), - message_trace('lib', 'op', 'typ', 'name'), - memcache_trace('cmd'), -]) +@pytest.mark.parametrize( + "trace", + [ + function_trace(name="simple_gen"), + external_trace(library="lib", url="http://foo.com"), + database_trace("select * from foo"), + datastore_trace("lib", "foo", "bar"), + message_trace("lib", "op", "typ", "name"), + memcache_trace("cmd"), + ], +) def test_coroutine_functions_outside_of_transaction(trace): - @trace def coro(): for _ in range(2): - yield 'foo' + yield "foo" - assert [_ for _ in coro()] == ['foo', 'foo'] + assert [_ for _ in coro()] == ["foo", "foo"] @validate_transaction_metrics( - 'test_catching_generator_exit_causes_runtime_error', - background_task=True, - scoped_metrics=[('Function/coro', 1)], - rollup_metrics=[('Function/coro', 1)]) -@background_task(name='test_catching_generator_exit_causes_runtime_error') + "test_catching_generator_exit_causes_runtime_error", + background_task=True, + scoped_metrics=[("Function/coro", 1)], + rollup_metrics=[("Function/coro", 1)], +) +@background_task(name="test_catching_generator_exit_causes_runtime_error") def test_catching_generator_exit_causes_runtime_error(): - - @function_trace(name='coro') + @function_trace(name="coro") def coro(): try: yield @@ -366,13 +390,13 @@ def coro(): @validate_transaction_metrics( - 'test_coroutine_time_excludes_creation_time', - background_task=True, - scoped_metrics=[('Function/coro', 1)], - rollup_metrics=[('Function/coro', 1)]) + "test_coroutine_time_excludes_creation_time", + background_task=True, + scoped_metrics=[("Function/coro", 1)], + rollup_metrics=[("Function/coro", 1)], +) def test_coroutine_time_excludes_creation_time(): - - @function_trace(name='coro') + @function_trace(name="coro") def coro(): yield @@ -380,7 +404,7 @@ def coro(): full_metrics = {} @capture_transaction_metrics(metrics, full_metrics) - @background_task(name='test_coroutine_time_excludes_creation_time') + @background_task(name="test_coroutine_time_excludes_creation_time") def _test(): gen = coro() time.sleep(0.1) @@ -391,13 +415,12 @@ def _test(): # check that the trace does not include the time between creation and # consumption - assert full_metrics[('Function/coro', '')].total_call_time < 0.1 + assert full_metrics[("Function/coro", "")].total_call_time < 0.1 -@pytest.mark.parametrize('nr_transaction', [True, False]) +@pytest.mark.parametrize("nr_transaction", [True, False]) def test_incomplete_coroutine(nr_transaction): - - @function_trace(name='coro') + @function_trace(name="coro") def coro(): for _ in range(5): yield @@ -423,24 +446,25 @@ def _test(): # https://bitbucket.org/pypy/pypy/issues/736 del c import gc + gc.collect() if nr_transaction: _test = validate_transaction_metrics( - 'test_incomplete_coroutine', - background_task=True, - scoped_metrics=[('Function/coro', 1)], - rollup_metrics=[('Function/coro', 1)], - )(background_task(name='test_incomplete_coroutine')(_test)) + "test_incomplete_coroutine", + background_task=True, + scoped_metrics=[("Function/coro", 1)], + rollup_metrics=[("Function/coro", 1)], + )(background_task(name="test_incomplete_coroutine")(_test)) _test() -def test_trace_outlives_transaction(): +def test_trace_outlives_transaction(event_loop): task = [] running, finish = asyncio.Event(), asyncio.Event() - @function_trace(name='coro') + @function_trace(name="coro") @asyncio.coroutine def _coro(): running.set() @@ -451,20 +475,18 @@ def parent(): task.append(asyncio.ensure_future(_coro())) yield from running.wait() - loop = asyncio.get_event_loop() - @validate_transaction_metrics( - 'test_trace_outlives_transaction', + "test_trace_outlives_transaction", background_task=True, - scoped_metrics=(('Function/coro', None),), + scoped_metrics=(("Function/coro", None),), ) - @background_task(name='test_trace_outlives_transaction') + @background_task(name="test_trace_outlives_transaction") def _test(): - loop.run_until_complete(parent()) + event_loop.run_until_complete(parent()) _test() finish.set() - loop.run_until_complete(task.pop()) + event_loop.run_until_complete(task.pop()) if sys.version_info >= (3, 5): diff --git a/tests/agent_features/test_coroutine_transaction.py b/tests/agent_features/test_coroutine_transaction.py index d89757f193..1a55385c70 100644 --- a/tests/agent_features/test_coroutine_transaction.py +++ b/tests/agent_features/test_coroutine_transaction.py @@ -13,17 +13,21 @@ # limitations under the License. import asyncio -import pytest import sys -from newrelic.core.config import global_settings -from newrelic.api.transaction import current_transaction +import pytest +from testing_support.fixtures import ( + capture_transaction_metrics, + override_generic_settings, + validate_transaction_errors, +) + from newrelic.api.background_task import background_task -from newrelic.api.web_transaction import web_transaction -from newrelic.api.message_transaction import message_transaction from newrelic.api.function_trace import function_trace -from testing_support.fixtures import (validate_transaction_errors, - capture_transaction_metrics, override_generic_settings) +from newrelic.api.message_transaction import message_transaction +from newrelic.api.transaction import current_transaction +from newrelic.api.web_transaction import web_transaction +from newrelic.core.config import global_settings if sys.version_info >= (3, 5): from _test_async_coroutine_transaction import native_coroutine_test @@ -31,13 +35,9 @@ native_coroutine_test = None settings = global_settings() -loop = asyncio.get_event_loop() - -def coroutine_test(transaction, nr_enabled=True, does_hang=False, - call_exit=False, runtime_error=False): - loop = asyncio.get_event_loop() +def coroutine_test(event_loop, transaction, nr_enabled=True, does_hang=False, call_exit=False, runtime_error=False): @transaction @asyncio.coroutine def task(): @@ -73,80 +73,92 @@ def task(): test_matrix.append(native_coroutine_test) -@pytest.mark.parametrize('num_coroutines', (2,)) -@pytest.mark.parametrize('create_test_task', test_matrix) -@pytest.mark.parametrize('transaction,metric', [ - (background_task(name='test'), 'OtherTransaction/Function/test'), - (message_transaction('lib', 'dest_type', 'dest_name'), - 'OtherTransaction/Message/lib/dest_type/Named/dest_name'), -]) -@pytest.mark.parametrize('nr_enabled,call_exit', ( +@pytest.mark.parametrize("num_coroutines", (2,)) +@pytest.mark.parametrize("create_test_task", test_matrix) +@pytest.mark.parametrize( + "transaction,metric", + [ + (background_task(name="test"), "OtherTransaction/Function/test"), + ( + message_transaction("lib", "dest_type", "dest_name"), + "OtherTransaction/Message/lib/dest_type/Named/dest_name", + ), + ], +) +@pytest.mark.parametrize( + "nr_enabled,call_exit", + ( (False, False), (True, False), (True, True), -)) -def test_async_coroutine_send(num_coroutines, create_test_task, transaction, - metric, call_exit, nr_enabled): + ), +) +def test_async_coroutine_send(event_loop, num_coroutines, create_test_task, transaction, metric, call_exit, nr_enabled): metrics = [] - tasks = [create_test_task( - transaction, nr_enabled=nr_enabled, call_exit=call_exit) - for _ in range(num_coroutines)] + tasks = [ + create_test_task(event_loop, transaction, nr_enabled=nr_enabled, call_exit=call_exit) + for _ in range(num_coroutines) + ] - @override_generic_settings(settings, {'enabled': nr_enabled}) + @override_generic_settings(settings, {"enabled": nr_enabled}) @capture_transaction_metrics(metrics) def _test_async_coroutine_send(): - loop = asyncio.get_event_loop() driver = asyncio.gather(*[t() for t in tasks]) - loop.run_until_complete(driver) + event_loop.run_until_complete(driver) _test_async_coroutine_send() if nr_enabled: - assert metrics.count((metric, '')) == num_coroutines, metrics + assert metrics.count((metric, "")) == num_coroutines, metrics else: assert not metrics, metrics -@pytest.mark.parametrize('num_coroutines', (2,)) -@pytest.mark.parametrize('create_test_task', test_matrix) -@pytest.mark.parametrize('transaction,metric', [ - (background_task(name='test'), 'OtherTransaction/Function/test'), - (message_transaction('lib', 'dest_type', 'dest_name'), - 'OtherTransaction/Message/lib/dest_type/Named/dest_name'), -]) -def test_async_coroutine_send_disabled(num_coroutines, create_test_task, - transaction, metric): +@pytest.mark.parametrize("num_coroutines", (2,)) +@pytest.mark.parametrize("create_test_task", test_matrix) +@pytest.mark.parametrize( + "transaction,metric", + [ + (background_task(name="test"), "OtherTransaction/Function/test"), + ( + message_transaction("lib", "dest_type", "dest_name"), + "OtherTransaction/Message/lib/dest_type/Named/dest_name", + ), + ], +) +def test_async_coroutine_send_disabled(event_loop, num_coroutines, create_test_task, transaction, metric): metrics = [] - tasks = [create_test_task(transaction, call_exit=True) - for _ in range(num_coroutines)] + tasks = [create_test_task(event_loop, transaction, call_exit=True) for _ in range(num_coroutines)] @capture_transaction_metrics(metrics) def _test_async_coroutine_send(): - loop = asyncio.get_event_loop() driver = asyncio.gather(*[t() for t in tasks]) - loop.run_until_complete(driver) + event_loop.run_until_complete(driver) _test_async_coroutine_send() - assert metrics.count((metric, '')) == num_coroutines, metrics - - -@pytest.mark.parametrize('num_coroutines', (2,)) -@pytest.mark.parametrize('create_test_task', test_matrix) -@pytest.mark.parametrize('transaction,metric', [ - (background_task(name='test'), 'OtherTransaction/Function/test'), - (message_transaction('lib', 'dest_type', 'dest_name'), - 'OtherTransaction/Message/lib/dest_type/Named/dest_name'), -]) + assert metrics.count((metric, "")) == num_coroutines, metrics + + +@pytest.mark.parametrize("num_coroutines", (2,)) +@pytest.mark.parametrize("create_test_task", test_matrix) +@pytest.mark.parametrize( + "transaction,metric", + [ + (background_task(name="test"), "OtherTransaction/Function/test"), + ( + message_transaction("lib", "dest_type", "dest_name"), + "OtherTransaction/Message/lib/dest_type/Named/dest_name", + ), + ], +) @validate_transaction_errors([]) -def test_async_coroutine_throw_cancel(num_coroutines, create_test_task, - transaction, metric): +def test_async_coroutine_throw_cancel(event_loop, num_coroutines, create_test_task, transaction, metric): metrics = [] - tasks = [create_test_task(transaction) - for _ in range(num_coroutines)] + tasks = [create_test_task(event_loop, transaction) for _ in range(num_coroutines)] @asyncio.coroutine def task_c(): @@ -158,28 +170,30 @@ def task_c(): @capture_transaction_metrics(metrics) def _test_async_coroutine_throw_cancel(): - loop = asyncio.get_event_loop() - loop.run_until_complete(task_c()) + event_loop.run_until_complete(task_c()) _test_async_coroutine_throw_cancel() - assert metrics.count((metric, '')) == num_coroutines, metrics - - -@pytest.mark.parametrize('num_coroutines', (2,)) -@pytest.mark.parametrize('create_test_task', test_matrix) -@pytest.mark.parametrize('transaction,metric', [ - (background_task(name='test'), 'OtherTransaction/Function/test'), - (message_transaction('lib', 'dest_type', 'dest_name'), - 'OtherTransaction/Message/lib/dest_type/Named/dest_name'), -]) -@validate_transaction_errors(['builtins:ValueError']) -def test_async_coroutine_throw_error(num_coroutines, create_test_task, - transaction, metric): + assert metrics.count((metric, "")) == num_coroutines, metrics + + +@pytest.mark.parametrize("num_coroutines", (2,)) +@pytest.mark.parametrize("create_test_task", test_matrix) +@pytest.mark.parametrize( + "transaction,metric", + [ + (background_task(name="test"), "OtherTransaction/Function/test"), + ( + message_transaction("lib", "dest_type", "dest_name"), + "OtherTransaction/Message/lib/dest_type/Named/dest_name", + ), + ], +) +@validate_transaction_errors(["builtins:ValueError"]) +def test_async_coroutine_throw_error(event_loop, num_coroutines, create_test_task, transaction, metric): metrics = [] - tasks = [create_test_task(transaction) - for _ in range(num_coroutines)] + tasks = [create_test_task(event_loop, transaction) for _ in range(num_coroutines)] @asyncio.coroutine def task_c(): @@ -191,30 +205,32 @@ def task_c(): @capture_transaction_metrics(metrics) def _test_async_coroutine_throw_error(): - loop = asyncio.get_event_loop() - loop.run_until_complete(task_c()) + event_loop.run_until_complete(task_c()) _test_async_coroutine_throw_error() - assert metrics.count((metric, '')) == num_coroutines, metrics - assert metrics.count(('Errors/' + metric, '')) == num_coroutines, metrics - assert metrics.count(('Errors/all', '')) == num_coroutines, metrics - - -@pytest.mark.parametrize('num_coroutines', (1,)) -@pytest.mark.parametrize('create_test_task', test_matrix) -@pytest.mark.parametrize('transaction,metric', [ - (background_task(name='test'), 'OtherTransaction/Function/test'), - (message_transaction('lib', 'dest_type', 'dest_name'), - 'OtherTransaction/Message/lib/dest_type/Named/dest_name'), -]) -@pytest.mark.parametrize('start_coroutines', (False, True)) -def test_async_coroutine_close(num_coroutines, create_test_task, transaction, - metric, start_coroutines): + assert metrics.count((metric, "")) == num_coroutines, metrics + assert metrics.count(("Errors/" + metric, "")) == num_coroutines, metrics + assert metrics.count(("Errors/all", "")) == num_coroutines, metrics + + +@pytest.mark.parametrize("num_coroutines", (1,)) +@pytest.mark.parametrize("create_test_task", test_matrix) +@pytest.mark.parametrize( + "transaction,metric", + [ + (background_task(name="test"), "OtherTransaction/Function/test"), + ( + message_transaction("lib", "dest_type", "dest_name"), + "OtherTransaction/Message/lib/dest_type/Named/dest_name", + ), + ], +) +@pytest.mark.parametrize("start_coroutines", (False, True)) +def test_async_coroutine_close(event_loop, num_coroutines, create_test_task, transaction, metric, start_coroutines): metrics = [] - tasks = [create_test_task(transaction) - for _ in range(num_coroutines)] + tasks = [create_test_task(event_loop, transaction) for _ in range(num_coroutines)] @asyncio.coroutine def task_c(): @@ -229,31 +245,33 @@ def task_c(): @capture_transaction_metrics(metrics) def _test_async_coroutine_close(): - loop = asyncio.get_event_loop() - loop.run_until_complete(task_c()) + event_loop.run_until_complete(task_c()) _test_async_coroutine_close() if start_coroutines: - assert metrics.count((metric, '')) == num_coroutines, metrics + assert metrics.count((metric, "")) == num_coroutines, metrics else: assert not metrics -@pytest.mark.parametrize('num_coroutines', (1,)) -@pytest.mark.parametrize('create_test_task', test_matrix) -@pytest.mark.parametrize('transaction,metric', [ - (background_task(name='test'), 'OtherTransaction/Function/test'), - (message_transaction('lib', 'dest_type', 'dest_name'), - 'OtherTransaction/Message/lib/dest_type/Named/dest_name'), -]) -@validate_transaction_errors(['builtins:RuntimeError']) -def test_async_coroutine_close_raises_error(num_coroutines, create_test_task, - transaction, metric): +@pytest.mark.parametrize("num_coroutines", (1,)) +@pytest.mark.parametrize("create_test_task", test_matrix) +@pytest.mark.parametrize( + "transaction,metric", + [ + (background_task(name="test"), "OtherTransaction/Function/test"), + ( + message_transaction("lib", "dest_type", "dest_name"), + "OtherTransaction/Message/lib/dest_type/Named/dest_name", + ), + ], +) +@validate_transaction_errors(["builtins:RuntimeError"]) +def test_async_coroutine_close_raises_error(event_loop, num_coroutines, create_test_task, transaction, metric): metrics = [] - tasks = [create_test_task(transaction, runtime_error=True) - for _ in range(num_coroutines)] + tasks = [create_test_task(event_loop, transaction, runtime_error=True) for _ in range(num_coroutines)] @asyncio.coroutine def task_c(): @@ -269,24 +287,28 @@ def task_c(): @capture_transaction_metrics(metrics) def _test_async_coroutine_close_raises_error(): - loop = asyncio.get_event_loop() - loop.run_until_complete(task_c()) + event_loop.run_until_complete(task_c()) _test_async_coroutine_close_raises_error() - assert metrics.count((metric, '')) == num_coroutines, metrics - assert metrics.count(('Errors/all', '')) == num_coroutines, metrics - - -@pytest.mark.parametrize('transaction,metric,arguments', [ - (web_transaction, 'Apdex/Function/%s', lambda name: ([], {'name': name})), - (message_transaction, 'OtherTransaction/Message/lib/dest_type/Named/%s', - lambda name: (['lib', 'dest_type', name], {})), - (background_task, 'OtherTransaction/Function/%s', - lambda name: ([], {'name': name}))]) -def test_deferred_async_background_task(transaction, metric, arguments): - loop = asyncio.get_event_loop() - deferred_metric = (metric % 'deferred', '') + assert metrics.count((metric, "")) == num_coroutines, metrics + assert metrics.count(("Errors/all", "")) == num_coroutines, metrics + + +@pytest.mark.parametrize( + "transaction,metric,arguments", + [ + (web_transaction, "Apdex/Function/%s", lambda name: ([], {"name": name})), + ( + message_transaction, + "OtherTransaction/Message/lib/dest_type/Named/%s", + lambda name: (["lib", "dest_type", name], {}), + ), + (background_task, "OtherTransaction/Function/%s", lambda name: ([], {"name": name})), + ], +) +def test_deferred_async_background_task(event_loop, transaction, metric, arguments): + deferred_metric = (metric % "deferred", "") args, kwargs = arguments("deferred") @@ -295,7 +317,7 @@ def test_deferred_async_background_task(transaction, metric, arguments): def child_task(): yield from asyncio.sleep(0) - main_metric = (metric % 'main', '') + main_metric = (metric % "main", "") args, kwargs = arguments("main") @@ -303,7 +325,7 @@ def child_task(): @asyncio.coroutine def parent_task(): yield from asyncio.sleep(0) - return loop.create_task(child_task()) + return event_loop.create_task(child_task()) @asyncio.coroutine def test_runner(): @@ -314,7 +336,7 @@ def test_runner(): @capture_transaction_metrics(metrics) def _test(): - loop.run_until_complete(test_runner()) + event_loop.run_until_complete(test_runner()) _test() @@ -322,16 +344,20 @@ def _test(): assert deferred_metric in metrics -@pytest.mark.parametrize('transaction,metric,arguments', [ - (web_transaction, 'Apdex/Function/%s', lambda name: ([], {'name': name})), - (message_transaction, 'OtherTransaction/Message/lib/dest_type/Named/%s', - lambda name: (['lib', 'dest_type', name], {})), - (background_task, 'OtherTransaction/Function/%s', - lambda name: ([], {'name': name}))]) -def test_child_transaction_when_parent_is_running( - transaction, metric, arguments): - loop = asyncio.get_event_loop() - deferred_metric = (metric % 'deferred', '') +@pytest.mark.parametrize( + "transaction,metric,arguments", + [ + (web_transaction, "Apdex/Function/%s", lambda name: ([], {"name": name})), + ( + message_transaction, + "OtherTransaction/Message/lib/dest_type/Named/%s", + lambda name: (["lib", "dest_type", name], {}), + ), + (background_task, "OtherTransaction/Function/%s", lambda name: ([], {"name": name})), + ], +) +def test_child_transaction_when_parent_is_running(event_loop, transaction, metric, arguments): + deferred_metric = (metric % "deferred", "") args, kwargs = arguments("deferred") @@ -340,20 +366,20 @@ def test_child_transaction_when_parent_is_running( def child_task(): yield from asyncio.sleep(0) - main_metric = (metric % 'main', '') + main_metric = (metric % "main", "") args, kwargs = arguments("main") @transaction(*args, **kwargs) @asyncio.coroutine def parent_task(): - yield from loop.create_task(child_task()) + yield from event_loop.create_task(child_task()) metrics = [] @capture_transaction_metrics(metrics) def _test(): - loop.run_until_complete(parent_task()) + event_loop.run_until_complete(parent_task()) _test() @@ -361,15 +387,20 @@ def _test(): assert deferred_metric in metrics -@pytest.mark.parametrize('transaction,metric,arguments', [ - (web_transaction, 'Apdex/Function/%s', lambda name: ([], {'name': name})), - (message_transaction, 'OtherTransaction/Message/lib/dest_type/Named/%s', - lambda name: (['lib', 'dest_type', name], {})), - (background_task, 'OtherTransaction/Function/%s', - lambda name: ([], {'name': name}))]) -def test_nested_coroutine_inside_sync(transaction, metric, arguments): - loop = asyncio.get_event_loop() - child_metric = (metric % 'child', '') +@pytest.mark.parametrize( + "transaction,metric,arguments", + [ + (web_transaction, "Apdex/Function/%s", lambda name: ([], {"name": name})), + ( + message_transaction, + "OtherTransaction/Message/lib/dest_type/Named/%s", + lambda name: (["lib", "dest_type", name], {}), + ), + (background_task, "OtherTransaction/Function/%s", lambda name: ([], {"name": name})), + ], +) +def test_nested_coroutine_inside_sync(event_loop, transaction, metric, arguments): + child_metric = (metric % "child", "") args, kwargs = arguments("child") @@ -378,7 +409,7 @@ def test_nested_coroutine_inside_sync(transaction, metric, arguments): def child_task(): yield from asyncio.sleep(0) - main_metric = (metric % 'main', '') + main_metric = (metric % "main", "") args, kwargs = arguments("main") metrics = [] @@ -386,7 +417,7 @@ def child_task(): @capture_transaction_metrics(metrics) @transaction(*args, **kwargs) def parent(): - loop.run_until_complete(child_task()) + event_loop.run_until_complete(child_task()) parent() @@ -394,15 +425,20 @@ def parent(): assert child_metric not in metrics -@pytest.mark.parametrize('transaction,metric,arguments', [ - (web_transaction, 'Apdex/Function/%s', lambda name: ([], {'name': name})), - (message_transaction, 'OtherTransaction/Message/lib/dest_type/Named/%s', - lambda name: (['lib', 'dest_type', name], {})), - (background_task, 'OtherTransaction/Function/%s', - lambda name: ([], {'name': name}))]) -def test_nested_coroutine_task_already_active(transaction, metric, arguments): - loop = asyncio.get_event_loop() - deferred_metric = (metric % 'deferred', '') +@pytest.mark.parametrize( + "transaction,metric,arguments", + [ + (web_transaction, "Apdex/Function/%s", lambda name: ([], {"name": name})), + ( + message_transaction, + "OtherTransaction/Message/lib/dest_type/Named/%s", + lambda name: (["lib", "dest_type", name], {}), + ), + (background_task, "OtherTransaction/Function/%s", lambda name: ([], {"name": name})), + ], +) +def test_nested_coroutine_task_already_active(event_loop, transaction, metric, arguments): + deferred_metric = (metric % "deferred", "") args, kwargs = arguments("deferred") @@ -415,20 +451,20 @@ def child_task(): def child_trace(): yield from child_task() - main_metric = (metric % 'main', '') + main_metric = (metric % "main", "") args, kwargs = arguments("main") @transaction(*args, **kwargs) @asyncio.coroutine def parent_task(): - yield from loop.create_task(child_trace()) + yield from event_loop.create_task(child_trace()) metrics = [] @capture_transaction_metrics(metrics) def _test(): - loop.run_until_complete(parent_task()) + event_loop.run_until_complete(parent_task()) _test() diff --git a/tests/agent_features/test_event_loop_wait_time.py b/tests/agent_features/test_event_loop_wait_time.py index d8891b82d6..ccf57c9a4d 100644 --- a/tests/agent_features/test_event_loop_wait_time.py +++ b/tests/agent_features/test_event_loop_wait_time.py @@ -12,16 +12,21 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest import asyncio import time -from newrelic.api.transaction import current_transaction + +import pytest +from testing_support.fixtures import ( + override_application_settings, + validate_transaction_event_attributes, + validate_transaction_metrics, + validate_transaction_trace_attributes, +) + from newrelic.api.background_task import background_task -from newrelic.api.function_trace import function_trace, FunctionTrace +from newrelic.api.function_trace import FunctionTrace, function_trace +from newrelic.api.transaction import current_transaction from newrelic.core.trace_cache import trace_cache -from testing_support.fixtures import (validate_transaction_metrics, - override_application_settings, validate_transaction_event_attributes, - validate_transaction_trace_attributes) @background_task(name="block") @@ -61,31 +66,27 @@ def wait_for_loop(ready, done, times=1): @pytest.mark.parametrize( - 'blocking_transaction_active,event_loop_visibility_enabled', ( - (True, True), - (False, True), - (False, False), -)) -def test_record_event_loop_wait( - blocking_transaction_active, - event_loop_visibility_enabled): + "blocking_transaction_active,event_loop_visibility_enabled", + ( + (True, True), + (False, True), + (False, False), + ), +) +def test_record_event_loop_wait(event_loop, blocking_transaction_active, event_loop_visibility_enabled): import asyncio metric_count = 2 if event_loop_visibility_enabled else None - execute_attributes = { - 'intrinsic': ('eventLoopTime',), 'agent': (), 'user': ()} - wait_attributes = { - 'intrinsic': ('eventLoopWait',), 'agent': (), 'user': ()} + execute_attributes = {"intrinsic": ("eventLoopTime",), "agent": (), "user": ()} + wait_attributes = {"intrinsic": ("eventLoopWait",), "agent": (), "user": ()} if event_loop_visibility_enabled: - wait_attributes = {'required_params': wait_attributes} - execute_attributes = {'required_params': execute_attributes} + wait_attributes = {"required_params": wait_attributes} + execute_attributes = {"required_params": execute_attributes} else: - wait_attributes = {'forgone_params': wait_attributes} - execute_attributes = {'forgone_params': execute_attributes} + wait_attributes = {"forgone_params": wait_attributes} + execute_attributes = {"forgone_params": execute_attributes} - scoped = ( - ("EventLoop/Wait/OtherTransaction/Function/block", metric_count), - ) + scoped = (("EventLoop/Wait/OtherTransaction/Function/block", metric_count),) rollup = ( ("EventLoop/Wait/all", metric_count), ("EventLoop/Wait/allOther", metric_count), @@ -99,10 +100,12 @@ def test_record_event_loop_wait( index = 0 if blocking_transaction_active else -1 - @override_application_settings({ - 'event_loop_visibility.enabled': event_loop_visibility_enabled, - 'distributed_tracing.enabled': True, - }) + @override_application_settings( + { + "event_loop_visibility.enabled": event_loop_visibility_enabled, + "distributed_tracing.enabled": True, + } + ) @validate_transaction_trace_attributes( index=index + 1, **execute_attributes, @@ -123,20 +126,22 @@ def test_record_event_loop_wait( index=index, ) def _test(): - asyncio.get_event_loop().run_until_complete(future) + event_loop.run_until_complete(future) _test() -@override_application_settings({ - 'event_loop_visibility.blocking_threshold': 0, -}) +@override_application_settings( + { + "event_loop_visibility.blocking_threshold": 0, + } +) def test_record_event_loop_wait_outside_task(): # Insert a random trace into the trace cache - trace = FunctionTrace(name='testing') + trace = FunctionTrace(name="testing") trace_cache()._cache[0] = trace - @background_task(name='test_record_event_loop_wait_outside_task') + @background_task(name="test_record_event_loop_wait_outside_task") def _test(): yield @@ -152,12 +157,15 @@ def _test(): def test_blocking_task_on_different_loop(): loops = [asyncio.new_event_loop() for _ in range(2)] - waiter_events = [asyncio.Event(loop=loops[0]) for _ in range(2)] - waiter = wait_for_loop(*waiter_events, times=1) + try: + waiter_events = [asyncio.Event(loop=loops[0]) for _ in range(2)] + blocker_events = [asyncio.Event(loop=loops[1]) for _ in range(2)] + except TypeError: + waiter_events = [asyncio.Event() for _ in range(2)] + blocker_events = [asyncio.Event() for _ in range(2)] - blocker_events = [asyncio.Event(loop=loops[1]) for _ in range(2)] - blocker = block_loop(*blocker_events, - blocking_transaction_active=False, times=1) + waiter = wait_for_loop(*waiter_events, times=1) + blocker = block_loop(*blocker_events, blocking_transaction_active=False, times=1) waiter_task = loops[0].create_task(waiter) blocker_task = loops[1].create_task(blocker) @@ -174,9 +182,8 @@ def test_blocking_task_on_different_loop(): loops[0].run_until_complete(waiter_task) -def test_record_event_loop_wait_on_different_task(): +def test_record_event_loop_wait_on_different_task(event_loop): import asyncio - loop = asyncio.get_event_loop() async def recorder(ready, wait): ready.set() @@ -186,10 +193,10 @@ async def recorder(ready, wait): @background_task(name="test_record_event_loop_wait_on_different_task") async def transaction(): coroutine_start, transaction_exit = asyncio.Event(), asyncio.Event() - task = loop.create_task(recorder(coroutine_start, transaction_exit)) + task = event_loop.create_task(recorder(coroutine_start, transaction_exit)) await coroutine_start.wait() current_transaction().__exit__(None, None, None) transaction_exit.set() await task - loop.run_until_complete(transaction()) \ No newline at end of file + event_loop.run_until_complete(transaction()) diff --git a/tests/agent_unittests/conftest.py b/tests/agent_unittests/conftest.py index 65fc36c779..012e6ca4b1 100644 --- a/tests/agent_unittests/conftest.py +++ b/tests/agent_unittests/conftest.py @@ -62,7 +62,7 @@ def global_settings(request, monkeypatch): monkeypatch.delenv("NEW_RELIC_HOST", raising=False) monkeypatch.delenv("NEW_RELIC_LICENSE_KEY", raising=False) - if "env" in request.funcargnames: + if "env" in request.fixturenames: env = request.getfixturevalue("env") for k, v in env.items(): monkeypatch.setenv(k, v) diff --git a/tests/agent_unittests/test_http_client.py b/tests/agent_unittests/test_http_client.py index 0e20b4c041..b1fc4b4f4e 100644 --- a/tests/agent_unittests/test_http_client.py +++ b/tests/agent_unittests/test_http_client.py @@ -19,6 +19,10 @@ import zlib import pytest +from testing_support.mock_external_http_server import ( + BaseHTTPServer, + MockExternalHTTPServer, +) from newrelic.common import certs from newrelic.common.agent_http import ( @@ -34,10 +38,6 @@ from newrelic.core.stats_engine import CustomMetrics from newrelic.network.exceptions import NetworkInterfaceException from newrelic.packages.urllib3.util import Url -from testing_support.mock_external_http_server import ( - BaseHTTPServer, - MockExternalHTTPServer, -) try: from StringIO import StringIO @@ -80,7 +80,10 @@ def do_CONNECT(self): handler = type( "ResponseHandler", - (BaseHTTPServer.BaseHTTPRequestHandler, object,), + ( + BaseHTTPServer.BaseHTTPRequestHandler, + object, + ), {"do_GET": handler, "do_POST": handler, "do_CONNECT": do_CONNECT}, ) self.httpd = BaseHTTPServer.HTTPServer(("localhost", self.port), handler) @@ -99,13 +102,22 @@ def reset(self): class SecureServer(InsecureServer): def __init__(self, *args, **kwargs): super(SecureServer, self).__init__(*args, **kwargs) - self.httpd.socket = ssl.wrap_socket( - self.httpd.socket, - server_side=True, - keyfile=SERVER_CERT, - certfile=SERVER_CERT, - do_handshake_on_connect=False, - ) + try: + self.context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) + self.context.load_cert_chain(certfile=SERVER_CERT, keyfile=SERVER_CERT) + self.httpd.socket = self.context.wrap_socket( + sock=self.httpd.socket, + server_side=True, + do_handshake_on_connect=False, + ) + except (AttributeError, TypeError): + self.httpd.socket = ssl.wrap_socket( + self.httpd.socket, + server_side=True, + keyfile=SERVER_CERT, + certfile=SERVER_CERT, + do_handshake_on_connect=False, + ) @pytest.fixture(scope="module") @@ -126,7 +138,14 @@ def insecure_server(): (None, "host", 0, None, None, None), ("http", None, 8080, None, None, None), ("http", "host", 0, None, None, Url(scheme="http", host="host", port=None)), - ("http", "host", 8080, None, None, Url(scheme="http", host="host", port=8080),), + ( + "http", + "host", + 8080, + None, + None, + Url(scheme="http", host="host", port=8080), + ), ( "http", "https://host:8081", @@ -167,9 +186,7 @@ def test_proxy_parsing(scheme, host, port, username, password, expected): @pytest.mark.parametrize("method", ("GET", "POST")) def test_http_no_payload(server, method): - with HttpClient( - "localhost", server.port, disable_certificate_validation=True - ) as client: + with HttpClient("localhost", server.port, disable_certificate_validation=True) as client: connection = client._connection_attr status, data = client.send_request(method=method, headers={"foo": "bar"}) @@ -206,9 +223,7 @@ def test_http_no_payload(server, method): def test_non_ok_response(client_cls, server): internal_metrics = CustomMetrics() - with client_cls( - "localhost", server.port, disable_certificate_validation=True - ) as client: + with client_cls("localhost", server.port, disable_certificate_validation=True) as client: with InternalTraceContext(internal_metrics): status, _ = client.send_request(method="PUT") @@ -226,7 +241,11 @@ def test_non_ok_response(client_cls, server): def test_http_close_connection(server): - client = HttpClient("localhost", server.port, disable_certificate_validation=True,) + client = HttpClient( + "localhost", + server.port, + disable_certificate_validation=True, + ) status, _ = client.send_request() assert status == 200 @@ -270,9 +289,7 @@ def test_http_payload_compression(server, client_cls, method, threshold): compression_threshold=threshold, ) as client: with InternalTraceContext(internal_metrics): - status, data = client.send_request( - payload=payload, params={"method": "test"} - ) + status, data = client.send_request(payload=payload, params={"method": "test"}) assert status == 200 data = data.split(b"\n") @@ -281,29 +298,18 @@ def test_http_payload_compression(server, client_cls, method, threshold): internal_metrics = dict(internal_metrics.metrics()) if client_cls is ApplicationModeClient: - assert internal_metrics["Supportability/Python/Collector/Output/Bytes/test"][ - :2 - ] == [1, payload_byte_len,] + assert internal_metrics["Supportability/Python/Collector/Output/Bytes/test"][:2] == [ + 1, + payload_byte_len, + ] if threshold < 20: # Verify compression time is recorded - assert ( - internal_metrics["Supportability/Python/Collector/ZLIB/Compress/test"][ - 0 - ] - == 1 - ) - assert ( - internal_metrics["Supportability/Python/Collector/ZLIB/Compress/test"][ - 1 - ] - > 0 - ) + assert internal_metrics["Supportability/Python/Collector/ZLIB/Compress/test"][0] == 1 + assert internal_metrics["Supportability/Python/Collector/ZLIB/Compress/test"][1] > 0 # Verify the original payload length is recorded - assert internal_metrics["Supportability/Python/Collector/ZLIB/Bytes/test"][ - :2 - ] == [1, len(payload)] + assert internal_metrics["Supportability/Python/Collector/ZLIB/Bytes/test"][:2] == [1, len(payload)] assert len(internal_metrics) == 3 else: @@ -369,9 +375,7 @@ def __init__(self, *args, **kwargs): assert internal_metrics[cert_metric][-3:-1] == [1, 1] -@pytest.mark.parametrize( - "auth", ((None, None), ("username", None), ("username", "password")) -) +@pytest.mark.parametrize("auth", ((None, None), ("username", None), ("username", "password"))) def test_ssl_via_ssl_proxy(server, auth): proxy_user, proxy_pass = auth with HttpClient( @@ -389,9 +393,7 @@ def test_ssl_via_ssl_proxy(server, auth): assert status == 200 data = data.decode("utf-8") data = data.split("\n") - assert data[0].startswith( - "POST https://localhost:1/agent_listener/invoke_raw_method " - ) + assert data[0].startswith("POST https://localhost:1/agent_listener/invoke_raw_method ") proxy_auth = None for header in data[1:-1]: @@ -404,9 +406,7 @@ def test_ssl_via_ssl_proxy(server, auth): auth_expected = proxy_user if proxy_pass: auth_expected = auth_expected + ":" + proxy_pass - auth_expected = "Basic " + base64.b64encode( - auth_expected.encode("utf-8") - ).decode("utf-8") + auth_expected = "Basic " + base64.b64encode(auth_expected.encode("utf-8")).decode("utf-8") assert proxy_auth == auth_expected else: assert not proxy_auth @@ -428,9 +428,7 @@ def test_non_ssl_via_ssl_proxy(server): assert status == 200 data = data.decode("utf-8") data = data.split("\n") - assert data[0].startswith( - "POST http://localhost:1/agent_listener/invoke_raw_method " - ) + assert data[0].startswith("POST http://localhost:1/agent_listener/invoke_raw_method ") assert server.httpd.connect_host is None @@ -448,16 +446,12 @@ def test_non_ssl_via_non_ssl_proxy(insecure_server): assert status == 200 data = data.decode("utf-8") data = data.split("\n") - assert data[0].startswith( - "POST http://localhost:1/agent_listener/invoke_raw_method " - ) + assert data[0].startswith("POST http://localhost:1/agent_listener/invoke_raw_method ") assert insecure_server.httpd.connect_host is None -@pytest.mark.parametrize( - "auth", ((None, None), ("username", None), ("username", "password")) -) +@pytest.mark.parametrize("auth", ((None, None), ("username", None), ("username", "password"))) def test_ssl_via_non_ssl_proxy(insecure_server, auth): proxy_user, proxy_pass = auth with HttpClient( @@ -480,13 +474,8 @@ def test_ssl_via_non_ssl_proxy(insecure_server, auth): auth_expected = proxy_user if proxy_pass: auth_expected = auth_expected + ":" + proxy_pass - auth_expected = "Basic " + base64.b64encode( - auth_expected.encode("utf-8") - ).decode("utf-8") - assert ( - insecure_server.httpd.connect_headers["proxy-authorization"] - == auth_expected - ) + auth_expected = "Basic " + base64.b64encode(auth_expected.encode("utf-8")).decode("utf-8") + assert insecure_server.httpd.connect_headers["proxy-authorization"] == auth_expected else: assert "proxy-authorization" not in insecure_server.httpd.connect_headers assert insecure_server.httpd.connect_host == "localhost" @@ -496,9 +485,7 @@ def test_ssl_via_non_ssl_proxy(insecure_server, auth): def test_max_payload_does_not_send(insecure_server): - with InsecureHttpClient( - "localhost", insecure_server.port, max_payload_size_in_bytes=0 - ) as client: + with InsecureHttpClient("localhost", insecure_server.port, max_payload_size_in_bytes=0) as client: status, data = client.send_request(payload=b"*") assert status == 413 @@ -556,7 +543,8 @@ def test_serverless_mode_client(): for method in methods: params = {"method": method} status, data = client.send_request( - params=params, payload=json.dumps({"method": method}).encode("utf-8"), + params=params, + payload=json.dumps({"method": method}).encode("utf-8"), ) assert status == 200 @@ -626,8 +614,7 @@ def test_audit_logging(server, insecure_server, client_cls, proxy_host, exceptio connection = "direct" assert internal_metrics == { "Supportability/Python/Collector/Failures": [1, 0, 0, 0, 0, 0], - "Supportability/Python/Collector/Failures/%s" - % connection: [1, 0, 0, 0, 0, 0], + "Supportability/Python/Collector/Failures/%s" % connection: [1, 0, 0, 0, 0, 0], "Supportability/Python/Collector/Exception/%s" % exc: [1, 0, 0, 0, 0, 0], } else: diff --git a/tests/application_gearman/test_gearman.py b/tests/application_gearman/test_gearman.py index fc091af603..7ddc13fdc3 100644 --- a/tests/application_gearman/test_gearman.py +++ b/tests/application_gearman/test_gearman.py @@ -14,9 +14,10 @@ from __future__ import print_function -import gearman -import threading import os +import threading + +import gearman from newrelic.api.background_task import background_task @@ -25,15 +26,21 @@ gm_client = None -GEARMAND_HOST = os.environ.get('GEARMAND_PORT_4730_TCP_ADDR', 'localhost') -GEARMAND_PORT = os.environ.get('GEARMAND_PORT_4730_TCP_PORT', '4730') +GEARMAND_HOST = os.environ.get("GEARMAND_PORT_4730_TCP_ADDR", "localhost") +GEARMAND_PORT = os.environ.get("GEARMAND_PORT_4730_TCP_PORT", "4730") -GEARMAND_ADDR = '%s:%s' % (GEARMAND_HOST, GEARMAND_PORT) +GEARMAND_ADDR = "%s:%s" % (GEARMAND_HOST, GEARMAND_PORT) -class GearmanWorker(gearman.GearmanWorker): +class GearmanWorker(gearman.GearmanWorker): def after_poll(self, any_activity): - return not worker_event.isSet() + try: + worker_event_set = worker_event.is_set() + except TypeError: + worker_event_set = worker_event.isSet() + + return not worker_event_set + def setup_module(module): global worker_thread @@ -41,14 +48,14 @@ def setup_module(module): gm_worker = GearmanWorker([GEARMAND_ADDR]) def task_listener_reverse(gearman_worker, gearman_job): - return ''.join(reversed(gearman_job.data)) + return "".join(reversed(gearman_job.data)) def task_listener_exception(gearman_worker, gearman_job): - raise RuntimeError('error') + raise RuntimeError("error") - gm_worker.set_client_id('gearman-instrumentation-tests') - gm_worker.register_task('reverse', task_listener_reverse) - gm_worker.register_task('exception', task_listener_exception) + gm_worker.set_client_id("gearman-instrumentation-tests") + gm_worker.register_task("reverse", task_listener_reverse) + gm_worker.register_task("exception", task_listener_exception) def startup(): gm_worker.work(poll_timeout=1.0) @@ -60,16 +67,19 @@ def startup(): gm_client = gearman.GearmanClient([GEARMAND_ADDR]) + def teardown_module(module): worker_event.set() worker_thread.join() + @background_task() def test_successful(): - completed_job_request = gm_client.submit_job('reverse', 'data') + completed_job_request = gm_client.submit_job("reverse", "data") assert completed_job_request.complete + @background_task() def test_exception(): - completed_job_request = gm_client.submit_job('exception', 'data') + completed_job_request = gm_client.submit_job("exception", "data") assert completed_job_request.complete diff --git a/tests/base_requirements.txt b/tests/base_requirements.txt deleted file mode 100644 index f526bd82a3..0000000000 --- a/tests/base_requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -pytest==4.6.2 -pytest-cov -WebTest==2.0.30 diff --git a/tests/coroutines_asyncio/conftest.py b/tests/coroutines_asyncio/conftest.py index 9f5c78053f..aa412d38a5 100644 --- a/tests/coroutines_asyncio/conftest.py +++ b/tests/coroutines_asyncio/conftest.py @@ -13,26 +13,27 @@ # limitations under the License. import pytest - +from testing_support.fixture.event_loop import event_loop from testing_support.fixtures import ( - collector_available_fixture, - code_coverage_fixture, - collector_agent_registration_fixture) + code_coverage_fixture, + collector_agent_registration_fixture, + collector_available_fixture, +) _coverage_source = [ - 'newrelic.hooks.coroutines_asyncio', + "newrelic.hooks.coroutines_asyncio", ] code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { - 'transaction_tracer.explain_threshold': 0.0, - 'transaction_tracer.transaction_threshold': 0.0, - 'transaction_tracer.stack_trace_threshold': 0.0, - 'debug.log_data_collector_payloads': True, - 'debug.record_transaction_failure': True, + "transaction_tracer.explain_threshold": 0.0, + "transaction_tracer.transaction_threshold": 0.0, + "transaction_tracer.stack_trace_threshold": 0.0, + "debug.log_data_collector_payloads": True, + "debug.record_transaction_failure": True, } collector_agent_registration = collector_agent_registration_fixture( - app_name='Python Agent Test (coroutines_asyncio)', - default_settings=_default_settings) + app_name="Python Agent Test (coroutines_asyncio)", default_settings=_default_settings +) diff --git a/tests/coroutines_asyncio/test_context_propagation.py b/tests/coroutines_asyncio/test_context_propagation.py index f45cc63dbb..10d0ecd526 100644 --- a/tests/coroutines_asyncio/test_context_propagation.py +++ b/tests/coroutines_asyncio/test_context_propagation.py @@ -12,37 +12,42 @@ # See the License for the specific language governing permissions and # limitations under the License. -import uvloop +import sys + import pytest -from newrelic.api.background_task import background_task, BackgroundTask +import uvloop +from testing_support.fixtures import ( + function_not_called, + override_generic_settings, + validate_transaction_metrics, +) + from newrelic.api.application import application_instance as application -from newrelic.api.time_trace import current_trace -from newrelic.api.function_trace import FunctionTrace, function_trace +from newrelic.api.background_task import BackgroundTask, background_task from newrelic.api.database_trace import database_trace from newrelic.api.datastore_trace import datastore_trace from newrelic.api.external_trace import external_trace +from newrelic.api.function_trace import FunctionTrace, function_trace from newrelic.api.memcache_trace import memcache_trace from newrelic.api.message_trace import message_trace - -from newrelic.core.trace_cache import trace_cache +from newrelic.api.time_trace import current_trace from newrelic.core.config import global_settings -from testing_support.fixtures import (validate_transaction_metrics, - override_generic_settings, function_not_called) +from newrelic.core.trace_cache import trace_cache -@function_trace('waiter3') +@function_trace("waiter3") async def child(): pass async def waiter(asyncio, event, wait): - with FunctionTrace(name='waiter1', terminal=True): + with FunctionTrace(name="waiter1", terminal=True): event.set() # Block until the parent says to exit await wait.wait() - with FunctionTrace(name='waiter2', terminal=True): + with FunctionTrace(name="waiter2", terminal=True): pass await child() @@ -57,7 +62,7 @@ async def task(asyncio, trace, event, wait): await waiter(asyncio, event, wait) -@background_task(name='test_context_propagation') +@background_task(name="test_context_propagation") async def _test(asyncio, schedule, nr_enabled=True): trace = current_trace() @@ -68,8 +73,7 @@ async def _test(asyncio, schedule, nr_enabled=True): events = [asyncio.Event() for _ in range(2)] wait = asyncio.Event() - tasks = [schedule(task(asyncio, trace, events[idx], wait)) - for idx in range(2)] + tasks = [schedule(task(asyncio, trace, events[idx], wait)) for idx in range(2)] await asyncio.gather(*(e.wait() for e in events)) @@ -84,37 +88,39 @@ async def _test(asyncio, schedule, nr_enabled=True): return trace -@pytest.mark.parametrize('loop_policy', (None, uvloop.EventLoopPolicy())) -@pytest.mark.parametrize('schedule', ( - 'create_task', - 'ensure_future', -)) +@pytest.mark.parametrize("loop_policy", (None, uvloop.EventLoopPolicy())) +@pytest.mark.parametrize( + "schedule", + ( + "create_task", + "ensure_future", + ), +) @validate_transaction_metrics( - 'test_context_propagation', + "test_context_propagation", background_task=True, scoped_metrics=( - ('Function/waiter1', 2), - ('Function/waiter2', 2), - ('Function/waiter3', 2), + ("Function/waiter1", 2), + ("Function/waiter2", 2), + ("Function/waiter3", 2), ), ) -def test_context_propagation(schedule, loop_policy): +def test_context_propagation(event_loop, schedule, loop_policy): import asyncio - asyncio.set_event_loop_policy(loop_policy) - loop = asyncio.get_event_loop() + asyncio.set_event_loop_policy(loop_policy) exceptions = [] def handle_exception(loop, context): exceptions.append(context) - loop.set_exception_handler(handle_exception) + event_loop.set_exception_handler(handle_exception) - schedule = getattr(asyncio, schedule, None) or getattr(loop, schedule) + schedule = getattr(asyncio, schedule, None) or getattr(event_loop, schedule) # Keep the trace around so that it's not removed from the trace cache # through reference counting (for testing) - _ = loop.run_until_complete(_test(asyncio, schedule)) + _ = event_loop.run_until_complete(_test(asyncio, schedule)) # The agent should have removed all traces from the cache since # run_until_complete has terminated (all callbacks scheduled inside the @@ -125,44 +131,49 @@ def handle_exception(loop, context): assert not exceptions, exceptions -@override_generic_settings(global_settings(), { - 'enabled': False, -}) -@function_not_called('newrelic.core.stats_engine', - 'StatsEngine.record_transaction') -def test_nr_disabled(): +@override_generic_settings( + global_settings(), + { + "enabled": False, + }, +) +@function_not_called("newrelic.core.stats_engine", "StatsEngine.record_transaction") +def test_nr_disabled(event_loop): import asyncio - schedule = asyncio.ensure_future - loop = asyncio.get_event_loop() + schedule = asyncio.ensure_future exceptions = [] def handle_exception(loop, context): exceptions.append(context) - loop.set_exception_handler(handle_exception) + event_loop.set_exception_handler(handle_exception) - loop.run_until_complete(_test(asyncio, schedule, nr_enabled=False)) + event_loop.run_until_complete(_test(asyncio, schedule, nr_enabled=False)) # Assert that no exceptions have occurred assert not exceptions, exceptions -@pytest.mark.parametrize('trace', [ - function_trace(name='simple_gen'), - external_trace(library='lib', url='http://foo.com'), - database_trace('select * from foo'), - datastore_trace('lib', 'foo', 'bar'), - message_trace('lib', 'op', 'typ', 'name'), - memcache_trace('cmd'), -]) -def test_two_transactions(trace): +@pytest.mark.parametrize( + "trace", + [ + function_trace(name="simple_gen"), + external_trace(library="lib", url="http://foo.com"), + database_trace("select * from foo"), + datastore_trace("lib", "foo", "bar"), + message_trace("lib", "op", "typ", "name"), + memcache_trace("cmd"), + ], +) +def test_two_transactions(event_loop, trace): """ Instantiate a coroutine in one transaction and await it in another. This should not cause any errors. """ import asyncio + tasks = [] ready = asyncio.Event() @@ -190,16 +201,21 @@ async def await_task(): done.set() - afut = asyncio.ensure_future(create_coro()) - bfut = asyncio.ensure_future(await_task()) - asyncio.get_event_loop().run_until_complete(asyncio.gather(afut, bfut)) + if sys.version_info >= (3, 10, 0): + afut = asyncio.ensure_future(create_coro(), loop=event_loop) + bfut = asyncio.ensure_future(await_task(), loop=event_loop) + event_loop.run_until_complete(asyncio.gather(afut, bfut)) + else: + afut = asyncio.ensure_future(create_coro()) + bfut = asyncio.ensure_future(await_task()) + asyncio.get_event_loop().run_until_complete(asyncio.gather(afut, bfut)) # Sentinel left in cache transaction exited async def sentinel_in_cache_txn_exited(asyncio, bg): event = asyncio.Event() - with BackgroundTask(application(), 'fg') as txn: + with BackgroundTask(application(), "fg") as txn: _ = txn.root_span task = asyncio.ensure_future(bg(event)) @@ -211,8 +227,8 @@ async def sentinel_in_cache_txn_exited(asyncio, bg): async def trace_in_cache_txn_exited(asyncio, bg): event = asyncio.Event() - with BackgroundTask(application(), 'fg'): - with FunctionTrace('fg') as _: + with BackgroundTask(application(), "fg"): + with FunctionTrace("fg") as _: task = asyncio.ensure_future(bg(event)) await event.wait() @@ -223,31 +239,36 @@ async def trace_in_cache_txn_exited(asyncio, bg): async def trace_in_cache_txn_active(asyncio, bg): event = asyncio.Event() - with BackgroundTask(application(), 'fg'): - with FunctionTrace('fg') as _: + with BackgroundTask(application(), "fg"): + with FunctionTrace("fg") as _: task = asyncio.ensure_future(bg(event)) await event.wait() return task -@pytest.mark.parametrize('fg', (sentinel_in_cache_txn_exited, - trace_in_cache_txn_exited, - trace_in_cache_txn_active,)) -def test_transaction_exit_trace_cache(fg): +@pytest.mark.parametrize( + "fg", + ( + sentinel_in_cache_txn_exited, + trace_in_cache_txn_exited, + trace_in_cache_txn_active, + ), +) +def test_transaction_exit_trace_cache(event_loop, fg): """ Verifying that the use of ensure_future will not cause errors when traces remain in the trace cache after transaction exit """ import asyncio + exceptions = [] def handle_exception(loop, context): exceptions.append(context) async def bg(event): - with BackgroundTask( - application(), 'bg'): + with BackgroundTask(application(), "bg"): event.set() async def handler(): @@ -255,9 +276,8 @@ async def handler(): await task def _test(): - loop = asyncio.get_event_loop() - loop.set_exception_handler(handle_exception) - return loop.run_until_complete(handler()) + event_loop.set_exception_handler(handle_exception) + return event_loop.run_until_complete(handler()) _test() @@ -269,18 +289,18 @@ def _test(): assert not exceptions, exceptions -def test_incomplete_traces_exit_when_root_exits(): +def test_incomplete_traces_exit_when_root_exits(event_loop): """Verifies that child traces in the same task are exited when the root exits""" import asyncio - @function_trace(name='child') + @function_trace(name="child") async def child(start, end): start.set() await end.wait() - @background_task(name='parent') + @background_task(name="parent") async def parent(): start = asyncio.Event() end = asyncio.Event() @@ -290,29 +310,27 @@ async def parent(): return task @validate_transaction_metrics( - 'parent', background_task=True, - scoped_metrics=[('Function/child', 1)], + "parent", + background_task=True, + scoped_metrics=[("Function/child", 1)], ) def test(loop): return loop.run_until_complete(parent()) - loop = asyncio.get_event_loop() - task = test(loop) - loop.run_until_complete(task) + task = test(event_loop) + event_loop.run_until_complete(task) -def test_incomplete_traces_with_multiple_transactions(): +def test_incomplete_traces_with_multiple_transactions(event_loop): import asyncio - loop = asyncio.get_event_loop() - - @background_task(name='dummy') + @background_task(name="dummy") async def dummy(): task = asyncio.ensure_future(child(True)) await end.wait() await task - @function_trace(name='child') + @function_trace(name="child") async def child(running_at_end=False): trace = current_trace() start.set() @@ -322,25 +340,27 @@ async def child(running_at_end=False): else: assert current_trace() is not trace - @background_task(name='parent') + @background_task(name="parent") async def parent(): task = asyncio.ensure_future(child()) await start.wait() return task @validate_transaction_metrics( - 'parent', background_task=True, - scoped_metrics=[('Function/child', 1)], + "parent", + background_task=True, + scoped_metrics=[("Function/child", 1)], ) def parent_assertions(task): - return loop.run_until_complete(task) + return event_loop.run_until_complete(task) @validate_transaction_metrics( - 'dummy', background_task=True, - scoped_metrics=[('Function/child', 1)], + "dummy", + background_task=True, + scoped_metrics=[("Function/child", 1)], ) def dummy_assertions(task): - return loop.run_until_complete(task) + return event_loop.run_until_complete(task) async def startup(): return asyncio.Event(), asyncio.Event() @@ -351,11 +371,11 @@ async def start_dummy(): start.clear() return dummy_task - start, end = loop.run_until_complete(startup()) + start, end = event_loop.run_until_complete(startup()) # Kick start dummy transaction (forcing an ensure_future on another # transaction) - dummy_task = loop.run_until_complete(start_dummy()) + dummy_task = event_loop.run_until_complete(start_dummy()) # start and end a transaction, forcing the child to truncate child_task = parent_assertions(parent()) @@ -368,10 +388,11 @@ async def start_dummy(): # Wait for dummy/parent->child to terminate dummy_assertions(dummy_task) - loop.run_until_complete(child_task) + event_loop.run_until_complete(child_task) + @validate_transaction_metrics("Parent", background_task=True) -def test_transaction_end_on_different_task(): +def test_transaction_end_on_different_task(event_loop): import asyncio txn = BackgroundTask(application(), name="Parent") @@ -398,5 +419,4 @@ async def test(): task = await asyncio.ensure_future(parent()) await task - loop = asyncio.get_event_loop() - loop.run_until_complete(test()) \ No newline at end of file + event_loop.run_until_complete(test()) diff --git a/tests/datastore_asyncpg/conftest.py b/tests/datastore_asyncpg/conftest.py index 0c4d926456..00720a55f6 100644 --- a/tests/datastore_asyncpg/conftest.py +++ b/tests/datastore_asyncpg/conftest.py @@ -12,23 +12,27 @@ # See the License for the specific language governing permissions and # limitations under the License. -from testing_support.fixtures import (code_coverage_fixture, # noqa - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixture.event_loop import event_loop +from testing_support.fixtures import code_coverage_fixture # noqa +from testing_support.fixtures import ( + collector_agent_registration_fixture, + collector_available_fixture, +) _default_settings = { - 'transaction_tracer.explain_threshold': 0.0, - 'transaction_tracer.transaction_threshold': 0.0, - 'transaction_tracer.stack_trace_threshold': 0.0, - 'debug.log_data_collector_payloads': True, - 'debug.record_transaction_failure': True, + "transaction_tracer.explain_threshold": 0.0, + "transaction_tracer.transaction_threshold": 0.0, + "transaction_tracer.stack_trace_threshold": 0.0, + "debug.log_data_collector_payloads": True, + "debug.record_transaction_failure": True, } collector_agent_registration = collector_agent_registration_fixture( - app_name='Python Agent Test (datastore_asyncpg)', - default_settings=_default_settings) + app_name="Python Agent Test (datastore_asyncpg)", default_settings=_default_settings +) _coverage_source = [ - 'newrelic.hooks.database_asyncpg', + "newrelic.hooks.database_asyncpg", ] code_coverage = code_coverage_fixture(source=_coverage_source) diff --git a/tests/datastore_asyncpg/test_multiple_dbs.py b/tests/datastore_asyncpg/test_multiple_dbs.py index 457f8d4c79..a24eaa3885 100644 --- a/tests/datastore_asyncpg/test_multiple_dbs.py +++ b/tests/datastore_asyncpg/test_multiple_dbs.py @@ -13,23 +13,21 @@ # limitations under the License. import asyncio + import asyncpg import pytest - +from testing_support.db_settings import postgresql_settings from testing_support.fixtures import ( - validate_transaction_metrics, override_application_settings, + validate_transaction_metrics, ) from testing_support.util import instance_hostname -from testing_support.db_settings import postgresql_settings from newrelic.api.background_task import background_task DB_MULTIPLE_SETTINGS = postgresql_settings() -ASYNCPG_VERSION = tuple( - int(x) for x in getattr(asyncpg, "__version__", "0.0").split(".")[:2] -) +ASYNCPG_VERSION = tuple(int(x) for x in getattr(asyncpg, "__version__", "0.0").split(".")[:2]) if ASYNCPG_VERSION < (0, 11): CONNECT_METRICS = [] @@ -97,9 +95,7 @@ (_instance_metric_name_2, STATEMENT_COUNT), ] ) - _disable_rollup_metrics.extend( - [(_instance_metric_name_1, None), (_instance_metric_name_2, None)] - ) + _disable_rollup_metrics.extend([(_instance_metric_name_1, None), (_instance_metric_name_2, None)]) # Query @@ -118,9 +114,7 @@ async def _exercise_db(): port=postgresql1["port"], ) try: - await connection.execute( - "SELECT setting from pg_settings where name='server_version'" - ) + await connection.execute("SELECT setting from pg_settings where name='server_version'") finally: await connection.close() @@ -132,9 +126,7 @@ async def _exercise_db(): port=postgresql2["port"], ) try: - await connection.execute( - "SELECT setting from pg_settings where name='server_version'" - ) + await connection.execute("SELECT setting from pg_settings where name='server_version'") finally: await connection.close() @@ -154,9 +146,8 @@ async def _exercise_db(): background_task=True, ) @background_task() -def test_multiple_databases_enable_instance(): - loop = asyncio.get_event_loop() - loop.run_until_complete(_exercise_db()) +def test_multiple_databases_enable_instance(event_loop): + event_loop.run_until_complete(_exercise_db()) @pytest.mark.skipif( @@ -171,6 +162,5 @@ def test_multiple_databases_enable_instance(): background_task=True, ) @background_task() -def test_multiple_databases_disable_instance(): - loop = asyncio.get_event_loop() - loop.run_until_complete(_exercise_db()) +def test_multiple_databases_disable_instance(event_loop): + event_loop.run_until_complete(_exercise_db()) diff --git a/tests/datastore_asyncpg/test_query.py b/tests/datastore_asyncpg/test_query.py index be68249242..eb44cfd164 100644 --- a/tests/datastore_asyncpg/test_query.py +++ b/tests/datastore_asyncpg/test_query.py @@ -12,29 +12,27 @@ # See the License for the specific language governing permissions and # limitations under the License. +import asyncio import os import random -import pytest -import asyncio -import asyncpg - from io import BytesIO -from newrelic.api.background_task import background_task +import asyncpg +import pytest +from testing_support.db_settings import postgresql_settings from testing_support.fixtures import ( validate_transaction_metrics, validate_tt_collector_json, ) from testing_support.util import instance_hostname -from testing_support.db_settings import postgresql_settings + +from newrelic.api.background_task import background_task DB_SETTINGS = postgresql_settings()[0] PG_PREFIX = "Datastore/operation/Postgres/" -ASYNCPG_VERSION = tuple( - int(x) for x in getattr(asyncpg, "__version__", "0.0").split(".")[:2] -) +ASYNCPG_VERSION = tuple(int(x) for x in getattr(asyncpg, "__version__", "0.0").split(".")[:2]) if ASYNCPG_VERSION < (0, 11): CONNECT_METRICS = () @@ -43,9 +41,8 @@ @pytest.fixture -def conn(): - loop = asyncio.get_event_loop() - conn = loop.run_until_complete( +def conn(event_loop): + conn = event_loop.run_until_complete( asyncpg.connect( user=DB_SETTINGS["user"], password=DB_SETTINGS["password"], @@ -55,7 +52,7 @@ def conn(): ) ) yield conn - loop.run_until_complete(conn.close()) + event_loop.run_until_complete(conn.close()) @validate_transaction_metrics( @@ -64,14 +61,12 @@ def conn(): scoped_metrics=((PG_PREFIX + "select", 1),), rollup_metrics=(("Datastore/all", 1),), ) -@validate_tt_collector_json( - datastore_params={"port_path_or_id": str(DB_SETTINGS["port"])} -) +@validate_tt_collector_json(datastore_params={"port_path_or_id": str(DB_SETTINGS["port"])}) @background_task(name="test_single") @pytest.mark.parametrize("method", ("execute",)) -def test_single(method, conn): +def test_single(event_loop, method, conn): _method = getattr(conn, method) - asyncio.get_event_loop().run_until_complete(_method("""SELECT 0""")) + event_loop.run_until_complete(_method("""SELECT 0""")) @validate_transaction_metrics( @@ -85,9 +80,9 @@ def test_single(method, conn): ) @background_task(name="test_prepared_single") @pytest.mark.parametrize("method", ("fetch", "fetchrow", "fetchval")) -def test_prepared_single(method, conn): +def test_prepared_single(event_loop, method, conn): _method = getattr(conn, method) - asyncio.get_event_loop().run_until_complete(_method("""SELECT 0""")) + event_loop.run_until_complete(_method("""SELECT 0""")) @validate_transaction_metrics( @@ -97,25 +92,20 @@ def test_prepared_single(method, conn): rollup_metrics=(("Datastore/all", 1),), ) @background_task(name="test_prepare") -def test_prepare(conn): - loop = asyncio.get_event_loop() - loop.run_until_complete(conn.prepare("""SELECT 0""")) +def test_prepare(event_loop, conn): + event_loop.run_until_complete(conn.prepare("""SELECT 0""")) @pytest.fixture -def table(conn): +def table(event_loop, conn): table_name = "table_%d" % os.getpid() - asyncio.get_event_loop().run_until_complete( - conn.execute("""create table %s (a integer, b real, c text)""" % table_name) - ) + event_loop.run_until_complete(conn.execute("""create table %s (a integer, b real, c text)""" % table_name)) return table_name -@pytest.mark.skipif( - ASYNCPG_VERSION < (0, 11), reason="Copy wasn't implemented before 0.11" -) +@pytest.mark.skipif(ASYNCPG_VERSION < (0, 11), reason="Copy wasn't implemented before 0.11") @validate_transaction_metrics( "test_copy", background_task=True, @@ -126,7 +116,7 @@ def table(conn): rollup_metrics=(("Datastore/all", 4),), ) @background_task(name="test_copy") -def test_copy(table, conn): +def test_copy(event_loop, table, conn): async def amain(): await conn.copy_records_to_table(table, records=[(1, 2, "3"), (4, 5, "6")]) await conn.copy_from_table(table, output=BytesIO()) @@ -135,8 +125,7 @@ async def amain(): # 2 statements await conn.copy_from_query("""SELECT 0""", output=BytesIO()) - loop = asyncio.get_event_loop() - loop.run_until_complete(amain()) + event_loop.run_until_complete(amain()) @validate_transaction_metrics( @@ -149,9 +138,8 @@ async def amain(): rollup_metrics=(("Datastore/all", 2),), ) @background_task(name="test_select_many") -def test_select_many(conn): - loop = asyncio.get_event_loop() - loop.run_until_complete(conn.executemany("""SELECT $1::int""", ((1,), (2,)))) +def test_select_many(event_loop, conn): + event_loop.run_until_complete(conn.executemany("""SELECT $1::int""", ((1,), (2,)))) @validate_transaction_metrics( @@ -165,13 +153,12 @@ def test_select_many(conn): rollup_metrics=(("Datastore/all", 3),), ) @background_task(name="test_transaction") -def test_transaction(conn): +def test_transaction(event_loop, conn): async def amain(): async with conn.transaction(): await conn.execute("""SELECT 0""") - loop = asyncio.get_event_loop() - loop.run_until_complete(amain()) + event_loop.run_until_complete(amain()) @validate_transaction_metrics( @@ -186,7 +173,7 @@ async def amain(): rollup_metrics=(("Datastore/all", 7),), ) @background_task(name="test_cursor") -def test_cursor(conn): +def test_cursor(event_loop, conn): async def amain(): async with conn.transaction(): async for record in conn.cursor("SELECT generate_series(0, 0)", prefetch=1): @@ -194,8 +181,7 @@ async def amain(): await conn.cursor("SELECT 0") - loop = asyncio.get_event_loop() - loop.run_until_complete(amain()) + event_loop.run_until_complete(amain()) @pytest.mark.skipif( @@ -207,20 +193,15 @@ async def amain(): background_task=True, rollup_metrics=[ ( - "Datastore/instance/Postgres/" - + instance_hostname("localhost") - + "//.s.PGSQL.THIS_FILE_BETTER_NOT_EXIST", + "Datastore/instance/Postgres/" + instance_hostname("localhost") + "//.s.PGSQL.THIS_FILE_BETTER_NOT_EXIST", 1, ) ], ) @background_task(name="test_unix_socket_connect") -def test_unix_socket_connect(): - loop = asyncio.get_event_loop() +def test_unix_socket_connect(event_loop): with pytest.raises(OSError): - loop.run_until_complete( - asyncpg.connect("postgres://?host=/.s.PGSQL.THIS_FILE_BETTER_NOT_EXIST") - ) + event_loop.run_until_complete(asyncpg.connect("postgres://?host=/.s.PGSQL.THIS_FILE_BETTER_NOT_EXIST")) @pytest.mark.skipif( @@ -233,7 +214,7 @@ def test_unix_socket_connect(): scoped_metrics=((PG_PREFIX + "connect", 2),), ) @background_task(name="test_pool_acquire") -def test_pool_acquire(): +def test_pool_acquire(event_loop): async def amain(): pool = await asyncpg.create_pool( user=DB_SETTINGS["user"], @@ -252,5 +233,4 @@ async def amain(): finally: await pool.close() - loop = asyncio.get_event_loop() - loop.run_until_complete(amain()) + event_loop.run_until_complete(amain()) diff --git a/tests/external_httpx/conftest.py b/tests/external_httpx/conftest.py index d15d0f8f37..bad35d45e9 100644 --- a/tests/external_httpx/conftest.py +++ b/tests/external_httpx/conftest.py @@ -15,6 +15,7 @@ import asyncio import pytest +from testing_support.fixture.event_loop import event_loop as loop from testing_support.fixtures import ( code_coverage_fixture, collector_agent_registration_fixture, @@ -46,8 +47,3 @@ def httpx(): import httpx return httpx - - -@pytest.fixture(scope="session") -def loop(): - return asyncio.get_event_loop() diff --git a/tests/framework_aiohttp/_target_application.py b/tests/framework_aiohttp/_target_application.py index 11d1fa5545..207c754863 100644 --- a/tests/framework_aiohttp/_target_application.py +++ b/tests/framework_aiohttp/_target_application.py @@ -138,8 +138,10 @@ def fetch(method, url, loop): try: _method = getattr(session, method) - response = yield from asyncio.wait_for( - _method(url), timeout=None, loop=loop) + try: + response = yield from asyncio.wait_for(_method(url), timeout=None, loop=loop) + except TypeError: + response = yield from asyncio.wait_for(_method(url), timeout=None) text = yield from response.text() finally: @@ -154,7 +156,10 @@ def fetch(method, url, loop): @asyncio.coroutine def fetch_multiple(method, loop, url): coros = [fetch(method, url, loop) for _ in range(2)] - responses = yield from asyncio.gather(*coros, loop=loop) + try: + responses = yield from asyncio.gather(*coros, loop=loop) + except TypeError: + responses = yield from asyncio.gather(*coros) return '\n'.join(responses) diff --git a/tests/framework_aiohttp/conftest.py b/tests/framework_aiohttp/conftest.py index 1735277ee2..eccf71a721 100644 --- a/tests/framework_aiohttp/conftest.py +++ b/tests/framework_aiohttp/conftest.py @@ -12,44 +12,47 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys import asyncio from collections import namedtuple -from aiohttp.test_utils import (AioHTTPTestCase, - TestClient as _TestClient) -from _target_application import make_app import pytest - -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from _target_application import make_app +from aiohttp.test_utils import AioHTTPTestCase +from aiohttp.test_utils import TestClient as _TestClient +from testing_support.fixture.event_loop import event_loop +from testing_support.fixtures import ( + code_coverage_fixture, + collector_agent_registration_fixture, + collector_available_fixture, +) from testing_support.mock_external_http_server import ( - MockExternalHTTPHResponseHeadersServer, MockExternalHTTPServer) + MockExternalHTTPHResponseHeadersServer, + MockExternalHTTPServer, +) _coverage_source = [ - 'newrelic.hooks.framework_aiohttp', + "newrelic.hooks.framework_aiohttp", ] code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { - 'transaction_tracer.explain_threshold': 0.0, - 'transaction_tracer.transaction_threshold': 0.0, - 'transaction_tracer.stack_trace_threshold': 0.0, - 'debug.log_data_collector_payloads': True, - 'debug.record_transaction_failure': True, + "transaction_tracer.explain_threshold": 0.0, + "transaction_tracer.transaction_threshold": 0.0, + "transaction_tracer.stack_trace_threshold": 0.0, + "debug.log_data_collector_payloads": True, + "debug.record_transaction_failure": True, } collector_agent_registration = collector_agent_registration_fixture( - app_name='Python Agent Test (framework_aiohttp)', - default_settings=_default_settings) + app_name="Python Agent Test (framework_aiohttp)", default_settings=_default_settings +) -ServerInfo = namedtuple('ServerInfo', ('base_metric', 'url')) +ServerInfo = namedtuple("ServerInfo", ("base_metric", "url")) class SimpleAiohttpApp(AioHTTPTestCase): - def __init__(self, server_cls, middleware, *args, **kwargs): super(SimpleAiohttpApp, self).__init__(*args, **kwargs) self.server_cls = server_cls @@ -69,17 +72,15 @@ def _get_client(self, app_or_server): """Return a TestClient instance.""" client_constructor_arg = app_or_server - scheme = 'http' - host = '127.0.0.1' + scheme = "http" + host = "127.0.0.1" server_kwargs = {} if self.server_cls: - test_server = self.server_cls(app_or_server, scheme=scheme, - host=host, **server_kwargs) + test_server = self.server_cls(app_or_server, scheme=scheme, host=host, **server_kwargs) client_constructor_arg = test_server try: - return _TestClient(client_constructor_arg, - loop=self.loop) + return _TestClient(client_constructor_arg, loop=self.loop) except TypeError: return _TestClient(client_constructor_arg) @@ -89,11 +90,11 @@ def _get_client(self, app_or_server): @pytest.fixture() def aiohttp_app(request): try: - middleware = request.getfixturevalue('middleware') + middleware = request.getfixturevalue("middleware") except: middleware = None try: - server_cls = request.getfixturevalue('server_cls') + server_cls = request.getfixturevalue("server_cls") except: server_cls = None case = SimpleAiohttpApp(server_cls=server_cls, middleware=middleware) @@ -102,15 +103,15 @@ def aiohttp_app(request): case.tearDown() -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def mock_header_server(): def handler(self): - if self.command != 'GET': + if self.command != "GET": self.send_response(501) self.end_headers() return - response = str(self.headers).encode('utf-8') + response = str(self.headers).encode("utf-8") self.send_response(200) self.end_headers() self.wfile.write(response) @@ -118,8 +119,9 @@ def handler(self): with MockExternalHTTPHResponseHeadersServer(handler=handler) as _server: yield _server + @pytest.fixture(scope="session") -def mock_external_http_server(): +def mock_external_http_server(): response_values = [] def respond_with_cat_header(self): @@ -128,15 +130,15 @@ def respond_with_cat_header(self): for header, value in headers: self.send_header(header, value) self.end_headers() - self.wfile.write(b'') + self.wfile.write(b"") with MockExternalHTTPServer(handler=respond_with_cat_header) as server: yield (server, response_values) -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def local_server_info(mock_header_server): - host_port = '127.0.0.1:%d' % mock_header_server.port - metric = 'External/%s/aiohttp/' % host_port - url = 'http://' + host_port + host_port = "127.0.0.1:%d" % mock_header_server.port + metric = "External/%s/aiohttp/" % host_port + url = "http://" + host_port return ServerInfo(metric, url) diff --git a/tests/framework_aiohttp/test_client.py b/tests/framework_aiohttp/test_client.py index 5559c4f361..b2d23dd235 100644 --- a/tests/framework_aiohttp/test_client.py +++ b/tests/framework_aiohttp/test_client.py @@ -12,18 +12,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -import aiohttp import asyncio + +import aiohttp import pytest +from testing_support.fixtures import validate_transaction_metrics from yarl import URL from newrelic.api.background_task import background_task from newrelic.api.function_trace import function_trace -from testing_support.fixtures import validate_transaction_metrics -version_info = tuple(int(_) for _ in aiohttp.__version__.split('.')[:2]) -skipif_aiohttp3 = pytest.mark.skipif(version_info >= (3, 0), - reason='This version of aiohttp does not support yield from syntax') +version_info = tuple(int(_) for _ in aiohttp.__version__.split(".")[:2]) +skipif_aiohttp3 = pytest.mark.skipif( + version_info >= (3, 0), reason="This version of aiohttp does not support yield from syntax" +) @asyncio.coroutine @@ -35,7 +37,7 @@ def fetch(method, url): yield from response.text() -@background_task(name='fetch_multiple') +@background_task(name="fetch_multiple") @asyncio.coroutine def fetch_multiple(method, url): coros = [fetch(method, url) for _ in range(2)] @@ -59,21 +61,21 @@ def task(loop, method, exc_expected, url): test_matrix = ( - ('get', False), - ('post', True), - ('options', True), - ('head', True), - ('put', True), - ('patch', True), - ('delete', True), + ("get", False), + ("post", True), + ("options", True), + ("head", True), + ("put", True), + ("patch", True), + ("delete", True), ) @skipif_aiohttp3 -@pytest.mark.parametrize('method,exc_expected', test_matrix) -def test_client_yield_from(local_server_info, method, exc_expected): +@pytest.mark.parametrize("method,exc_expected", test_matrix) +def test_client_yield_from(event_loop, local_server_info, method, exc_expected): @validate_transaction_metrics( - 'fetch_multiple', + "fetch_multiple", background_task=True, scoped_metrics=[ (local_server_info.base_metric + method.upper(), 2), @@ -83,18 +85,17 @@ def test_client_yield_from(local_server_info, method, exc_expected): ], ) def task_test(): - loop = asyncio.get_event_loop() - task(loop, method, exc_expected, local_server_info.url) + task(event_loop, method, exc_expected, local_server_info.url) task_test() @skipif_aiohttp3 -def test_client_yarl_yield_from(local_server_info): - method = 'get' +def test_client_yarl_yield_from(event_loop, local_server_info): + method = "get" @validate_transaction_metrics( - 'fetch_multiple', + "fetch_multiple", background_task=True, scoped_metrics=[ (local_server_info.base_metric + method.upper(), 2), @@ -104,31 +105,27 @@ def test_client_yarl_yield_from(local_server_info): ], ) def task_test(): - loop = asyncio.get_event_loop() - task(loop, method, False, URL(local_server_info.url)) + task(event_loop, method, False, URL(local_server_info.url)) task_test() @skipif_aiohttp3 -@pytest.mark.parametrize('method,exc_expected', test_matrix) -def test_client_no_txn_yield_from(local_server_info, method, exc_expected): - +@pytest.mark.parametrize("method,exc_expected", test_matrix) +def test_client_no_txn_yield_from(event_loop, local_server_info, method, exc_expected): def task_test(): - loop = asyncio.get_event_loop() - task(loop, method, exc_expected, local_server_info.url) + task(event_loop, method, exc_expected, local_server_info.url) task_test() @skipif_aiohttp3 -@pytest.mark.parametrize('method,exc_expected', test_matrix) -def test_client_throw_yield_from(local_server_info, method, exc_expected): - +@pytest.mark.parametrize("method,exc_expected", test_matrix) +def test_client_throw_yield_from(event_loop, local_server_info, method, exc_expected): class ThrowerException(ValueError): pass - @background_task(name='test_client_throw_yield_from') + @background_task(name="test_client_throw_yield_from") @asyncio.coroutine def self_driving_thrower(): with aiohttp.ClientSession() as session: @@ -141,7 +138,7 @@ def self_driving_thrower(): coro.throw(ThrowerException()) @validate_transaction_metrics( - 'test_client_throw_yield_from', + "test_client_throw_yield_from", background_task=True, scoped_metrics=[ (local_server_info.base_metric + method.upper(), 1), @@ -151,19 +148,17 @@ def self_driving_thrower(): ], ) def task_test(): - loop = asyncio.get_event_loop() with pytest.raises(ThrowerException): - loop.run_until_complete(self_driving_thrower()) + event_loop.run_until_complete(self_driving_thrower()) task_test() @skipif_aiohttp3 -@pytest.mark.parametrize('method,exc_expected', test_matrix) -def test_client_close_yield_from(local_server_info, method, exc_expected): - - @background_task(name='test_client_close_yield_from') +@pytest.mark.parametrize("method,exc_expected", test_matrix) +def test_client_close_yield_from(event_loop, local_server_info, method, exc_expected): + @background_task(name="test_client_close_yield_from") @asyncio.coroutine def self_driving_closer(): with aiohttp.ClientSession() as session: @@ -176,7 +171,7 @@ def self_driving_closer(): coro.close() @validate_transaction_metrics( - 'test_client_close_yield_from', + "test_client_close_yield_from", background_task=True, scoped_metrics=[ (local_server_info.base_metric + method.upper(), 1), @@ -186,8 +181,7 @@ def self_driving_closer(): ], ) def task_test(): - loop = asyncio.get_event_loop() - loop.run_until_complete(self_driving_closer()) + event_loop.run_until_complete(self_driving_closer()) task_test() @@ -195,34 +189,32 @@ def task_test(): test_ws_matrix = ( # the 127.0.0.1 server does not accept websocket requests, hence an # exception is expected but a metric will still be created - ('ws_connect', True), + ("ws_connect", True), ) @skipif_aiohttp3 -@pytest.mark.parametrize('method,exc_expected', test_ws_matrix) -def test_ws_connect_yield_from(local_server_info, method, exc_expected): - +@pytest.mark.parametrize("method,exc_expected", test_ws_matrix) +def test_ws_connect_yield_from(event_loop, local_server_info, method, exc_expected): @validate_transaction_metrics( - 'fetch_multiple', + "fetch_multiple", background_task=True, scoped_metrics=[ - (local_server_info.base_metric + 'GET', 2), + (local_server_info.base_metric + "GET", 2), ], rollup_metrics=[ - (local_server_info.base_metric + 'GET', 2), + (local_server_info.base_metric + "GET", 2), ], ) def task_test(): - loop = asyncio.get_event_loop() - task(loop, method, exc_expected, local_server_info.url) + task(event_loop, method, exc_expected, local_server_info.url) task_test() @skipif_aiohttp3 -@pytest.mark.parametrize('method,exc_expected', test_matrix) -def test_create_task_yield_from(local_server_info, method, exc_expected): +@pytest.mark.parametrize("method,exc_expected", test_matrix) +def test_create_task_yield_from(event_loop, local_server_info, method, exc_expected): # `loop.create_task` returns a Task object which uses the coroutine's # `send` method, not `__next__` @@ -235,14 +227,14 @@ def fetch_task(loop): resp.raise_for_status() yield from resp.text() - @background_task(name='test_create_task_yield_from') + @background_task(name="test_create_task_yield_from") @asyncio.coroutine def fetch_multiple(loop): coros = [fetch_task(loop) for _ in range(2)] return asyncio.gather(*coros, return_exceptions=True) @validate_transaction_metrics( - 'test_create_task_yield_from', + "test_create_task_yield_from", background_task=True, scoped_metrics=[ (local_server_info.base_metric + method.upper(), 2), @@ -252,8 +244,7 @@ def fetch_multiple(loop): ], ) def task_test(): - loop = asyncio.get_event_loop() - result = loop.run_until_complete(fetch_multiple(loop)) + result = event_loop.run_until_complete(fetch_multiple(event_loop)) if exc_expected: assert isinstance(result[0], _expected_error_class) assert isinstance(result[1], _expected_error_class) @@ -264,8 +255,8 @@ def task_test(): @skipif_aiohttp3 -@pytest.mark.parametrize('method,exc_expected', test_matrix) -def test_terminal_node_yield_from(local_server_info, method, exc_expected): +@pytest.mark.parametrize("method,exc_expected", test_matrix) +def test_terminal_node_yield_from(event_loop, local_server_info, method, exc_expected): """ This test injects a terminal node into a simple background task workflow. It was added to validate a bug where our coro.send() wrapper would fail @@ -273,11 +264,9 @@ def test_terminal_node_yield_from(local_server_info, method, exc_expected): """ def task_test(): - loop = asyncio.get_event_loop() - @function_trace(terminal=True) def execute_task(): - task(loop, method, exc_expected, local_server_info.url) + task(event_loop, method, exc_expected, local_server_info.url) execute_task() diff --git a/tests/framework_aiohttp/test_client_async_await.py b/tests/framework_aiohttp/test_client_async_await.py index a7b7b681af..1e3eb79ec5 100644 --- a/tests/framework_aiohttp/test_client_async_await.py +++ b/tests/framework_aiohttp/test_client_async_await.py @@ -67,7 +67,7 @@ def task(loop, method, exc_expected, url): @cat_enabled @pytest.mark.parametrize("method,exc_expected", test_matrix) -def test_client_async_await(local_server_info, method, exc_expected): +def test_client_async_await(event_loop, local_server_info, method, exc_expected): @validate_transaction_metrics( "fetch_multiple", background_task=True, @@ -79,14 +79,13 @@ def test_client_async_await(local_server_info, method, exc_expected): ], ) def task_test(): - loop = asyncio.get_event_loop() - task(loop, method, exc_expected, local_server_info.url) + task(event_loop, method, exc_expected, local_server_info.url) task_test() @cat_enabled -def test_client_yarl_async_await(local_server_info): +def test_client_yarl_async_await(event_loop, local_server_info): method = "get" @validate_transaction_metrics( @@ -100,24 +99,22 @@ def test_client_yarl_async_await(local_server_info): ], ) def task_test(): - loop = asyncio.get_event_loop() - task(loop, method, False, URL(local_server_info.url)) + task(event_loop, method, False, URL(local_server_info.url)) task_test() @pytest.mark.parametrize("method,exc_expected", test_matrix) @cat_enabled -def test_client_no_txn_async_await(local_server_info, method, exc_expected): +def test_client_no_txn_async_await(event_loop, local_server_info, method, exc_expected): def task_test(): - loop = asyncio.get_event_loop() - task(loop, method, exc_expected, local_server_info.url) + task(event_loop, method, exc_expected, local_server_info.url) task_test() @pytest.mark.parametrize("method,exc_expected", test_matrix) -def test_client_throw_async_await(local_server_info, method, exc_expected): +def test_client_throw_async_await(event_loop, local_server_info, method, exc_expected): class ThrowerException(ValueError): pass @@ -143,16 +140,14 @@ async def self_driving_thrower(): ], ) def task_test(): - loop = asyncio.get_event_loop() - with pytest.raises(ThrowerException): - loop.run_until_complete(self_driving_thrower()) + event_loop.run_until_complete(self_driving_thrower()) task_test() @pytest.mark.parametrize("method,exc_expected", test_matrix) -def test_client_close_async_await(local_server_info, method, exc_expected): +def test_client_close_async_await(event_loop, local_server_info, method, exc_expected): @background_task(name="test_client_close_async_await") async def self_driving_closer(): async with aiohttp.ClientSession() as session: @@ -175,15 +170,14 @@ async def self_driving_closer(): ], ) def task_test(): - loop = asyncio.get_event_loop() - loop.run_until_complete(self_driving_closer()) + event_loop.run_until_complete(self_driving_closer()) task_test() @pytest.mark.parametrize("method,exc_expected", test_matrix) @cat_enabled -def test_await_request_async_await(local_server_info, method, exc_expected): +def test_await_request_async_await(event_loop, local_server_info, method, exc_expected): async def request_with_await(): async with aiohttp.ClientSession() as session: coro = session._request(method.upper(), local_server_info.url) @@ -205,10 +199,9 @@ async def request_with_await(): ) @background_task(name="test_await_request_async_await") def task_test(): - loop = asyncio.get_event_loop() coros = [request_with_await() for _ in range(2)] future = asyncio.gather(*coros, return_exceptions=True) - text_list = loop.run_until_complete(future) + text_list = event_loop.run_until_complete(future) if exc_expected: assert isinstance(text_list[0], _expected_error_class), text_list[0].__class__ assert isinstance(text_list[1], _expected_error_class), text_list[1].__class__ @@ -226,7 +219,7 @@ def task_test(): @pytest.mark.parametrize("method,exc_expected", test_ws_matrix) -def test_ws_connect_async_await(local_server_info, method, exc_expected): +def test_ws_connect_async_await(event_loop, local_server_info, method, exc_expected): @validate_transaction_metrics( "fetch_multiple", background_task=True, @@ -238,15 +231,14 @@ def test_ws_connect_async_await(local_server_info, method, exc_expected): ], ) def task_test(): - loop = asyncio.get_event_loop() - task(loop, method, exc_expected, local_server_info.url) + task(event_loop, method, exc_expected, local_server_info.url) task_test() @pytest.mark.parametrize("method,exc_expected", test_matrix) @cat_enabled -def test_create_task_async_await(local_server_info, method, exc_expected): +def test_create_task_async_await(event_loop, local_server_info, method, exc_expected): # `loop.create_task` returns a Task object which uses the coroutine's # `send` method, not `__next__` @@ -274,8 +266,7 @@ async def fetch_multiple(loop): ], ) def task_test(): - loop = asyncio.get_event_loop() - result = loop.run_until_complete(fetch_multiple(loop)) + result = event_loop.run_until_complete(fetch_multiple(event_loop)) if exc_expected: assert isinstance(result[0], _expected_error_class), result[0].__class__ assert isinstance(result[1], _expected_error_class), result[1].__class__ @@ -287,7 +278,7 @@ def task_test(): @pytest.mark.parametrize("method,exc_expected", test_matrix) @cat_enabled -def test_terminal_parent_async_await(local_server_info, method, exc_expected): +def test_terminal_parent_async_await(event_loop, local_server_info, method, exc_expected): """ This test injects a terminal node into a simple background task workflow. It was added to validate a bug where our coro.send() wrapper would fail @@ -295,11 +286,9 @@ def test_terminal_parent_async_await(local_server_info, method, exc_expected): """ def task_test(): - loop = asyncio.get_event_loop() - @function_trace(terminal=True) def execute_task(): - task(loop, method, exc_expected, local_server_info.url) + task(event_loop, method, exc_expected, local_server_info.url) execute_task() diff --git a/tests/framework_aiohttp/test_client_cat.py b/tests/framework_aiohttp/test_client_cat.py index f444713c8d..a830c2269f 100644 --- a/tests/framework_aiohttp/test_client_cat.py +++ b/tests/framework_aiohttp/test_client_cat.py @@ -12,23 +12,28 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest import asyncio -import aiohttp import os +import aiohttp +import pytest +from testing_support.external_fixtures import create_incoming_headers +from testing_support.fixtures import ( + override_application_settings, + validate_transaction_metrics, +) +from testing_support.validators.validate_cross_process_headers import ( + validate_cross_process_headers, +) +from testing_support.validators.validate_external_node_params import ( + validate_external_node_params, +) + from newrelic.api.background_task import background_task from newrelic.api.external_trace import ExternalTrace from newrelic.api.transaction import current_transaction -from testing_support.fixtures import (override_application_settings, - validate_transaction_metrics) -from testing_support.external_fixtures import create_incoming_headers -from testing_support.validators.validate_cross_process_headers import validate_cross_process_headers -from testing_support.validators.validate_external_node_params import validate_external_node_params - - -version_info = tuple(int(_) for _ in aiohttp.__version__.split('.')[:2]) +version_info = tuple(int(_) for _ in aiohttp.__version__.split(".")[:2]) if version_info < (2, 0): _expected_error_class = aiohttp.errors.HttpProcessingError @@ -41,10 +46,10 @@ def fetch(url, headers=None, raise_for_status=False, connector=None): kwargs = {} if version_info >= (2, 0): - kwargs = {'raise_for_status': raise_for_status} + kwargs = {"raise_for_status": raise_for_status} session = aiohttp.ClientSession(connector=connector, **kwargs) - request = session._request('GET', url, headers=headers) + request = session._request("GET", url, headers=headers) headers = {} try: @@ -55,11 +60,11 @@ def fetch(url, headers=None, raise_for_status=False, connector=None): return headers response_text = yield from response.text() - for header in response_text.split('\n'): + for header in response_text.split("\n"): if not header: continue try: - h, v = header.split(':', 1) + h, v = header.split(":", 1) except ValueError: continue headers[h.strip()] = v.strip() @@ -68,28 +73,25 @@ def fetch(url, headers=None, raise_for_status=False, connector=None): return headers -@pytest.mark.parametrize('cat_enabled', (True, False)) -@pytest.mark.parametrize('distributed_tracing', (True, False)) -@pytest.mark.parametrize('span_events', (True, False)) -def test_outbound_cross_process_headers(cat_enabled, distributed_tracing, - span_events, mock_header_server): - - @background_task(name='test_outbound_cross_process_headers') +@pytest.mark.parametrize("cat_enabled", (True, False)) +@pytest.mark.parametrize("distributed_tracing", (True, False)) +@pytest.mark.parametrize("span_events", (True, False)) +def test_outbound_cross_process_headers(event_loop, cat_enabled, distributed_tracing, span_events, mock_header_server): + @background_task(name="test_outbound_cross_process_headers") @asyncio.coroutine def _test(): - headers = yield from fetch( - 'http://127.0.0.1:%d' % mock_header_server.port) + headers = yield from fetch("http://127.0.0.1:%d" % mock_header_server.port) transaction = current_transaction() transaction._test_request_headers = headers if distributed_tracing: - assert 'newrelic' in headers + assert "newrelic" in headers elif cat_enabled: assert ExternalTrace.cat_id_key in headers assert ExternalTrace.cat_transaction_key in headers else: - assert 'newrelic' not in headers + assert "newrelic" not in headers assert ExternalTrace.cat_id_key not in headers assert ExternalTrace.cat_transaction_key not in headers @@ -101,91 +103,82 @@ def _validate(): _validate() - @override_application_settings({ - 'cross_application_tracer.enabled': cat_enabled, - 'distributed_tracing.enabled': distributed_tracing, - 'span_events.enabled': span_events, - }) + @override_application_settings( + { + "cross_application_tracer.enabled": cat_enabled, + "distributed_tracing.enabled": distributed_tracing, + "span_events.enabled": span_events, + } + ) def test(): - loop = asyncio.get_event_loop() - loop.run_until_complete(_test()) + event_loop.run_until_complete(_test()) test() _nr_key = ExternalTrace.cat_id_key _customer_headers_tests = [ - {'Test-Header': 'Test Data 1'}, - {_nr_key.title(): 'Test Data 2'}, + {"Test-Header": "Test Data 1"}, + {_nr_key.title(): "Test Data 2"}, ] -@pytest.mark.parametrize('customer_headers', _customer_headers_tests) -def test_outbound_cross_process_headers_custom_headers(customer_headers, - mock_header_server): +@pytest.mark.parametrize("customer_headers", _customer_headers_tests) +def test_outbound_cross_process_headers_custom_headers(event_loop, customer_headers, mock_header_server): - loop = asyncio.get_event_loop() - headers = loop.run_until_complete( - background_task()(fetch)( - 'http://127.0.0.1:%d' % mock_header_server.port, - customer_headers.copy())) + headers = event_loop.run_until_complete( + background_task()(fetch)("http://127.0.0.1:%d" % mock_header_server.port, customer_headers.copy()) + ) # always honor customer headers for expected_header, expected_value in customer_headers.items(): assert headers.get(expected_header) == expected_value -def test_outbound_cross_process_headers_no_txn(mock_header_server): - - loop = asyncio.get_event_loop() - headers = loop.run_until_complete(fetch( - 'http://127.0.0.1:%d' % mock_header_server.port)) +def test_outbound_cross_process_headers_no_txn(event_loop, mock_header_server): + headers = event_loop.run_until_complete(fetch("http://127.0.0.1:%d" % mock_header_server.port)) assert not headers.get(ExternalTrace.cat_id_key) assert not headers.get(ExternalTrace.cat_transaction_key) -def test_outbound_cross_process_headers_exception(mock_header_server): - - @background_task(name='test_outbound_cross_process_headers_exception') +def test_outbound_cross_process_headers_exception(event_loop, mock_header_server): + @background_task(name="test_outbound_cross_process_headers_exception") @asyncio.coroutine def test(): # corrupt the transaction object to force an error transaction = current_transaction() guid = transaction.guid - delattr(transaction, 'guid') + delattr(transaction, "guid") try: - headers = yield from fetch('http://127.0.0.1:%d' % mock_header_server.port) + headers = yield from fetch("http://127.0.0.1:%d" % mock_header_server.port) assert not headers.get(ExternalTrace.cat_id_key) assert not headers.get(ExternalTrace.cat_transaction_key) finally: transaction.guid = guid - loop = asyncio.get_event_loop() - loop.run_until_complete(test()) + event_loop.run_until_complete(test()) class PoorResolvingConnector(aiohttp.TCPConnector): @asyncio.coroutine def _resolve_host(self, host, port, *args, **kwargs): - res = [{'hostname': host, 'host': host, 'port': 1234, - 'family': self._family, 'proto': 0, 'flags': 0}] - hosts = yield from super(PoorResolvingConnector, - self)._resolve_host(host, port, *args, **kwargs) + res = [{"hostname": host, "host": host, "port": 1234, "family": self._family, "proto": 0, "flags": 0}] + hosts = yield from super(PoorResolvingConnector, self)._resolve_host(host, port, *args, **kwargs) for hinfo in hosts: res.append(hinfo) return res -@pytest.mark.parametrize('cat_enabled', [True, False]) -@pytest.mark.parametrize('response_code', [200, 404]) -@pytest.mark.parametrize('raise_for_status', [True, False]) -@pytest.mark.parametrize('connector_class', - [None, PoorResolvingConnector]) # None will use default -def test_process_incoming_headers(cat_enabled, response_code, - raise_for_status, connector_class, mock_external_http_server): +@pytest.mark.parametrize("cat_enabled", [True, False]) +@pytest.mark.parametrize("response_code", [200, 404]) +@pytest.mark.parametrize("raise_for_status", [True, False]) +@pytest.mark.parametrize("connector_class", [None, PoorResolvingConnector]) # None will use default +def test_process_incoming_headers( + event_loop, cat_enabled, response_code, raise_for_status, connector_class, mock_external_http_server +): # It was discovered via packnsend that the `throw` method of the `_request` # coroutine is used in the case of poorly resolved hosts. An older version @@ -195,60 +188,56 @@ def test_process_incoming_headers(cat_enabled, response_code, # always called and thus makes sure the trace is not ended before # StopIteration is called. server, response_values = mock_external_http_server - address = 'http://127.0.0.1:%d' % server.port + address = "http://127.0.0.1:%d" % server.port port = server.port _test_cross_process_response_scoped_metrics = [ - ('ExternalTransaction/127.0.0.1:%d/1#2/test' % port, 1 if cat_enabled - else None)] + ("ExternalTransaction/127.0.0.1:%d/1#2/test" % port, 1 if cat_enabled else None) + ] _test_cross_process_response_rollup_metrics = [ - ('External/all', 1), - ('External/allOther', 1), - ('External/127.0.0.1:%d/all' % port, 1), - ('ExternalApp/127.0.0.1:%d/1#2/all' % port, 1 if cat_enabled else None), - ('ExternalTransaction/127.0.0.1:%d/1#2/test' % port, 1 if cat_enabled - else None)] + ("External/all", 1), + ("External/allOther", 1), + ("External/127.0.0.1:%d/all" % port, 1), + ("ExternalApp/127.0.0.1:%d/1#2/all" % port, 1 if cat_enabled else None), + ("ExternalTransaction/127.0.0.1:%d/1#2/test" % port, 1 if cat_enabled else None), + ] _test_cross_process_response_external_node_params = [ - ('cross_process_id', '1#2'), - ('external_txn_name', 'test'), - ('transaction_guid', '0123456789012345')] + ("cross_process_id", "1#2"), + ("external_txn_name", "test"), + ("transaction_guid", "0123456789012345"), + ] _test_cross_process_response_external_node_forgone_params = [ - k for k, v in _test_cross_process_response_external_node_params] + k for k, v in _test_cross_process_response_external_node_params + ] connector = connector_class() if connector_class else None - - @background_task(name='test_process_incoming_headers') + @background_task(name="test_process_incoming_headers") async def _test(): transaction = current_transaction() headers = create_incoming_headers(transaction) response_values.append((headers, response_code)) - await fetch( - address, - raise_for_status=raise_for_status, - connector=connector) + await fetch(address, raise_for_status=raise_for_status, connector=connector) - @override_application_settings({ - 'cross_application_tracer.enabled': cat_enabled, - 'distributed_tracing.enabled': False - }) + @override_application_settings( + {"cross_application_tracer.enabled": cat_enabled, "distributed_tracing.enabled": False} + ) @validate_transaction_metrics( - 'test_process_incoming_headers', - scoped_metrics=_test_cross_process_response_scoped_metrics, - rollup_metrics=_test_cross_process_response_rollup_metrics, - background_task=True) + "test_process_incoming_headers", + scoped_metrics=_test_cross_process_response_scoped_metrics, + rollup_metrics=_test_cross_process_response_rollup_metrics, + background_task=True, + ) @validate_external_node_params( - params=(_test_cross_process_response_external_node_params if - cat_enabled else []), - forgone_params=([] if cat_enabled else - _test_cross_process_response_external_node_forgone_params)) + params=(_test_cross_process_response_external_node_params if cat_enabled else []), + forgone_params=([] if cat_enabled else _test_cross_process_response_external_node_forgone_params), + ) def test(): - loop = asyncio.get_event_loop() - loop.run_until_complete(_test()) + event_loop.run_until_complete(_test()) test() diff --git a/tests/framework_aiohttp/test_server.py b/tests/framework_aiohttp/test_server.py index d27d642ab8..6120ea8d8f 100644 --- a/tests/framework_aiohttp/test_server.py +++ b/tests/framework_aiohttp/test_server.py @@ -149,7 +149,12 @@ def fetch(): @asyncio.coroutine def multi_fetch(loop): coros = [fetch() for i in range(2)] - combined = asyncio.gather(*coros, loop=loop) + + try: + combined = asyncio.gather(*coros) + except TypeError: + combined = asyncio.gather(*coros, loop=loop) + responses = yield from combined return responses diff --git a/tests/framework_ariadne/test_application_async.py b/tests/framework_ariadne/test_application_async.py index 7b9cf18c4b..06d5f4d358 100644 --- a/tests/framework_ariadne/test_application_async.py +++ b/tests/framework_ariadne/test_application_async.py @@ -85,7 +85,7 @@ async def coro(): assert "storage" in str(response.get("data")) assert "abc" in str(response.get("data")) - loop = asyncio.get_event_loop() + loop = asyncio.new_event_loop() loop.run_until_complete(coro()) _test() diff --git a/tests/framework_graphql/test_application_async.py b/tests/framework_graphql/test_application_async.py index 7bbd2bbe80..63e50492ea 100644 --- a/tests/framework_graphql/test_application_async.py +++ b/tests/framework_graphql/test_application_async.py @@ -1,18 +1,17 @@ import asyncio + import pytest -from testing_support.fixtures import ( - dt_enabled, - validate_transaction_metrics, -) +from test_application import is_graphql_2 +from testing_support.fixtures import dt_enabled, validate_transaction_metrics from testing_support.validators.validate_span_events import validate_span_events -from newrelic.api.background_task import background_task -from test_application import is_graphql_2 +from newrelic.api.background_task import background_task @pytest.fixture(scope="session") def graphql_run_async(): - from graphql import graphql, __version__ as version + from graphql import __version__ as version + from graphql import graphql major_version = int(version.split(".")[0]) if major_version == 2: @@ -81,9 +80,7 @@ def test_query_and_mutation_async(app, graphql_run_async, is_graphql_2): @background_task() def _test(): async def coro(): - response = await graphql_run_async( - app, 'mutation { storage_add(string: "abc") }' - ) + response = await graphql_run_async(app, 'mutation { storage_add(string: "abc") }') assert not response.errors response = await graphql_run_async(app, "query { storage }") assert not response.errors @@ -92,7 +89,7 @@ async def coro(): assert "storage" in str(response.data) assert "abc" in str(response.data) - loop = asyncio.get_event_loop() + loop = asyncio.new_event_loop() loop.run_until_complete(coro()) _test() diff --git a/tests/framework_sanic/conftest.py b/tests/framework_sanic/conftest.py index cd588dccd0..f8b5dac376 100644 --- a/tests/framework_sanic/conftest.py +++ b/tests/framework_sanic/conftest.py @@ -12,53 +12,59 @@ # See the License for the specific language governing permissions and # limitations under the License. +import asyncio + import pytest +from testing_support.fixtures import ( + code_coverage_fixture, + collector_agent_registration_fixture, + collector_available_fixture, +) from newrelic.common.object_wrapper import transient_function_wrapper -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) - -import asyncio - _coverage_source = [ - 'newrelic.hooks.framework_sanic', + "newrelic.hooks.framework_sanic", ] code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { - 'transaction_tracer.explain_threshold': 0.0, - 'transaction_tracer.transaction_threshold': 0.0, - 'transaction_tracer.stack_trace_threshold': 0.0, - 'debug.log_data_collector_payloads': True, - 'debug.record_transaction_failure': True, + "transaction_tracer.explain_threshold": 0.0, + "transaction_tracer.transaction_threshold": 0.0, + "transaction_tracer.stack_trace_threshold": 0.0, + "debug.log_data_collector_payloads": True, + "debug.record_transaction_failure": True, } collector_agent_registration = collector_agent_registration_fixture( - app_name='Python Agent Test (framework_sanic)', - default_settings=_default_settings) + app_name="Python Agent Test (framework_sanic)", default_settings=_default_settings +) RESPONSES = [] +loop = None + + def create_request_class(app, method, url, headers=None, loop=None): from sanic.request import Request + try: _request = Request( method=method.upper(), - url_bytes=url.encode('utf-8'), + url_bytes=url.encode("utf-8"), headers=headers, - version='1.0', + version="1.0", transport=None, ) except TypeError: _request = Request( app=app, method=method.upper(), - url_bytes=url.encode('utf-8'), + url_bytes=url.encode("utf-8"), headers=headers, - version='1.0', + version="1.0", transport=None, ) @@ -89,9 +95,10 @@ def create_request_coroutine(app, method, url, headers=None, loop=None): try: coro = app.handle_request(create_request_class(app, method, url, headers, loop=loop)) except TypeError: + def write_callback(response): response.raw_headers = response.output() - if b'write_response_error' in response.raw_headers: + if b"write_response_error" in response.raw_headers: raise ValueError("write_response_error") if response not in RESPONSES: @@ -112,7 +119,10 @@ async def stream_callback(response): def request(app, method, url, headers=None): - loop = asyncio.get_event_loop() + global loop + if loop is None: + loop = asyncio.new_event_loop() + coro = create_request_coroutine(app, method, url, headers, loop) loop.run_until_complete(coro) return RESPONSES.pop() @@ -129,11 +139,14 @@ def fetch(self, method, url, headers=None): @pytest.fixture() def app(): from _target_application import app + return TestApplication(app) + @pytest.fixture(autouse=True) def capture_requests(monkeypatch): from sanic.response import BaseHTTPResponse + original = BaseHTTPResponse.__init__ def capture(*args, **kwargs): diff --git a/tests/framework_strawberry/test_application_async.py b/tests/framework_strawberry/test_application_async.py index 3af55b6d46..bac2c06940 100644 --- a/tests/framework_strawberry/test_application_async.py +++ b/tests/framework_strawberry/test_application_async.py @@ -26,6 +26,9 @@ def execute(schema, *args, **kwargs): ] +loop = asyncio.new_event_loop() + + def test_basic(app, graphql_run_async): from graphql import __version__ as version @@ -48,7 +51,6 @@ async def coro(): response = await graphql_run_async(app, "{ hello_async }") assert not response.errors - loop = asyncio.get_event_loop() loop.run_until_complete(coro()) _test() @@ -122,7 +124,6 @@ async def coro(): assert "storage" in str(response.data) assert "abc" in str(response.data) - loop = asyncio.get_event_loop() loop.run_until_complete(coro()) _test() diff --git a/tests/testing_support/asgi_testing.py b/tests/testing_support/asgi_testing.py index 747b0e3247..3b45686794 100644 --- a/tests/testing_support/asgi_testing.py +++ b/tests/testing_support/asgi_testing.py @@ -34,7 +34,7 @@ def __init__(self, asgi_application): self.asgi_application = asgi_application def make_request(self, method, path, params=None, headers=None, body=None): - loop = asyncio.get_event_loop() + loop = asyncio.new_event_loop() coro = self.make_request_async(method, path, params, headers, body) return loop.run_until_complete(coro) @@ -49,9 +49,7 @@ async def make_request_async(self, method, path, params, headers, body): scope = self.generate_input(method, path, params, headers, body) try: - awaitable = self.asgi_application( - scope, self.input_queue.get, self.output_queue.put - ) + awaitable = self.asgi_application(scope, self.input_queue.get, self.output_queue.put) except TypeError: instance = self.asgi_application(scope) awaitable = instance(self.input_queue.get, self.output_queue.put) diff --git a/tests/testing_support/fixture/__init__.py b/tests/testing_support/fixture/__init__.py new file mode 100644 index 0000000000..8030baccf7 --- /dev/null +++ b/tests/testing_support/fixture/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/testing_support/fixture/event_loop.py b/tests/testing_support/fixture/event_loop.py new file mode 100644 index 0000000000..ab89694459 --- /dev/null +++ b/tests/testing_support/fixture/event_loop.py @@ -0,0 +1,16 @@ +import pytest + +from newrelic.packages import six + +# Guard against Python 2 crashes +if six.PY2: + event_loop = None +else: + + @pytest.fixture(scope="session") + def event_loop(): + from asyncio import new_event_loop, set_event_loop + + loop = new_event_loop() + set_event_loop(loop) + yield loop diff --git a/tox.ini b/tox.ini index 13006aea59..60ac0ee353 100644 --- a/tox.ini +++ b/tox.ini @@ -42,99 +42,101 @@ [tox] setupdir = {toxinidir} envlist = - python-adapter_cheroot-{py27,py36,py37,py38,py39}, - python-adapter_gevent-{py27,py36,py37,py38}, + python-adapter_cheroot-{py27,py36,py37,py38,py39,py310}, + python-adapter_gevent-{py27,py36,py37,py38,py310}, python-adapter_gunicorn-{py36}-aiohttp1-gunicorn{19,latest}, - python-adapter_gunicorn-{py36,py37,py38,py39}-aiohttp3-gunicornlatest, + python-adapter_gunicorn-{py36,py37,py38,py39,py310}-aiohttp3-gunicornlatest, python-adapter_uvicorn-{py36,py37}-uvicorn03, ; Temporarily testing py36 on the uvicorn version preceeding v0.15 python-adapter_uvicorn-{py36}-uvicorn014 - python-adapter_uvicorn-{py37,py38,py39}-uvicornlatest, - python-agent_features-{py27,py36,py37,py38,py39}-{with,without}_extensions, + python-adapter_uvicorn-{py37,py38,py39,py310}-uvicornlatest, + python-agent_features-{py27,py36,py37,py38,py39,py310}-{with,without}_extensions, python-agent_features-{pypy,pypy3}-without_extensions, - python-agent_streaming-{py27,py36,py37,py38,py39}-{with,without}_extensions, - python-agent_unittests-{py27,py36,py37,py38,py39}-{with,without}_extensions, + python-agent_streaming-{py27,py36,py37,py38,py39,py310}-{with,without}_extensions, + python-agent_unittests-{py27,py36,py37,py38,py39,py310}-{with,without}_extensions, python-agent_unittests-{pypy,pypy3}-without_extensions, - python-application_celery-{py27,py36,py37,py38,py39,pypy,pypy3}, + python-application_celery-{py27,py36,py37,py38,py39,py310,pypy,pypy3}, gearman-application_gearman-{py27,pypy}, python-component_djangorestframework-py27-djangorestframework0300, - python-component_djangorestframework-{py36,py37,py38,py39}-djangorestframeworklatest, - python-component_flask_rest-{py27,py36,py37,py38,pypy,pypy3}, + python-component_djangorestframework-{py36,py37,py38,py39,py310}-djangorestframeworklatest, + python-component_flask_rest-{py27,py36,py37,py38,py39,pypy,pypy3}, python-component_tastypie-{py27,py36,py37,py38,py39,pypy,pypy3}, - python-coroutines_asyncio-{py36,py37,py38,py39,pypy3}, - python-cross_agent-{py27,py36,py37,py38,py39}-{with,without}_extensions, + python-coroutines_asyncio-{py36,py37,py38,py39,py310,pypy3}, + python-cross_agent-{py27,py36,py37,py38,py39,py310}-{with,without}_extensions, python-cross_agent-pypy-without_extensions, - postgres-datastore_asyncpg-{py36,py37,py38,py39}, - memcached-datastore_bmemcached-{pypy,py27,py36,py37,py38,py39}-memcached030, + postgres-datastore_asyncpg-{py36,py37,py38,py39,py310}, + memcached-datastore_bmemcached-{pypy,py27,py36,py37,py38,py39,py310}-memcached030, elasticsearchserver01-datastore_pyelasticsearch-{py27,py36,pypy}, elasticsearchserver01-datastore_elasticsearch-py27-elasticsearch{00,01,02,05}, - elasticsearchserver07-datastore_elasticsearch-{py27,py36,py37,py38,py39,pypy,pypy3}-elasticsearch{07}, - memcached-datastore_memcache-{py27,py36,py37,py38,py39,pypy,pypy3}-memcached01, + elasticsearchserver07-datastore_elasticsearch-{py27,py36,py37,py38,py39,py310,pypy,pypy3}-elasticsearch{07}, + memcached-datastore_memcache-{py27,py36,py37,py38,py39,py310,pypy,pypy3}-memcached01, mysql-datastore_mysql-mysql080023-py27, - mysql-datastore_mysql-mysqllatest-{py36,py37,py38}, + mysql-datastore_mysql-mysqllatest-{py36,py37,py38,py39,py310}, postgres-datastore_postgresql-{py36,py37,py38,py39}, - postgres-datastore_psycopg2-{py27,py36,py37,py38}-psycopg20208, + postgres-datastore_psycopg2-{py27,py36,py37,py38,py39,py310}-psycopg20208, postgres-datastore_psycopg2cffi-{py27,py36,pypy}-psycopg2cffi{0207,0208}, - postgres-datastore_psycopg2cffi-py37-psycopg2cffi0208, + postgres-datastore_psycopg2cffi-{py37,py38,py39,py310}-psycopg2cffi0208, memcached-datastore_pylibmc-{py27,py36,py37}, - memcached-datastore_pymemcache-{py27,py36,py37,py38,py39,pypy,pypy3}, - mongodb-datastore_pymongo-{py27,py36,py37,py38,py39,pypy}-pymongo{03}, - mysql-datastore_pymysql-{py27,py36,py37,py38,py39,pypy,pypy3}, - solr-datastore_pysolr-{py27,py36,py37,py38,py39,pypy,pypy3}, - redis-datastore_redis-{py27,py36,py37,py38,py39,pypy,pypy3}, + memcached-datastore_pymemcache-{py27,py36,py37,py38,py39,py310,pypy,pypy3}, + mongodb-datastore_pymongo-{py27,py36,py37,py38,py39,py310,pypy}-pymongo{03}, + mysql-datastore_pymysql-{py27,py36,py37,py38,py39,py310,pypy,pypy3}, + solr-datastore_pysolr-{py27,py36,py37,py38,py39,py310,pypy,pypy3}, + redis-datastore_redis-{py27,py36,py37,py38,py39,py310,pypy,pypy3}, solr-datastore_solrpy-{py27,pypy}-solrpy{00,01}, - python-datastore_sqlite-{py27,py36,py37,py38,py39,pypy,pypy3}, + python-datastore_sqlite-{py27,py36,py37,py38,py39,py310,pypy,pypy3}, memcached-datastore_umemcache-{py27,pypy}, - python-external_boto3-{py27,py36,py37,py38,py39}-boto01, - python-external_botocore-{py27,py36,py37,py38,py39}, + python-external_boto3-{py27,py36,py37,py38,py39,py310}-boto01, + python-external_botocore-{py27,py36,py37,py38,py39,py310}, python-external_feedparser-py27-feedparser{05,06}, - python-external_http-{py27,py36,py37,py38,py39,pypy}, - python-external_httplib-{py27,py36,py37,py38,py39,pypy,pypy3}, - python-external_httplib2-{py27,py36,py37,py38,py39,pypy,pypy3}, - python-external_httpx-{py36,py37,py38,py39}, - python-external_requests-{py27,py36,py37,py38,py39,pypy,pypy3}, + python-external_http-{py27,py36,py37,py38,py39,py310,pypy}, + python-external_httplib-{py27,py36,py37,py38,py39,py310,pypy,pypy3}, + python-external_httplib2-{py27,py36,py37,py38,py39,py310,pypy,pypy3}, + python-external_httpx-{py36,py37,py38,py39,py310}, + python-external_requests-{py27,py36,py37,py38,py39,py310,pypy,pypy3}, python-external_urllib3-{py27,py37,pypy}-urllib3{0109}, - python-external_urllib3-{py27,py36,py37,py38,py39,pypy,pypy3}-urllib3latest, + python-external_urllib3-{py27,py36,py37,py38,py39,py310,pypy,pypy3}-urllib3latest, python-framework_aiohttp-{py36}-aiohttp{01,0202,02}, python-framework_aiohttp-{py36,py37}-aiohttp0304, - python-framework_aiohttp-{py36,py37,py38,py39,pypy3}-aiohttp03, - python-framework_ariadne-{py36,py37,py38,py39}-ariadnelatest, + python-framework_aiohttp-{py36,py37,py38,py39,py310,pypy3}-aiohttp03, + python-framework_ariadne-{py36,py37,py38,py39,py310}-ariadnelatest, python-framework_ariadne-py37-ariadne{0011,0012,0013}, python-framework_bottle-py27-bottle{0008,0009,0010}, - python-framework_bottle-{py27,py36,py37,py38,py39}-bottle{0011,0012}, + python-framework_bottle-{py27,py36,py37,py38,py39,pypy3}-bottle{0011,0012}, + python-framework_bottle-py310-bottle0012, python-framework_bottle-pypy-bottle{0008,0009,0010,0011,0012}, - python-framework_bottle-pypy3-bottle{0011,0012}, - python-framework_cherrypy-{py36,py37,py38,py39,pypy3}-CherryPy18, + python-framework_cherrypy-{py36,py37,py38,py39,py310,pypy3}-CherryPy18, python-framework_cherrypy-{py36,py37}-CherryPy0302, python-framework_cherrypy-pypy3-CherryPy0303, python-framework_django-{pypy,py27}-Django0103, python-framework_django-{pypy,py27,py37}-Django0108, - python-framework_django-py39-Django{0200,0201,0202,0300,0301,latest}, - python-framework_django-{py36,py37,py38,py39}-Django0302, + python-framework_django-{py39}-Django{0200,0201,0202,0300,0301,latest}, + python-framework_django-{py36,py37,py38,py39,py310}-Django0302, python-framework_falcon-{py27,py36,py37,py38,py39,pypy,pypy3}-falcon0103, - python-framework_falcon-{py36,py37,py38,py39,pypy3}-falcon{0200,master}, - python-framework_fastapi-{py36,py37,py38}, + python-framework_falcon-{py36,py37,py38,py39,py310,pypy3}-falcon{0200,master}, + python-framework_fastapi-{py36,py37,py38,py39,py310}, python-framework_flask-{pypy,py27}-flask0012, - python-framework_flask-{pypy,py27,py36,py37,py38,pypy3}-flask0101, - python-framework_flask-{py37,py38,pypy3}-flask{latest,master}, - python-framework_graphene-{py27,py36,py37,py38,py39,pypy,pypy3}-graphenelatest, + python-framework_flask-{pypy,py27,py36,py37,py38,py39,py310,pypy3}-flask0101, + python-framework_flask-{py37,py38,py39,py310,pypy3}-flask{latest,master}, + python-framework_graphene-{py27,py36,py37,py38,py39,py310,pypy,pypy3}-graphenelatest, python-framework_graphene-py37-graphene{0200,0201}, - python-framework_graphql-{py27,py36,py37,py38,pypy,pypy3}-graphql02, - python-framework_graphql-{py36,py37,py38,py39,pypy3}-graphql03, - python-framework_graphql-py37-graphql{0202,0203,0300,0301,master}, + python-framework_graphql-{py27,py36,py37,py38,py39,py310,pypy,pypy3}-graphql02, + python-framework_graphql-{py36,py37,py38,py39,py310,pypy3}-graphql03, + ;temporarily disabling tests on graphql master + python-framework_graphql-py37-graphql{0202,0203,0300,0301}, grpc-framework_grpc-{py27,py36}-grpc0125, - grpc-framework_grpc-{py36,py37,py38,py39}-grpclatest, + grpc-framework_grpc-{py36,py37,py38,py39,py310}-grpclatest, python-framework_pyramid-{pypy,py27,py38}-Pyramid0104, - python-framework_pyramid-{pypy,py27,pypy3,py36,py37,py38,py39}-Pyramid0110-cornice, - python-framework_pyramid-{pypy3,py36,py37,py38,py39}-Pyramidmaster, - python-framework_sanic-{py38,pypy3}-sanic{190301,1906,1812,1912,200904,210300} - python-framework_sanic-{py36,py37,py38,pypy3}-saniclatest - python-framework_starlette-{py36,py37,py38,py39,pypy3}-starlette{0014,latest}, - python-framework_strawberry-{py37,py38,py39}-strawberrylatest, - libcurl-framework_tornado-{py36,py37,py38,py39,pypy3}-tornado0600, - libcurl-framework_tornado-{py36,py37,py38,py39,pypy3}-tornadomaster, - rabbitmq-messagebroker_pika-{py27,py36,py37,py38,py39,pypy,pypy3}-pika{0.13,latest}, - python-template_mako-{py27,py36,py37,py38,py39} + python-framework_pyramid-{pypy,py27,pypy3,py36,py37,py38,py39,py310}-Pyramid0110-cornice, + python-framework_pyramid-{pypy3,py36,py37,py38,py39,py310}-Pyramidmaster, + python-framework_sanic-{py38,pypy3}-sanic{190301,1906,1812,1912,200904,210300}, + python-framework_sanic-{py36,py37,py38,py310,pypy3}-saniclatest, + python-framework_starlette-{py36,py37,py38,py39,py310, pypy3}-starlette{0014,latest}, + python-framework_strawberry-{py37,py38,py39,py310}-strawberrylatest, + libcurl-framework_tornado-{py36,py37,py38,py39,py310,pypy3}-tornado0600, + libcurl-framework_tornado-{py36,py37,py38,py39,py310,pypy3}-tornadomaster, + rabbitmq-messagebroker_pika-{py27,py36,py37,py38,py39,pypy,pypy3}-pika0.13, + rabbitmq-messagebroker_pika-{py27,py36,py37,py38,py39,py310,pypy,pypy3}-pikalatest, + python-template_mako-{py27,py36,py37,py38,py39,py310} [pytest] usefixtures = @@ -144,6 +146,14 @@ usefixtures = [testenv] deps = + # Base Dependencies + {py36,py37,py38,py39,py310,pypy3}: pytest==6.2.5 + {py27,pypy}: pytest==4.6.11 + iniconfig + pytest-cov + WebTest==2.0.35 + + # Test Suite Dependencies adapter_cheroot: cheroot adapter_gevent: WSGIProxy2 adapter_gevent: gevent @@ -323,7 +333,7 @@ commands = py.test -v [] install_command= - pip install -r {toxinidir}/tests/base_requirements.txt {opts} {packages} + pip install {opts} {packages} extras = agent_streaming: infinite-tracing