From cd23f56e6d84133de4a10169970f5b3dc9330a74 Mon Sep 17 00:00:00 2001 From: Uma Annamalai Date: Thu, 21 Jul 2022 09:56:09 -0700 Subject: [PATCH 01/49] Enable log forwarding by default. (#587) * Enable log forwarding by default. (#583) * Enable log forwarding by default. * Fix catmap fixture for log forwarding compatibility. * Use isinstance check in fixtures. * Update data usage supportability metrics. (#582) * Update data usage supportability metric name. * Fix compression bug and add testing. Co-authored-by: Lalleh Rafeei Co-authored-by: Nyenty Ayuk-Enow * Update harvest_loop tests. * Update compression test to check method1 payload length for consistency. Co-authored-by: Lalleh Rafeei Co-authored-by: Nyenty Ayuk-Enow Co-authored-by: Lalleh Rafeei Co-authored-by: Nyenty Ayuk-Enow --- newrelic/common/agent_http.py | 22 ++++++++---- newrelic/core/config.py | 2 +- tests/agent_unittests/test_harvest_loop.py | 2 +- tests/agent_unittests/test_http_client.py | 39 +++++++++++++++------- tests/testing_support/fixtures.py | 4 +-- 5 files changed, 46 insertions(+), 23 deletions(-) diff --git a/newrelic/common/agent_http.py b/newrelic/common/agent_http.py index 239892000..e9d9a00aa 100644 --- a/newrelic/common/agent_http.py +++ b/newrelic/common/agent_http.py @@ -524,24 +524,32 @@ def _supportability_request(params, payload, body, compression_time): # ********* # Used only for supportability metrics. Do not use to drive business # logic! + # payload: uncompressed + # body: compressed agent_method = params and params.get("method") # ********* - if agent_method and body: + if agent_method and payload: # Compression was applied if compression_time is not None: internal_metric( - "Supportability/Python/Collector/ZLIB/Bytes/%s" % agent_method, - len(payload), + "Supportability/Python/Collector/%s/ZLIB/Bytes" % agent_method, + len(body), ) internal_metric( - "Supportability/Python/Collector/ZLIB/Compress/%s" % agent_method, + "Supportability/Python/Collector/ZLIB/Bytes", len(body) + ) + internal_metric( + "Supportability/Python/Collector/%s/ZLIB/Compress" % agent_method, compression_time, ) - internal_metric( - "Supportability/Python/Collector/Output/Bytes/%s" % agent_method, - len(body), + "Supportability/Python/Collector/%s/Output/Bytes" % agent_method, + len(payload), + ) + # Top level metric to aggregate overall bytes being sent + internal_metric( + "Supportability/Python/Collector/Output/Bytes", len(payload) ) @staticmethod diff --git a/newrelic/core/config.py b/newrelic/core/config.py index 57f408b87..60520c113 100644 --- a/newrelic/core/config.py +++ b/newrelic/core/config.py @@ -812,7 +812,7 @@ def default_host(license_key): _settings.application_logging.enabled = _environ_as_bool("NEW_RELIC_APPLICATION_LOGGING_ENABLED", default=True) _settings.application_logging.forwarding.enabled = _environ_as_bool( - "NEW_RELIC_APPLICATION_LOGGING_FORWARDING_ENABLED", default=False + "NEW_RELIC_APPLICATION_LOGGING_FORWARDING_ENABLED", default=True ) _settings.application_logging.metrics.enabled = _environ_as_bool( "NEW_RELIC_APPLICATION_LOGGING_METRICS_ENABLED", default=True diff --git a/tests/agent_unittests/test_harvest_loop.py b/tests/agent_unittests/test_harvest_loop.py index 7a1f1702d..7760e1307 100644 --- a/tests/agent_unittests/test_harvest_loop.py +++ b/tests/agent_unittests/test_harvest_loop.py @@ -896,7 +896,7 @@ def test_default_events_harvested(allowlist_event): num_seen = 0 if (allowlist_event != "span_event_data") else 1 assert app._stats_engine.span_events.num_seen == num_seen - assert app._stats_engine.metrics_count() == 1 + assert app._stats_engine.metrics_count() == 4 @failing_endpoint("analytic_event_data") diff --git a/tests/agent_unittests/test_http_client.py b/tests/agent_unittests/test_http_client.py index b1fc4b4f4..a5c340d6a 100644 --- a/tests/agent_unittests/test_http_client.py +++ b/tests/agent_unittests/test_http_client.py @@ -289,32 +289,47 @@ def test_http_payload_compression(server, client_cls, method, threshold): compression_threshold=threshold, ) as client: with InternalTraceContext(internal_metrics): - status, data = client.send_request(payload=payload, params={"method": "test"}) + status, data = client.send_request(payload=payload, params={"method": "method1"}) + + # Sending one additional request to valid metric aggregation for top level data usage supportability metrics + with client_cls( + "localhost", + server.port, + disable_certificate_validation=True, + compression_method=method, + compression_threshold=threshold, + ) as client: + with InternalTraceContext(internal_metrics): + status, data = client.send_request(payload=payload, params={"method": "method2"}) assert status == 200 data = data.split(b"\n") sent_payload = data[-1] payload_byte_len = len(sent_payload) - internal_metrics = dict(internal_metrics.metrics()) if client_cls is ApplicationModeClient: - assert internal_metrics["Supportability/Python/Collector/Output/Bytes/test"][:2] == [ + assert internal_metrics["Supportability/Python/Collector/method1/Output/Bytes"][:2] == [ 1, - payload_byte_len, + len(payload), + ] + assert internal_metrics["Supportability/Python/Collector/Output/Bytes"][:2] == [ + 2, + len(payload)*2, ] if threshold < 20: # Verify compression time is recorded - assert internal_metrics["Supportability/Python/Collector/ZLIB/Compress/test"][0] == 1 - assert internal_metrics["Supportability/Python/Collector/ZLIB/Compress/test"][1] > 0 - - # Verify the original payload length is recorded - assert internal_metrics["Supportability/Python/Collector/ZLIB/Bytes/test"][:2] == [1, len(payload)] - - assert len(internal_metrics) == 3 + assert internal_metrics["Supportability/Python/Collector/method1/ZLIB/Compress"][0] == 1 + assert internal_metrics["Supportability/Python/Collector/method1/ZLIB/Compress"][1] > 0 + + # Verify the compressed payload length is recorded + assert internal_metrics["Supportability/Python/Collector/method1/ZLIB/Bytes"][:2] == [1, payload_byte_len] + assert internal_metrics["Supportability/Python/Collector/ZLIB/Bytes"][:2] == [2, payload_byte_len*2] + + assert len(internal_metrics) == 8 else: # Verify no ZLIB compression metrics were sent - assert len(internal_metrics) == 1 + assert len(internal_metrics) == 3 else: assert not internal_metrics diff --git a/tests/testing_support/fixtures.py b/tests/testing_support/fixtures.py index 71bfea670..a89730377 100644 --- a/tests/testing_support/fixtures.py +++ b/tests/testing_support/fixtures.py @@ -2625,8 +2625,8 @@ def _validate_analytics_sample_data(wrapped, instance, args, kwargs): _new_wrapped = _capture_samples(wrapped) result = _new_wrapped(*args, **kwargs) - - _samples = [s for s in samples if s[0]["type"] == "Transaction"] + # Check type of s[0] because it returns an integer if s is a LogEventNode + _samples = [s for s in samples if not isinstance(s[0], int) and s[0]["type"] == "Transaction"] assert _samples, "No Transaction events captured." for sample in _samples: assert isinstance(sample, list) From c80abfaabe80a3b98dec88093e6e2ed2c2353c6d Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Wed, 27 Jul 2022 13:36:00 -0700 Subject: [PATCH 02/49] Remove unknown/useless pylint disable options (#589) Fix the following errors we've been seeing in CI: ``` pyproject.toml:1:0: W0012: Unknown option value for '--disable', expected a valid pylint message and got 'B101' (unknown-option-value) pyproject.toml:1:0: W0012: Unknown option value for '--disable', expected a valid pylint message and got 'E122' (unknown-option-value) pyproject.toml:1:0: W0012: Unknown option value for '--disable', expected a valid pylint message and got 'E126' (unknown-option-value) pyproject.toml:1:0: W0012: Unknown option value for '--disable', expected a valid pylint message and got 'E127' (unknown-option-value) pyproject.toml:1:0: W0012: Unknown option value for '--disable', expected a valid pylint message and got 'E128' (unknown-option-value) pyproject.toml:1:0: W0012: Unknown option value for '--disable', expected a valid pylint message and got 'E203' (unknown-option-value) pyproject.toml:1:0: W0012: Unknown option value for '--disable', expected a valid pylint message and got 'E501' (unknown-option-value) pyproject.toml:1:0: W0012: Unknown option value for '--disable', expected a valid pylint message and got 'E722' (unknown-option-value) pyproject.toml:1:0: W0012: Unknown option value for '--disable', expected a valid pylint message and got 'W504' (unknown-option-value) ``` --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index a1b2989af..871f3c5ba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ include = '\.pyi?$' profile = "black" [tool.pylint.messages_control] -disable = "B101,C0103,C0114,C0115,C0116,C0209,C0302,C0415,E0401,E1120,E122,E126,E127,E128,E203,E501,E722,R0201,R0205,R0401,R0801,R0902,R0903,R0904,R0911,R0912,R0913,R0914,R0915,R1705,R1710,R1725,W0201,W0212,W0223,W0402,W0603,W0612,W0613,W0702,W0703,W0706,W504,line-too-long,redefined-outer-name" +disable = "C0103,C0114,C0115,C0116,C0209,C0302,C0415,E0401,E1120,R0205,R0401,R0801,R0902,R0903,R0904,R0911,R0912,R0913,R0914,R0915,R1705,R1710,R1725,W0201,W0212,W0223,W0402,W0603,W0612,W0613,W0702,W0703,W0706,line-too-long,redefined-outer-name" [tool.pylint.format] max-line-length = "120" From d7fc9cd985da5203899bf37f3aa804c05bdba45d Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Wed, 3 Aug 2022 13:29:04 -0700 Subject: [PATCH 03/49] Fix MySQL metric name in tests (#592) * Fix mysql metric name test * Fix version gating * Fix version gating again * Remove py36 testing * Add cache ignore for pip * Pip cache purge * Revert "Pip cache purge" This reverts commit 3b9eac288e83593bbcc7d580a1a86ac67dbeeae7. * Remove caching changes --- tests/datastore_mysql/test_database.py | 10 ++++++++-- tox.ini | 2 +- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/tests/datastore_mysql/test_database.py b/tests/datastore_mysql/test_database.py index 06bdaba5d..0991d6df0 100644 --- a/tests/datastore_mysql/test_database.py +++ b/tests/datastore_mysql/test_database.py @@ -25,8 +25,14 @@ DB_NAMESPACE = DB_SETTINGS["namespace"] DB_PROCEDURE = "hello_" + DB_NAMESPACE +mysql_version = tuple(int(x) for x in mysql.connector.__version__.split(".")[:3]) +if mysql_version >= (8, 0, 30): + _connector_metric_name = 'Function/mysql.connector.pooling:connect' +else: + _connector_metric_name = 'Function/mysql.connector:connect' + _test_execute_via_cursor_scoped_metrics = [ - ('Function/mysql.connector:connect', 1), + (_connector_metric_name, 1), ('Datastore/statement/MySQL/datastore_mysql_%s/select' % DB_NAMESPACE, 1), ('Datastore/statement/MySQL/datastore_mysql_%s/insert' % DB_NAMESPACE, 1), ('Datastore/statement/MySQL/datastore_mysql_%s/update' % DB_NAMESPACE, 1), @@ -103,7 +109,7 @@ def test_execute_via_cursor(table_name): connection.commit() _test_connect_using_alias_scoped_metrics = [ - ('Function/mysql.connector:connect', 1), + (_connector_metric_name, 1), ('Datastore/statement/MySQL/datastore_mysql_%s/select' % DB_NAMESPACE, 1), ('Datastore/statement/MySQL/datastore_mysql_%s/insert' % DB_NAMESPACE, 1), ('Datastore/statement/MySQL/datastore_mysql_%s/update' % DB_NAMESPACE, 1), diff --git a/tox.ini b/tox.ini index 724b65217..d0a8ce780 100644 --- a/tox.ini +++ b/tox.ini @@ -74,7 +74,7 @@ envlist = elasticsearchserver07-datastore_elasticsearch-{py27,py36,py37,py38,py39,py310,pypy,pypy36}-elasticsearch{07}, memcached-datastore_memcache-{py27,py36,py37,py38,py39,py310,pypy,pypy36}-memcached01, mysql-datastore_mysql-mysql080023-py27, - mysql-datastore_mysql-mysqllatest-{py36,py37,py38,py39,py310}, + mysql-datastore_mysql-mysqllatest-{py37,py38,py39,py310}, postgres-datastore_postgresql-{py36,py37,py38,py39}, postgres-datastore_psycopg2-{py27,py36,py37,py38,py39,py310}-psycopg20208, postgres-datastore_psycopg2cffi-{py27,py36,pypy}-psycopg2cffi{0207,0208}, From 28fff631d5fb549b898e0890eec4441b99c5dd4b Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Wed, 3 Aug 2022 13:31:28 -0700 Subject: [PATCH 04/49] Use pyproject.toml for bandit & exclude tests (#586) * Since bandit now supports pyproject.toml as a config file, use that instead of cli options. * Exclude tests from bandit. There are many cases in tests where we might do something that violates bandit rules and that's ok. Co-authored-by: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> --- .mega-linter.yml | 4 ++-- pyproject.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.mega-linter.yml b/.mega-linter.yml index b947f9f4b..39764b5c3 100644 --- a/.mega-linter.yml +++ b/.mega-linter.yml @@ -29,7 +29,7 @@ PYTHON_FLAKE8_CONFIG_FILE: setup.cfg PYTHON_BLACK_CONFIG_FILE: pyproject.toml PYTHON_PYLINT_CONFIG_FILE: pyproject.toml PYTHON_ISORT_CONFIG_FILE: pyproject.toml +PYTHON_BANDIT_CONFIG_FILE: pyproject.toml +PYTHON_BANDIT_FILTER_REGEX_EXCLUDE: ./tests -# Bandit's next release supports pyproject.toml. Until then, add config by cli option -PYTHON_BANDIT_ARGUMENTS: --skip=B110,B101,B404 PYTHON_PYLINT_ARGUMENTS: "--fail-under=0 --fail-on=E" diff --git a/pyproject.toml b/pyproject.toml index 871f3c5ba..65be1548b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,4 +15,4 @@ max-line-length = "120" good-names = "exc,val,tb" [tool.bandit] -skips = ["B110"] +skips = ["B110", "B101", "B404"] From 601efc378a6e13cd0148d90cf4dbb582088a71b3 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Wed, 3 Aug 2022 13:35:35 -0700 Subject: [PATCH 05/49] Upgrade MegaLinter to v6 (#599) * Upgrade megalinter to v6 * Apply linter fixes --- .github/workflows/deploy-python.yml | 2 +- .github/workflows/mega-linter.yml | 12 ++--- .github/workflows/tests.yml | 78 ++++++++++++++--------------- .gitignore | 2 +- .mega-linter.yml | 2 +- 5 files changed, 48 insertions(+), 48 deletions(-) diff --git a/.github/workflows/deploy-python.yml b/.github/workflows/deploy-python.yml index 4a0054c1a..d238f0720 100644 --- a/.github/workflows/deploy-python.yml +++ b/.github/workflows/deploy-python.yml @@ -24,7 +24,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: persist-credentials: false fetch-depth: 0 diff --git a/.github/workflows/mega-linter.yml b/.github/workflows/mega-linter.yml index 75ab9a4b1..d378752dc 100644 --- a/.github/workflows/mega-linter.yml +++ b/.github/workflows/mega-linter.yml @@ -1,6 +1,6 @@ --- # Mega-Linter GitHub Action configuration file -# More info at https://megalinter.github.io +# More info at https://oxsecurity.github.io/megalinter name: Mega-Linter on: @@ -25,7 +25,7 @@ jobs: steps: # Git Checkout - name: Checkout Code - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: token: ${{ secrets.PAT || secrets.GITHUB_TOKEN }} fetch-depth: 0 @@ -34,11 +34,11 @@ jobs: - name: Mega-Linter id: ml # You can override Mega-Linter flavor used to have faster performances - # More info at https://megalinter.github.io/flavors/ - uses: megalinter/megalinter/flavors/python@v5 + # More info at https://oxsecurity.github.io/megalinter/flavors/ + uses: oxsecurity/megalinter/flavors/python@v6 env: # All available variables are described in documentation - # https://megalinter.github.io/configuration/ + # https://oxsecurity.github.io/megalinter/configuration/ VALIDATE_ALL_CODEBASE: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} # Validates all source when push on main, else just the git diff with main. Set 'true' if you always want to lint all sources DEFAULT_BRANCH: ${{ github.event_name == 'pull_request' && github.base_ref || 'main' }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -51,7 +51,7 @@ jobs: with: name: Mega-Linter reports path: | - report + megalinter-reports mega-linter.log # Create pull request if applicable (for now works only on PR from same repository, not from forks) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index b96a10186..d00cf18e8 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -80,7 +80,7 @@ jobs: timeout-minutes: 30 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 # Set up all versions of python # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases @@ -98,7 +98,7 @@ jobs: with: python-version: "pypy-2.7" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "3.6" @@ -123,7 +123,7 @@ jobs: with: python-version: "3.10" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "2.7" @@ -160,7 +160,7 @@ jobs: timeout-minutes: 30 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 # Set up all versions of python # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases @@ -178,7 +178,7 @@ jobs: with: python-version: "pypy-2.7" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "3.6" @@ -203,7 +203,7 @@ jobs: with: python-version: "3.10" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "2.7" @@ -240,7 +240,7 @@ jobs: timeout-minutes: 30 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 # Set up all versions of python # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases @@ -258,7 +258,7 @@ jobs: with: python-version: "pypy-2.7" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "3.6" @@ -283,7 +283,7 @@ jobs: with: python-version: "3.10" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "2.7" @@ -341,7 +341,7 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 # Set up all versions of python # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases @@ -359,7 +359,7 @@ jobs: with: python-version: "pypy-2.7" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "3.6" @@ -384,7 +384,7 @@ jobs: with: python-version: "3.10" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "2.7" @@ -439,7 +439,7 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 # Set up all versions of python # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases @@ -457,7 +457,7 @@ jobs: with: python-version: "pypy-2.7" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "3.6" @@ -482,7 +482,7 @@ jobs: with: python-version: "3.10" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "2.7" @@ -532,7 +532,7 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 # Set up all versions of python # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases @@ -550,7 +550,7 @@ jobs: with: python-version: "pypy-2.7" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "3.6" @@ -575,7 +575,7 @@ jobs: with: python-version: "3.10" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "2.7" @@ -627,7 +627,7 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 # Set up all versions of python # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases @@ -645,7 +645,7 @@ jobs: with: python-version: "pypy-2.7" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "3.6" @@ -670,7 +670,7 @@ jobs: with: python-version: "3.10" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "2.7" @@ -720,7 +720,7 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 # Set up all versions of python # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases @@ -738,7 +738,7 @@ jobs: with: python-version: "pypy-2.7" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "3.6" @@ -763,7 +763,7 @@ jobs: with: python-version: "3.10" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "2.7" @@ -814,7 +814,7 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 # Set up all versions of python # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases @@ -832,7 +832,7 @@ jobs: with: python-version: "pypy-2.7" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "3.6" @@ -857,7 +857,7 @@ jobs: with: python-version: "3.10" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "2.7" @@ -907,7 +907,7 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 # Set up all versions of python # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases @@ -925,7 +925,7 @@ jobs: with: python-version: "pypy-2.7" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "3.6" @@ -950,7 +950,7 @@ jobs: with: python-version: "3.10" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "2.7" @@ -1002,7 +1002,7 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 # Set up all versions of python # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases @@ -1020,7 +1020,7 @@ jobs: with: python-version: "pypy-2.7" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "3.6" @@ -1045,7 +1045,7 @@ jobs: with: python-version: "3.10" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "2.7" @@ -1097,7 +1097,7 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 # Set up all versions of python # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases @@ -1115,7 +1115,7 @@ jobs: with: python-version: "pypy-2.7" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "3.6" @@ -1140,7 +1140,7 @@ jobs: with: python-version: "3.10" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "2.7" @@ -1189,7 +1189,7 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 # Set up all versions of python # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases @@ -1207,7 +1207,7 @@ jobs: with: python-version: "pypy-2.7" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "3.6" @@ -1232,7 +1232,7 @@ jobs: with: python-version: "3.10" architecture: x64 - + - uses: actions/setup-python@v3 with: python-version: "2.7" diff --git a/.gitignore b/.gitignore index dadd8da50..8226b0e97 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,5 @@ # Linter -report/ +megalinter-reports/ # Byte-compiled / optimized / DLL files __pycache__/ diff --git a/.mega-linter.yml b/.mega-linter.yml index 39764b5c3..66dc1c74c 100644 --- a/.mega-linter.yml +++ b/.mega-linter.yml @@ -1,5 +1,5 @@ # Configuration file for Mega-Linter -# See all available variables at https://megalinter.github.io/configuration/ and in linters documentation +# See all available variables at https://oxsecurity.github.io/megalinter/configuration/ and in linters documentation APPLY_FIXES: none # all, none, or list of linter keys DEFAULT_BRANCH: main # Usually master or main From 9726a42aee973e2fd4d7fd0af0eeedd96a25a92a Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Wed, 3 Aug 2022 15:06:24 -0700 Subject: [PATCH 06/49] Fix django version for tastypie (#600) --- tox.ini | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index d0a8ce780..fc662e88a 100644 --- a/tox.ini +++ b/tox.ini @@ -196,9 +196,9 @@ deps = component_graphqlserver: jinja2<3.1 component_tastypie-tastypie0143: django-tastypie<0.14.4 component_tastypie-{py27,pypy}-tastypie0143: django<1.12 - component_tastypie-{py36,py37,py38,py39,pypy36}-tastypie0143: django<3.0.1 + component_tastypie-{py36,py37,py38,py39,py310,pypy36,pypy37}-tastypie0143: django<3.0.1 component_tastypie-tastypielatest: django-tastypie - component_tastypie-tastypielatest: django + component_tastypie-tastypielatest: django<4.1 coroutines_asyncio: uvloop cross_agent: mock==1.0.1 cross_agent: requests From e4c5f9757ddf9afb0629f2f25170b5ab30745190 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Wed, 3 Aug 2022 15:22:55 -0700 Subject: [PATCH 07/49] Composite Setup Python Action (#594) * Add composite setup-python matrix action * Fix actions path * Clean out dependency install * [Mega-Linter] Apply linters fixes * Remove fail-fast from matrix strategy Co-authored-by: TimPansino --- .../actions/setup-python-matrix/action.yml | 55 ++ .github/workflows/tests.yml | 702 +----------------- 2 files changed, 81 insertions(+), 676 deletions(-) create mode 100644 .github/actions/setup-python-matrix/action.yml diff --git a/.github/actions/setup-python-matrix/action.yml b/.github/actions/setup-python-matrix/action.yml new file mode 100644 index 000000000..99c7f4d59 --- /dev/null +++ b/.github/actions/setup-python-matrix/action.yml @@ -0,0 +1,55 @@ +name: "setup-python-matrix" +description: "Sets up all versions of python required for matrix testing in this repo." +runs: + using: "composite" + steps: + - uses: actions/setup-python@v3 + with: + python-version: "pypy-3.6" + architecture: x64 + + - uses: actions/setup-python@v3 + with: + python-version: "pypy-3.7" + architecture: x64 + + - uses: actions/setup-python@v3 + with: + python-version: "pypy-2.7" + architecture: x64 + + - uses: actions/setup-python@v3 + with: + python-version: "3.6" + architecture: x64 + + - uses: actions/setup-python@v3 + with: + python-version: "3.7" + architecture: x64 + + - uses: actions/setup-python@v3 + with: + python-version: "3.8" + architecture: x64 + + - uses: actions/setup-python@v3 + with: + python-version: "3.9" + architecture: x64 + + - uses: actions/setup-python@v3 + with: + python-version: "3.10" + architecture: x64 + + - uses: actions/setup-python@v3 + with: + python-version: "2.7" + architecture: x64 + + - name: Install Dependencies + shell: bash + run: | + python3.10 -m pip install -U pip + python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index d00cf18e8..b9abc6745 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -51,6 +51,7 @@ jobs: TOTAL_GROUPS: 20 strategy: + fail-fast: false matrix: group-number: [ @@ -81,58 +82,7 @@ jobs: steps: - uses: actions/checkout@v3 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: ./.github/actions/setup-python-matrix - name: Get Environments id: get-envs @@ -153,6 +103,7 @@ jobs: TOTAL_GROUPS: 1 strategy: + fail-fast: false matrix: group-number: [1] @@ -161,58 +112,7 @@ jobs: steps: - uses: actions/checkout@v3 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: ./.github/actions/setup-python-matrix - name: Get Environments id: get-envs @@ -233,6 +133,7 @@ jobs: TOTAL_GROUPS: 1 strategy: + fail-fast: false matrix: group-number: [1] @@ -241,58 +142,7 @@ jobs: steps: - uses: actions/checkout@v3 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: ./.github/actions/setup-python-matrix # Special case packages - name: Install libcurl-dev @@ -319,6 +169,7 @@ jobs: TOTAL_GROUPS: 2 strategy: + fail-fast: false matrix: group-number: [1, 2] @@ -342,58 +193,7 @@ jobs: steps: - uses: actions/checkout@v3 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: ./.github/actions/setup-python-matrix - name: Get Environments id: get-envs @@ -414,6 +214,7 @@ jobs: TOTAL_GROUPS: 2 strategy: + fail-fast: false matrix: group-number: [1, 2] @@ -440,58 +241,7 @@ jobs: steps: - uses: actions/checkout@v3 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: ./.github/actions/setup-python-matrix - name: Get Environments id: get-envs @@ -512,6 +262,7 @@ jobs: TOTAL_GROUPS: 2 strategy: + fail-fast: false matrix: group-number: [1, 2] @@ -533,58 +284,7 @@ jobs: steps: - uses: actions/checkout@v3 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: ./.github/actions/setup-python-matrix - name: Get Environments id: get-envs @@ -605,6 +305,7 @@ jobs: TOTAL_GROUPS: 1 strategy: + fail-fast: false matrix: group-number: [1] @@ -628,58 +329,7 @@ jobs: steps: - uses: actions/checkout@v3 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: ./.github/actions/setup-python-matrix - name: Get Environments id: get-envs @@ -700,6 +350,7 @@ jobs: TOTAL_GROUPS: 2 strategy: + fail-fast: false matrix: group-number: [1, 2] @@ -721,58 +372,7 @@ jobs: steps: - uses: actions/checkout@v3 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: ./.github/actions/setup-python-matrix - name: Get Environments id: get-envs @@ -793,6 +393,7 @@ jobs: TOTAL_GROUPS: 1 strategy: + fail-fast: false matrix: group-number: [1] @@ -815,58 +416,7 @@ jobs: steps: - uses: actions/checkout@v3 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: ./.github/actions/setup-python-matrix - name: Get Environments id: get-envs @@ -887,6 +437,7 @@ jobs: TOTAL_GROUPS: 1 strategy: + fail-fast: false matrix: group-number: [1] @@ -908,58 +459,7 @@ jobs: steps: - uses: actions/checkout@v3 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: ./.github/actions/setup-python-matrix - name: Get Environments id: get-envs @@ -980,6 +480,7 @@ jobs: TOTAL_GROUPS: 1 strategy: + fail-fast: false matrix: group-number: [1] @@ -1003,58 +504,7 @@ jobs: steps: - uses: actions/checkout@v3 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: ./.github/actions/setup-python-matrix - name: Get Environments id: get-envs @@ -1075,6 +525,7 @@ jobs: TOTAL_GROUPS: 1 strategy: + fail-fast: false matrix: group-number: [1] @@ -1098,58 +549,7 @@ jobs: steps: - uses: actions/checkout@v3 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: ./.github/actions/setup-python-matrix - name: Get Environments id: get-envs @@ -1170,6 +570,7 @@ jobs: TOTAL_GROUPS: 1 strategy: + fail-fast: false matrix: group-number: [1] @@ -1190,58 +591,7 @@ jobs: steps: - uses: actions/checkout@v3 - - # Set up all versions of python - # Setup PyPy2 and Python 2.7 after Python 3 to prevent overwriting aliases - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "pypy-2.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - architecture: x64 - - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - architecture: x64 - - - name: Install Dependencies - run: | - python3.10 -m pip install -U pip - python3.10 -m pip install -U wheel setuptools tox virtualenv!=20.0.24 + - uses: ./.github/actions/setup-python-matrix - name: Get Environments id: get-envs From 91fecbfc32d49c8a225d36a92add258151514086 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Wed, 3 Aug 2022 15:55:30 -0700 Subject: [PATCH 08/49] Add PyMongo v4 Testing (#540) * Add pymongo v4 testing * Add parameterized testing for mongo v3/v4 Co-authored-by: Lalleh Rafeei Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai Co-authored-by: Nyenty Ayuk * Fix linting issues. * [Mega-Linter] Apply linters fixes * Add suggestions from code review * [Mega-Linter] Apply linters fixes * Bump Tests * Fix py2 transaction name * [Mega-Linter] Apply linters fixes * Bump Tests Co-authored-by: Lalleh Rafeei Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai Co-authored-by: Nyenty Ayuk Co-authored-by: TimPansino --- tests/datastore_pymongo/conftest.py | 28 +- tests/datastore_pymongo/test_pymongo.py | 394 ++++++++++++++---------- tox.ini | 4 +- 3 files changed, 257 insertions(+), 169 deletions(-) diff --git a/tests/datastore_pymongo/conftest.py b/tests/datastore_pymongo/conftest.py index 518b19325..8d279f2e2 100644 --- a/tests/datastore_pymongo/conftest.py +++ b/tests/datastore_pymongo/conftest.py @@ -12,26 +12,28 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest - -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import ( # noqa: F401; pylint: disable=W0611 + code_coverage_fixture, + collector_agent_registration_fixture, + collector_available_fixture, +) _coverage_source = [ - 'newrelic.hooks.datastore_pymongo', + "newrelic.hooks.datastore_pymongo", ] code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { - 'transaction_tracer.explain_threshold': 0.0, - 'transaction_tracer.transaction_threshold': 0.0, - 'transaction_tracer.stack_trace_threshold': 0.0, - 'debug.log_data_collector_payloads': True, - 'debug.record_transaction_failure': True + "transaction_tracer.explain_threshold": 0.0, + "transaction_tracer.transaction_threshold": 0.0, + "transaction_tracer.stack_trace_threshold": 0.0, + "debug.log_data_collector_payloads": True, + "debug.record_transaction_failure": True, } collector_agent_registration = collector_agent_registration_fixture( - app_name='Python Agent Test (datastore_pymongo)', - default_settings=_default_settings, - linked_applications=['Python Agent Test (datastore)']) + app_name="Python Agent Test (datastore_pymongo)", + default_settings=_default_settings, + linked_applications=["Python Agent Test (datastore)"], +) diff --git a/tests/datastore_pymongo/test_pymongo.py b/tests/datastore_pymongo/test_pymongo.py index fa464fa12..09ea62e0b 100644 --- a/tests/datastore_pymongo/test_pymongo.py +++ b/tests/datastore_pymongo/test_pymongo.py @@ -12,17 +12,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pymongo -import pytest import sqlite3 -from testing_support.fixtures import (validate_transaction_metrics, - validate_transaction_errors) +import pymongo from testing_support.db_settings import mongodb_settings -from testing_support.validators.validate_database_duration import validate_database_duration +from testing_support.fixtures import ( + validate_transaction_errors, + validate_transaction_metrics, +) +from testing_support.validators.validate_database_duration import ( + validate_database_duration, +) from newrelic.api.background_task import background_task - +from newrelic.packages import six DB_SETTINGS = mongodb_settings()[0] MONGODB_HOST = DB_SETTINGS["host"] @@ -30,10 +33,11 @@ MONGODB_COLLECTION = DB_SETTINGS["collection"] -def _exercise_mongo(db): +def _exercise_mongo_v3(db): db[MONGODB_COLLECTION].save({"x": 10}) db[MONGODB_COLLECTION].save({"x": 8}) db[MONGODB_COLLECTION].save({"x": 11}) + db[MONGODB_COLLECTION].find_one() for item in db[MONGODB_COLLECTION].find(): @@ -46,163 +50,243 @@ def _exercise_mongo(db): [item["x"] for item in db[MONGODB_COLLECTION].find().limit(2).skip(1)] - if pymongo.version_tuple >= (3, 0): - db[MONGODB_COLLECTION].initialize_unordered_bulk_op() - db[MONGODB_COLLECTION].initialize_ordered_bulk_op() - db[MONGODB_COLLECTION].bulk_write([pymongo.InsertOne({'x': 1})]) - db[MONGODB_COLLECTION].insert_one({'x': 300}) - db[MONGODB_COLLECTION].insert_many([{'x': 1} for i in range(20, 25)]) - db[MONGODB_COLLECTION].replace_one({'x': 1}, {'x': 2}) - db[MONGODB_COLLECTION].update_one({'x': 1}, {'$inc': {'x': 3}}) - db[MONGODB_COLLECTION].update_many({'x': 1}, {'$inc': {'x': 3}}) - db[MONGODB_COLLECTION].delete_one({'x': 4}) - db[MONGODB_COLLECTION].delete_many({'x': 4}) - db[MONGODB_COLLECTION].find_raw_batches() - db[MONGODB_COLLECTION].parallel_scan(1) - db[MONGODB_COLLECTION].create_indexes( - [pymongo.IndexModel([('x', pymongo.DESCENDING)])]) - db[MONGODB_COLLECTION].list_indexes() - db[MONGODB_COLLECTION].aggregate([]) - db[MONGODB_COLLECTION].aggregate_raw_batches([]) - db[MONGODB_COLLECTION].find_one_and_delete({'x': 10}) - db[MONGODB_COLLECTION].find_one_and_replace({'x': 300}, {'x': 301}) - db[MONGODB_COLLECTION].find_one_and_update({'x': 301}, {'$inc': {'x': 300}}) + db[MONGODB_COLLECTION].initialize_unordered_bulk_op() + db[MONGODB_COLLECTION].initialize_ordered_bulk_op() + db[MONGODB_COLLECTION].parallel_scan(1) + + db[MONGODB_COLLECTION].bulk_write([pymongo.InsertOne({"x": 1})]) + db[MONGODB_COLLECTION].insert_one({"x": 300}) + db[MONGODB_COLLECTION].insert_many([{"x": 1} for i in range(20, 25)]) + db[MONGODB_COLLECTION].replace_one({"x": 1}, {"x": 2}) + db[MONGODB_COLLECTION].update_one({"x": 1}, {"$inc": {"x": 3}}) + db[MONGODB_COLLECTION].update_many({"x": 1}, {"$inc": {"x": 3}}) + db[MONGODB_COLLECTION].delete_one({"x": 4}) + db[MONGODB_COLLECTION].delete_many({"x": 4}) + db[MONGODB_COLLECTION].find_raw_batches() + db[MONGODB_COLLECTION].create_indexes([pymongo.IndexModel([("x", pymongo.DESCENDING)])]) + db[MONGODB_COLLECTION].list_indexes() + db[MONGODB_COLLECTION].aggregate([]) + db[MONGODB_COLLECTION].aggregate_raw_batches([]) + db[MONGODB_COLLECTION].find_one_and_delete({"x": 10}) + db[MONGODB_COLLECTION].find_one_and_replace({"x": 300}, {"x": 301}) + db[MONGODB_COLLECTION].find_one_and_update({"x": 301}, {"$inc": {"x": 300}}) + + +def _exercise_mongo_v4(db): + db[MONGODB_COLLECTION].insert_one({"x": 10}) + db[MONGODB_COLLECTION].insert_one({"x": 8}) + db[MONGODB_COLLECTION].insert_one({"x": 11}) + db[MONGODB_COLLECTION].find_one() -# Common Metrics for tests that use _exercise_mongo(). + for item in db[MONGODB_COLLECTION].find(): + item["x"] -_all_count = 9 -if pymongo.version_tuple >= (3, 0): - _all_count += 19 - -_test_pymongo_scoped_metrics = [ - ('Datastore/statement/MongoDB/%s/save' % MONGODB_COLLECTION, 3), - ('Datastore/statement/MongoDB/%s/create_index' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/find' % MONGODB_COLLECTION, 3), - ('Datastore/statement/MongoDB/%s/find_one' % MONGODB_COLLECTION, 1)] - -_test_pymongo_rollup_metrics = [ - ('Datastore/all', _all_count), - ('Datastore/allOther', _all_count), - ('Datastore/MongoDB/all', _all_count), - ('Datastore/MongoDB/allOther', _all_count), - ('Datastore/operation/MongoDB/save', 3), - ('Datastore/operation/MongoDB/create_index', 1), - ('Datastore/operation/MongoDB/find', 3), - ('Datastore/operation/MongoDB/find_one', 1), - ('Datastore/statement/MongoDB/%s/save' % MONGODB_COLLECTION, 3), - ('Datastore/statement/MongoDB/%s/create_index' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/find' % MONGODB_COLLECTION, 3), - ('Datastore/statement/MongoDB/%s/find_one' % MONGODB_COLLECTION, 1)] - -if pymongo.version_tuple >= (3, 0): - _test_pymongo_scoped_metrics.extend([ - (('Datastore/statement/MongoDB/%s' % MONGODB_COLLECTION + - '/initialize_unordered_bulk_op'), 1), - (('Datastore/statement/MongoDB/%s' % MONGODB_COLLECTION + - '/initialize_ordered_bulk_op'), 1), - ('Datastore/statement/MongoDB/%s/bulk_write' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/insert_one' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/insert_many' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/replace_one' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/update_one' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/delete_one' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/delete_many' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/find_raw_batches' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/parallel_scan' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/create_indexes' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/list_indexes' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/aggregate' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/aggregate_raw_batches' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/find_one_and_delete' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/find_one_and_replace' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/find_one_and_update' % MONGODB_COLLECTION, 1), - ]) - _test_pymongo_rollup_metrics.extend([ - ('Datastore/operation/MongoDB/initialize_unordered_bulk_op', 1), - ('Datastore/operation/MongoDB/initialize_ordered_bulk_op', 1), - ('Datastore/operation/MongoDB/bulk_write', 1), - ('Datastore/operation/MongoDB/insert_one', 1), - ('Datastore/operation/MongoDB/insert_many', 1), - ('Datastore/operation/MongoDB/replace_one', 1), - ('Datastore/operation/MongoDB/update_one', 1), - ('Datastore/operation/MongoDB/delete_one', 1), - ('Datastore/operation/MongoDB/delete_many', 1), - ('Datastore/operation/MongoDB/find_raw_batches', 1), - ('Datastore/operation/MongoDB/parallel_scan', 1), - ('Datastore/operation/MongoDB/create_indexes', 1), - ('Datastore/operation/MongoDB/list_indexes', 1), - ('Datastore/operation/MongoDB/aggregate', 1), - ('Datastore/operation/MongoDB/aggregate_raw_batches', 1), - ('Datastore/operation/MongoDB/find_one_and_delete', 1), - ('Datastore/operation/MongoDB/find_one_and_replace', 1), - ('Datastore/operation/MongoDB/find_one_and_update', 1), - (('Datastore/statement/MongoDB/%s' % MONGODB_COLLECTION + - '/initialize_unordered_bulk_op'), 1), - (('Datastore/statement/MongoDB/%s' % MONGODB_COLLECTION + - '/initialize_ordered_bulk_op'), 1), - ('Datastore/statement/MongoDB/%s/bulk_write' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/insert_one' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/insert_many' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/replace_one' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/update_one' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/delete_one' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/delete_many' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/find_raw_batches' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/parallel_scan' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/create_indexes' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/list_indexes' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/aggregate' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/aggregate_raw_batches' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/find_one_and_delete' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/find_one_and_replace' % MONGODB_COLLECTION, 1), - ('Datastore/statement/MongoDB/%s/find_one_and_update' % MONGODB_COLLECTION, 1), - ]) - -# Add Connection metric - -_test_pymongo_connection_scoped_metrics = (_test_pymongo_scoped_metrics + - [('Function/pymongo.connection:Connection.__init__', 1)]) - -_test_pymongo_connection_rollup_metrics = (_test_pymongo_rollup_metrics + - [('Function/pymongo.connection:Connection.__init__', 1)]) - - -@pytest.mark.skipif(pymongo.version_tuple >= (3, 0), - reason='PyMongo version does not have pymongo.Connection.') -@validate_transaction_errors(errors=[]) -@validate_transaction_metrics( - 'test_pymongo:test_mongodb_connection_operation', - scoped_metrics=_test_pymongo_connection_scoped_metrics, - rollup_metrics=_test_pymongo_connection_rollup_metrics, - background_task=True) -@background_task() -def test_mongodb_connection_operation(): - connection = pymongo.Connection(MONGODB_HOST, MONGODB_PORT) - db = connection.test - _exercise_mongo(db) + db[MONGODB_COLLECTION].create_index("x") + for item in db[MONGODB_COLLECTION].find().sort("x", pymongo.ASCENDING): + item["x"] -# Add MongoClient metric + [item["x"] for item in db[MONGODB_COLLECTION].find().limit(2).skip(1)] -_test_pymongo_mongo_client_scoped_metrics = (_test_pymongo_scoped_metrics + - [('Function/pymongo.mongo_client:MongoClient.__init__', 1)]) + db[MONGODB_COLLECTION].bulk_write([pymongo.InsertOne({"x": 1})]) + db[MONGODB_COLLECTION].insert_one({"x": 300}) + db[MONGODB_COLLECTION].insert_many([{"x": 1} for i in range(20, 25)]) + db[MONGODB_COLLECTION].replace_one({"x": 1}, {"x": 2}) + db[MONGODB_COLLECTION].update_one({"x": 1}, {"$inc": {"x": 3}}) + db[MONGODB_COLLECTION].update_many({"x": 1}, {"$inc": {"x": 3}}) + db[MONGODB_COLLECTION].delete_one({"x": 4}) + db[MONGODB_COLLECTION].delete_many({"x": 4}) + db[MONGODB_COLLECTION].find_raw_batches() + db[MONGODB_COLLECTION].create_indexes([pymongo.IndexModel([("x", pymongo.DESCENDING)])]) + db[MONGODB_COLLECTION].list_indexes() + db[MONGODB_COLLECTION].aggregate([]) + db[MONGODB_COLLECTION].aggregate_raw_batches([]) + db[MONGODB_COLLECTION].find_one_and_delete({"x": 10}) + db[MONGODB_COLLECTION].find_one_and_replace({"x": 300}, {"x": 301}) + db[MONGODB_COLLECTION].find_one_and_update({"x": 301}, {"$inc": {"x": 300}}) -_test_pymongo_mongo_client_rollup_metrics = (_test_pymongo_rollup_metrics + - [('Function/pymongo.mongo_client:MongoClient.__init__', 1)]) +def _exercise_mongo(db): + if pymongo.version_tuple < (4, 0): + _exercise_mongo_v3(db) + else: + _exercise_mongo_v4(db) -@validate_transaction_errors(errors=[]) -@validate_transaction_metrics( - 'test_pymongo:test_mongodb_mongo_client_operation', - scoped_metrics=_test_pymongo_mongo_client_scoped_metrics, - rollup_metrics=_test_pymongo_mongo_client_rollup_metrics, - background_task=True) -@background_task() -def test_mongodb_mongo_client_operation(): - client = pymongo.MongoClient(MONGODB_HOST, MONGODB_PORT) - db = client.test - _exercise_mongo(db) + +# Common Metrics for tests that use _exercise_mongo(). + + +_test_pymongo_scoped_metrics_v3 = [ + ("Function/pymongo.mongo_client:MongoClient.__init__", 1), + ("Datastore/statement/MongoDB/%s/create_index" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find" % MONGODB_COLLECTION, 3), + ("Datastore/statement/MongoDB/%s/find_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/save" % MONGODB_COLLECTION, 3), + ("Datastore/statement/MongoDB/%s" % MONGODB_COLLECTION + "/initialize_unordered_bulk_op", 1), + ("Datastore/statement/MongoDB/%s" % MONGODB_COLLECTION + "/initialize_ordered_bulk_op", 1), + ("Datastore/statement/MongoDB/%s/parallel_scan" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/insert_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/bulk_write" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/insert_many" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/replace_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/update_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/delete_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/delete_many" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_raw_batches" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/create_indexes" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/list_indexes" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/aggregate" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/aggregate_raw_batches" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_one_and_delete" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_one_and_replace" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_one_and_update" % MONGODB_COLLECTION, 1), +] + + +_test_pymongo_scoped_metrics_v4 = [ + ("Function/pymongo.mongo_client:MongoClient.__init__", 1), + ("Datastore/statement/MongoDB/%s/create_index" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find" % MONGODB_COLLECTION, 3), + ("Datastore/statement/MongoDB/%s/find_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/insert_one" % MONGODB_COLLECTION, 4), + ("Datastore/statement/MongoDB/%s/bulk_write" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/insert_many" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/replace_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/update_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/delete_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/delete_many" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_raw_batches" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/create_indexes" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/list_indexes" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/aggregate" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/aggregate_raw_batches" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_one_and_delete" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_one_and_replace" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_one_and_update" % MONGODB_COLLECTION, 1), +] + +_test_pymongo_rollup_metrics_v3 = [ + ("Function/pymongo.mongo_client:MongoClient.__init__", 1), + ("Datastore/all", 28), + ("Datastore/allOther", 28), + ("Datastore/MongoDB/all", 28), + ("Datastore/MongoDB/allOther", 28), + ("Datastore/operation/MongoDB/create_index", 1), + ("Datastore/operation/MongoDB/find", 3), + ("Datastore/operation/MongoDB/find_one", 1), + ("Datastore/statement/MongoDB/%s/create_index" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find" % MONGODB_COLLECTION, 3), + ("Datastore/statement/MongoDB/%s/find_one" % MONGODB_COLLECTION, 1), + ("Datastore/operation/MongoDB/save", 3), + ("Datastore/operation/MongoDB/initialize_unordered_bulk_op", 1), + ("Datastore/operation/MongoDB/initialize_ordered_bulk_op", 1), + ("Datastore/operation/MongoDB/parallel_scan", 1), + ("Datastore/statement/MongoDB/%s/save" % MONGODB_COLLECTION, 3), + (("Datastore/statement/MongoDB/%s" % MONGODB_COLLECTION + "/initialize_unordered_bulk_op"), 1), + (("Datastore/statement/MongoDB/%s" % MONGODB_COLLECTION + "/initialize_ordered_bulk_op"), 1), + ("Datastore/statement/MongoDB/%s/parallel_scan" % MONGODB_COLLECTION, 1), + ("Datastore/operation/MongoDB/bulk_write", 1), + ("Datastore/operation/MongoDB/insert_one", 1), + ("Datastore/operation/MongoDB/insert_many", 1), + ("Datastore/operation/MongoDB/replace_one", 1), + ("Datastore/operation/MongoDB/update_one", 1), + ("Datastore/operation/MongoDB/delete_one", 1), + ("Datastore/operation/MongoDB/delete_many", 1), + ("Datastore/operation/MongoDB/find_raw_batches", 1), + ("Datastore/operation/MongoDB/create_indexes", 1), + ("Datastore/operation/MongoDB/list_indexes", 1), + ("Datastore/operation/MongoDB/aggregate", 1), + ("Datastore/operation/MongoDB/aggregate_raw_batches", 1), + ("Datastore/operation/MongoDB/find_one_and_delete", 1), + ("Datastore/operation/MongoDB/find_one_and_replace", 1), + ("Datastore/operation/MongoDB/find_one_and_update", 1), + ("Datastore/statement/MongoDB/%s/bulk_write" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/insert_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/insert_many" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/replace_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/update_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/delete_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/delete_many" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_raw_batches" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/create_indexes" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/list_indexes" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/aggregate" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/aggregate_raw_batches" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_one_and_delete" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_one_and_replace" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_one_and_update" % MONGODB_COLLECTION, 1), +] + +_test_pymongo_rollup_metrics_v4 = [ + ("Function/pymongo.mongo_client:MongoClient.__init__", 1), + ("Datastore/all", 25), + ("Datastore/allOther", 25), + ("Datastore/MongoDB/all", 25), + ("Datastore/MongoDB/allOther", 25), + ("Datastore/operation/MongoDB/create_index", 1), + ("Datastore/operation/MongoDB/find", 3), + ("Datastore/operation/MongoDB/find_one", 1), + ("Datastore/statement/MongoDB/%s/create_index" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find" % MONGODB_COLLECTION, 3), + ("Datastore/statement/MongoDB/%s/find_one" % MONGODB_COLLECTION, 1), + ("Datastore/operation/MongoDB/bulk_write", 1), + ("Datastore/operation/MongoDB/insert_one", 4), + ("Datastore/operation/MongoDB/insert_many", 1), + ("Datastore/operation/MongoDB/replace_one", 1), + ("Datastore/operation/MongoDB/update_one", 1), + ("Datastore/operation/MongoDB/delete_one", 1), + ("Datastore/operation/MongoDB/delete_many", 1), + ("Datastore/operation/MongoDB/find_raw_batches", 1), + ("Datastore/operation/MongoDB/create_indexes", 1), + ("Datastore/operation/MongoDB/list_indexes", 1), + ("Datastore/operation/MongoDB/aggregate", 1), + ("Datastore/operation/MongoDB/aggregate_raw_batches", 1), + ("Datastore/operation/MongoDB/find_one_and_delete", 1), + ("Datastore/operation/MongoDB/find_one_and_replace", 1), + ("Datastore/operation/MongoDB/find_one_and_update", 1), + ("Datastore/statement/MongoDB/%s/bulk_write" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/insert_one" % MONGODB_COLLECTION, 4), + ("Datastore/statement/MongoDB/%s/insert_many" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/replace_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/update_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/delete_one" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/delete_many" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_raw_batches" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/create_indexes" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/list_indexes" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/aggregate" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/aggregate_raw_batches" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_one_and_delete" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_one_and_replace" % MONGODB_COLLECTION, 1), + ("Datastore/statement/MongoDB/%s/find_one_and_update" % MONGODB_COLLECTION, 1), +] + + +def test_mongodb_client_operation(): + if pymongo.version_tuple < (4, 0): + _test_pymongo_client_scoped_metrics = _test_pymongo_scoped_metrics_v3 + _test_pymongo_client_rollup_metrics = _test_pymongo_rollup_metrics_v3 + else: + _test_pymongo_client_scoped_metrics = _test_pymongo_scoped_metrics_v4 + _test_pymongo_client_rollup_metrics = _test_pymongo_rollup_metrics_v4 + + txn_name = "test_pymongo:test_mongodb_client_operation.._test" if six.PY3 else "test_pymongo:_test" + + @validate_transaction_errors(errors=[]) + @validate_transaction_metrics( + txn_name, + scoped_metrics=_test_pymongo_client_scoped_metrics, + rollup_metrics=_test_pymongo_client_rollup_metrics, + background_task=True, + ) + @background_task() + def _test(): + client = pymongo.MongoClient(MONGODB_HOST, MONGODB_PORT) + db = client.test + _exercise_mongo(db) + + _test() @validate_database_duration() diff --git a/tox.ini b/tox.ini index fc662e88a..0fa1e2b71 100644 --- a/tox.ini +++ b/tox.ini @@ -81,7 +81,8 @@ envlist = postgres-datastore_psycopg2cffi-{py37,py38,py39,py310}-psycopg2cffi0208, memcached-datastore_pylibmc-{py27,py36,py37}, memcached-datastore_pymemcache-{py27,py36,py37,py38,py39,py310,pypy,pypy36}, - mongodb-datastore_pymongo-{py27,py36,py37,py38,py39,py310,pypy}-pymongo{03}, + mongodb-datastore_pymongo-{py27,py36,py37,py38,py39,py310,pypy}-pymongo03, + mongodb-datastore_pymongo-{py37,py38,py39,py310,pypy,pypy37}-pymongo04, mysql-datastore_pymysql-{py27,py36,py37,py38,py39,py310,pypy,pypy36}, solr-datastore_pysolr-{py27,py36,py37,py38,py39,py310,pypy,pypy36}, redis-datastore_redis-{py27,py36,py37,py38,pypy,pypy36}-redis03, @@ -223,6 +224,7 @@ deps = datastore_pylibmc: pylibmc datastore_pymemcache: pymemcache datastore_pymongo-pymongo03: pymongo<4.0 + datastore_pymongo-pymongo04: pymongo<5.0 datastore_pymysql: PyMySQL<0.11 datastore_pysolr: pysolr<4.0 datastore_redis-redislatest: redis From b6377fae2abebf65443b1cd05b339284fd29edaa Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Thu, 4 Aug 2022 11:06:32 -0700 Subject: [PATCH 09/49] Fix PyPy coroutines testing to exclude uvloop (#601) * Fix PyPy coroutines testing to exclude uvloop * [Mega-Linter] Apply linters fixes * Bump Tests Co-authored-by: TimPansino --- tests/coroutines_asyncio/test_context_propagation.py | 11 +++++++++-- tox.ini | 4 ++-- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/tests/coroutines_asyncio/test_context_propagation.py b/tests/coroutines_asyncio/test_context_propagation.py index 10d0ecd52..3beef38d0 100644 --- a/tests/coroutines_asyncio/test_context_propagation.py +++ b/tests/coroutines_asyncio/test_context_propagation.py @@ -15,7 +15,6 @@ import sys import pytest -import uvloop from testing_support.fixtures import ( function_not_called, override_generic_settings, @@ -34,6 +33,14 @@ from newrelic.core.config import global_settings from newrelic.core.trace_cache import trace_cache +# uvloop is not available on PyPy. +try: + import uvloop + + loop_policies = (None, uvloop.EventLoopPolicy()) +except ImportError: + loop_policies = (None,) + @function_trace("waiter3") async def child(): @@ -88,7 +95,7 @@ async def _test(asyncio, schedule, nr_enabled=True): return trace -@pytest.mark.parametrize("loop_policy", (None, uvloop.EventLoopPolicy())) +@pytest.mark.parametrize("loop_policy", loop_policies) @pytest.mark.parametrize( "schedule", ( diff --git a/tox.ini b/tox.ini index 0fa1e2b71..6cd87f2be 100644 --- a/tox.ini +++ b/tox.ini @@ -64,7 +64,7 @@ envlist = python-component_graphqlserver-{py36,py37,py38,py39,py310}, python-component_tastypie-{py27,pypy}-tastypie0143, python-component_tastypie-{py36,py37,py38,py39,pypy36}-tastypie{0143,latest}, - python-coroutines_asyncio-{py36,py37,py38,py39,py310,pypy36}, + python-coroutines_asyncio-{py36,py37,py38,py39,py310,pypy37}, python-cross_agent-{py27,py36,py37,py38,py39,py310}-{with,without}_extensions, python-cross_agent-pypy-without_extensions, postgres-datastore_asyncpg-{py36,py37,py38,py39,py310}, @@ -200,7 +200,7 @@ deps = component_tastypie-{py36,py37,py38,py39,py310,pypy36,pypy37}-tastypie0143: django<3.0.1 component_tastypie-tastypielatest: django-tastypie component_tastypie-tastypielatest: django<4.1 - coroutines_asyncio: uvloop + coroutines_asyncio-{py36,py37,py38,py39,py310}: uvloop cross_agent: mock==1.0.1 cross_agent: requests datastore_asyncpg: asyncpg From 45982dda06be246a1a504dd99969a84768911324 Mon Sep 17 00:00:00 2001 From: Nyenty Ayuk Date: Thu, 4 Aug 2022 15:01:34 -0400 Subject: [PATCH 10/49] Add PYPY 3.7 tox tests (#590) * Updated tox file to include testing for pypy3.7 * Add PYPY 3.7 tox tests * Update pyramid tests * Update tox.ini * Update tox.ini * Update tox.ini Co-authored-by: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Co-authored-by: Tim Pansino --- tox.ini | 91 ++++++++++++++++++++++++++++----------------------------- 1 file changed, 45 insertions(+), 46 deletions(-) diff --git a/tox.ini b/tox.ini index 6cd87f2be..3fc7acac9 100644 --- a/tox.ini +++ b/tox.ini @@ -16,7 +16,7 @@ ; framework_aiohttp-aiohttp01: aiohttp<2 ; framework_aiohttp-aiohttp0202: aiohttp<2.3 ; 3. Python version required. Uses the standard tox definitions. (https://tox.readthedocs.io/en/latest/config.html#tox-environments) -; Examples: py27,py36,py37,py38,py39,pypy,pypy36 +; Examples: py27,py36,py37,py38,py39,pypy,pypy37 ; 4. Library and version (Optional). Used when testing multiple versions of the library, and may be omitted when only testing a single version. ; Versions should be specified with 2 digits per version number, so <3 becomes 02 and <3.5 becomes 0304. latest and master are also acceptable versions. ; Examples: uvicorn03, CherryPy0302, uvicornlatest @@ -28,7 +28,7 @@ ; 5. With or without New Relic C extensions (Optional). Used for testing agent features. ; Examples: with_extensions, without_extensions ; envlist = -; python-agent_features-pypy36-without_extensions, +; python-agent_features-pypy37-without_extensions, ; python-agent_streaming-py37-{with,without}_extensions, ; ; Full Format: @@ -51,19 +51,19 @@ envlist = python-adapter_uvicorn-{py36}-uvicorn014 python-adapter_uvicorn-{py37,py38,py39,py310}-uvicornlatest, python-agent_features-{py27,py36,py37,py38,py39,py310}-{with,without}_extensions, - python-agent_features-{pypy,pypy36}-without_extensions, + python-agent_features-{pypy,pypy37}-without_extensions, python-agent_streaming-py27-grpc0125-{with,without}_extensions, python-agent_streaming-{py36,py37,py38,py39,py310}-{with,without}_extensions, python-agent_unittests-{py27,py36,py37,py38,py39,py310}-{with,without}_extensions, - python-agent_unittests-{pypy,pypy36}-without_extensions, - python-application_celery-{py27,py36,py37,py38,py39,py310,pypy,pypy36}, + python-agent_unittests-{pypy,pypy37}-without_extensions, + python-application_celery-{py27,py36,py37,py38,py39,py310,pypy,pypy37}, gearman-application_gearman-{py27,pypy}, python-component_djangorestframework-py27-djangorestframework0300, python-component_djangorestframework-{py36,py37,py38,py39,py310}-djangorestframeworklatest, - python-component_flask_rest-{py27,py36,py37,py38,py39,pypy,pypy36}, + python-component_flask_rest-{py27,py36,py37,py38,py39,pypy,pypy37}, python-component_graphqlserver-{py36,py37,py38,py39,py310}, python-component_tastypie-{py27,pypy}-tastypie0143, - python-component_tastypie-{py36,py37,py38,py39,pypy36}-tastypie{0143,latest}, + python-component_tastypie-{py36,py37,py38,py39,pypy37}-tastypie{0143,latest}, python-coroutines_asyncio-{py36,py37,py38,py39,py310,pypy37}, python-cross_agent-{py27,py36,py37,py38,py39,py310}-{with,without}_extensions, python-cross_agent-pypy-without_extensions, @@ -71,8 +71,8 @@ envlist = memcached-datastore_bmemcached-{pypy,py27,py36,py37,py38,py39,py310}-memcached030, elasticsearchserver01-datastore_pyelasticsearch-{py27,py36,pypy}, elasticsearchserver01-datastore_elasticsearch-py27-elasticsearch{00,01,02,05}, - elasticsearchserver07-datastore_elasticsearch-{py27,py36,py37,py38,py39,py310,pypy,pypy36}-elasticsearch{07}, - memcached-datastore_memcache-{py27,py36,py37,py38,py39,py310,pypy,pypy36}-memcached01, + elasticsearchserver07-datastore_elasticsearch-{py27,py36,py37,py38,py39,py310,pypy,pypy37}-elasticsearch{07}, + memcached-datastore_memcache-{py27,py36,py37,py38,py39,py310,pypy,pypy37}-memcached01, mysql-datastore_mysql-mysql080023-py27, mysql-datastore_mysql-mysqllatest-{py37,py38,py39,py310}, postgres-datastore_postgresql-{py36,py37,py38,py39}, @@ -80,74 +80,73 @@ envlist = postgres-datastore_psycopg2cffi-{py27,py36,pypy}-psycopg2cffi{0207,0208}, postgres-datastore_psycopg2cffi-{py37,py38,py39,py310}-psycopg2cffi0208, memcached-datastore_pylibmc-{py27,py36,py37}, - memcached-datastore_pymemcache-{py27,py36,py37,py38,py39,py310,pypy,pypy36}, - mongodb-datastore_pymongo-{py27,py36,py37,py38,py39,py310,pypy}-pymongo03, + memcached-datastore_pymemcache-{py27,py36,py37,py38,py39,py310,pypy,pypy37}, + mongodb-datastore_pymongo-{py27,py36,py37,py38,py39,py310,pypy}-pymongo{03}, mongodb-datastore_pymongo-{py37,py38,py39,py310,pypy,pypy37}-pymongo04, - mysql-datastore_pymysql-{py27,py36,py37,py38,py39,py310,pypy,pypy36}, - solr-datastore_pysolr-{py27,py36,py37,py38,py39,py310,pypy,pypy36}, - redis-datastore_redis-{py27,py36,py37,py38,pypy,pypy36}-redis03, - redis-datastore_redis-{py36,py37,py38,py39,py310,pypy36}-redis{0400,latest}, - redis-datastore_aioredis-{py36,py37,py38,py39,py310,pypy36}-aioredislatest, + mysql-datastore_pymysql-{py27,py36,py37,py38,py39,py310,pypy,pypy37}, + solr-datastore_pysolr-{py27,py36,py37,py38,py39,py310,pypy,pypy37}, + redis-datastore_redis-{py27,py36,py37,py38,pypy,pypy37}-redis03, + redis-datastore_redis-{py36,py37,py38,py39,py310,pypy37}-redis{0400,latest}, + redis-datastore_aioredis-{py36,py37,py38,py39,py310,pypy37}-aioredislatest, redis-datastore_aioredis-py39-aioredis01, - redis-datastore_aredis-{py36,py37,py38,py39,pypy36}-aredislatest, + redis-datastore_aredis-{py36,py37,py38,py39,pypy37}-aredislatest, solr-datastore_solrpy-{py27,pypy}-solrpy{00,01}, - python-datastore_sqlite-{py27,py36,py37,py38,py39,py310,pypy,pypy36}, + python-datastore_sqlite-{py27,py36,py37,py38,py39,py310,pypy,pypy37}, memcached-datastore_umemcache-{py27,pypy}, python-external_boto3-{py27,py36,py37,py38,py39,py310}-boto01, python-external_botocore-{py27,py36,py37,py38,py39,py310}, python-external_feedparser-py27-feedparser{05,06}, python-external_http-{py27,py36,py37,py38,py39,py310,pypy}, - python-external_httplib-{py27,py36,py37,py38,py39,py310,pypy,pypy36}, - python-external_httplib2-{py27,py36,py37,py38,py39,py310,pypy,pypy36}, + python-external_httplib-{py27,py36,py37,py38,py39,py310,pypy,pypy37}, + python-external_httplib2-{py27,py36,py37,py38,py39,py310,pypy,pypy37}, python-external_httpx-{py36,py37,py38,py39,py310}, - python-external_requests-{py27,py36,py37,py38,py39,py310,pypy,pypy36}, + python-external_requests-{py27,py36,py37,py38,py39,py310,pypy,pypy37}, python-external_urllib3-{py27,py37,pypy}-urllib3{0109}, - python-external_urllib3-{py27,py36,py37,py38,py39,py310,pypy,pypy36}-urllib3latest, - python-framework_aiohttp-{py36,py37,py38,py39,py310,pypy36}-aiohttp03, + python-external_urllib3-{py27,py36,py37,py38,py39,py310,pypy,pypy37}-urllib3latest, + python-framework_aiohttp-{py36,py37,py38,py39,py310,pypy37}-aiohttp03, python-framework_ariadne-{py36,py37,py38,py39,py310}-ariadnelatest, python-framework_ariadne-py37-ariadne{0011,0012,0013}, python-framework_bottle-py27-bottle{0008,0009,0010}, - python-framework_bottle-{py27,py36,py37,py38,py39,pypy36}-bottle{0011,0012}, + python-framework_bottle-{py27,py36,py37,py38,py39,pypy37}-bottle{0011,0012}, python-framework_bottle-py310-bottle0012, python-framework_bottle-pypy-bottle{0008,0009,0010,0011,0012}, - python-framework_cherrypy-{py36,py37,py38,py39,py310,pypy36}-CherryPy18, + python-framework_cherrypy-{py36,py37,py38,py39,py310,pypy37}-CherryPy18, python-framework_cherrypy-{py36,py37}-CherryPy0302, - python-framework_cherrypy-pypy36-CherryPy0303, + python-framework_cherrypy-pypy37-CherryPy0303, python-framework_django-{pypy,py27}-Django0103, python-framework_django-{pypy,py27,py37}-Django0108, python-framework_django-{py39}-Django{0200,0201,0202,0300,0301,latest}, python-framework_django-{py36,py37,py38,py39,py310}-Django0302, - python-framework_falcon-{py27,py36,py37,py38,py39,pypy,pypy36}-falcon0103, - python-framework_falcon-{py36,py37,py38,py39,py310,pypy36}-falcon{0200,master}, + python-framework_falcon-{py27,py36,py37,py38,py39,pypy,pypy37}-falcon0103, + python-framework_falcon-{py36,py37,py38,py39,py310,pypy37}-falcon{0200,master}, python-framework_fastapi-{py36,py37,py38,py39,py310}, python-framework_flask-{pypy,py27}-flask0012, - python-framework_flask-{pypy,py27,py36,py37,py38,py39,py310,pypy36}-flask0101, + python-framework_flask-{pypy,py27,py36,py37,py38,py39,py310,pypy37}-flask0101, ; temporarily disabling flaskmaster tests python-framework_flask-{py37,py38,py39,py310,pypy37}-flask{latest}, python-framework_graphene-{py36,py37,py38,py39,py310}-graphenelatest, - python-framework_graphene-{py27,py36,py37,py38,py39,pypy,pypy36}-graphene{0200,0201}, + python-framework_graphene-{py27,py36,py37,py38,py39,pypy,pypy37}-graphene{0200,0201}, python-framework_graphene-py310-graphene0201, - python-framework_graphql-{py27,py36,py37,py38,py39,py310,pypy,pypy36}-graphql02, - python-framework_graphql-{py36,py37,py38,py39,py310,pypy36}-graphql03, + python-framework_graphql-{py27,py36,py37,py38,py39,py310,pypy,pypy37}-graphql02, + python-framework_graphql-{py36,py37,py38,py39,py310,pypy37}-graphql03, python-framework_graphql-py37-graphql{0202,0203,0300,0301,0302,master}, grpc-framework_grpc-{py27,py36}-grpc0125, grpc-framework_grpc-{py36,py37,py38,py39,py310}-grpclatest, python-framework_pyramid-{pypy,py27,py38}-Pyramid0104, - python-framework_pyramid-{pypy,py27,pypy36,py36,py37,py38,py39,py310}-Pyramid0110-cornice, - ;temporarily disabling pypy36 on pyramid master - python-framework_pyramid-{py37,py38,py39,py310}-Pyramidmaster, - python-framework_sanic-{py38,pypy36}-sanic{190301,1906,1812,1912,200904,210300}, - python-framework_sanic-{py36,py37,py38,py310,pypy36}-saniclatest, - python-framework_starlette-{py36,py310,pypy36}-starlette{0014,0015,0019}, - python-framework_starlette-{py36,py37,py38,py39,py310,pypy36}-starlettelatest, + python-framework_pyramid-{pypy,py27,pypy37,py36,py37,py38,py39,py310}-Pyramid0110-cornice, + python-framework_pyramid-{py37,py38,py39,py310,pypy37}-Pyramidmaster, + python-framework_sanic-{py38,pypy37}-sanic{190301,1906,1812,1912,200904,210300}, + python-framework_sanic-{py36,py37,py38,py310,pypy37}-saniclatest, + python-framework_starlette-{py36,py310,pypy37}-starlette{0014,0015,0019}, + python-framework_starlette-{py36,py37,py38,py39,py310,pypy37}-starlettelatest, python-framework_strawberry-{py37,py38,py39,py310}-strawberrylatest, - python-logger_logging-{py27,py36,py37,py38,py39,py310,pypy,pypy36}, - python-logger_loguru-{py36,py37,py38,py39,py310,pypy36}-logurulatest, + python-logger_logging-{py27,py36,py37,py38,py39,py310,pypy,pypy37}, + python-logger_loguru-{py36,py37,py38,py39,py310,pypy37}-logurulatest, python-logger_loguru-py39-loguru{06,05,04,03}, - libcurl-framework_tornado-{py36,py37,py38,py39,py310,pypy36}-tornado0600, + libcurl-framework_tornado-{py36,py37,py38,py39,py310,pypy37}-tornado0600, libcurl-framework_tornado-{py37,py38,py39,py310}-tornadomaster, - rabbitmq-messagebroker_pika-{py27,py36,py37,py38,py39,pypy,pypy36}-pika0.13, - rabbitmq-messagebroker_pika-{py36,py37,py38,py39,py310,pypy36}-pikalatest, + rabbitmq-messagebroker_pika-{py27,py36,py37,py38,py39,pypy,pypy37}-pika0.13, + rabbitmq-messagebroker_pika-{py36,py37,py38,py39,py310,pypy37}-pikalatest, python-template_mako-{py27,py36,py37,py38,py39,py310} [pytest] @@ -159,7 +158,7 @@ usefixtures = [testenv] deps = # Base Dependencies - {py36,py37,py38,py39,py310,pypy36}: pytest==6.2.5 + {py36,py37,py38,py39,py310,pypy37}: pytest==6.2.5 {py27,pypy}: pytest==4.6.11 iniconfig pytest-cov @@ -197,7 +196,7 @@ deps = component_graphqlserver: jinja2<3.1 component_tastypie-tastypie0143: django-tastypie<0.14.4 component_tastypie-{py27,pypy}-tastypie0143: django<1.12 - component_tastypie-{py36,py37,py38,py39,py310,pypy36,pypy37}-tastypie0143: django<3.0.1 + component_tastypie-{py36,py37,py38,py39,py310,pypy37}-tastypie0143: django<3.0.1 component_tastypie-tastypielatest: django-tastypie component_tastypie-tastypielatest: django<4.1 coroutines_asyncio-{py36,py37,py38,py39,py310}: uvloop From 9da52c322b5a50ad3c61a9103c862a8860018ddc Mon Sep 17 00:00:00 2001 From: Nyenty Ayuk Date: Thu, 4 Aug 2022 15:48:48 -0400 Subject: [PATCH 11/49] Deprecated Python 3.6 (#602) * Cleanup of unsupported python 3.6 * Removed comment and added pypy37 base dependency * fixing typo * Update tox.ini * removal of pypy 3.6 and python 3.6 from action.yml --- .../actions/setup-python-matrix/action.yml | 10 -- .github/workflows/deploy-python.yml | 2 +- setup.py | 7 +- tox.ini | 145 +++++++++--------- 4 files changed, 75 insertions(+), 89 deletions(-) diff --git a/.github/actions/setup-python-matrix/action.yml b/.github/actions/setup-python-matrix/action.yml index 99c7f4d59..344cf686c 100644 --- a/.github/actions/setup-python-matrix/action.yml +++ b/.github/actions/setup-python-matrix/action.yml @@ -3,11 +3,6 @@ description: "Sets up all versions of python required for matrix testing in this runs: using: "composite" steps: - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.6" - architecture: x64 - - uses: actions/setup-python@v3 with: python-version: "pypy-3.7" @@ -18,11 +13,6 @@ runs: python-version: "pypy-2.7" architecture: x64 - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - architecture: x64 - - uses: actions/setup-python@v3 with: python-version: "3.7" diff --git a/.github/workflows/deploy-python.yml b/.github/workflows/deploy-python.yml index d238f0720..e8fbd4f7f 100644 --- a/.github/workflows/deploy-python.yml +++ b/.github/workflows/deploy-python.yml @@ -57,7 +57,7 @@ jobs: uses: pypa/cibuildwheel@v2.1.3 env: CIBW_PLATFORM: linux - CIBW_BUILD: cp36-manylinux_aarch64 cp37-manylinux_aarch64 cp38-manylinux_aarch64 cp39-manylinux_aarch64 cp310-manylinux_aarch64 cp36-manylinux_x86_64 cp37-manylinux_x86_64 cp38-manylinux_x86_64 cp39-manylinux_x86_64 cp310-manylinux_x86_64 + CIBW_BUILD: cp37-manylinux_aarch64 cp38-manylinux_aarch64 cp39-manylinux_aarch64 cp310-manylinux_aarch64 cp37-manylinux_x86_64 cp38-manylinux_x86_64 cp39-manylinux_x86_64 cp310-manylinux_x86_64 CIBW_ARCHS: x86_64 aarch64 CIBW_ENVIRONMENT: "LD_LIBRARY_PATH=/opt/rh/devtoolset-8/root/usr/lib64:/opt/rh/devtoolset-8/root/usr/lib:/opt/rh/devtoolset-8/root/usr/lib64/dyninst:/opt/rh/devtoolset-8/root/usr/lib/dyninst:/usr/local/lib64:/usr/local/lib" diff --git a/setup.py b/setup.py index 9dc1b97c2..5fdf2005f 100644 --- a/setup.py +++ b/setup.py @@ -21,8 +21,8 @@ assert python_version in ((2, 7),) or python_version >= ( 3, - 6, -), "The New Relic Python agent only supports Python 2.7 and 3.6+." + 7, +), "The New Relic Python agent only supports Python 2.7 and 3.7+." with_setuptools = False @@ -120,7 +120,6 @@ def build_extension(self, ext): "Development Status :: 5 - Production/Stable", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", @@ -151,7 +150,7 @@ def build_extension(self, ext): zip_safe=False, classifiers=classifiers, packages=packages, - python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*", + python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*", package_data={ "newrelic": ["newrelic.ini", "version.txt", "packages/urllib3/LICENSE.txt", "common/cacert.pem"], }, diff --git a/tox.ini b/tox.ini index 3fc7acac9..2f7ee4bcc 100644 --- a/tox.ini +++ b/tox.ini @@ -16,7 +16,7 @@ ; framework_aiohttp-aiohttp01: aiohttp<2 ; framework_aiohttp-aiohttp0202: aiohttp<2.3 ; 3. Python version required. Uses the standard tox definitions. (https://tox.readthedocs.io/en/latest/config.html#tox-environments) -; Examples: py27,py36,py37,py38,py39,pypy,pypy37 +; Examples: py27,py37,py38,py39,pypy,pypy37 ; 4. Library and version (Optional). Used when testing multiple versions of the library, and may be omitted when only testing a single version. ; Versions should be specified with 2 digits per version number, so <3 becomes 02 and <3.5 becomes 0304. latest and master are also acceptable versions. ; Examples: uvicorn03, CherryPy0302, uvicornlatest @@ -42,112 +42,109 @@ [tox] setupdir = {toxinidir} envlist = - python-adapter_cheroot-{py27,py36,py37,py38,py39,py310}, - python-adapter_gevent-{py27,py36,py37,py38,py310}, - python-adapter_gunicorn-{py36}-aiohttp1-gunicorn{19,latest}, - python-adapter_gunicorn-{py36,py37,py38,py39,py310}-aiohttp3-gunicornlatest, - python-adapter_uvicorn-{py36,py37}-uvicorn03, - ; Temporarily testing py36 on the uvicorn version preceeding v0.15 - python-adapter_uvicorn-{py36}-uvicorn014 + python-adapter_cheroot-{py27,py37,py38,py39,py310}, + python-adapter_gevent-{py27,py37,py38,py310}, + python-adapter_gunicorn-{py37,py38,py39,py310}-aiohttp3-gunicornlatest, + python-adapter_uvicorn-py37-uvicorn03, python-adapter_uvicorn-{py37,py38,py39,py310}-uvicornlatest, - python-agent_features-{py27,py36,py37,py38,py39,py310}-{with,without}_extensions, + python-agent_features-{py27,py37,py38,py39,py310}-{with,without}_extensions, python-agent_features-{pypy,pypy37}-without_extensions, python-agent_streaming-py27-grpc0125-{with,without}_extensions, - python-agent_streaming-{py36,py37,py38,py39,py310}-{with,without}_extensions, - python-agent_unittests-{py27,py36,py37,py38,py39,py310}-{with,without}_extensions, + python-agent_streaming-{py37,py38,py39,py310}-{with,without}_extensions, + python-agent_unittests-{py27,py37,py38,py39,py310}-{with,without}_extensions, python-agent_unittests-{pypy,pypy37}-without_extensions, - python-application_celery-{py27,py36,py37,py38,py39,py310,pypy,pypy37}, + python-application_celery-{py27,py37,py38,py39,py310,pypy,pypy37}, gearman-application_gearman-{py27,pypy}, python-component_djangorestframework-py27-djangorestframework0300, - python-component_djangorestframework-{py36,py37,py38,py39,py310}-djangorestframeworklatest, - python-component_flask_rest-{py27,py36,py37,py38,py39,pypy,pypy37}, - python-component_graphqlserver-{py36,py37,py38,py39,py310}, + python-component_djangorestframework-{py37,py38,py39,py310}-djangorestframeworklatest, + python-component_flask_rest-{py27,py37,py38,py39,pypy,pypy37}, + python-component_graphqlserver-{py37,py38,py39,py310}, python-component_tastypie-{py27,pypy}-tastypie0143, - python-component_tastypie-{py36,py37,py38,py39,pypy37}-tastypie{0143,latest}, - python-coroutines_asyncio-{py36,py37,py38,py39,py310,pypy37}, - python-cross_agent-{py27,py36,py37,py38,py39,py310}-{with,without}_extensions, + python-component_tastypie-{py37,py38,py39,pypy37}-tastypie{0143,latest}, + python-coroutines_asyncio-{py37,py38,py39,py310,pypy37}, + python-cross_agent-{py27,py37,py38,py39,py310}-{with,without}_extensions, python-cross_agent-pypy-without_extensions, - postgres-datastore_asyncpg-{py36,py37,py38,py39,py310}, - memcached-datastore_bmemcached-{pypy,py27,py36,py37,py38,py39,py310}-memcached030, - elasticsearchserver01-datastore_pyelasticsearch-{py27,py36,pypy}, + postgres-datastore_asyncpg-{py37,py38,py39,py310}, + memcached-datastore_bmemcached-{pypy,py27,py37,py38,py39,py310}-memcached030, + elasticsearchserver01-datastore_pyelasticsearch-{py27,pypy}, elasticsearchserver01-datastore_elasticsearch-py27-elasticsearch{00,01,02,05}, - elasticsearchserver07-datastore_elasticsearch-{py27,py36,py37,py38,py39,py310,pypy,pypy37}-elasticsearch{07}, - memcached-datastore_memcache-{py27,py36,py37,py38,py39,py310,pypy,pypy37}-memcached01, + elasticsearchserver07-datastore_elasticsearch-{py27,py37,py38,py39,py310,pypy,pypy37}-elasticsearch{07}, + memcached-datastore_memcache-{py27,py37,py38,py39,py310,pypy,pypy37}-memcached01, mysql-datastore_mysql-mysql080023-py27, mysql-datastore_mysql-mysqllatest-{py37,py38,py39,py310}, - postgres-datastore_postgresql-{py36,py37,py38,py39}, - postgres-datastore_psycopg2-{py27,py36,py37,py38,py39,py310}-psycopg20208, - postgres-datastore_psycopg2cffi-{py27,py36,pypy}-psycopg2cffi{0207,0208}, + postgres-datastore_postgresql-{py37,py38,py39}, + postgres-datastore_psycopg2-{py27,py37,py38,py39,py310}-psycopg20208, + postgres-datastore_psycopg2cffi-{py27,pypy}-psycopg2cffi{0207,0208}, postgres-datastore_psycopg2cffi-{py37,py38,py39,py310}-psycopg2cffi0208, - memcached-datastore_pylibmc-{py27,py36,py37}, - memcached-datastore_pymemcache-{py27,py36,py37,py38,py39,py310,pypy,pypy37}, - mongodb-datastore_pymongo-{py27,py36,py37,py38,py39,py310,pypy}-pymongo{03}, + memcached-datastore_pylibmc-{py27,py37}, + memcached-datastore_pymemcache-{py27,py37,py38,py39,py310,pypy,pypy37}, + mongodb-datastore_pymongo-{py27,py37,py38,py39,py310,pypy}-pymongo{03}, mongodb-datastore_pymongo-{py37,py38,py39,py310,pypy,pypy37}-pymongo04, - mysql-datastore_pymysql-{py27,py36,py37,py38,py39,py310,pypy,pypy37}, - solr-datastore_pysolr-{py27,py36,py37,py38,py39,py310,pypy,pypy37}, - redis-datastore_redis-{py27,py36,py37,py38,pypy,pypy37}-redis03, - redis-datastore_redis-{py36,py37,py38,py39,py310,pypy37}-redis{0400,latest}, - redis-datastore_aioredis-{py36,py37,py38,py39,py310,pypy37}-aioredislatest, + mysql-datastore_pymysql-{py27,py37,py38,py39,py310,pypy,pypy37}, + solr-datastore_pysolr-{py27,py37,py38,py39,py310,pypy,pypy37}, + redis-datastore_redis-{py27,py37,py38,pypy,pypy37}-redis03, + redis-datastore_redis-{py37,py38,py39,py310,pypy37}-redis{0400,latest}, + redis-datastore_aioredis-{py37,py38,py39,py310,pypy37}-aioredislatest, redis-datastore_aioredis-py39-aioredis01, - redis-datastore_aredis-{py36,py37,py38,py39,pypy37}-aredislatest, + redis-datastore_aredis-{py37,py38,py39,pypy37}-aredislatest, solr-datastore_solrpy-{py27,pypy}-solrpy{00,01}, - python-datastore_sqlite-{py27,py36,py37,py38,py39,py310,pypy,pypy37}, + python-datastore_sqlite-{py27,py37,py38,py39,py310,pypy,pypy37}, memcached-datastore_umemcache-{py27,pypy}, - python-external_boto3-{py27,py36,py37,py38,py39,py310}-boto01, - python-external_botocore-{py27,py36,py37,py38,py39,py310}, + python-external_boto3-{py27,py37,py38,py39,py310}-boto01, + python-external_botocore-{py27,py37,py38,py39,py310}, python-external_feedparser-py27-feedparser{05,06}, - python-external_http-{py27,py36,py37,py38,py39,py310,pypy}, - python-external_httplib-{py27,py36,py37,py38,py39,py310,pypy,pypy37}, - python-external_httplib2-{py27,py36,py37,py38,py39,py310,pypy,pypy37}, - python-external_httpx-{py36,py37,py38,py39,py310}, - python-external_requests-{py27,py36,py37,py38,py39,py310,pypy,pypy37}, + python-external_http-{py27,py37,py38,py39,py310,pypy}, + python-external_httplib-{py27,py37,py38,py39,py310,pypy,pypy37}, + python-external_httplib2-{py27,py37,py38,py39,py310,pypy,pypy37}, + python-external_httpx-{py37,py38,py39,py310}, + python-external_requests-{py27,py37,py38,py39,py310,pypy,pypy37}, python-external_urllib3-{py27,py37,pypy}-urllib3{0109}, - python-external_urllib3-{py27,py36,py37,py38,py39,py310,pypy,pypy37}-urllib3latest, - python-framework_aiohttp-{py36,py37,py38,py39,py310,pypy37}-aiohttp03, - python-framework_ariadne-{py36,py37,py38,py39,py310}-ariadnelatest, + python-external_urllib3-{py27,py37,py38,py39,py310,pypy,pypy37}-urllib3latest, + python-framework_aiohttp-{py37,py38,py39,py310,pypy37}-aiohttp03, + python-framework_ariadne-{py37,py38,py39,py310}-ariadnelatest, python-framework_ariadne-py37-ariadne{0011,0012,0013}, python-framework_bottle-py27-bottle{0008,0009,0010}, - python-framework_bottle-{py27,py36,py37,py38,py39,pypy37}-bottle{0011,0012}, + python-framework_bottle-{py27,py37,py38,py39,pypy37}-bottle{0011,0012}, python-framework_bottle-py310-bottle0012, python-framework_bottle-pypy-bottle{0008,0009,0010,0011,0012}, - python-framework_cherrypy-{py36,py37,py38,py39,py310,pypy37}-CherryPy18, - python-framework_cherrypy-{py36,py37}-CherryPy0302, + python-framework_cherrypy-{py37,py38,py39,py310,pypy37}-CherryPy18, + python-framework_cherrypy-{py37}-CherryPy0302, python-framework_cherrypy-pypy37-CherryPy0303, python-framework_django-{pypy,py27}-Django0103, python-framework_django-{pypy,py27,py37}-Django0108, python-framework_django-{py39}-Django{0200,0201,0202,0300,0301,latest}, - python-framework_django-{py36,py37,py38,py39,py310}-Django0302, - python-framework_falcon-{py27,py36,py37,py38,py39,pypy,pypy37}-falcon0103, - python-framework_falcon-{py36,py37,py38,py39,py310,pypy37}-falcon{0200,master}, - python-framework_fastapi-{py36,py37,py38,py39,py310}, + python-framework_django-{py37,py38,py39,py310}-Django0302, + python-framework_falcon-{py27,py37,py38,py39,pypy,pypy37}-falcon0103, + python-framework_falcon-{py37,py38,py39,py310,pypy37}-falcon{0200,master}, + python-framework_fastapi-{py37,py38,py39,py310}, python-framework_flask-{pypy,py27}-flask0012, - python-framework_flask-{pypy,py27,py36,py37,py38,py39,py310,pypy37}-flask0101, + python-framework_flask-{pypy,py27,py37,py38,py39,py310,pypy37}-flask0101, ; temporarily disabling flaskmaster tests python-framework_flask-{py37,py38,py39,py310,pypy37}-flask{latest}, - python-framework_graphene-{py36,py37,py38,py39,py310}-graphenelatest, - python-framework_graphene-{py27,py36,py37,py38,py39,pypy,pypy37}-graphene{0200,0201}, + python-framework_graphene-{py37,py38,py39,py310}-graphenelatest, + python-framework_graphene-{py27,py37,py38,py39,pypy,pypy37}-graphene{0200,0201}, python-framework_graphene-py310-graphene0201, - python-framework_graphql-{py27,py36,py37,py38,py39,py310,pypy,pypy37}-graphql02, - python-framework_graphql-{py36,py37,py38,py39,py310,pypy37}-graphql03, + python-framework_graphql-{py27,py37,py38,py39,py310,pypy,pypy37}-graphql02, + python-framework_graphql-{py37,py38,py39,py310,pypy37}-graphql03, python-framework_graphql-py37-graphql{0202,0203,0300,0301,0302,master}, - grpc-framework_grpc-{py27,py36}-grpc0125, - grpc-framework_grpc-{py36,py37,py38,py39,py310}-grpclatest, + grpc-framework_grpc-{py27}-grpc0125, + grpc-framework_grpc-{py37,py38,py39,py310}-grpclatest, python-framework_pyramid-{pypy,py27,py38}-Pyramid0104, - python-framework_pyramid-{pypy,py27,pypy37,py36,py37,py38,py39,py310}-Pyramid0110-cornice, + python-framework_pyramid-{pypy,py27,pypy37,py37,py38,py39,py310}-Pyramid0110-cornice, python-framework_pyramid-{py37,py38,py39,py310,pypy37}-Pyramidmaster, python-framework_sanic-{py38,pypy37}-sanic{190301,1906,1812,1912,200904,210300}, - python-framework_sanic-{py36,py37,py38,py310,pypy37}-saniclatest, - python-framework_starlette-{py36,py310,pypy37}-starlette{0014,0015,0019}, - python-framework_starlette-{py36,py37,py38,py39,py310,pypy37}-starlettelatest, + python-framework_sanic-{py37,py38,py310,pypy37}-saniclatest, + python-framework_starlette-{py310,pypy37}-starlette{0014,0015,0019}, + python-framework_starlette-{py37,py38,py39,py310,pypy37}-starlettelatest, python-framework_strawberry-{py37,py38,py39,py310}-strawberrylatest, - python-logger_logging-{py27,py36,py37,py38,py39,py310,pypy,pypy37}, - python-logger_loguru-{py36,py37,py38,py39,py310,pypy37}-logurulatest, + python-logger_logging-{py27,py37,py38,py39,py310,pypy,pypy37}, + python-logger_loguru-{py37,py38,py39,py310,pypy37}-logurulatest, python-logger_loguru-py39-loguru{06,05,04,03}, - libcurl-framework_tornado-{py36,py37,py38,py39,py310,pypy37}-tornado0600, + libcurl-framework_tornado-{py37,py38,py39,py310,pypy37}-tornado0600, libcurl-framework_tornado-{py37,py38,py39,py310}-tornadomaster, - rabbitmq-messagebroker_pika-{py27,py36,py37,py38,py39,pypy,pypy37}-pika0.13, - rabbitmq-messagebroker_pika-{py36,py37,py38,py39,py310,pypy37}-pikalatest, - python-template_mako-{py27,py36,py37,py38,py39,py310} + rabbitmq-messagebroker_pika-{py27,py37,py38,py39,pypy,pypy37}-pika0.13, + rabbitmq-messagebroker_pika-{py37,py38,py39,py310,pypy37}-pikalatest, + python-template_mako-{py27,py37,py38,py39,py310} [pytest] usefixtures = @@ -158,7 +155,7 @@ usefixtures = [testenv] deps = # Base Dependencies - {py36,py37,py38,py39,py310,pypy37}: pytest==6.2.5 + {py37,py38,py39,py310,pypy37}: pytest==6.2.5 {py27,pypy}: pytest==4.6.11 iniconfig pytest-cov @@ -196,10 +193,10 @@ deps = component_graphqlserver: jinja2<3.1 component_tastypie-tastypie0143: django-tastypie<0.14.4 component_tastypie-{py27,pypy}-tastypie0143: django<1.12 - component_tastypie-{py36,py37,py38,py39,py310,pypy37}-tastypie0143: django<3.0.1 + component_tastypie-{py37,py38,py39,py310,pypy37}-tastypie0143: django<3.0.1 component_tastypie-tastypielatest: django-tastypie component_tastypie-tastypielatest: django<4.1 - coroutines_asyncio-{py36,py37,py38,py39,py310}: uvloop + coroutines_asyncio-{py37,py38,py39,py310}: uvloop cross_agent: mock==1.0.1 cross_agent: requests datastore_asyncpg: asyncpg @@ -240,7 +237,7 @@ deps = external_boto3-boto01: moto<2.0 external_boto3-py27: rsa<4.7.1 external_botocore: botocore - external_botocore-{py36,py37,py38,py39,py310}: moto[awslambda,ec2,iam]<3.0 + external_botocore-{py37,py38,py39,py310}: moto[awslambda,ec2,iam]<3.0 external_botocore-py27: rsa<4.7.1 external_botocore-py27: moto[awslambda,ec2,iam]<2.0 external_feedparser-feedparser05: feedparser<6 From a81513dc150eceebedf91be3c38c91d2b65d2518 Mon Sep 17 00:00:00 2001 From: Rafael Perrella Date: Thu, 4 Aug 2022 21:08:03 -0300 Subject: [PATCH 12/49] Fix loguru detection of module/function name (#604) * Fix loguru detection of module/function name * Add regression test * Add PyPy wrapper to loguru depth Co-authored-by: Tim Pansino --- newrelic/hooks/logger_loguru.py | 12 +++++ tests/logger_loguru/test_stack_inspection.py | 56 ++++++++++++++++++++ 2 files changed, 68 insertions(+) create mode 100644 tests/logger_loguru/test_stack_inspection.py diff --git a/newrelic/hooks/logger_loguru.py b/newrelic/hooks/logger_loguru.py index 65eadb1c4..801a1c8cd 100644 --- a/newrelic/hooks/logger_loguru.py +++ b/newrelic/hooks/logger_loguru.py @@ -13,6 +13,7 @@ # limitations under the License. import logging +import sys from newrelic.api.application import application_instance from newrelic.api.transaction import current_transaction, record_log_event @@ -22,6 +23,7 @@ from newrelic.packages import six _logger = logging.getLogger(__name__) +is_pypy = hasattr(sys, "pypy_version_info") def loguru_version(): from loguru import __version__ @@ -71,6 +73,16 @@ def wrap_log(wrapped, instance, args, kwargs): try: level_id, static_level_no, from_decorator, options, message, subargs, subkwargs = bind_log(*args, **kwargs) options[-2] = nr_log_patcher(options[-2]) + # Loguru looks into the stack trace to find the caller's module and function names. + # options[1] tells loguru how far up to look in the stack trace to find the caller. + # Because wrap_log is an extra call in the stack trace, loguru needs to look 1 level higher. + if not is_pypy: + options[1] += 1 + else: + # PyPy inspection requires an additional frame of offset, as the wrapt internals seem to + # add another frame on PyPy but not on CPython. + options[1] += 2 + except Exception as e: _logger.debug("Exception in loguru handling: %s" % str(e)) return wrapped(*args, **kwargs) diff --git a/tests/logger_loguru/test_stack_inspection.py b/tests/logger_loguru/test_stack_inspection.py new file mode 100644 index 000000000..fb2738ac2 --- /dev/null +++ b/tests/logger_loguru/test_stack_inspection.py @@ -0,0 +1,56 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from conftest import CaplogHandler + +from newrelic.api.background_task import background_task +from testing_support.fixtures import reset_core_stats_engine +from testing_support.validators.validate_log_event_count import validate_log_event_count +from testing_support.validators.validate_log_events import validate_log_events +from testing_support.fixtures import override_application_settings + + + +@pytest.fixture(scope="function") +def filepath_logger(): + import loguru + _logger = loguru.logger + caplog = CaplogHandler() + handler_id = _logger.add(caplog, level="WARNING", format="{file}:{function} - {message}") + _logger.caplog = caplog + yield _logger + del caplog.records[:] + _logger.remove(handler_id) + + +@override_application_settings({ + "application_logging.local_decorating.enabled": False, +}) +@reset_core_stats_engine() +def test_filepath_inspection(filepath_logger): + # Test for regression in stack inspection that caused log messages. + # See https://github.com/newrelic/newrelic-python-agent/issues/603 + + @validate_log_events([{"message": "A", "level": "ERROR"}]) + @validate_log_event_count(1) + @background_task() + def test(): + filepath_logger.error("A") + assert len(filepath_logger.caplog.records) == 1 + record = filepath_logger.caplog.records[0] + assert record == "test_stack_inspection.py:test - A", record + + test() From e89152830f8ce7b58721022e87b8519e1f16646e Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Fri, 5 Aug 2022 15:07:39 -0700 Subject: [PATCH 13/49] Capture module versions (#588) * Change n is None to not n None types are falsey so we can shorten this expression to `if not module`. * Use in instead of .find `in` is more performant than find for search a string so use this instead. * Simplify and combine sub path module logic Do not include module.sub_paths as separate modules. Skip these except for `newrelic.hooks`. * Exclude standard lib/built-in modules Previously, we were capturing standard library and built-in Python modules as plugins. These are included with the version of Python the user installed and are not packages that need to be captured so exclude these from the list. * Capture module versions * Fixup: remove pkg_resources check * Ignore pylint function-redefined * Check plugin version info in tests --- newrelic/core/environment.py | 52 ++++++++++++++--------- tests/agent_unittests/test_environment.py | 14 +++--- 2 files changed, 39 insertions(+), 27 deletions(-) diff --git a/newrelic/core/environment.py b/newrelic/core/environment.py index f198155a2..f63047ab5 100644 --- a/newrelic/core/environment.py +++ b/newrelic/core/environment.py @@ -20,6 +20,7 @@ import os import platform import sys +import sysconfig import newrelic from newrelic.common.system_info import ( @@ -178,41 +179,50 @@ def environment_settings(): env.extend(dispatcher) # Module information. + purelib = sysconfig.get_path("purelib") + platlib = sysconfig.get_path("platlib") plugins = [] + get_version = None + # importlib was introduced into the standard library starting in Python3.8. + if "importlib" in sys.modules and hasattr(sys.modules["importlib"], "metadata"): + get_version = sys.modules["importlib"].metadata.version + elif "pkg_resources" in sys.modules: + + def get_version(name): # pylint: disable=function-redefined + return sys.modules["pkg_resources"].get_distribution(name).version + # Using any iterable to create a snapshot of sys.modules can occassionally # fail in a rare case when modules are imported in parallel by different # threads. # # TL;DR: Do NOT use an iterable on the original sys.modules to generate the # list - for name, module in sys.modules.copy().items(): + # Exclude lib.sub_paths as independent modules except for newrelic.hooks. + if "." in name and not name.startswith("newrelic.hooks."): + continue # If the module isn't actually loaded (such as failed relative imports # in Python 2.7), the module will be None and should not be reported. - if module is None: + if not module: + continue + # Exclude standard library/built-in modules. + # Third-party modules can be installed in either purelib or platlib directories. + # See https://docs.python.org/3/library/sysconfig.html#installation-paths. + if ( + not hasattr(module, "__file__") + or not module.__file__ + or not module.__file__.startswith(purelib) + or not module.__file__.startswith(platlib) + ): continue - if name.startswith("newrelic.hooks."): - plugins.append(name) - - elif name.find(".") == -1 and hasattr(module, "__file__"): - # XXX This is disabled as it can cause notable overhead in - # pathalogical cases. Will be replaced with a new system - # where have a allowlist of packages we really want version - # information for and will work out on case by case basis - # how to extract that from the modules themselves. - - # try: - # if 'pkg_resources' in sys.modules: - # version = pkg_resources.get_distribution(name).version - # if version: - # name = '%s (%s)' % (name, version) - # except Exception: - # pass - - plugins.append(name) + try: + version = get_version(name) + plugins.append("%s (%s)" % (name, version)) + except Exception: + pass env.append(("Plugin List", plugins)) diff --git a/tests/agent_unittests/test_environment.py b/tests/agent_unittests/test_environment.py index ef5c5e448..b2c639adc 100644 --- a/tests/agent_unittests/test_environment.py +++ b/tests/agent_unittests/test_environment.py @@ -12,8 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest import sys + +import pytest + from newrelic.core.environment import environment_settings @@ -29,7 +31,7 @@ class Module(object): def test_plugin_list(): # Let's pretend we fired an import hook - import newrelic.hooks.adapter_gunicorn + import newrelic.hooks.adapter_gunicorn # noqa: F401 environment_info = environment_settings() @@ -41,6 +43,8 @@ def test_plugin_list(): # Check that bogus plugins don't get reported assert "newrelic.hooks.newrelic" not in plugin_list + # Check that plugin that should get reported has version info. + assert "pytest (%s)" % (pytest.__version__) in plugin_list class NoIteratorDict(object): @@ -62,7 +66,7 @@ def __contains__(self, *args, **kwargs): def test_plugin_list_uses_no_sys_modules_iterator(monkeypatch): modules = NoIteratorDict(sys.modules) - monkeypatch.setattr(sys, 'modules', modules) + monkeypatch.setattr(sys, "modules", modules) # If environment_settings iterates over sys.modules, an attribute error will be generated environment_info = environment_settings() @@ -113,9 +117,7 @@ def test_plugin_list_uses_no_sys_modules_iterator(monkeypatch): ), ), ) -def test_uvicorn_dispatcher( - monkeypatch, loaded_modules, dispatcher, dispatcher_version, worker_version -): +def test_uvicorn_dispatcher(monkeypatch, loaded_modules, dispatcher, dispatcher_version, worker_version): # Let's pretend we load some modules for name, module in loaded_modules.items(): monkeypatch.setitem(sys.modules, name, module) From 2633a4d5c29288cbe799da55a61f3f0a31b72a12 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Mon, 15 Aug 2022 11:09:49 -0700 Subject: [PATCH 14/49] Daphne ASGI Server Instrumentation (#597) * Daphne instrumentation * Daphne Testing * Add Daphne ASGI v2 testing * Fix flake8 errors * Apply linter fixes * Remove py36 testing --- newrelic/config.py | 6 +- newrelic/core/environment.py | 7 ++ newrelic/hooks/adapter_daphne.py | 33 +++++++ tests/adapter_daphne/conftest.py | 37 +++++++ tests/adapter_daphne/test_daphne.py | 136 ++++++++++++++++++++++++++ tests/adapter_uvicorn/test_uvicorn.py | 2 +- tox.ini | 6 ++ 7 files changed, 225 insertions(+), 2 deletions(-) create mode 100644 newrelic/hooks/adapter_daphne.py create mode 100644 tests/adapter_daphne/conftest.py create mode 100644 tests/adapter_daphne/test_daphne.py diff --git a/newrelic/config.py b/newrelic/config.py index 1c3571a54..a447e20bf 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -2540,6 +2540,8 @@ def _process_module_builtin_defaults(): _process_module_definition("uvicorn.config", "newrelic.hooks.adapter_uvicorn", "instrument_uvicorn_config") + _process_module_definition("daphne.server", "newrelic.hooks.adapter_daphne", "instrument_daphne_server") + _process_module_definition("sanic.app", "newrelic.hooks.framework_sanic", "instrument_sanic_app") _process_module_definition("sanic.response", "newrelic.hooks.framework_sanic", "instrument_sanic_response") @@ -2712,7 +2714,9 @@ def _process_module_builtin_defaults(): ) _process_module_definition( - "redis.commands.timeseries.commands", "newrelic.hooks.datastore_redis", "instrument_redis_commands_timeseries_commands" + "redis.commands.timeseries.commands", + "newrelic.hooks.datastore_redis", + "instrument_redis_commands_timeseries_commands", ) _process_module_definition( diff --git a/newrelic/core/environment.py b/newrelic/core/environment.py index f63047ab5..17b03813c 100644 --- a/newrelic/core/environment.py +++ b/newrelic/core/environment.py @@ -170,6 +170,13 @@ def environment_settings(): if hasattr(uvicorn, "__version__"): dispatcher.append(("Dispatcher Version", uvicorn.__version__)) + if not dispatcher and "daphne" in sys.modules: + dispatcher.append(("Dispatcher", "daphne")) + daphne = sys.modules["daphne"] + + if hasattr(daphne, "__version__"): + dispatcher.append(("Dispatcher Version", daphne.__version__)) + if not dispatcher and "tornado" in sys.modules: dispatcher.append(("Dispatcher", "tornado")) tornado = sys.modules["tornado"] diff --git a/newrelic/hooks/adapter_daphne.py b/newrelic/hooks/adapter_daphne.py new file mode 100644 index 000000000..430d9c4b3 --- /dev/null +++ b/newrelic/hooks/adapter_daphne.py @@ -0,0 +1,33 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from newrelic.api.asgi_application import ASGIApplicationWrapper + + +@property +def application(self): + return getattr(self, "_nr_application", vars(self).get("application", None)) + + +@application.setter +def application(self, value): + # Wrap app only once + if value and not getattr(value, "_nr_wrapped", False): + value = ASGIApplicationWrapper(value) + value._nr_wrapped = True + self._nr_application = value + + +def instrument_daphne_server(module): + module.Server.application = application diff --git a/tests/adapter_daphne/conftest.py b/tests/adapter_daphne/conftest.py new file mode 100644 index 000000000..cda62f22e --- /dev/null +++ b/tests/adapter_daphne/conftest.py @@ -0,0 +1,37 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from testing_support.fixtures import ( # noqa: F401; pylint: disable=W0611 + code_coverage_fixture, + collector_agent_registration_fixture, + collector_available_fixture, +) + +_coverage_source = [ + "newrelic.hooks.adapter_daphne", +] + +code_coverage = code_coverage_fixture(source=_coverage_source) + +_default_settings = { + "transaction_tracer.explain_threshold": 0.0, + "transaction_tracer.transaction_threshold": 0.0, + "transaction_tracer.stack_trace_threshold": 0.0, + "debug.log_data_collector_payloads": True, + "debug.record_transaction_failure": True, +} + +collector_agent_registration = collector_agent_registration_fixture( + app_name="Python Agent Test (adapter_daphne)", default_settings=_default_settings +) diff --git a/tests/adapter_daphne/test_daphne.py b/tests/adapter_daphne/test_daphne.py new file mode 100644 index 000000000..4953e9a9f --- /dev/null +++ b/tests/adapter_daphne/test_daphne.py @@ -0,0 +1,136 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import asyncio +import threading +from urllib.request import HTTPError, urlopen + +import daphne.server +import pytest +from testing_support.fixtures import ( + override_application_settings, + raise_background_exceptions, + validate_transaction_errors, + validate_transaction_metrics, + wait_for_background_threads, +) +from testing_support.sample_asgi_applications import ( + AppWithCall, + AppWithCallRaw, + simple_app_v2_raw, +) +from testing_support.util import get_open_port + +from newrelic.common.object_names import callable_name + +DAPHNE_VERSION = tuple(int(v) for v in daphne.__version__.split(".")[:2]) +skip_asgi_3_unsupported = pytest.mark.skipif(DAPHNE_VERSION < (3, 0), reason="ASGI3 unsupported") +skip_asgi_2_unsupported = pytest.mark.skipif(DAPHNE_VERSION >= (3, 0), reason="ASGI2 unsupported") + + +@pytest.fixture( + params=( + pytest.param( + simple_app_v2_raw, + marks=skip_asgi_2_unsupported, + ), + pytest.param( + AppWithCallRaw(), + marks=skip_asgi_3_unsupported, + ), + pytest.param( + AppWithCall(), + marks=skip_asgi_3_unsupported, + ), + ), + ids=("raw", "class_with_call", "class_with_call_double_wrapped"), +) +def app(request, server_and_port): + app = request.param + server, _ = server_and_port + server.application = app + return app + + +@pytest.fixture(scope="session") +def port(server_and_port): + _, port = server_and_port + return port + + +@pytest.fixture(scope="session") +def server_and_port(): + port = get_open_port() + + servers = [] + loops = [] + ready = threading.Event() + + def server_run(): + def on_ready(): + if not ready.is_set(): + loops.append(asyncio.get_event_loop()) + servers.append(server) + ready.set() + + async def fake_app(*args, **kwargs): + raise RuntimeError("Failed to swap out app.") + + server = daphne.server.Server( + fake_app, + endpoints=["tcp:%d:interface=127.0.0.1" % port], + ready_callable=on_ready, + signal_handlers=False, + verbosity=9, + ) + + server.run() + + thread = threading.Thread(target=server_run, daemon=True) + thread.start() + assert ready.wait(timeout=10) + yield servers[0], port + + reactor = daphne.server.reactor + _ = [loop.call_soon_threadsafe(reactor.stop) for loop in loops] # Stop all loops + thread.join(timeout=10) + + if thread.is_alive(): + raise RuntimeError("Thread failed to exit in time.") + + +@override_application_settings({"transaction_name.naming_scheme": "framework"}) +def test_daphne_200(port, app): + @validate_transaction_metrics(callable_name(app)) + @raise_background_exceptions() + @wait_for_background_threads() + def response(): + return urlopen("http://localhost:%d" % port, timeout=10) + + assert response().status == 200 + + +@override_application_settings({"transaction_name.naming_scheme": "framework"}) +@validate_transaction_errors(["builtins:ValueError"]) +def test_daphne_500(port, app): + @validate_transaction_metrics(callable_name(app)) + @raise_background_exceptions() + @wait_for_background_threads() + def _test(): + try: + urlopen("http://localhost:%d/exc" % port) + except HTTPError: + pass + + _test() diff --git a/tests/adapter_uvicorn/test_uvicorn.py b/tests/adapter_uvicorn/test_uvicorn.py index c93e719e8..e3261f4e8 100644 --- a/tests/adapter_uvicorn/test_uvicorn.py +++ b/tests/adapter_uvicorn/test_uvicorn.py @@ -97,7 +97,7 @@ async def on_tick(): thread = threading.Thread(target=server_run, daemon=True) thread.start() - ready.wait() + assert ready.wait(timeout=10) yield port _ = [loop.stop() for loop in loops] # Stop all loops thread.join(timeout=1) diff --git a/tox.ini b/tox.ini index 2f7ee4bcc..d054034d2 100644 --- a/tox.ini +++ b/tox.ini @@ -43,6 +43,8 @@ setupdir = {toxinidir} envlist = python-adapter_cheroot-{py27,py37,py38,py39,py310}, + python-adapter_daphne-{py37,py38,py39,py310}-daphnelatest, + python-adapter_daphne-py38-daphne{0204,0205}, python-adapter_gevent-{py27,py37,py38,py310}, python-adapter_gunicorn-{py37,py38,py39,py310}-aiohttp3-gunicornlatest, python-adapter_uvicorn-py37-uvicorn03, @@ -163,6 +165,9 @@ deps = # Test Suite Dependencies adapter_cheroot: cheroot + adapter_daphne-daphnelatest: daphne + adapter_daphne-daphne0205: daphne<2.6 + adapter_daphne-daphne0204: daphne<2.5 adapter_gevent: WSGIProxy2 adapter_gevent: gevent adapter_gevent: urllib3 @@ -372,6 +377,7 @@ extras = changedir = adapter_cheroot: tests/adapter_cheroot + adapter_daphne: tests/adapter_daphne adapter_gevent: tests/adapter_gevent adapter_gunicorn: tests/adapter_gunicorn adapter_uvicorn: tests/adapter_uvicorn From bde2b52b95e34acaf15202dc1426221776e13130 Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Fri, 19 Aug 2022 11:06:06 -0700 Subject: [PATCH 15/49] Add support for Sanic.TouchUp metaclass (#607) * Add support for Sanic.TouchUp metaclass Starting in version 21.9.0 sanic added a TouchUp metaclass that rewrites methods on the Sanic class effectively undoing our instrumentation wrapping. This adds back our instrumentation wrapping. Co-authored-by: TimPansino * Change Sanic "test app" name to "test-app" Sanic app names cannot have spaces. * Fix sanic warning * Fix sanic testing setup * Fix sanic response streaming * Fix sanic testing init logic * Add missing py39 tests for sanic Co-authored-by: TimPansino Co-authored-by: Tim Pansino --- newrelic/config.py | 3 + newrelic/hooks/framework_sanic.py | 115 +++++++++++-------- tests/framework_sanic/_target_application.py | 41 ++++--- tests/framework_sanic/conftest.py | 14 ++- tox.ini | 10 +- 5 files changed, 114 insertions(+), 69 deletions(-) diff --git a/newrelic/config.py b/newrelic/config.py index a447e20bf..90566e113 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -2544,6 +2544,9 @@ def _process_module_builtin_defaults(): _process_module_definition("sanic.app", "newrelic.hooks.framework_sanic", "instrument_sanic_app") _process_module_definition("sanic.response", "newrelic.hooks.framework_sanic", "instrument_sanic_response") + _process_module_definition( + "sanic.touchup.service", "newrelic.hooks.framework_sanic", "instrument_sanic_touchup_service" + ) _process_module_definition("aiohttp.wsgi", "newrelic.hooks.framework_aiohttp", "instrument_aiohttp_wsgi") _process_module_definition("aiohttp.web", "newrelic.hooks.framework_aiohttp", "instrument_aiohttp_web") diff --git a/newrelic/hooks/framework_sanic.py b/newrelic/hooks/framework_sanic.py index aabeb9b57..ab5a22eac 100644 --- a/newrelic/hooks/framework_sanic.py +++ b/newrelic/hooks/framework_sanic.py @@ -15,13 +15,12 @@ import sys from inspect import isawaitable -from newrelic.api.web_transaction import web_transaction -from newrelic.api.transaction import current_transaction -from newrelic.api.function_trace import function_trace, FunctionTrace +from newrelic.api.function_trace import FunctionTrace, function_trace from newrelic.api.time_trace import notice_error -from newrelic.common.object_wrapper import (wrap_function_wrapper, - function_wrapper) +from newrelic.api.transaction import current_transaction +from newrelic.api.web_transaction import web_transaction from newrelic.common.object_names import callable_name +from newrelic.common.object_wrapper import function_wrapper, wrap_function_wrapper def _bind_add(uri, methods, handler, *args, **kwargs): @@ -36,19 +35,20 @@ def _nr_wrapper_handler_(wrapped, instance, args, kwargs): return wrapped(*args, **kwargs) name = callable_name(wrapped) - view_class = getattr(wrapped, 'view_class', None) + view_class = getattr(wrapped, "view_class", None) view = view_class or wrapped if view_class: try: method = args[0].method.lower() - name = callable_name(view_class) + '.' + method + name = callable_name(view_class) + "." + method view = getattr(view_class, method) except: pass - + transaction.set_transaction_name(name, priority=3) import sanic - transaction.add_framework_info(name='Sanic', version=sanic.__version__) + + transaction.add_framework_info(name="Sanic", version=sanic.__version__) with FunctionTrace(name=name, source=view): return wrapped(*args, **kwargs) @@ -60,7 +60,7 @@ def _nr_sanic_router_add(wrapped, instance, args, kwargs): # Cache the callable_name on the handler object callable_name(handler) - if hasattr(wrapped, 'view_class'): + if hasattr(wrapped, "view_class"): callable_name(wrapped.view_class) wrapped_handler = _nr_wrapper_handler_(handler) @@ -131,7 +131,7 @@ def error_response(wrapped, instance, args, kwargs): raise else: # response can be a response object or a coroutine - if hasattr(response, 'status'): + if hasattr(response, "status"): notice_error(error=exc_info, status_code=response.status) else: notice_error(exc_info) @@ -144,18 +144,16 @@ def error_response(wrapped, instance, args, kwargs): def _sanic_app_init(wrapped, instance, args, kwargs): result = wrapped(*args, **kwargs) - error_handler = getattr(instance, 'error_handler') - if hasattr(error_handler, 'response'): - instance.error_handler.response = error_response( - error_handler.response) - if hasattr(error_handler, 'add'): - error_handler.add = _nr_sanic_error_handlers( - error_handler.add) + error_handler = getattr(instance, "error_handler") + if hasattr(error_handler, "response"): + instance.error_handler.response = error_response(error_handler.response) + if hasattr(error_handler, "add"): + error_handler.add = _nr_sanic_error_handlers(error_handler.add) - router = getattr(instance, 'router') - if hasattr(router, 'add'): + router = getattr(instance, "router") + if hasattr(router, "add"): router.add = _nr_sanic_router_add(router.add) - if hasattr(router, 'get'): + if hasattr(router, "get"): # Cache the callable_name on the router.get callable_name(router.get) router.get = _nr_sanic_router_get(router.get) @@ -172,8 +170,7 @@ def _nr_sanic_response_get_headers(wrapped, instance, args, kwargs): return result # instance is the response object - cat_headers = transaction.process_response(str(instance.status), - instance.headers.items()) + cat_headers = transaction.process_response(str(instance.status), instance.headers.items()) for header_name, header_value in cat_headers: if header_name not in instance.headers: @@ -189,11 +186,10 @@ async def _nr_sanic_response_send(wrapped, instance, args, kwargs): await result if transaction is None: - return wrapped(*args, **kwargs) + return result # instance is the response object - cat_headers = transaction.process_response(str(instance.status), - instance.headers.items()) + cat_headers = transaction.process_response(str(instance.status), instance.headers.items()) for header_name, header_value in cat_headers: if header_name not in instance.headers: @@ -201,6 +197,7 @@ async def _nr_sanic_response_send(wrapped, instance, args, kwargs): return result + def _nr_sanic_response_parse_headers(wrapped, instance, args, kwargs): transaction = current_transaction() @@ -208,8 +205,7 @@ def _nr_sanic_response_parse_headers(wrapped, instance, args, kwargs): return wrapped(*args, **kwargs) # instance is the response object - cat_headers = transaction.process_response(str(instance.status), - instance.headers.items()) + cat_headers = transaction.process_response(str(instance.status), instance.headers.items()) for header_name, header_value in cat_headers: if header_name not in instance.headers: @@ -219,7 +215,7 @@ def _nr_sanic_response_parse_headers(wrapped, instance, args, kwargs): def _nr_wrapper_middleware_(attach_to): - is_request_middleware = attach_to == 'request' + is_request_middleware = attach_to == "request" @function_wrapper def _wrapper(wrapped, instance, args, kwargs): @@ -238,7 +234,7 @@ def _wrapper(wrapped, instance, args, kwargs): return _wrapper -def _bind_middleware(middleware, attach_to='request', *args, **kwargs): +def _bind_middleware(middleware, attach_to="request", *args, **kwargs): return middleware, attach_to @@ -259,36 +255,55 @@ def _bind_request(request, *args, **kwargs): def _nr_sanic_transaction_wrapper_(wrapped, instance, args, kwargs): request = _bind_request(*args, **kwargs) # If the request is a websocket request do not wrap it - if request.headers.get('upgrade', '').lower() == 'websocket': + if request.headers.get("upgrade", "").lower() == "websocket": return wrapped(*args, **kwargs) return web_transaction( request_method=request.method, request_path=request.path, query_string=request.query_string, - headers=request.headers)(wrapped)(*args, **kwargs) + headers=request.headers, + )(wrapped)(*args, **kwargs) + + +def _nr_wrap_touchup_run(wrapped, instance, args, kwargs): + # TouchUp uses metaprogramming to rewrite methods of classes on startup. + # To properly wrap them we need to catch the call to TouchUp.run and + # reinstrument any methods that were replaced with uninstrumented versions. + + result = wrapped(*args, **kwargs) + + if "sanic.app" in sys.modules: + module = sys.modules["sanic.app"] + target = args[0] + + if isinstance(target, module.Sanic): + # Reinstrument class after metaclass "TouchUp" has finished rewriting methods on the class. + target_cls = module.Sanic + if hasattr(target_cls, "handle_request") and not hasattr(target_cls.handle_request, "__wrapped__"): + wrap_function_wrapper(module, "Sanic.handle_request", _nr_sanic_transaction_wrapper_) + + return result def instrument_sanic_app(module): - wrap_function_wrapper(module, 'Sanic.handle_request', - _nr_sanic_transaction_wrapper_) - wrap_function_wrapper(module, 'Sanic.__init__', - _sanic_app_init) - wrap_function_wrapper(module, 'Sanic.register_middleware', - _nr_sanic_register_middleware_) - if hasattr(module.Sanic, 'register_named_middleware'): - wrap_function_wrapper(module, 'Sanic.register_named_middleware', - _nr_sanic_register_middleware_) + wrap_function_wrapper(module, "Sanic.handle_request", _nr_sanic_transaction_wrapper_) + wrap_function_wrapper(module, "Sanic.__init__", _sanic_app_init) + wrap_function_wrapper(module, "Sanic.register_middleware", _nr_sanic_register_middleware_) + if hasattr(module.Sanic, "register_named_middleware"): + wrap_function_wrapper(module, "Sanic.register_named_middleware", _nr_sanic_register_middleware_) def instrument_sanic_response(module): - if hasattr(module.BaseHTTPResponse, 'send'): - wrap_function_wrapper(module, 'BaseHTTPResponse.send', - _nr_sanic_response_send) + if hasattr(module.BaseHTTPResponse, "send"): + wrap_function_wrapper(module, "BaseHTTPResponse.send", _nr_sanic_response_send) else: - if hasattr(module.BaseHTTPResponse, 'get_headers'): - wrap_function_wrapper(module, 'BaseHTTPResponse.get_headers', - _nr_sanic_response_get_headers) - if hasattr(module.BaseHTTPResponse, '_parse_headers'): - wrap_function_wrapper(module, 'BaseHTTPResponse._parse_headers', - _nr_sanic_response_parse_headers) + if hasattr(module.BaseHTTPResponse, "get_headers"): + wrap_function_wrapper(module, "BaseHTTPResponse.get_headers", _nr_sanic_response_get_headers) + if hasattr(module.BaseHTTPResponse, "_parse_headers"): + wrap_function_wrapper(module, "BaseHTTPResponse._parse_headers", _nr_sanic_response_parse_headers) + + +def instrument_sanic_touchup_service(module): + if hasattr(module, "TouchUp") and hasattr(module.TouchUp, "run"): + wrap_function_wrapper(module.TouchUp, "run", _nr_wrap_touchup_run) diff --git a/tests/framework_sanic/_target_application.py b/tests/framework_sanic/_target_application.py index 03f3e4771..001ff9b23 100644 --- a/tests/framework_sanic/_target_application.py +++ b/tests/framework_sanic/_target_application.py @@ -15,11 +15,18 @@ from sanic import Blueprint, Sanic from sanic.exceptions import NotFound, SanicException, ServerError from sanic.handlers import ErrorHandler -from sanic.response import json, stream +from sanic.response import json from sanic.router import Router from sanic.views import HTTPMethodView +try: + # Old style response streaming + from sanic.response import stream +except ImportError: + stream = None + + class MethodView(HTTPMethodView): async def get(self, request): return json({"hello": "world"}) @@ -93,7 +100,7 @@ def get(self, *args): error_handler = CustomErrorHandler() router = CustomRouter() -app = Sanic(name="test app", error_handler=error_handler, router=router) +app = Sanic(name="test-app", error_handler=error_handler, router=router) router.app = app blueprint = Blueprint("test_bp") @@ -139,13 +146,25 @@ async def blueprint_middleware(request): app.register_middleware(request_middleware) +async def do_streaming(request): + if stream is not None: + # Old style response streaming + async def streaming_fn(response): + response.write("foo") + response.write("bar") + + return stream(streaming_fn) + else: + # New style response streaming + response = await request.respond(content_type="text/plain") + await response.send("foo") + await response.send("bar") + await response.eof() + + @app.route("/streaming") async def streaming(request): - async def streaming_fn(response): - response.write("foo") - response.write("bar") - - return stream(streaming_fn) + return await do_streaming(request) # Fake websocket endpoint to enable websockets on the server @@ -200,17 +219,11 @@ async def async_error(request): @blueprint.route("/blueprint") async def blueprint_route(request): - async def streaming_fn(response): - response.write("foo") - - return stream(streaming_fn) - + return await do_streaming(request) app.blueprint(blueprint) app.add_route(MethodView.as_view(), "/method_view") -if not getattr(router, "finalized", True): - router.finalize() if __name__ == "__main__": app.run(host="127.0.0.1", port=8000) diff --git a/tests/framework_sanic/conftest.py b/tests/framework_sanic/conftest.py index f8b5dac37..4880bf116 100644 --- a/tests/framework_sanic/conftest.py +++ b/tests/framework_sanic/conftest.py @@ -80,10 +80,13 @@ async def send(*args, **kwargs): proto = MockProtocol(loop=loop, app=app) proto.recv_buffer = bytearray() http = Http(proto) + + if hasattr(http, "init_for_request"): + http.init_for_request() + http.stage = Stage.HANDLER http.response_func = http.http1_response_header _request.stream = http - pass except ImportError: pass @@ -123,6 +126,15 @@ def request(app, method, url, headers=None): if loop is None: loop = asyncio.new_event_loop() + if not getattr(app.router, "finalized", True): + # Handle startup if the router hasn't been finalized. + # Older versions don't have this requirement or variable so + # the default should be True. + if hasattr(app, "_startup"): + loop.run_until_complete(app._startup()) + else: + app.router.finalize() + coro = create_request_coroutine(app, method, url, headers, loop) loop.run_until_complete(coro) return RESPONSES.pop() diff --git a/tox.ini b/tox.ini index d054034d2..8491cd0b6 100644 --- a/tox.ini +++ b/tox.ini @@ -134,8 +134,8 @@ envlist = python-framework_pyramid-{pypy,py27,py38}-Pyramid0104, python-framework_pyramid-{pypy,py27,pypy37,py37,py38,py39,py310}-Pyramid0110-cornice, python-framework_pyramid-{py37,py38,py39,py310,pypy37}-Pyramidmaster, - python-framework_sanic-{py38,pypy37}-sanic{190301,1906,1812,1912,200904,210300}, - python-framework_sanic-{py37,py38,py310,pypy37}-saniclatest, + python-framework_sanic-{py38,pypy37}-sanic{190301,1906,1812,1912,200904,210300,2109,2112,2203}, + python-framework_sanic-{py37,py38,py39,py310,pypy37}-saniclatest, python-framework_starlette-{py310,pypy37}-starlette{0014,0015,0019}, python-framework_starlette-{py37,py38,py39,py310,pypy37}-starlettelatest, python-framework_strawberry-{py37,py38,py39,py310}-strawberrylatest, @@ -319,8 +319,10 @@ deps = framework_sanic-sanic1912: sanic<19.13 framework_sanic-sanic200904: sanic<20.9.5 framework_sanic-sanic210300: sanic<21.3.1 - ; Temporarily test older sanic version until issues are resolved - framework_sanic-saniclatest: sanic<21.9.0 + framework_sanic-sanic2109: sanic<21.10 + framework_sanic-sanic2112: sanic<21.13 + framework_sanic-sanic2203: sanic<22.4 + framework_sanic-saniclatest: sanic framework_sanic-sanic{1812,190301,1906}: aiohttp framework_starlette: graphene<3 framework_starlette-starlette0014: starlette<0.15 From 996178ce849498f7aa813117d13ec241ff15a158 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Sun, 28 Aug 2022 23:12:19 -0700 Subject: [PATCH 16/49] Fix pid in memory sampler (#606) --- newrelic/samplers/memory_usage.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/newrelic/samplers/memory_usage.py b/newrelic/samplers/memory_usage.py index 15bdbaef1..11b75eef9 100644 --- a/newrelic/samplers/memory_usage.py +++ b/newrelic/samplers/memory_usage.py @@ -21,19 +21,18 @@ from newrelic.common.system_info import physical_memory_used, total_physical_memory from newrelic.samplers.decorators import data_source_generator -PID = os.getpid() - @data_source_generator(name="Memory Usage") def memory_usage_data_source(): memory = physical_memory_used() total_memory = total_physical_memory() + pid = os.getpid() # Calculate memory utilization without 0 division errors memory_utilization = (memory / total_memory) if total_memory != 0 else 0 yield ("Memory/Physical", memory) - yield ("Memory/Physical/%d" % (PID), memory) + yield ("Memory/Physical/%d" % (pid), memory) yield ("Memory/Physical/Utilization", memory_utilization) - yield ("Memory/Physical/Utilization/%d" % (PID), memory_utilization) + yield ("Memory/Physical/Utilization/%d" % (pid), memory_utilization) From 7626e3b9c576288790cbdb7b65cfc8ac5df4906e Mon Sep 17 00:00:00 2001 From: Uma Annamalai Date: Tue, 30 Aug 2022 22:58:27 +0530 Subject: [PATCH 17/49] Fix sql_format error on database_trace (#610) * Add check for sql_format in database_trace.py * Fix typo. * [Mega-Linter] Apply linters fixes Co-authored-by: umaannamalai Co-authored-by: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> --- newrelic/api/database_trace.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/newrelic/api/database_trace.py b/newrelic/api/database_trace.py index 2f58e9268..09dfa1e11 100644 --- a/newrelic/api/database_trace.py +++ b/newrelic/api/database_trace.py @@ -205,8 +205,9 @@ def finalize_data(self, transaction, exc=None, value=None, tb=None): execute_params = self.execute_params transaction._explain_plan_count += 1 - self.sql_format = tt.record_sql - + self.sql_format = ( + tt.record_sql if tt.record_sql else "" + ) # If tt.record_sql is None, then the empty string will default to sql being obfuscated self.connect_params = connect_params self.cursor_params = cursor_params self.sql_parameters = sql_parameters From ba1d93408665d810d9a7ac2581f94b76fdb5f418 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Thu, 1 Sep 2022 13:09:59 -0700 Subject: [PATCH 18/49] Disable GraphQL Master tests (#617) --- tox.ini | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 8491cd0b6..7c90f2a40 100644 --- a/tox.ini +++ b/tox.ini @@ -128,7 +128,8 @@ envlist = python-framework_graphene-py310-graphene0201, python-framework_graphql-{py27,py37,py38,py39,py310,pypy,pypy37}-graphql02, python-framework_graphql-{py37,py38,py39,py310,pypy37}-graphql03, - python-framework_graphql-py37-graphql{0202,0203,0300,0301,0302,master}, + ; temporarily disabling graphqlmaster tests + python-framework_graphql-py37-graphql{0202,0203,0300,0301,0302}, grpc-framework_grpc-{py27}-grpc0125, grpc-framework_grpc-{py37,py38,py39,py310}-grpclatest, python-framework_pyramid-{pypy,py27,py38}-Pyramid0104, From 9040fbde82d530c9693f44e6644515e35a1c58ab Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Thu, 1 Sep 2022 13:16:37 -0700 Subject: [PATCH 19/49] Exclude all tests from bandit (#616) Co-authored-by: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> --- .mega-linter.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.mega-linter.yml b/.mega-linter.yml index 66dc1c74c..0d64b4d24 100644 --- a/.mega-linter.yml +++ b/.mega-linter.yml @@ -30,6 +30,6 @@ PYTHON_BLACK_CONFIG_FILE: pyproject.toml PYTHON_PYLINT_CONFIG_FILE: pyproject.toml PYTHON_ISORT_CONFIG_FILE: pyproject.toml PYTHON_BANDIT_CONFIG_FILE: pyproject.toml -PYTHON_BANDIT_FILTER_REGEX_EXCLUDE: ./tests +PYTHON_BANDIT_FILTER_REGEX_EXCLUDE: "tests" PYTHON_PYLINT_ARGUMENTS: "--fail-under=0 --fail-on=E" From d0896ae5f4e10ee197fb935d922404faef7c6107 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Thu, 1 Sep 2022 13:52:10 -0700 Subject: [PATCH 20/49] Hypercorn ASGI Server Instrumentation (#598) * Hypercorn instrumentation * Fix hypercorn ASGI2/3 detection * Add hypercorn to tox * Hypercorn testing * Fix flake8 errors * Apply linter fixes * Fix lifespan support for hypercorn 0.10 * More explicit timeout errors. * [Mega-Linter] Apply linters fixes * Bump tests * Add ignored txn endpoints to sample apps Co-authored-by: Lalleh Rafeei Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai * Fix ASGI sample app transaction assertions * [Mega-Linter] Apply linters fixes * Bump Tests * Fix issues from code review * Fix testing for hypercorn after asgi2 removal * Add hypercorn WSGI instrumentation * Fix exact patch version for hypercorn updates * Formatting Co-authored-by: TimPansino Co-authored-by: Lalleh Rafeei Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai --- newrelic/config.py | 8 + newrelic/core/environment.py | 34 ++-- newrelic/hooks/adapter_hypercorn.py | 79 +++++++++ tests/adapter_hypercorn/conftest.py | 40 +++++ tests/adapter_hypercorn/test_hypercorn.py | 150 ++++++++++++++++++ .../sample_asgi_applications.py | 47 ++++-- tox.ini | 8 + 7 files changed, 345 insertions(+), 21 deletions(-) create mode 100644 newrelic/hooks/adapter_hypercorn.py create mode 100644 tests/adapter_hypercorn/conftest.py create mode 100644 tests/adapter_hypercorn/test_hypercorn.py diff --git a/newrelic/config.py b/newrelic/config.py index 90566e113..456cd722d 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -2540,6 +2540,14 @@ def _process_module_builtin_defaults(): _process_module_definition("uvicorn.config", "newrelic.hooks.adapter_uvicorn", "instrument_uvicorn_config") + _process_module_definition( + "hypercorn.asyncio.run", "newrelic.hooks.adapter_hypercorn", "instrument_hypercorn_asyncio_run" + ) + _process_module_definition( + "hypercorn.trio.run", "newrelic.hooks.adapter_hypercorn", "instrument_hypercorn_trio_run" + ) + _process_module_definition("hypercorn.utils", "newrelic.hooks.adapter_hypercorn", "instrument_hypercorn_utils") + _process_module_definition("daphne.server", "newrelic.hooks.adapter_daphne", "instrument_daphne_server") _process_module_definition("sanic.app", "newrelic.hooks.framework_sanic", "instrument_sanic_app") diff --git a/newrelic/core/environment.py b/newrelic/core/environment.py index 17b03813c..799b266a8 100644 --- a/newrelic/core/environment.py +++ b/newrelic/core/environment.py @@ -43,6 +43,17 @@ def environment_settings(): """Returns an array of arrays of environment settings""" + # Find version resolver. + + get_version = None + # importlib was introduced into the standard library starting in Python3.8. + if "importlib" in sys.modules and hasattr(sys.modules["importlib"], "metadata"): + get_version = sys.modules["importlib"].metadata.version + elif "pkg_resources" in sys.modules: + + def get_version(name): # pylint: disable=function-redefined + return sys.modules["pkg_resources"].get_distribution(name).version + env = [] # Agent information. @@ -104,6 +115,8 @@ def environment_settings(): dispatcher = [] + # Find the first dispatcher module that's been loaded and report that as the dispatcher. + # If possible, also report the dispatcher's version and any other environment information. if not dispatcher and "mod_wsgi" in sys.modules: mod_wsgi = sys.modules["mod_wsgi"] if hasattr(mod_wsgi, "process_group"): @@ -170,6 +183,18 @@ def environment_settings(): if hasattr(uvicorn, "__version__"): dispatcher.append(("Dispatcher Version", uvicorn.__version__)) + if not dispatcher and "hypercorn" in sys.modules: + dispatcher.append(("Dispatcher", "hypercorn")) + hypercorn = sys.modules["hypercorn"] + + if hasattr(hypercorn, "__version__"): + dispatcher.append(("Dispatcher Version", hypercorn.__version__)) + else: + try: + dispatcher.append(("Dispatcher Version", get_version("hypercorn"))) + except Exception: + pass + if not dispatcher and "daphne" in sys.modules: dispatcher.append(("Dispatcher", "daphne")) daphne = sys.modules["daphne"] @@ -191,15 +216,6 @@ def environment_settings(): plugins = [] - get_version = None - # importlib was introduced into the standard library starting in Python3.8. - if "importlib" in sys.modules and hasattr(sys.modules["importlib"], "metadata"): - get_version = sys.modules["importlib"].metadata.version - elif "pkg_resources" in sys.modules: - - def get_version(name): # pylint: disable=function-redefined - return sys.modules["pkg_resources"].get_distribution(name).version - # Using any iterable to create a snapshot of sys.modules can occassionally # fail in a rare case when modules are imported in parallel by different # threads. diff --git a/newrelic/hooks/adapter_hypercorn.py b/newrelic/hooks/adapter_hypercorn.py new file mode 100644 index 000000000..f22dc74f1 --- /dev/null +++ b/newrelic/hooks/adapter_hypercorn.py @@ -0,0 +1,79 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from newrelic.api.asgi_application import ASGIApplicationWrapper +from newrelic.api.wsgi_application import WSGIApplicationWrapper +from newrelic.common.object_wrapper import wrap_function_wrapper + + +def bind_worker_serve(app, *args, **kwargs): + return app, args, kwargs + + +async def wrap_worker_serve(wrapped, instance, args, kwargs): + import hypercorn + + wrapper_module = getattr(hypercorn, "app_wrappers", None) + asgi_wrapper_class = getattr(wrapper_module, "ASGIWrapper", None) + wsgi_wrapper_class = getattr(wrapper_module, "WSGIWrapper", None) + + app, args, kwargs = bind_worker_serve(*args, **kwargs) + + # Hypercorn 0.14.1 introduced wrappers for ASGI and WSGI apps that need to be above our instrumentation. + if asgi_wrapper_class is not None and isinstance(app, asgi_wrapper_class): + app.app = ASGIApplicationWrapper(app.app) + elif wsgi_wrapper_class is not None and isinstance(app, wsgi_wrapper_class): + app.app = WSGIApplicationWrapper(app.app) + else: + app = ASGIApplicationWrapper(app) + + app._nr_wrapped = True + return await wrapped(app, *args, **kwargs) + + +def bind_is_asgi(app): + return app + + +def wrap_is_asgi(wrapped, instance, args, kwargs): + # Wrapper is identical and reused for the functions is_asgi and _is_asgi_2. + app = bind_is_asgi(*args, **kwargs) + + # Unwrap apps wrapped by our instrumentation. + # ASGI 2/3 detection for hypercorn is unable to process + # our wrappers and will return incorrect results. This + # should be sufficient to allow hypercorn to run detection + # on an application that was not wrapped by this instrumentation. + while getattr(app, "_nr_wrapped", False): + app = app.__wrapped__ + + return wrapped(app) + + +def instrument_hypercorn_asyncio_run(module): + if hasattr(module, "worker_serve"): + wrap_function_wrapper(module, "worker_serve", wrap_worker_serve) + + +def instrument_hypercorn_trio_run(module): + if hasattr(module, "worker_serve"): + wrap_function_wrapper(module, "worker_serve", wrap_worker_serve) + + +def instrument_hypercorn_utils(module): + if hasattr(module, "_is_asgi_2"): + wrap_function_wrapper(module, "_is_asgi_2", wrap_is_asgi) + + if hasattr(module, "is_asgi"): + wrap_function_wrapper(module, "is_asgi", wrap_is_asgi) diff --git a/tests/adapter_hypercorn/conftest.py b/tests/adapter_hypercorn/conftest.py new file mode 100644 index 000000000..50e8bad10 --- /dev/null +++ b/tests/adapter_hypercorn/conftest.py @@ -0,0 +1,40 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from testing_support.fixture.event_loop import ( # noqa: F401; pylint: disable=W0611 + event_loop as loop, +) +from testing_support.fixtures import ( # noqa: F401; pylint: disable=W0611 + code_coverage_fixture, + collector_agent_registration_fixture, + collector_available_fixture, +) + +_coverage_source = [ + "newrelic.hooks.adapter_hypercorn", +] + +code_coverage = code_coverage_fixture(source=_coverage_source) + +_default_settings = { + "transaction_tracer.explain_threshold": 0.0, + "transaction_tracer.transaction_threshold": 0.0, + "transaction_tracer.stack_trace_threshold": 0.0, + "debug.log_data_collector_payloads": True, + "debug.record_transaction_failure": True, +} + +collector_agent_registration = collector_agent_registration_fixture( + app_name="Python Agent Test (adapter_hypercorn)", default_settings=_default_settings +) diff --git a/tests/adapter_hypercorn/test_hypercorn.py b/tests/adapter_hypercorn/test_hypercorn.py new file mode 100644 index 000000000..05bf9fdc5 --- /dev/null +++ b/tests/adapter_hypercorn/test_hypercorn.py @@ -0,0 +1,150 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import asyncio +import threading +import time +from urllib.request import HTTPError, urlopen + +import pkg_resources +import pytest +from testing_support.fixtures import ( + override_application_settings, + raise_background_exceptions, + validate_transaction_errors, + validate_transaction_metrics, + wait_for_background_threads, +) +from testing_support.sample_asgi_applications import ( + AppWithCall, + AppWithCallRaw, + simple_app_v2_raw, +) +from testing_support.util import get_open_port + +from newrelic.api.transaction import ignore_transaction +from newrelic.common.object_names import callable_name + +HYPERCORN_VERSION = tuple(int(v) for v in pkg_resources.get_distribution("hypercorn").version.split(".")) +asgi_2_unsupported = HYPERCORN_VERSION >= (0, 14, 1) +wsgi_unsupported = HYPERCORN_VERSION < (0, 14, 1) + + +def wsgi_app(environ, start_response): + path = environ["PATH_INFO"] + + if path == "/": + start_response("200 OK", response_headers=[]) + elif path == "/ignored": + ignore_transaction() + start_response("200 OK", response_headers=[]) + elif path == "/exc": + raise ValueError("whoopsies") + + return [] + + +@pytest.fixture( + params=( + pytest.param( + simple_app_v2_raw, + marks=pytest.mark.skipif(asgi_2_unsupported, reason="ASGI2 unsupported"), + ), + AppWithCallRaw(), + AppWithCall(), + pytest.param( + wsgi_app, + marks=pytest.mark.skipif(wsgi_unsupported, reason="WSGI unsupported"), + ), + ), + ids=("raw", "class_with_call", "class_with_call_double_wrapped", "wsgi"), +) +def app(request): + return request.param + + +@pytest.fixture() +def port(loop, app): + import hypercorn.asyncio + import hypercorn.config + + port = get_open_port() + shutdown = asyncio.Event() + + def server_run(): + async def shutdown_trigger(): + await shutdown.wait() + return True + + config = hypercorn.config.Config.from_mapping( + { + "bind": ["127.0.0.1:%d" % port], + } + ) + + try: + loop.run_until_complete(hypercorn.asyncio.serve(app, config, shutdown_trigger=shutdown_trigger)) + except Exception: + pass + + thread = threading.Thread(target=server_run, daemon=True) + thread.start() + wait_for_port(port) + yield port + + shutdown.set() + loop.call_soon_threadsafe(loop.stop) + thread.join(timeout=10) + + if thread.is_alive(): + raise RuntimeError("Thread failed to exit in time.") + + +def wait_for_port(port, retries=10): + status = None + for _ in range(retries): + try: + status = urlopen("http://localhost:%d/ignored" % port, timeout=1).status + assert status == 200 + return + except Exception as e: + status = e + + time.sleep(1) + + raise RuntimeError("Failed to wait for port %d. Got status %s" % (port, status)) + + +@override_application_settings({"transaction_name.naming_scheme": "framework"}) +def test_hypercorn_200(port, app): + @validate_transaction_metrics(callable_name(app)) + @raise_background_exceptions() + @wait_for_background_threads() + def response(): + return urlopen("http://localhost:%d" % port, timeout=10) + + assert response().status == 200 + + +@override_application_settings({"transaction_name.naming_scheme": "framework"}) +def test_hypercorn_500(port, app): + @validate_transaction_errors(["builtins:ValueError"]) + @validate_transaction_metrics(callable_name(app)) + @raise_background_exceptions() + @wait_for_background_threads() + def _test(): + with pytest.raises(HTTPError): + urlopen("http://localhost:%d/exc" % port) + + _test() diff --git a/tests/testing_support/sample_asgi_applications.py b/tests/testing_support/sample_asgi_applications.py index e43ba36d4..53bf40d33 100644 --- a/tests/testing_support/sample_asgi_applications.py +++ b/tests/testing_support/sample_asgi_applications.py @@ -12,9 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -from newrelic.api.time_trace import notice_error -from newrelic.api.transaction import add_custom_parameter, current_transaction from newrelic.api.asgi_application import ASGIApplicationWrapper +from newrelic.api.time_trace import notice_error +from newrelic.api.transaction import ( + add_custom_parameter, + current_transaction, + ignore_transaction, +) class simple_app_v2_raw: @@ -22,17 +26,21 @@ def __init__(self, scope): self.scope = scope async def __call__(self, receive, send): + if self.scope["type"] == "lifespan": + return await handle_lifespan(self.scope, receive, send) + if self.scope["type"] != "http": raise ValueError("unsupported") if self.scope["path"] == "/exc": raise ValueError("whoopsies") + elif self.scope["path"] == "/ignored": + ignore_transaction() + await send({"type": "http.response.start", "status": 200}) await send({"type": "http.response.body"}) - txn = current_transaction() - - assert txn is None + assert current_transaction() is None class simple_app_v2_init_exc(simple_app_v2_raw): @@ -41,19 +49,21 @@ def __init__(self, scope): async def simple_app_v3_raw(scope, receive, send): + if scope["type"] == "lifespan": + return await handle_lifespan(scope, receive, send) + if scope["type"] != "http": raise ValueError("unsupported") if scope["path"] == "/exc": raise ValueError("whoopsies") + elif scope["path"] == "/ignored": + ignore_transaction() await send({"type": "http.response.start", "status": 200}) await send({"type": "http.response.body"}) - txn = current_transaction() - - assert txn is None - + assert current_transaction() is None class AppWithDescriptor: @@ -104,7 +114,20 @@ async def normal_asgi_application(scope, receive, send): except ValueError: notice_error(attributes={"ohnoes": "param-value"}) - await send( - {"type": "http.response.start", "status": 200, "headers": response_headers} - ) + await send({"type": "http.response.start", "status": 200, "headers": response_headers}) await send({"type": "http.response.body", "body": output}) + + +async def handle_lifespan(scope, receive, send): + """Handle lifespan protocol with no-ops to allow more compatibility.""" + while True: + txn = current_transaction() + if txn: + txn.ignore_transaction = True + + message = await receive() + if message["type"] == "lifespan.startup": + await send({"type": "lifespan.startup.complete"}) + elif message["type"] == "lifespan.shutdown": + await send({"type": "lifespan.shutdown.complete"}) + return diff --git a/tox.ini b/tox.ini index 7c90f2a40..04a9b6219 100644 --- a/tox.ini +++ b/tox.ini @@ -47,6 +47,8 @@ envlist = python-adapter_daphne-py38-daphne{0204,0205}, python-adapter_gevent-{py27,py37,py38,py310}, python-adapter_gunicorn-{py37,py38,py39,py310}-aiohttp3-gunicornlatest, + python-adapter_hypercorn-{py37,py38,py39,py310}-hypercornlatest, + python-adapter_hypercorn-py38-hypercorn{0010,0011,0012,0013}, python-adapter_uvicorn-py37-uvicorn03, python-adapter_uvicorn-{py37,py38,py39,py310}-uvicornlatest, python-agent_features-{py27,py37,py38,py39,py310}-{with,without}_extensions, @@ -176,6 +178,11 @@ deps = adapter_gunicorn-aiohttp3: aiohttp<4.0 adapter_gunicorn-gunicorn19: gunicorn<20 adapter_gunicorn-gunicornlatest: gunicorn + adapter_hypercorn-hypercornlatest: hypercorn + adapter_hypercorn-hypercorn0013: hypercorn<0.14 + adapter_hypercorn-hypercorn0012: hypercorn<0.13 + adapter_hypercorn-hypercorn0011: hypercorn<0.12 + adapter_hypercorn-hypercorn0010: hypercorn<0.11 adapter_uvicorn-uvicorn03: uvicorn<0.4 adapter_uvicorn-uvicorn014: uvicorn<0.15 adapter_uvicorn-uvicornlatest: uvicorn @@ -383,6 +390,7 @@ changedir = adapter_daphne: tests/adapter_daphne adapter_gevent: tests/adapter_gevent adapter_gunicorn: tests/adapter_gunicorn + adapter_hypercorn: tests/adapter_hypercorn adapter_uvicorn: tests/adapter_uvicorn agent_features: tests/agent_features agent_streaming: tests/agent_streaming From 6ef9bce98c116526eb4dd38f66c2328d30979b79 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Thu, 1 Sep 2022 14:33:00 -0700 Subject: [PATCH 21/49] Custom Event Limit Increase (#591) * Update reservoir size for custom events. * [Mega-Linter] Apply linters fixes Co-authored-by: TimPansino --- newrelic/api/transaction.py | 38 ++++++++++++++-------- newrelic/core/config.py | 3 +- tests/agent_features/test_configuration.py | 8 ++--- 3 files changed, 30 insertions(+), 19 deletions(-) diff --git a/newrelic/api/transaction.py b/newrelic/api/transaction.py index f486989b4..c963e7392 100644 --- a/newrelic/api/transaction.py +++ b/newrelic/api/transaction.py @@ -25,13 +25,12 @@ import weakref from collections import OrderedDict -from newrelic.api.application import application_instance import newrelic.core.database_node import newrelic.core.error_node -from newrelic.core.log_event_node import LogEventNode import newrelic.core.root_node import newrelic.core.transaction_node import newrelic.packages.six as six +from newrelic.api.application import application_instance from newrelic.api.time_trace import TimeTrace, get_linking_metadata from newrelic.common.encoding_utils import ( DistributedTracePayload, @@ -61,8 +60,9 @@ DST_NONE, DST_TRANSACTION_TRACER, ) -from newrelic.core.config import DEFAULT_RESERVOIR_SIZE, LOG_EVENT_RESERVOIR_SIZE +from newrelic.core.config import CUSTOM_EVENT_RESERVOIR_SIZE, LOG_EVENT_RESERVOIR_SIZE from newrelic.core.custom_event import create_custom_event +from newrelic.core.log_event_node import LogEventNode from newrelic.core.stack_trace import exception_stack from newrelic.core.stats_engine import CustomMetrics, SampledDataSet from newrelic.core.thread_utilization import utilization_tracker @@ -324,10 +324,14 @@ def __init__(self, application, enabled=None, source=None): self.enabled = True if self._settings: - self._custom_events = SampledDataSet(capacity=self._settings.event_harvest_config.harvest_limits.custom_event_data) - self._log_events = SampledDataSet(capacity=self._settings.event_harvest_config.harvest_limits.log_event_data) + self._custom_events = SampledDataSet( + capacity=self._settings.event_harvest_config.harvest_limits.custom_event_data + ) + self._log_events = SampledDataSet( + capacity=self._settings.event_harvest_config.harvest_limits.log_event_data + ) else: - self._custom_events = SampledDataSet(capacity=DEFAULT_RESERVOIR_SIZE) + self._custom_events = SampledDataSet(capacity=CUSTOM_EVENT_RESERVOIR_SIZE) self._log_events = SampledDataSet(capacity=LOG_EVENT_RESERVOIR_SIZE) def __del__(self): @@ -1473,31 +1477,35 @@ def set_transaction_name(self, name, group=None, priority=None): self._group = group self._name = name - def record_log_event(self, message, level=None, timestamp=None, priority=None): settings = self.settings - if not (settings and settings.application_logging and settings.application_logging.enabled and settings.application_logging.forwarding and settings.application_logging.forwarding.enabled): + if not ( + settings + and settings.application_logging + and settings.application_logging.enabled + and settings.application_logging.forwarding + and settings.application_logging.forwarding.enabled + ): return - + timestamp = timestamp if timestamp is not None else time.time() level = str(level) if level is not None else "UNKNOWN" - + if not message or message.isspace(): _logger.debug("record_log_event called where message was missing. No log event will be sent.") return - + message = truncate(message, MAX_LOG_MESSAGE_LENGTH) event = LogEventNode( timestamp=timestamp, level=level, message=message, - attributes=get_linking_metadata(), + attributes=get_linking_metadata(), ) self._log_events.add(event, priority=priority) - def record_exception(self, exc=None, value=None, tb=None, params=None, ignore_errors=None): # Deprecation Warning warnings.warn( @@ -1869,7 +1877,9 @@ def record_log_event(message, level=None, timestamp=None, application=None, prio "record_log_event has been called but no transaction or application was running. As a result, " "the following event has not been recorded. message: %r level: %r timestamp %r. To correct " "this problem, supply an application object as a parameter to this record_log_event call.", - message, level, timestamp, + message, + level, + timestamp, ) elif application.enabled: application.record_log_event(message, level, timestamp, priority=priority) diff --git a/newrelic/core/config.py b/newrelic/core/config.py index 60520c113..2e4db97c7 100644 --- a/newrelic/core/config.py +++ b/newrelic/core/config.py @@ -52,6 +52,7 @@ # reservoir. Error Events have a different default size. DEFAULT_RESERVOIR_SIZE = 1200 +CUSTOM_EVENT_RESERVOIR_SIZE = 30000 ERROR_EVENT_RESERVOIR_SIZE = 100 SPAN_EVENT_RESERVOIR_SIZE = 2000 LOG_EVENT_RESERVOIR_SIZE = 10000 @@ -738,7 +739,7 @@ def default_host(license_key): ) _settings.event_harvest_config.harvest_limits.custom_event_data = _environ_as_int( - "NEW_RELIC_CUSTOM_INSIGHTS_EVENTS_MAX_SAMPLES_STORED", DEFAULT_RESERVOIR_SIZE + "NEW_RELIC_CUSTOM_INSIGHTS_EVENTS_MAX_SAMPLES_STORED", CUSTOM_EVENT_RESERVOIR_SIZE ) _settings.event_harvest_config.harvest_limits.span_event_data = _environ_as_int( diff --git a/tests/agent_features/test_configuration.py b/tests/agent_features/test_configuration.py index 5846e3808..0b5203ad8 100644 --- a/tests/agent_features/test_configuration.py +++ b/tests/agent_features/test_configuration.py @@ -438,12 +438,12 @@ def test_delete_setting_parent(): TSetting("event_harvest_config.harvest_limits.error_event_data", 100, 100), ), ( - TSetting("custom_insights_events.max_samples_stored", 1200, 1200), - TSetting("event_harvest_config.harvest_limits.custom_event_data", 9999, 1200), + TSetting("custom_insights_events.max_samples_stored", 30000, 30000), + TSetting("event_harvest_config.harvest_limits.custom_event_data", 9999, 30000), ), ( - TSetting("custom_insights_events.max_samples_stored", 9999, 1200), - TSetting("event_harvest_config.harvest_limits.custom_event_data", 1200, 1200), + TSetting("custom_insights_events.max_samples_stored", 9999, 30000), + TSetting("event_harvest_config.harvest_limits.custom_event_data", 30000, 30000), ), ( TSetting("application_logging.forwarding.max_samples_stored", 10000, 10000), From aa23eaa53a4ab8757cb97ad79e58acdbed414a38 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Mon, 12 Sep 2022 10:42:11 -0700 Subject: [PATCH 22/49] Revert "Custom Event Limit Increase (#591)" (#622) This reverts commit 6ef9bce98c116526eb4dd38f66c2328d30979b79. --- newrelic/api/transaction.py | 38 ++++++++-------------- newrelic/core/config.py | 3 +- tests/agent_features/test_configuration.py | 8 ++--- 3 files changed, 19 insertions(+), 30 deletions(-) diff --git a/newrelic/api/transaction.py b/newrelic/api/transaction.py index c963e7392..f486989b4 100644 --- a/newrelic/api/transaction.py +++ b/newrelic/api/transaction.py @@ -25,12 +25,13 @@ import weakref from collections import OrderedDict +from newrelic.api.application import application_instance import newrelic.core.database_node import newrelic.core.error_node +from newrelic.core.log_event_node import LogEventNode import newrelic.core.root_node import newrelic.core.transaction_node import newrelic.packages.six as six -from newrelic.api.application import application_instance from newrelic.api.time_trace import TimeTrace, get_linking_metadata from newrelic.common.encoding_utils import ( DistributedTracePayload, @@ -60,9 +61,8 @@ DST_NONE, DST_TRANSACTION_TRACER, ) -from newrelic.core.config import CUSTOM_EVENT_RESERVOIR_SIZE, LOG_EVENT_RESERVOIR_SIZE +from newrelic.core.config import DEFAULT_RESERVOIR_SIZE, LOG_EVENT_RESERVOIR_SIZE from newrelic.core.custom_event import create_custom_event -from newrelic.core.log_event_node import LogEventNode from newrelic.core.stack_trace import exception_stack from newrelic.core.stats_engine import CustomMetrics, SampledDataSet from newrelic.core.thread_utilization import utilization_tracker @@ -324,14 +324,10 @@ def __init__(self, application, enabled=None, source=None): self.enabled = True if self._settings: - self._custom_events = SampledDataSet( - capacity=self._settings.event_harvest_config.harvest_limits.custom_event_data - ) - self._log_events = SampledDataSet( - capacity=self._settings.event_harvest_config.harvest_limits.log_event_data - ) + self._custom_events = SampledDataSet(capacity=self._settings.event_harvest_config.harvest_limits.custom_event_data) + self._log_events = SampledDataSet(capacity=self._settings.event_harvest_config.harvest_limits.log_event_data) else: - self._custom_events = SampledDataSet(capacity=CUSTOM_EVENT_RESERVOIR_SIZE) + self._custom_events = SampledDataSet(capacity=DEFAULT_RESERVOIR_SIZE) self._log_events = SampledDataSet(capacity=LOG_EVENT_RESERVOIR_SIZE) def __del__(self): @@ -1477,35 +1473,31 @@ def set_transaction_name(self, name, group=None, priority=None): self._group = group self._name = name + def record_log_event(self, message, level=None, timestamp=None, priority=None): settings = self.settings - if not ( - settings - and settings.application_logging - and settings.application_logging.enabled - and settings.application_logging.forwarding - and settings.application_logging.forwarding.enabled - ): + if not (settings and settings.application_logging and settings.application_logging.enabled and settings.application_logging.forwarding and settings.application_logging.forwarding.enabled): return - + timestamp = timestamp if timestamp is not None else time.time() level = str(level) if level is not None else "UNKNOWN" - + if not message or message.isspace(): _logger.debug("record_log_event called where message was missing. No log event will be sent.") return - + message = truncate(message, MAX_LOG_MESSAGE_LENGTH) event = LogEventNode( timestamp=timestamp, level=level, message=message, - attributes=get_linking_metadata(), + attributes=get_linking_metadata(), ) self._log_events.add(event, priority=priority) + def record_exception(self, exc=None, value=None, tb=None, params=None, ignore_errors=None): # Deprecation Warning warnings.warn( @@ -1877,9 +1869,7 @@ def record_log_event(message, level=None, timestamp=None, application=None, prio "record_log_event has been called but no transaction or application was running. As a result, " "the following event has not been recorded. message: %r level: %r timestamp %r. To correct " "this problem, supply an application object as a parameter to this record_log_event call.", - message, - level, - timestamp, + message, level, timestamp, ) elif application.enabled: application.record_log_event(message, level, timestamp, priority=priority) diff --git a/newrelic/core/config.py b/newrelic/core/config.py index 2e4db97c7..60520c113 100644 --- a/newrelic/core/config.py +++ b/newrelic/core/config.py @@ -52,7 +52,6 @@ # reservoir. Error Events have a different default size. DEFAULT_RESERVOIR_SIZE = 1200 -CUSTOM_EVENT_RESERVOIR_SIZE = 30000 ERROR_EVENT_RESERVOIR_SIZE = 100 SPAN_EVENT_RESERVOIR_SIZE = 2000 LOG_EVENT_RESERVOIR_SIZE = 10000 @@ -739,7 +738,7 @@ def default_host(license_key): ) _settings.event_harvest_config.harvest_limits.custom_event_data = _environ_as_int( - "NEW_RELIC_CUSTOM_INSIGHTS_EVENTS_MAX_SAMPLES_STORED", CUSTOM_EVENT_RESERVOIR_SIZE + "NEW_RELIC_CUSTOM_INSIGHTS_EVENTS_MAX_SAMPLES_STORED", DEFAULT_RESERVOIR_SIZE ) _settings.event_harvest_config.harvest_limits.span_event_data = _environ_as_int( diff --git a/tests/agent_features/test_configuration.py b/tests/agent_features/test_configuration.py index 0b5203ad8..5846e3808 100644 --- a/tests/agent_features/test_configuration.py +++ b/tests/agent_features/test_configuration.py @@ -438,12 +438,12 @@ def test_delete_setting_parent(): TSetting("event_harvest_config.harvest_limits.error_event_data", 100, 100), ), ( - TSetting("custom_insights_events.max_samples_stored", 30000, 30000), - TSetting("event_harvest_config.harvest_limits.custom_event_data", 9999, 30000), + TSetting("custom_insights_events.max_samples_stored", 1200, 1200), + TSetting("event_harvest_config.harvest_limits.custom_event_data", 9999, 1200), ), ( - TSetting("custom_insights_events.max_samples_stored", 9999, 30000), - TSetting("event_harvest_config.harvest_limits.custom_event_data", 30000, 30000), + TSetting("custom_insights_events.max_samples_stored", 9999, 1200), + TSetting("event_harvest_config.harvest_limits.custom_event_data", 1200, 1200), ), ( TSetting("application_logging.forwarding.max_samples_stored", 10000, 10000), From 0508f58eb24c0d03bbde9f1908e283a136feb48d Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Thu, 22 Sep 2022 13:18:03 -0700 Subject: [PATCH 23/49] Add bandit toml support to megalinter (#625) --- .mega-linter.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.mega-linter.yml b/.mega-linter.yml index 0d64b4d24..f8456d059 100644 --- a/.mega-linter.yml +++ b/.mega-linter.yml @@ -31,5 +31,6 @@ PYTHON_PYLINT_CONFIG_FILE: pyproject.toml PYTHON_ISORT_CONFIG_FILE: pyproject.toml PYTHON_BANDIT_CONFIG_FILE: pyproject.toml PYTHON_BANDIT_FILTER_REGEX_EXCLUDE: "tests" +PYTHON_BANDIT_PRE_COMMANDS: ["pip install bandit[toml]"] PYTHON_PYLINT_ARGUMENTS: "--fail-under=0 --fail-on=E" From 77d44461bb7e9092659ff67af6b13a90088b7ab3 Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Fri, 23 Sep 2022 10:03:18 -0700 Subject: [PATCH 24/49] Fix silent failure in validator fixture (#627) * Fix silent failure in validator fixture Previously, if notice_error did not record an error then the validate_error_event_attributes_outside_transaction fixture would silently pass. This is because the validation would not ever run because it was decorated in a stats_engine notice error block. By moving the validation outside of this decorator, now the validation will run even if no error is recorded in the stats_engine. This has been fixed so now the fixture raises if no error is recorded at all. * Fix pylint errors Fix the following pylint errors: * tests/testing_support/fixtures.py:2400:46: E0601: Using variable 'backup' before assignment (used-before-assignment) * tests/testing_support/fixtures.py:2423:37: E0601: Using variable 'backup' before assignment (used-before-assignment) * tests/testing_support/fixtures.py:2447:59: E0601: Using variable 'original' before assignment (used-before-assignment) * tests/testing_support/fixtures.py:2452:0: W0102: Dangerous default value [] as argument (dangerous-default-value) --- .../test_ignore_expected_errors.py | 26 ++--- tests/testing_support/fixtures.py | 103 +++++++++++------- 2 files changed, 68 insertions(+), 61 deletions(-) diff --git a/tests/agent_features/test_ignore_expected_errors.py b/tests/agent_features/test_ignore_expected_errors.py index 7adc07e1c..d685c39c0 100644 --- a/tests/agent_features/test_ignore_expected_errors.py +++ b/tests/agent_features/test_ignore_expected_errors.py @@ -37,12 +37,8 @@ # Settings presets # Error classes settings -expected_runtime_error_settings = { - "error_collector.expected_classes": [_runtime_error_name] -} -ignore_runtime_error_settings = { - "error_collector.ignore_classes": [_runtime_error_name] -} +expected_runtime_error_settings = {"error_collector.expected_classes": [_runtime_error_name]} +ignore_runtime_error_settings = {"error_collector.ignore_classes": [_runtime_error_name]} # Status code settings expected_status_code_settings = {"error_collector.expected_status_codes": [418]} @@ -141,9 +137,7 @@ def _test(): @pytest.mark.parametrize("settings,expected", error_trace_settings_matrix) @pytest.mark.parametrize("override_expected", override_expected_matrix) -def test_error_trace_attributes_inside_transaction( - settings, expected, override_expected -): +def test_error_trace_attributes_inside_transaction(settings, expected, override_expected): expected = override_expected if override_expected is not None else expected error_trace_attributes = { @@ -165,9 +159,7 @@ def _test(): @pytest.mark.parametrize("settings,expected", error_trace_settings_matrix) @pytest.mark.parametrize("override_expected", override_expected_matrix) -def test_error_trace_attributes_outside_transaction( - settings, expected, override_expected -): +def test_error_trace_attributes_outside_transaction(settings, expected, override_expected): expected = override_expected if override_expected is not None else expected error_trace_attributes = { @@ -182,9 +174,7 @@ def test_error_trace_attributes_outside_transaction( } @reset_core_stats_engine() - @validate_error_trace_attributes_outside_transaction( - _runtime_error_name, exact_attrs=error_trace_attributes - ) + @validate_error_trace_attributes_outside_transaction(_runtime_error_name, exact_attrs=error_trace_attributes) @override_application_settings(settings) def _test(): exercise(override_expected) @@ -206,9 +196,7 @@ def test_error_metrics_inside_transaction(expected): ("ErrorsExpected/all", expected_metrics_count), ] - @validate_transaction_metrics( - "test", background_task=True, rollup_metrics=metrics_payload - ) + @validate_transaction_metrics("test", background_task=True, rollup_metrics=metrics_payload) @background_task(name="test") def _test(): exercise(expected) @@ -316,7 +304,7 @@ def _test(): try: raise TeapotError(_error_message) except: - notice_error(status_code=status_code) + notice_error(status_code=status_code, application=application_instance(activate=False)) _test() diff --git a/tests/testing_support/fixtures.py b/tests/testing_support/fixtures.py index a89730377..2df593abc 100644 --- a/tests/testing_support/fixtures.py +++ b/tests/testing_support/fixtures.py @@ -603,8 +603,12 @@ def no_op(wrapped, instance, args, kwargs): @function_wrapper def _validate_wrapper(wrapped, instance, args, kwargs): # Apply no-op wrappers to prevent new internal trace contexts from being started, preventing capture - wrapped = transient_function_wrapper("newrelic.core.internal_metrics", "InternalTraceContext.__enter__")(no_op)(wrapped) - wrapped = transient_function_wrapper("newrelic.core.internal_metrics", "InternalTraceContext.__exit__")(no_op)(wrapped) + wrapped = transient_function_wrapper("newrelic.core.internal_metrics", "InternalTraceContext.__enter__")(no_op)( + wrapped + ) + wrapped = transient_function_wrapper("newrelic.core.internal_metrics", "InternalTraceContext.__exit__")(no_op)( + wrapped + ) captured_metrics = CustomMetrics() with InternalTraceContext(captured_metrics): @@ -1736,16 +1740,27 @@ def validate_error_event_attributes_outside_transaction( required_params = required_params or {} forgone_params = forgone_params or {} + event_data = [] + @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.notice_error") def _validate_error_event_attributes_outside_transaction(wrapped, instance, args, kwargs): - try: result = wrapped(*args, **kwargs) except: raise else: - event_data = list(instance.error_events) + for event in instance.error_events: + event_data.append(event) + return result + + @function_wrapper + def wrapper(wrapped, instance, args, kwargs): + try: + result = _validate_error_event_attributes_outside_transaction(wrapped)(*args, **kwargs) + except: + raise + else: if num_errors is not None: exc_message = ( "Expected: %d, Got: %d. Verify StatsEngine is being reset before using this validator." @@ -1758,7 +1773,7 @@ def _validate_error_event_attributes_outside_transaction(wrapped, instance, args return result - return _validate_error_event_attributes_outside_transaction + return wrapper def validate_request_params_omitted(): @@ -2362,14 +2377,14 @@ def cat_enabled(wrapped, instance, args, kwargs): def override_application_settings(overrides): @function_wrapper def _override_application_settings(wrapped, instance, args, kwargs): - try: - # The settings object has references from a number of - # different places. We have to create a copy, overlay - # the temporary settings and then when done clear the - # top level settings object and rebuild it when done. + # The settings object has references from a number of + # different places. We have to create a copy, overlay + # the temporary settings and then when done clear the + # top level settings object and rebuild it when done. + original_settings = application_settings() + backup = copy.deepcopy(original_settings.__dict__) - original_settings = application_settings() - backup = copy.deepcopy(original_settings.__dict__) + try: for name, value in overrides.items(): apply_config_setting(original_settings, name, value) @@ -2390,16 +2405,15 @@ def _override_application_settings(wrapped, instance, args, kwargs): def override_generic_settings(settings_object, overrides): @function_wrapper def _override_generic_settings(wrapped, instance, args, kwargs): - try: - # In some cases, a settings object may have references - # from a number of different places. We have to create - # a copy, overlay the temporary settings and then when - # done, clear the top level settings object and rebuild - # it when done. + # In some cases, a settings object may have references + # from a number of different places. We have to create + # a copy, overlay the temporary settings and then when + # done, clear the top level settings object and rebuild + # it when done. + original = settings_object + backup = copy.deepcopy(original.__dict__) - original = settings_object - - backup = copy.deepcopy(original.__dict__) + try: for name, value in overrides.items(): apply_config_setting(original, name, value) return wrapped(*args, **kwargs) @@ -2413,19 +2427,20 @@ def _override_generic_settings(wrapped, instance, args, kwargs): def override_ignore_status_codes(status_codes): @function_wrapper def _override_ignore_status_codes(wrapped, instance, args, kwargs): - try: - # Updates can be made to ignored status codes in server - # side configs. Changes will be applied to application - # settings so we first check there and if they don't - # exist, we default to global settings + # Updates can be made to ignored status codes in server + # side configs. Changes will be applied to application + # settings so we first check there and if they don't + # exist, we default to global settings + + application = application_instance() + settings = application and application.settings - application = application_instance() - settings = application and application.settings + if not settings: + settings = global_settings() - if not settings: - settings = global_settings() + original = settings.error_collector.ignore_status_codes - original = settings.error_collector.ignore_status_codes + try: settings.error_collector.ignore_status_codes = status_codes return wrapped(*args, **kwargs) finally: @@ -2434,25 +2449,28 @@ def _override_ignore_status_codes(wrapped, instance, args, kwargs): return _override_ignore_status_codes -def code_coverage_fixture(source=['newrelic']): - @pytest.fixture(scope='session') +def code_coverage_fixture(source=None): + if source is None: + source = ["newrelic"] + + @pytest.fixture(scope="session") def _code_coverage_fixture(request): if not source: return - if os.environ.get('GITHUB_ACTIONS') is not None: + if os.environ.get("GITHUB_ACTIONS") is not None: return from coverage import coverage - env_directory = os.environ.get('TOX_ENVDIR', None) + env_directory = os.environ.get("TOX_ENVDIR", None) if env_directory is not None: - coverage_directory = os.path.join(env_directory, 'htmlcov') - xml_report = os.path.join(env_directory, 'coverage.xml') + coverage_directory = os.path.join(env_directory, "htmlcov") + xml_report = os.path.join(env_directory, "coverage.xml") else: - coverage_directory = 'htmlcov' - xml_report = 'coverage.xml' + coverage_directory = "htmlcov" + xml_report = "coverage.xml" def finalize(): cov.stop() @@ -2469,18 +2487,19 @@ def finalize(): def reset_core_stats_engine(): """Reset the StatsEngine and custom StatsEngine of the core application.""" + @function_wrapper def _reset_core_stats_engine(wrapped, instance, args, kwargs): api_application = application_instance() api_name = api_application.name core_application = api_application._agent.application(api_name) - + stats = core_application._stats_engine stats.reset_stats(stats.settings) - + custom_stats = core_application._stats_custom_engine custom_stats.reset_stats(custom_stats.settings) - + return wrapped(*args, **kwargs) return _reset_core_stats_engine From e184662f695f6272d131a4d69a4a8daed8e5950d Mon Sep 17 00:00:00 2001 From: Uma Annamalai Date: Fri, 26 Aug 2022 21:07:30 +0530 Subject: [PATCH 25/49] Add Kafka test infra. (#609) * Add Kafka test infra. * Update tests.yml. * Finalizing testing infrastructure setup * Fix healthcheck for kafka * Add kafka to aggregate check depends * Removing kafka healthcheck completely * Fix db settings wiring for py27 * Linting and formatting Co-authored-by: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Co-authored-by: Tim Pansino --- .github/workflows/tests.yml | 68 +++++++++++-- tests/messagebroker_kafkapython/conftest.py | 95 +++++++++++++++++++ .../test_kafka_produce.py | 30 ++++++ tests/messagebroker_pika/conftest.py | 71 ++++++-------- tests/testing_support/db_settings.py | 36 +++++-- tox.ini | 3 + 6 files changed, 250 insertions(+), 53 deletions(-) create mode 100644 tests/messagebroker_kafkapython/conftest.py create mode 100644 tests/messagebroker_kafkapython/test_kafka_produce.py diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index b9abc6745..57993a515 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -29,18 +29,19 @@ jobs: runs-on: ubuntu-latest needs: - python + - elasticsearchserver01 + - elasticsearchserver07 + - gearman - grpc + - kafka - libcurl - - postgres + - memcached + - mongodb - mysql + - postgres + - rabbitmq - redis - solr - - memcached - - rabbitmq - - mongodb - - elasticsearchserver01 - - elasticsearchserver07 - - gearman steps: - name: Success @@ -432,6 +433,59 @@ jobs: TOX_PARALLEL_NO_SPINNER: 1 PY_COLORS: 0 + kafka: + env: + TOTAL_GROUPS: 1 + + strategy: + fail-fast: false + matrix: + group-number: [1] + + runs-on: ubuntu-latest + timeout-minutes: 30 + + services: + zookeeper: + image: bitnami/zookeeper:latest + env: + ALLOW_ANONYMOUS_LOGIN: yes + + ports: + - 2181:2181 + + kafka: + image: bitnami/kafka:latest + ports: + - 8080:8080 + - 8081:8081 + env: + ALLOW_PLAINTEXT_LISTENER: yes + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE: true + KAFKA_CFG_LISTENERS: L1://:8080,L2://:8081 + KAFKA_CFG_ADVERTISED_LISTENERS: L1://127.0.0.1:8080,L2://kafka:8081, + KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP: L1:PLAINTEXT,L2:PLAINTEXT + KAFKA_CFG_INTER_BROKER_LISTENER_NAME: L2 + + steps: + - uses: actions/checkout@v3 + - uses: ./.github/actions/setup-python-matrix + + - name: Get Environments + id: get-envs + run: | + echo "::set-output name=envs::$(tox -l | grep "^${{ github.job }}\-" | ./.github/workflows/get-envs.py)" + env: + GROUP_NUMBER: ${{ matrix.group-number }} + + - name: Test + run: | + tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + env: + TOX_PARALLEL_NO_SPINNER: 1 + PY_COLORS: 0 + mongodb: env: TOTAL_GROUPS: 1 diff --git a/tests/messagebroker_kafkapython/conftest.py b/tests/messagebroker_kafkapython/conftest.py new file mode 100644 index 000000000..bcaf4c2d1 --- /dev/null +++ b/tests/messagebroker_kafkapython/conftest.py @@ -0,0 +1,95 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import uuid + +import kafka +import pytest +from testing_support.db_settings import kafka_settings +from testing_support.fixtures import ( # noqa: F401 + code_coverage_fixture, + collector_agent_registration_fixture, + collector_available_fixture, +) + +from newrelic.api.transaction import current_transaction +from newrelic.common.object_wrapper import transient_function_wrapper + +DB_SETTINGS = kafka_settings()[0] + +BOOTSTRAP_SERVER = "%s:%s" % (DB_SETTINGS["host"], DB_SETTINGS["port"]) +BROKER = [BOOTSTRAP_SERVER] + +_coverage_source = [ + "newrelic.hooks.messagebroker_kafkapython", +] + +code_coverage = code_coverage_fixture(source=_coverage_source) + +_default_settings = { + "transaction_tracer.explain_threshold": 0.0, + "transaction_tracer.transaction_threshold": 0.0, + "transaction_tracer.stack_trace_threshold": 0.0, + "debug.log_data_collector_payloads": True, + "debug.record_transaction_failure": True, +} + +collector_agent_registration = collector_agent_registration_fixture( + app_name="Python Agent Test (messagebroker_kafkapython)", + default_settings=_default_settings, + linked_applications=["Python Agent Test (messagebroker_kafkapython)"], +) + + +@pytest.fixture(scope="function") +def producer(): + producer = kafka.KafkaProducer( + bootstrap_servers=BROKER, api_version=(2, 0, 2), value_serializer=lambda v: json.dumps(v).encode("utf-8") + ) + yield producer + producer.close() + + +@pytest.fixture(scope="function") +def consumer(topic): + consumer = kafka.KafkaConsumer( + topic, + bootstrap_servers=BROKER, + value_deserializer=lambda v: json.loads(v.decode("utf-8")), + auto_offset_reset="earliest", + consumer_timeout_ms=5000, + ) + yield consumer + consumer.close() + + +@pytest.fixture(scope="function") +def topic(): + yield "test-topic-%s" % str(uuid.uuid4()) + + +@transient_function_wrapper(kafka.producer.kafka, "KafkaProducer.send.__wrapped__") +# Place transient wrapper underneath instrumentation +def cache_kafka_headers(wrapped, instance, args, kwargs): + transaction = current_transaction() + + if transaction is None: + return wrapped(*args, **kwargs) + + ret = wrapped(*args, **kwargs) + headers = kwargs.get("headers", []) + headers = dict(headers) + transaction._test_request_headers = headers + return ret diff --git a/tests/messagebroker_kafkapython/test_kafka_produce.py b/tests/messagebroker_kafkapython/test_kafka_produce.py new file mode 100644 index 000000000..e2982bc93 --- /dev/null +++ b/tests/messagebroker_kafkapython/test_kafka_produce.py @@ -0,0 +1,30 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time + + +def test_no_harm(topic, producer, consumer): + MESSAGES = [ + {"foo": "bar"}, + {"baz": "bat"}, + ] + + for msg in MESSAGES: + time.sleep(1) + producer.send(topic, value=msg) + producer.flush() + + for msg in consumer: + assert msg.topic == topic diff --git a/tests/messagebroker_pika/conftest.py b/tests/messagebroker_pika/conftest.py index a64f9e8cd..9849ee014 100644 --- a/tests/messagebroker_pika/conftest.py +++ b/tests/messagebroker_pika/conftest.py @@ -12,53 +12,55 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pika -import pytest import uuid +import pika +import pytest from testing_support.db_settings import rabbitmq_settings -from testing_support.fixtures import (code_coverage_fixture, # NOQA - collector_agent_registration_fixture, collector_available_fixture) - +from testing_support.fixtures import ( # noqa: F401 + code_coverage_fixture, + collector_agent_registration_fixture, + collector_available_fixture, +) -QUEUE = 'test_pika-%s' % uuid.uuid4() -QUEUE_2 = 'test_pika-%s' % uuid.uuid4() +QUEUE = "test_pika-%s" % uuid.uuid4() +QUEUE_2 = "test_pika-%s" % uuid.uuid4() -EXCHANGE = 'exchange-%s' % uuid.uuid4() -EXCHANGE_2 = 'exchange-%s' % uuid.uuid4() +EXCHANGE = "exchange-%s" % uuid.uuid4() +EXCHANGE_2 = "exchange-%s" % uuid.uuid4() -CORRELATION_ID = 'test-correlation-id' -REPLY_TO = 'test-reply-to' -HEADERS = {'TestHeader': 'my test header value'} -BODY = b'test_body' +CORRELATION_ID = "test-correlation-id" +REPLY_TO = "test-reply-to" +HEADERS = {"TestHeader": "my test header value"} +BODY = b"test_body" DB_SETTINGS = rabbitmq_settings()[0] _coverage_source = [ - 'newrelic.hooks.messagebroker_pika', + "newrelic.hooks.messagebroker_pika", ] code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { - 'transaction_tracer.explain_threshold': 0.0, - 'transaction_tracer.transaction_threshold': 0.0, - 'transaction_tracer.stack_trace_threshold': 0.0, - 'debug.log_data_collector_payloads': True, - 'debug.record_transaction_failure': True + "transaction_tracer.explain_threshold": 0.0, + "transaction_tracer.transaction_threshold": 0.0, + "transaction_tracer.stack_trace_threshold": 0.0, + "debug.log_data_collector_payloads": True, + "debug.record_transaction_failure": True, } collector_agent_registration = collector_agent_registration_fixture( - app_name='Python Agent Test (messagebroker_pika)', - default_settings=_default_settings, - linked_applications=['Python Agent Test (messagebroker)']) + app_name="Python Agent Test (messagebroker_pika)", + default_settings=_default_settings, + linked_applications=["Python Agent Test (messagebroker_pika)"], +) @pytest.fixture() def producer(): # put something into the queue so it can be consumed - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() channel.queue_declare(queue=QUEUE, durable=False) @@ -69,10 +71,7 @@ def producer(): exchange=EXCHANGE, routing_key=QUEUE, body=BODY, - properties=pika.spec.BasicProperties( - correlation_id=CORRELATION_ID, - reply_to=REPLY_TO, - headers=HEADERS), + properties=pika.spec.BasicProperties(correlation_id=CORRELATION_ID, reply_to=REPLY_TO, headers=HEADERS), ) yield channel.queue_delete(queue=QUEUE) @@ -82,8 +81,7 @@ def producer(): @pytest.fixture() def producer_2(): # put something into the queue so it can be consumed - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() channel.queue_declare(queue=QUEUE_2, durable=False) @@ -94,10 +92,7 @@ def producer_2(): exchange=EXCHANGE_2, routing_key=QUEUE_2, body=BODY, - properties=pika.spec.BasicProperties( - correlation_id=CORRELATION_ID, - reply_to=REPLY_TO, - headers=HEADERS), + properties=pika.spec.BasicProperties(correlation_id=CORRELATION_ID, reply_to=REPLY_TO, headers=HEADERS), ) yield channel.queue_delete(queue=QUEUE_2) @@ -107,8 +102,7 @@ def producer_2(): @pytest.fixture() def produce_five(): # put something into the queue so it can be consumed - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() channel.queue_declare(queue=QUEUE, durable=False) @@ -120,10 +114,7 @@ def produce_five(): exchange=EXCHANGE, routing_key=QUEUE, body=BODY, - properties=pika.spec.BasicProperties( - correlation_id=CORRELATION_ID, - reply_to=REPLY_TO, - headers=HEADERS), + properties=pika.spec.BasicProperties(correlation_id=CORRELATION_ID, reply_to=REPLY_TO, headers=HEADERS), ) yield diff --git a/tests/testing_support/db_settings.py b/tests/testing_support/db_settings.py index 8f4c7b49a..c7c35935f 100644 --- a/tests/testing_support/db_settings.py +++ b/tests/testing_support/db_settings.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pwd import os +import pwd USER = pwd.getpwuid(os.getuid()).pw_name @@ -168,11 +168,7 @@ def mongodb_settings(): base_port = 27017 settings = [ - { - "host": "127.0.0.1", - "port": base_port + instance_num, - "collection": "mongodb_collection_" + str(os.getpid()) - } + {"host": "127.0.0.1", "port": base_port + instance_num, "collection": "mongodb_collection_" + str(os.getpid())} for instance_num in range(instances) ] return settings @@ -258,3 +254,31 @@ def rabbitmq_settings(): for instance_num in range(instances) ] return settings + + +def kafka_settings(): + """Return a list of dict of settings for connecting to kafka. + + Will return the correct settings, depending on which of the environments it + is running in. It attempts to set variables in the following order, where + later environments override earlier ones. + + 1. Local + 2. Github Actions + """ + + if "GITHUB_ACTIONS" in os.environ: + instances = 2 + base_port = 8080 + else: + instances = 1 + base_port = 9092 + + settings = [ + { + "host": "localhost", + "port": base_port + instance_num, + } + for instance_num in range(instances) + ] + return settings diff --git a/tox.ini b/tox.ini index 04a9b6219..2caee17cf 100644 --- a/tox.ini +++ b/tox.ini @@ -149,6 +149,7 @@ envlist = libcurl-framework_tornado-{py37,py38,py39,py310}-tornadomaster, rabbitmq-messagebroker_pika-{py27,py37,py38,py39,pypy,pypy37}-pika0.13, rabbitmq-messagebroker_pika-{py37,py38,py39,py310,pypy37}-pikalatest, + kafka-messagebroker_kafkapython-{pypy,py27,py37,py38,pypy37}-kafkapythonlatest python-template_mako-{py27,py37,py38,py39,py310} [pytest] @@ -353,6 +354,7 @@ deps = messagebroker_pika-pikalatest: pika messagebroker_pika: tornado<5 messagebroker_pika-{py27,pypy}: enum34 + messagebroker_kafkapython: kafka-python template_mako: mako<1.2 setenv = @@ -451,4 +453,5 @@ changedir = logger_logging: tests/logger_logging logger_loguru: tests/logger_loguru messagebroker_pika: tests/messagebroker_pika + messagebroker_kafkapython: tests/messagebroker_kafkapython template_mako: tests/template_mako From ec91e03f61066724e8fdb05ebb6c961a0d6754ee Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Thu, 1 Sep 2022 13:46:32 -0700 Subject: [PATCH 26/49] Kafka Heartbeat (#614) * Add Kafka test infra. * Update tests.yml. * Basic kafka producer instrumentation. Co-authored-by: Lalleh Rafeei * Add testing for kafka producer. Co-authored-by: Lalleh Rafeei * Kafka Heartbeat instrumentation * Remove changes not related to heartbeat Co-authored-by: Uma Annamalai Co-authored-by: Hannah Stepanek Co-authored-by: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> --- newrelic/config.py | 6 ++ newrelic/hooks/messagebroker_kafkapython.py | 66 +++++++++++++++++++ tests/messagebroker_kafkapython/conftest.py | 2 + .../test_heartbeat.py | 60 +++++++++++++++++ 4 files changed, 134 insertions(+) create mode 100644 newrelic/hooks/messagebroker_kafkapython.py create mode 100644 tests/messagebroker_kafkapython/test_heartbeat.py diff --git a/newrelic/config.py b/newrelic/config.py index 456cd722d..058195eb1 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -2307,6 +2307,12 @@ def _process_module_builtin_defaults(): "instrument_cherrypy__cptree", ) + _process_module_definition( + "kafka.coordinator.heartbeat", + "newrelic.hooks.messagebroker_kafkapython", + "instrument_kafka_heartbeat", + ) + _process_module_definition( "logging", "newrelic.hooks.logger_logging", diff --git a/newrelic/hooks/messagebroker_kafkapython.py b/newrelic/hooks/messagebroker_kafkapython.py new file mode 100644 index 000000000..c6b1fb07a --- /dev/null +++ b/newrelic/hooks/messagebroker_kafkapython.py @@ -0,0 +1,66 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from newrelic.api.application import application_instance +from newrelic.common.object_wrapper import wrap_function_wrapper + +HEARTBEAT_POLL = "MessageBroker/Kafka/Heartbeat/Poll" +HEARTBEAT_SENT = "MessageBroker/Kafka/Heartbeat/Sent" +HEARTBEAT_FAIL = "MessageBroker/Kafka/Heartbeat/Fail" +HEARTBEAT_RECEIVE = "MessageBroker/Kafka/Heartbeat/Receive" +HEARTBEAT_SESSION_TIMEOUT = "MessageBroker/Kafka/Heartbeat/SessionTimeout" +HEARTBEAT_POLL_TIMEOUT = "MessageBroker/Kafka/Heartbeat/PollTimeout" + + +def metric_wrapper(metric_name, check_result=False): + def _metric_wrapper(wrapped, instance, args, kwargs): + result = wrapped(*args, **kwargs) + + application = application_instance(activate=False) + if application: + if not check_result or check_result and result: + # If the result does not need validated, send metric. + # If the result does need validated, ensure it is True. + application.record_custom_metric(metric_name, 1) + + return result + + return _metric_wrapper + + +def instrument_kafka_heartbeat(module): + if hasattr(module, "Heartbeat"): + if hasattr(module.Heartbeat, "poll"): + wrap_function_wrapper(module, "Heartbeat.poll", metric_wrapper(HEARTBEAT_POLL)) + + if hasattr(module.Heartbeat, "fail_heartbeat"): + wrap_function_wrapper(module, "Heartbeat.fail_heartbeat", metric_wrapper(HEARTBEAT_FAIL)) + + if hasattr(module.Heartbeat, "sent_heartbeat"): + wrap_function_wrapper(module, "Heartbeat.sent_heartbeat", metric_wrapper(HEARTBEAT_SENT)) + + if hasattr(module.Heartbeat, "received_heartbeat"): + wrap_function_wrapper(module, "Heartbeat.received_heartbeat", metric_wrapper(HEARTBEAT_RECEIVE)) + + if hasattr(module.Heartbeat, "session_timeout_expired"): + wrap_function_wrapper( + module, + "Heartbeat.session_timeout_expired", + metric_wrapper(HEARTBEAT_SESSION_TIMEOUT, check_result=True), + ) + + if hasattr(module.Heartbeat, "poll_timeout_expired"): + wrap_function_wrapper( + module, "Heartbeat.poll_timeout_expired", metric_wrapper(HEARTBEAT_POLL_TIMEOUT, check_result=True) + ) diff --git a/tests/messagebroker_kafkapython/conftest.py b/tests/messagebroker_kafkapython/conftest.py index bcaf4c2d1..45981cd47 100644 --- a/tests/messagebroker_kafkapython/conftest.py +++ b/tests/messagebroker_kafkapython/conftest.py @@ -70,6 +70,8 @@ def consumer(topic): value_deserializer=lambda v: json.loads(v.decode("utf-8")), auto_offset_reset="earliest", consumer_timeout_ms=5000, + heartbeat_interval_ms=1000, + group_id="test", ) yield consumer consumer.close() diff --git a/tests/messagebroker_kafkapython/test_heartbeat.py b/tests/messagebroker_kafkapython/test_heartbeat.py new file mode 100644 index 000000000..1b6c79a2e --- /dev/null +++ b/tests/messagebroker_kafkapython/test_heartbeat.py @@ -0,0 +1,60 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time + +import kafka +from testing_support.validators.validate_custom_metrics_outside_transaction import ( + validate_custom_metrics_outside_transaction, +) + + +@validate_custom_metrics_outside_transaction( + [ + ("MessageBroker/Kafka/Heartbeat/Poll", "present"), + ("MessageBroker/Kafka/Heartbeat/Sent", "present"), + ("MessageBroker/Kafka/Heartbeat/Receive", "present"), + ("MessageBroker/Kafka/Heartbeat/Fail", None), + ("MessageBroker/Kafka/Heartbeat/SessionTimeout", None), + ("MessageBroker/Kafka/Heartbeat/PollTimeout", None), + ] +) +def test_successful_heartbeat_metrics_recorded(topic, producer, consumer): + producer.send(topic, value=1) + producer.flush() + + next(iter(consumer)) + time.sleep(1.5) + + +@validate_custom_metrics_outside_transaction( + [ + ("MessageBroker/Kafka/Heartbeat/Poll", "present"), + ("MessageBroker/Kafka/Heartbeat/Sent", "present"), + ("MessageBroker/Kafka/Heartbeat/Fail", "present"), + ("MessageBroker/Kafka/Heartbeat/Receive", "present"), + ("MessageBroker/Kafka/Heartbeat/SessionTimeout", "present"), + ("MessageBroker/Kafka/Heartbeat/PollTimeout", "present"), + ] +) +def test_fail_timeout_heartbeat_metrics_recorded(): + heartbeat = kafka.coordinator.heartbeat.Heartbeat(session_timeout_ms=0, max_poll_interval_ms=0) + + heartbeat.poll() + heartbeat.sent_heartbeat() + heartbeat.received_heartbeat() + heartbeat.fail_heartbeat() + + assert heartbeat.session_timeout_expired(), "Failed to force heartbeat to timeout." + assert heartbeat.poll_timeout_expired(), "Failed to force heartbeat to timeout." From ef56ff70018cbe285a094104c41aad7e7deb2d09 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Thu, 1 Sep 2022 14:27:55 -0700 Subject: [PATCH 27/49] Kafka Producer Instrumentation (#612) * Basic kafka producer instrumentation. Co-authored-by: Lalleh Rafeei * Add testing for kafka producer. Co-authored-by: Lalleh Rafeei * Add producer test for notice_error & cleanup Co-authored-by: Lalleh Rafeei * Fix py2 naming * Fix lint error * Kafka Heartbeat (#614) * Add Kafka test infra. * Update tests.yml. * Basic kafka producer instrumentation. Co-authored-by: Lalleh Rafeei * Add testing for kafka producer. Co-authored-by: Lalleh Rafeei * Kafka Heartbeat instrumentation * Remove changes not related to heartbeat Co-authored-by: Uma Annamalai Co-authored-by: Hannah Stepanek Co-authored-by: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> Co-authored-by: Hannah Stepanek Co-authored-by: Lalleh Rafeei Co-authored-by: Uma Annamalai Co-authored-by: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> --- newrelic/config.py | 5 ++ newrelic/hooks/messagebroker_kafkapython.py | 37 ++++++++++ .../test_kafka_produce.py | 69 +++++++++++++++---- 3 files changed, 99 insertions(+), 12 deletions(-) diff --git a/newrelic/config.py b/newrelic/config.py index 058195eb1..bfd3e6611 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -2307,6 +2307,11 @@ def _process_module_builtin_defaults(): "instrument_cherrypy__cptree", ) + _process_module_definition( + "kafka.producer.kafka", + "newrelic.hooks.messagebroker_kafkapython", + "instrument_kafka_producer", + ) _process_module_definition( "kafka.coordinator.heartbeat", "newrelic.hooks.messagebroker_kafkapython", diff --git a/newrelic/hooks/messagebroker_kafkapython.py b/newrelic/hooks/messagebroker_kafkapython.py index c6b1fb07a..31d71794f 100644 --- a/newrelic/hooks/messagebroker_kafkapython.py +++ b/newrelic/hooks/messagebroker_kafkapython.py @@ -13,6 +13,9 @@ # limitations under the License. from newrelic.api.application import application_instance +from newrelic.api.message_trace import MessageTrace +from newrelic.api.time_trace import notice_error +from newrelic.api.transaction import current_transaction from newrelic.common.object_wrapper import wrap_function_wrapper HEARTBEAT_POLL = "MessageBroker/Kafka/Heartbeat/Poll" @@ -23,6 +26,40 @@ HEARTBEAT_POLL_TIMEOUT = "MessageBroker/Kafka/Heartbeat/PollTimeout" +def _bind_send(topic, value=None, key=None, headers=None, partition=None, timestamp_ms=None): + return topic, value, key, headers, partition, timestamp_ms + + +def wrap_KafkaProducer_send(wrapped, instance, args, kwargs): + transaction = current_transaction() + + if transaction is None: + return wrapped(*args, **kwargs) + + topic, value, key, headers, partition, timestamp_ms = _bind_send(*args, **kwargs) + headers = list(headers) if headers else [] + + with MessageTrace( + library="Kafka", + operation="Produce", + destination_type="Topic", + destination_name=topic or "Default", + source=wrapped, + ) as trace: + dt_headers = [(k, v.encode("utf-8")) for k, v in trace.generate_request_headers(transaction)] + headers.extend(dt_headers) + try: + return wrapped(topic, value=value, key=key, headers=headers, partition=partition, timestamp_ms=timestamp_ms) + except Exception: + notice_error() + raise + + +def instrument_kafka_producer(module): + if hasattr(module, "KafkaProducer"): + wrap_function_wrapper(module, "KafkaProducer.send", wrap_KafkaProducer_send) + + def metric_wrapper(metric_name, check_result=False): def _metric_wrapper(wrapped, instance, args, kwargs): result = wrapped(*args, **kwargs) diff --git a/tests/messagebroker_kafkapython/test_kafka_produce.py b/tests/messagebroker_kafkapython/test_kafka_produce.py index e2982bc93..43b1c2a0e 100644 --- a/tests/messagebroker_kafkapython/test_kafka_produce.py +++ b/tests/messagebroker_kafkapython/test_kafka_produce.py @@ -12,19 +12,64 @@ # See the License for the specific language governing permissions and # limitations under the License. -import time +import pytest +from conftest import cache_kafka_headers +from testing_support.fixtures import ( + validate_non_transaction_error_event, + validate_transaction_metrics, +) +from testing_support.validators.validate_messagebroker_headers import ( + validate_messagebroker_headers, +) +from newrelic.api.background_task import background_task +from newrelic.packages import six -def test_no_harm(topic, producer, consumer): - MESSAGES = [ - {"foo": "bar"}, - {"baz": "bat"}, - ] - for msg in MESSAGES: - time.sleep(1) - producer.send(topic, value=msg) - producer.flush() +def test_producer_records_trace(topic, send_producer_messages): + scoped_metrics = [("MessageBroker/Kafka/Topic/Produce/Named/%s" % topic, 3)] + unscoped_metrics = scoped_metrics + txn_name = "test_kafka_produce:test_producer_records_trace..test" if six.PY3 else "test_kafka_produce:test" - for msg in consumer: - assert msg.topic == topic + @validate_transaction_metrics( + txn_name, + scoped_metrics=scoped_metrics, + rollup_metrics=unscoped_metrics, + background_task=True, + ) + @background_task() + @cache_kafka_headers + @validate_messagebroker_headers + def test(): + send_producer_messages() + + test() + + +def test_producer_records_error_if_raised(topic, producer): + _intrinsic_attributes = { + "error.class": "AssertionError", + "error.message": "Need at least one: key or value", + "error.expected": False, + } + + @validate_non_transaction_error_event(_intrinsic_attributes) + @background_task() + def test(): + producer.send(topic, None) + producer.flush() + + with pytest.raises(AssertionError): + test() + + +@pytest.fixture +def send_producer_messages(topic, producer): + def _test(): + messages = [1, 2, 3] + for message in messages: + producer.send(topic, message) + + producer.flush() + + return _test From 3b89bbf3084797bc906eff91b3bc4e57a504105a Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Fri, 2 Sep 2022 13:39:02 -0700 Subject: [PATCH 28/49] Kafka Internal Metrics Instrumentation (#613) * Kafka Metrics data source Co-authored-by: Lalleh Rafeei Co-authored-by: Hannah Stepanek * [Mega-Linter] Apply linters fixes * Remove breakpoint * Run linter * Fix black issues * Swap @classmethod decorator order. Co-authored-by: Hannah Stepanek * Remove variable named variable Co-authored-by: Lalleh Rafeei Co-authored-by: Hannah Stepanek Co-authored-by: TimPansino Co-authored-by: Lalleh Rafeei Co-authored-by: Hannah Stepanek --- newrelic/config.py | 6 + newrelic/hooks/messagebroker_kafkapython.py | 157 +++++++++++++++++- tests/messagebroker_kafkapython/conftest.py | 18 +- .../messagebroker_kafkapython/test_metrics.py | 123 ++++++++++++++ 4 files changed, 296 insertions(+), 8 deletions(-) create mode 100644 tests/messagebroker_kafkapython/test_metrics.py diff --git a/newrelic/config.py b/newrelic/config.py index bfd3e6611..01ac7bfae 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -2318,6 +2318,12 @@ def _process_module_builtin_defaults(): "instrument_kafka_heartbeat", ) + _process_module_definition( + "kafka.consumer.group", + "newrelic.hooks.messagebroker_kafkapython", + "instrument_kafka_consumer_group", + ) + _process_module_definition( "logging", "newrelic.hooks.logger_logging", diff --git a/newrelic/hooks/messagebroker_kafkapython.py b/newrelic/hooks/messagebroker_kafkapython.py index 31d71794f..4182ed536 100644 --- a/newrelic/hooks/messagebroker_kafkapython.py +++ b/newrelic/hooks/messagebroker_kafkapython.py @@ -12,11 +12,22 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging +import math +import threading + +from kafka.metrics.metrics_reporter import AbstractMetricsReporter + +import newrelic.core.agent from newrelic.api.application import application_instance from newrelic.api.message_trace import MessageTrace from newrelic.api.time_trace import notice_error from newrelic.api.transaction import current_transaction from newrelic.common.object_wrapper import wrap_function_wrapper +from newrelic.packages import six +from newrelic.samplers.decorators import data_source_factory + +_logger = logging.getLogger(__name__) HEARTBEAT_POLL = "MessageBroker/Kafka/Heartbeat/Poll" HEARTBEAT_SENT = "MessageBroker/Kafka/Heartbeat/Sent" @@ -55,11 +66,6 @@ def wrap_KafkaProducer_send(wrapped, instance, args, kwargs): raise -def instrument_kafka_producer(module): - if hasattr(module, "KafkaProducer"): - wrap_function_wrapper(module, "KafkaProducer.send", wrap_KafkaProducer_send) - - def metric_wrapper(metric_name, check_result=False): def _metric_wrapper(wrapped, instance, args, kwargs): result = wrapped(*args, **kwargs) @@ -101,3 +107,144 @@ def instrument_kafka_heartbeat(module): wrap_function_wrapper( module, "Heartbeat.poll_timeout_expired", metric_wrapper(HEARTBEAT_POLL_TIMEOUT, check_result=True) ) + + +class KafkaMetricsDataSource(object): + _instance = None + + def __init__(self): + self.reporters = [] + + @classmethod + @data_source_factory(name="Kafka Metrics Reporter") + def factory(cls, settings=None, environ=None): + return cls.singleton() + + @classmethod + def singleton(cls, register=True): + # If already initialized, exit early + if cls._instance: + return cls._instance + + # Init and register instance on class + instance = cls() + cls._instance = instance + + # register_data_source takes a callable so let it rerun singleton to retrieve the instance + if register: + try: + _logger.debug("Registering kafka metrics data source.") + newrelic.core.agent.agent_instance().register_data_source(cls.factory) + except Exception: + _logger.exception( + "Attempt to register kafka metrics data source has failed. Data source will be skipped." + ) + + return instance + + def register(self, reporter): + self.reporters.append(reporter) + + def unregister(self, reporter): + if reporter in self.reporters: + self.reporters.remove(reporter) + + def start(self): + return + + def stop(self): + # Clear references to reporters to prevent them from participating in a reference cycle. + self.reporters = [] + + def __call__(self): + for reporter in self.reporters: + for name, metric in six.iteritems(reporter.snapshot()): + yield name, metric + + +class NewRelicMetricsReporter(AbstractMetricsReporter): + def __init__(self, *args, **kwargs): + super(NewRelicMetricsReporter, self).__init__(*args, **kwargs) + + # Register with data source for harvesting + self.data_source = KafkaMetricsDataSource.singleton() + self.data_source.register(self) + + self._metrics = {} + self._lock = threading.Lock() + + def close(self, *args, **kwargs): + self.data_source.unregister(self) + with self._lock: + self._metrics = {} + + def init(self, metrics): + for metric in metrics: + self.metric_change(metric) + + @staticmethod + def invalid_metric_value(metric): + name, value = metric + return not any((math.isinf(value), math.isnan(value), value == 0)) + + def snapshot(self): + with self._lock: + # metric.value can only be called once, so care must be taken when filtering + metrics = ((name, metric.value()) for name, metric in six.iteritems(self._metrics)) + return { + "MessageBroker/Kafka/Internal/%s" % name: {"count": value} + for name, value in filter(self.invalid_metric_value, metrics) + } + + def get_metric_name(self, metric): + metric_name = metric.metric_name # Get MetricName object to work with + + name = metric_name.name + group = metric_name.group + + if "topic" in metric_name.tags: + topic = metric_name.tags["topic"] + return "/".join((group, topic, name)) + else: + return "/".join((group, name)) + + def metric_change(self, metric): + name = self.get_metric_name(metric) + with self._lock: + self._metrics[name] = metric + + def metric_removal(self, metric): + name = self.get_metric_name(metric) + with self._lock: + if name in self._metrics: + self._metrics.pop(name) + + def configure(self, configs): + return + + +def wrap_KafkaProducerConsumer_init(wrapped, instance, args, kwargs): + try: + if "metric_reporters" in kwargs: + metric_reporters = list(kwargs.get("metric_reporters", [])) + metric_reporters.append(NewRelicMetricsReporter) + kwargs["metric_reporters"] = [metric_reporters] + else: + kwargs["metric_reporters"] = [NewRelicMetricsReporter] + except Exception: + pass + + return wrapped(*args, **kwargs) + + +def instrument_kafka_producer(module): + if hasattr(module, "KafkaProducer"): + wrap_function_wrapper(module, "KafkaProducer.__init__", wrap_KafkaProducerConsumer_init) + + if hasattr(module, "KafkaProducer"): + wrap_function_wrapper(module, "KafkaProducer.send", wrap_KafkaProducer_send) + + +def instrument_kafka_consumer_group(module): + if hasattr(module, "KafkaConsumer"): + wrap_function_wrapper(module, "KafkaConsumer.__init__", wrap_KafkaProducerConsumer_init) diff --git a/tests/messagebroker_kafkapython/conftest.py b/tests/messagebroker_kafkapython/conftest.py index 45981cd47..39ea88dc3 100644 --- a/tests/messagebroker_kafkapython/conftest.py +++ b/tests/messagebroker_kafkapython/conftest.py @@ -18,7 +18,7 @@ import kafka import pytest from testing_support.db_settings import kafka_settings -from testing_support.fixtures import ( # noqa: F401 +from testing_support.fixtures import ( # noqa: F401, W0611 code_coverage_fixture, collector_agent_registration_fixture, collector_available_fixture, @@ -26,6 +26,7 @@ from newrelic.api.transaction import current_transaction from newrelic.common.object_wrapper import transient_function_wrapper +from newrelic.hooks.messagebroker_kafkapython import KafkaMetricsDataSource DB_SETTINGS = kafka_settings()[0] @@ -54,7 +55,7 @@ @pytest.fixture(scope="function") -def producer(): +def producer(data_source): producer = kafka.KafkaProducer( bootstrap_servers=BROKER, api_version=(2, 0, 2), value_serializer=lambda v: json.dumps(v).encode("utf-8") ) @@ -63,7 +64,7 @@ def producer(): @pytest.fixture(scope="function") -def consumer(topic): +def consumer(topic, data_source): consumer = kafka.KafkaConsumer( topic, bootstrap_servers=BROKER, @@ -82,6 +83,17 @@ def topic(): yield "test-topic-%s" % str(uuid.uuid4()) +@pytest.fixture(scope="session") +def data_source(): + """ + Must be required by consumer and producer fixtures, or the first one of them to be + instantiated will create and register the singleton. We rely on the singleton to + not be registered to properly test the output of it without interference from the + harvest thread. + """ + return KafkaMetricsDataSource.singleton(register=False) + + @transient_function_wrapper(kafka.producer.kafka, "KafkaProducer.send.__wrapped__") # Place transient wrapper underneath instrumentation def cache_kafka_headers(wrapped, instance, args, kwargs): diff --git a/tests/messagebroker_kafkapython/test_metrics.py b/tests/messagebroker_kafkapython/test_metrics.py new file mode 100644 index 000000000..94c7c224a --- /dev/null +++ b/tests/messagebroker_kafkapython/test_metrics.py @@ -0,0 +1,123 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def test_data_source_metrics(data_source, topic, producer, consumer): + producer.send(topic, value=1) + producer.flush() + next(iter(consumer)) + + metrics = dict(data_source()) + metric_names = list(metrics.keys()) + assert metrics + + +# Example metrics + +# MessageBroker/Kafka/Internal/kafka-metrics-count/count +# MessageBroker/Kafka/Internal/producer-metrics/connection-close-rate +# MessageBroker/Kafka/Internal/producer-metrics/connection-creation-rate +# MessageBroker/Kafka/Internal/producer-metrics/select-rate +# MessageBroker/Kafka/Internal/producer-metrics/io-wait-time-ns-avg +# MessageBroker/Kafka/Internal/producer-metrics/io-wait-ratio +# MessageBroker/Kafka/Internal/producer-metrics/io-time-ns-avg +# MessageBroker/Kafka/Internal/producer-metrics/io-ratio +# MessageBroker/Kafka/Internal/producer-metrics/connection-count +# MessageBroker/Kafka/Internal/producer-metrics/batch-size-avg +# MessageBroker/Kafka/Internal/producer-metrics/batch-size-max +# MessageBroker/Kafka/Internal/producer-metrics/compression-rate-avg +# MessageBroker/Kafka/Internal/producer-metrics/record-queue-time-avg +# MessageBroker/Kafka/Internal/producer-metrics/record-queue-time-max +# MessageBroker/Kafka/Internal/producer-metrics/record-send-rate +# MessageBroker/Kafka/Internal/producer-metrics/records-per-request-avg +# MessageBroker/Kafka/Internal/producer-metrics/byte-rate +# MessageBroker/Kafka/Internal/producer-metrics/record-size-max +# MessageBroker/Kafka/Internal/producer-metrics/record-size-avg +# MessageBroker/Kafka/Internal/producer-metrics/metadata-age +# MessageBroker/Kafka/Internal/producer-metrics/network-io-rate +# MessageBroker/Kafka/Internal/producer-metrics/outgoing-byte-rate +# MessageBroker/Kafka/Internal/producer-metrics/request-rate +# MessageBroker/Kafka/Internal/producer-metrics/request-size-avg +# MessageBroker/Kafka/Internal/producer-metrics/request-size-max +# MessageBroker/Kafka/Internal/producer-metrics/incoming-byte-rate +# MessageBroker/Kafka/Internal/producer-metrics/response-rate +# MessageBroker/Kafka/Internal/producer-metrics/request-latency-avg +# MessageBroker/Kafka/Internal/producer-metrics/request-latency-max +# MessageBroker/Kafka/Internal/producer-node-metrics.node-bootstrap-0/outgoing-byte-rate +# MessageBroker/Kafka/Internal/producer-node-metrics.node-bootstrap-0/request-rate +# MessageBroker/Kafka/Internal/producer-node-metrics.node-bootstrap-0/request-size-avg +# MessageBroker/Kafka/Internal/producer-node-metrics.node-bootstrap-0/request-size-max +# MessageBroker/Kafka/Internal/producer-node-metrics.node-bootstrap-0/incoming-byte-rate +# MessageBroker/Kafka/Internal/producer-node-metrics.node-bootstrap-0/response-rate +# MessageBroker/Kafka/Internal/producer-node-metrics.node-bootstrap-0/request-latency-avg +# MessageBroker/Kafka/Internal/producer-node-metrics.node-bootstrap-0/request-latency-max +# MessageBroker/Kafka/Internal/producer-node-metrics.node-1001/outgoing-byte-rate +# MessageBroker/Kafka/Internal/producer-node-metrics.node-1001/request-rate +# MessageBroker/Kafka/Internal/producer-node-metrics.node-1001/request-size-avg +# MessageBroker/Kafka/Internal/producer-node-metrics.node-1001/request-size-max +# MessageBroker/Kafka/Internal/producer-node-metrics.node-1001/incoming-byte-rate +# MessageBroker/Kafka/Internal/producer-node-metrics.node-1001/response-rate +# MessageBroker/Kafka/Internal/producer-node-metrics.node-1001/request-latency-avg +# MessageBroker/Kafka/Internal/producer-node-metrics.node-1001/request-latency-max +# MessageBroker/Kafka/Internal/producer-topic-metrics.test-topic-c962647a-f6cf-4a24-a90b-40ab2364dc55/record-send-rate +# MessageBroker/Kafka/Internal/producer-topic-metrics.test-topic-c962647a-f6cf-4a24-a90b-40ab2364dc55/byte-rate +# MessageBroker/Kafka/Internal/producer-topic-metrics.test-topic-c962647a-f6cf-4a24-a90b-40ab2364dc55/compression-rate +# MessageBroker/Kafka/Internal/consumer-metrics/connection-close-rate +# MessageBroker/Kafka/Internal/consumer-metrics/connection-creation-rate +# MessageBroker/Kafka/Internal/consumer-metrics/select-rate +# MessageBroker/Kafka/Internal/consumer-metrics/io-wait-time-ns-avg +# MessageBroker/Kafka/Internal/consumer-metrics/io-wait-ratio +# MessageBroker/Kafka/Internal/consumer-metrics/io-time-ns-avg +# MessageBroker/Kafka/Internal/consumer-metrics/io-ratio +# MessageBroker/Kafka/Internal/consumer-metrics/connection-count +# MessageBroker/Kafka/Internal/consumer-metrics/network-io-rate +# MessageBroker/Kafka/Internal/consumer-metrics/outgoing-byte-rate +# MessageBroker/Kafka/Internal/consumer-metrics/request-rate +# MessageBroker/Kafka/Internal/consumer-metrics/request-size-avg +# MessageBroker/Kafka/Internal/consumer-metrics/request-size-max +# MessageBroker/Kafka/Internal/consumer-metrics/incoming-byte-rate +# MessageBroker/Kafka/Internal/consumer-metrics/response-rate +# MessageBroker/Kafka/Internal/consumer-metrics/request-latency-avg +# MessageBroker/Kafka/Internal/consumer-metrics/request-latency-max +# MessageBroker/Kafka/Internal/consumer-node-metrics.node-bootstrap-0/outgoing-byte-rate +# MessageBroker/Kafka/Internal/consumer-node-metrics.node-bootstrap-0/request-rate +# MessageBroker/Kafka/Internal/consumer-node-metrics.node-bootstrap-0/request-size-avg +# MessageBroker/Kafka/Internal/consumer-node-metrics.node-bootstrap-0/request-size-max +# MessageBroker/Kafka/Internal/consumer-node-metrics.node-bootstrap-0/incoming-byte-rate +# MessageBroker/Kafka/Internal/consumer-node-metrics.node-bootstrap-0/response-rate +# MessageBroker/Kafka/Internal/consumer-node-metrics.node-bootstrap-0/request-latency-avg +# MessageBroker/Kafka/Internal/consumer-node-metrics.node-bootstrap-0/request-latency-max +# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/fetch-size-avg +# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/fetch-size-max +# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/bytes-consumed-rate +# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/records-per-request-avg +# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/records-consumed-rate +# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/fetch-latency-avg +# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/fetch-latency-max +# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/fetch-rate +# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/records-lag-max +# MessageBroker/Kafka/Internal/consumer-coordinator-metrics/assigned-partitions +# MessageBroker/Kafka/Internal/consumer-node-metrics.node-1001/outgoing-byte-rate +# MessageBroker/Kafka/Internal/consumer-node-metrics.node-1001/request-rate +# MessageBroker/Kafka/Internal/consumer-node-metrics.node-1001/request-size-avg +# MessageBroker/Kafka/Internal/consumer-node-metrics.node-1001/request-size-max +# MessageBroker/Kafka/Internal/consumer-node-metrics.node-1001/incoming-byte-rate +# MessageBroker/Kafka/Internal/consumer-node-metrics.node-1001/response-rate +# MessageBroker/Kafka/Internal/consumer-node-metrics.node-1001/request-latency-avg +# MessageBroker/Kafka/Internal/consumer-node-metrics.node-1001/request-latency-max +# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/test-topic-c962647a-f6cf-4a24-a90b-40ab2364dc55/fetch-size-avg +# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/test-topic-c962647a-f6cf-4a24-a90b-40ab2364dc55/fetch-size-max +# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/test-topic-c962647a-f6cf-4a24-a90b-40ab2364dc55/bytes-consumed-rate +# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/test-topic-c962647a-f6cf-4a24-a90b-40ab2364dc55/records-per-request-avg +# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/test-topic-c962647a-f6cf-4a24-a90b-40ab2364dc55/records-consumed-rate From 411b980a726c447e8be71d901dfe843f815a4276 Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Fri, 2 Sep 2022 15:00:40 -0700 Subject: [PATCH 29/49] Add kafka python consumer (#611) * Add kafka-python consumer __next__ instrumentation Co-authored-by: UmaAnnamalai Co-authored-by: TimPansino Co-authored-by: LallehRafeei * Replace iter(next()) with for loop The consumer instrumentation of __next__ relies on __next__ getting called until StopIteration is raised. If this does not happen, the current transaction is not ended. Because the heartbeat tests were not calling __next__ a second time to end the current transaction, this caused failures in the consumer tests where the previous transaction was not getting closed. Because the newrelic application is per session rather than per function, the unclosed transaction inside the heartbeat tests were impacting the consumer tests which ran right after. Co-authored-by: LallehRafeei * Remove duplicated module has attr Co-authored-by: LallehRafeei Co-authored-by: UmaAnnamalai Co-authored-by: TimPansino Co-authored-by: LallehRafeei --- newrelic/api/message_transaction.py | 154 ++++++++++++------ newrelic/config.py | 6 + newrelic/hooks/messagebroker_kafkapython.py | 83 +++++++++- .../test_heartbeat.py | 3 +- .../test_kafka_consumer.py | 135 +++++++++++++++ 5 files changed, 326 insertions(+), 55 deletions(-) create mode 100644 tests/messagebroker_kafkapython/test_kafka_consumer.py diff --git a/newrelic/api/message_transaction.py b/newrelic/api/message_transaction.py index 1ce0025e9..291a3897e 100644 --- a/newrelic/api/message_transaction.py +++ b/newrelic/api/message_transaction.py @@ -19,33 +19,39 @@ from newrelic.api.background_task import BackgroundTask from newrelic.api.message_trace import MessageTrace from newrelic.api.transaction import current_transaction +from newrelic.common.async_proxy import TransactionContext, async_proxy from newrelic.common.object_wrapper import FunctionWrapper, wrap_object -from newrelic.common.async_proxy import async_proxy, TransactionContext class MessageTransaction(BackgroundTask): - - def __init__(self, library, destination_type, - destination_name, application, routing_key=None, - exchange_type=None, headers=None, queue_name=None, reply_to=None, - correlation_id=None, source=None): - - name, group = self.get_transaction_name(library, destination_type, - destination_name) - - super(MessageTransaction, self).__init__(application, name, - group=group, source=source) + def __init__( + self, + library, + destination_type, + destination_name, + application, + routing_key=None, + exchange_type=None, + headers=None, + queue_name=None, + reply_to=None, + correlation_id=None, + transport_type="AMQP", + source=None, + ): + + name, group = self.get_transaction_name(library, destination_type, destination_name) + + super(MessageTransaction, self).__init__(application, name, group=group, source=source) self.headers = headers if headers is not None and self.settings is not None: if self.settings.distributed_tracing.enabled: - self.accept_distributed_trace_headers( - headers, transport_type='AMQP') + self.accept_distributed_trace_headers(headers, transport_type=transport_type) elif self.settings.cross_application_tracer.enabled: self._process_incoming_cat_headers( - headers.pop(MessageTrace.cat_id_key, None), - headers.pop(MessageTrace.cat_transaction_key, None) + headers.pop(MessageTrace.cat_id_key, None), headers.pop(MessageTrace.cat_transaction_key, None) ) self.routing_key = routing_key @@ -56,37 +62,45 @@ def __init__(self, library, destination_type, @staticmethod def get_transaction_name(library, destination_type, destination_name): - group = 'Message/%s/%s' % (library, destination_type) - name = 'Named/%s' % destination_name + group = "Message/%s/%s" % (library, destination_type) + name = "Named/%s" % destination_name return name, group def _update_agent_attributes(self): ms_attrs = self._agent_attributes if self.exchange_type is not None: - ms_attrs['message.exchangeType'] = self.exchange_type + ms_attrs["message.exchangeType"] = self.exchange_type if self.queue_name is not None: - ms_attrs['message.queueName'] = self.queue_name + ms_attrs["message.queueName"] = self.queue_name if self.reply_to is not None: - ms_attrs['message.replyTo'] = self.reply_to + ms_attrs["message.replyTo"] = self.reply_to if self.correlation_id is not None: - ms_attrs['message.correlationId'] = self.correlation_id + ms_attrs["message.correlationId"] = self.correlation_id if self.headers: for k, v in self.headers.items(): - new_key = 'message.headers.%s' % k + new_key = "message.headers.%s" % k new_val = str(v) ms_attrs[new_key] = new_val if self.routing_key is not None: - ms_attrs['message.routingKey'] = self.routing_key + ms_attrs["message.routingKey"] = self.routing_key super(MessageTransaction, self)._update_agent_attributes() -def MessageTransactionWrapper(wrapped, library, destination_type, - destination_name, application=None, routing_key=None, - exchange_type=None, headers=None, queue_name=None, reply_to=None, - correlation_id=None): - +def MessageTransactionWrapper( + wrapped, + library, + destination_type, + destination_name, + application=None, + routing_key=None, + exchange_type=None, + headers=None, + queue_name=None, + reply_to=None, + correlation_id=None, +): def wrapper(wrapped, instance, args, kwargs): if callable(library): if instance is not None: @@ -173,9 +187,8 @@ def create_transaction(transaction): if not transaction.background_task: transaction.background_task = True transaction.set_transaction_name( - *MessageTransaction.get_transaction_name( - _library, _destination_type, - _destination_name)) + *MessageTransaction.get_transaction_name(_library, _destination_type, _destination_name) + ) return None @@ -233,22 +246,61 @@ def create_transaction(transaction): return FunctionWrapper(wrapped, wrapper) -def message_transaction(library, destination_type, destination_name, - application=None, routing_key=None, exchange_type=None, headers=None, - queue_name=None, reply_to=None, correlation_id=None): - return functools.partial(MessageTransactionWrapper, - library=library, destination_type=destination_type, - destination_name=destination_name, application=application, - routing_key=routing_key, exchange_type=exchange_type, - headers=headers, queue_name=queue_name, reply_to=reply_to, - correlation_id=correlation_id) - - -def wrap_message_transaction(module, object_path, library, destination_type, - destination_name, application=None, routing_key=None, - exchange_type=None, headers=None, queue_name=None, reply_to=None, - correlation_id=None): - wrap_object(module, object_path, MessageTransactionWrapper, - (library, destination_type, destination_name, application, - routing_key, exchange_type, headers, queue_name, reply_to, - correlation_id)) +def message_transaction( + library, + destination_type, + destination_name, + application=None, + routing_key=None, + exchange_type=None, + headers=None, + queue_name=None, + reply_to=None, + correlation_id=None, +): + return functools.partial( + MessageTransactionWrapper, + library=library, + destination_type=destination_type, + destination_name=destination_name, + application=application, + routing_key=routing_key, + exchange_type=exchange_type, + headers=headers, + queue_name=queue_name, + reply_to=reply_to, + correlation_id=correlation_id, + ) + + +def wrap_message_transaction( + module, + object_path, + library, + destination_type, + destination_name, + application=None, + routing_key=None, + exchange_type=None, + headers=None, + queue_name=None, + reply_to=None, + correlation_id=None, +): + wrap_object( + module, + object_path, + MessageTransactionWrapper, + ( + library, + destination_type, + destination_name, + application, + routing_key, + exchange_type, + headers, + queue_name, + reply_to, + correlation_id, + ), + ) diff --git a/newrelic/config.py b/newrelic/config.py index 01ac7bfae..fde8ee402 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -2307,6 +2307,12 @@ def _process_module_builtin_defaults(): "instrument_cherrypy__cptree", ) + _process_module_definition( + "kafka.consumer.group", + "newrelic.hooks.messagebroker_kafkapython", + "instrument_kafka_consumer_group", + ) + _process_module_definition( "kafka.producer.kafka", "newrelic.hooks.messagebroker_kafkapython", diff --git a/newrelic/hooks/messagebroker_kafkapython.py b/newrelic/hooks/messagebroker_kafkapython.py index 4182ed536..fbb450a56 100644 --- a/newrelic/hooks/messagebroker_kafkapython.py +++ b/newrelic/hooks/messagebroker_kafkapython.py @@ -11,9 +11,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - import logging import math +import sys import threading from kafka.metrics.metrics_reporter import AbstractMetricsReporter @@ -21,6 +21,7 @@ import newrelic.core.agent from newrelic.api.application import application_instance from newrelic.api.message_trace import MessageTrace +from newrelic.api.message_transaction import MessageTransaction from newrelic.api.time_trace import notice_error from newrelic.api.transaction import current_transaction from newrelic.common.object_wrapper import wrap_function_wrapper @@ -109,6 +110,83 @@ def instrument_kafka_heartbeat(module): ) +def wrap_kafkaconsumer_next(wrapped, instance, args, kwargs): + if hasattr(instance, "_nr_transaction") and not instance._nr_transaction.stopped: + instance._nr_transaction.__exit__(*sys.exc_info()) + + try: + record = wrapped(*args, **kwargs) + except Exception as e: + # StopIteration is an expected error, indicating the end of an iterable, + # that should not be captured. + if not isinstance(e, StopIteration): + notice_error() + raise + + if record: + # This iterator can be called either outside of a transaction, or + # within the context of an existing transaction. There are 3 + # possibilities we need to handle: (Note that this is similar to + # our Pika and Celery instrumentation) + # + # 1. In an inactive transaction + # + # If the end_of_transaction() or ignore_transaction() API + # calls have been invoked, this iterator may be called in the + # context of an inactive transaction. In this case, don't wrap + # the iterator in any way. Just run the original iterator. + # + # 2. In an active transaction + # + # Do nothing. + # + # 3. Outside of a transaction + # + # Since it's not running inside of an existing transaction, we + # want to create a new background transaction for it. + + library = "Kafka" + destination_type = "Topic" + destination_name = record.topic + received_bytes = len(str(record.value).encode("utf-8")) + message_count = 1 + + transaction = current_transaction(active_only=False) + if not transaction: + transaction = MessageTransaction( + application=application_instance(), + library=library, + destination_type=destination_type, + destination_name=destination_name, + headers=record.headers, + transport_type="Kafka", + routing_key=record.key, + source=wrapped, + ) + instance._nr_transaction = transaction + transaction.__enter__() + + # Obtain consumer client_id to send up as agent attribute + if hasattr(instance, "config") and "client_id" in instance.config: + client_id = instance.config["client_id"] + transaction._add_agent_attribute("kafka.consume.client_id", client_id) + + transaction._add_agent_attribute("kafka.consume.byteCount", received_bytes) + + transaction = current_transaction() + if transaction: # If there is an active transaction now. + # Add metrics whether or not a transaction was already active, or one was just started. + # Don't add metrics if there was an inactive transaction. + # Name the metrics using the same format as the transaction, but in case the active transaction + # was an existing one and not a message transaction, reproduce the naming logic here. + group = "Message/%s/%s" % (library, destination_type) + name = "Named/%s" % destination_name + transaction.record_custom_metric("%s/%s/Received/Bytes" % (group, name), received_bytes) + transaction.record_custom_metric("%s/%s/Received/Messages" % (group, name), message_count) + + return record + + class KafkaMetricsDataSource(object): _instance = None @@ -240,11 +318,10 @@ def wrap_KafkaProducerConsumer_init(wrapped, instance, args, kwargs): def instrument_kafka_producer(module): if hasattr(module, "KafkaProducer"): wrap_function_wrapper(module, "KafkaProducer.__init__", wrap_KafkaProducerConsumer_init) - - if hasattr(module, "KafkaProducer"): wrap_function_wrapper(module, "KafkaProducer.send", wrap_KafkaProducer_send) def instrument_kafka_consumer_group(module): if hasattr(module, "KafkaConsumer"): wrap_function_wrapper(module, "KafkaConsumer.__init__", wrap_KafkaProducerConsumer_init) + wrap_function_wrapper(module.KafkaConsumer, "__next__", wrap_kafkaconsumer_next) diff --git a/tests/messagebroker_kafkapython/test_heartbeat.py b/tests/messagebroker_kafkapython/test_heartbeat.py index 1b6c79a2e..b2ae58f9c 100644 --- a/tests/messagebroker_kafkapython/test_heartbeat.py +++ b/tests/messagebroker_kafkapython/test_heartbeat.py @@ -34,7 +34,8 @@ def test_successful_heartbeat_metrics_recorded(topic, producer, consumer): producer.send(topic, value=1) producer.flush() - next(iter(consumer)) + for record in consumer: + pass time.sleep(1.5) diff --git a/tests/messagebroker_kafkapython/test_kafka_consumer.py b/tests/messagebroker_kafkapython/test_kafka_consumer.py new file mode 100644 index 000000000..22e9962f1 --- /dev/null +++ b/tests/messagebroker_kafkapython/test_kafka_consumer.py @@ -0,0 +1,135 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import kafka.errors as Errors +import pytest +from testing_support.fixtures import ( + validate_attributes, + validate_error_event_attributes_outside_transaction, + validate_transaction_errors, + validate_transaction_metrics, +) + +from newrelic.api.background_task import background_task +from newrelic.api.transaction import end_of_transaction +from newrelic.packages import six + + +def test_custom_metrics_are_recorded(get_consumer_records, topic): + @validate_transaction_metrics( + "Named/%s" % topic, + group="Message/Kafka/Topic", + custom_metrics=[ + ("Message/Kafka/Topic/Named/%s/Received/Bytes" % topic, 1), + ("Message/Kafka/Topic/Named/%s/Received/Messages" % topic, 1), + ], + background_task=True, + ) + def _test(): + get_consumer_records() + + _test() + + +def test_custom_metrics_are_recorded_on_already_active_transaction(get_consumer_records, topic): + transaction_name = ( + "test_kafka_consumer:test_custom_metrics_are_recorded_on_already_active_transaction.._test" + if six.PY3 + else "test_kafka_consumer:_test" + ) + + @validate_transaction_metrics( + transaction_name, + custom_metrics=[ + ("Message/Kafka/Topic/Named/%s/Received/Bytes" % topic, 1), + ("Message/Kafka/Topic/Named/%s/Received/Messages" % topic, 1), + ], + background_task=True, + ) + @background_task() + def _test(): + get_consumer_records() + + _test() + + +def test_custom_metrics_are_not_recorded_on_inactive_transaction(get_consumer_records, topic): + transaction_name = ( + "test_kafka_consumer:test_custom_metrics_are_not_recorded_on_inactive_transaction.._test" + if six.PY3 + else "test_kafka_consumer:_test" + ) + + @validate_transaction_metrics( + transaction_name, + custom_metrics=[ + ("Message/Kafka/Topic/Named/%s/Received/Bytes" % topic, None), + ("Message/Kafka/Topic/Named/%s/Received/Messages" % topic, None), + ], + background_task=True, + ) + @background_task() + def _test(): + end_of_transaction() + get_consumer_records() + + _test() + + +def test_agent_attributes_are_recorded(get_consumer_records): + @validate_attributes("agent", ["kafka.consume.client_id", "kafka.consume.byteCount"]) + def _test(): + get_consumer_records() + + _test() + + +def test_agent_records_error_if_raised(get_consumer_records, consumer_next_raises): + @validate_error_event_attributes_outside_transaction( + exact_attrs={"intrinsic": {"error.class": "kafka.errors:KafkaError"}} + ) + def _test(): + with pytest.raises(Errors.KafkaError): + get_consumer_records() + + _test() + + +def test_agent_does_not_record_error_if_not_raised(get_consumer_records): + # It's important to check that we do not notice the StopIteration error. + @validate_transaction_errors([]) + def _test(): + get_consumer_records() + + _test() + + +@pytest.fixture() +def get_consumer_records(topic, producer, consumer): + def _test(): + producer.send(topic, value={"foo": "bar"}) + producer.flush() + for record in consumer: + assert record.value == {"foo": "bar"} + + return _test + + +@pytest.fixture() +def consumer_next_raises(consumer): + def _poll(*args, **kwargs): + raise Errors.KafkaError() + + consumer.poll = _poll + consumer From 6d71a66e7eaf17d07906f65a9fc471d118e8223c Mon Sep 17 00:00:00 2001 From: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> Date: Mon, 19 Sep 2022 16:13:32 -0700 Subject: [PATCH 30/49] Add more metrics, consumer, and producer tests (#619) * Add more metrics, consumer, and producer tests Co-authored-by: Timothy Pansino Co-authored-by: Hannah Stepanek * Remove breakpoint * Add DT accepted validator * Fix lint error * Fix issue with holdover transaction Co-authored-by: Lalleh Rafeei Co-authored-by: Hannah Stepanek * [Mega-Linter] Apply linters fixes * Fix broken producer error test * [Mega-Linter] Apply linters fixes * Bump Tests * Fix flakey test * Fix flakey test * fixup * Fix metrics tests for Python 2.7 Co-authored-by: Timothy Pansino Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai Co-authored-by: Timothy Pansino Co-authored-by: Hannah Stepanek Co-authored-by: Tim Pansino Co-authored-by: Hannah Stepanek Co-authored-by: Lalleh Rafeei Co-authored-by: Uma Annamalai --- .github/workflows/tests.yml | 4 +- newrelic/hooks/messagebroker_kafkapython.py | 2 +- tests/messagebroker_kafkapython/conftest.py | 52 +++++++- ...est_kafka_consumer.py => test_consumer.py} | 78 ++++++++++-- .../messagebroker_kafkapython/test_metrics.py | 117 +++--------------- ...test_kafka_produce.py => test_producer.py} | 55 +++++--- .../validate_distributed_trace_accepted.py | 39 ++++++ 7 files changed, 208 insertions(+), 139 deletions(-) rename tests/messagebroker_kafkapython/{test_kafka_consumer.py => test_consumer.py} (56%) rename tests/messagebroker_kafkapython/{test_kafka_produce.py => test_producer.py} (53%) create mode 100644 tests/testing_support/validators/validate_distributed_trace_accepted.py diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 57993a515..55cd4b3d3 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -447,7 +447,7 @@ jobs: services: zookeeper: - image: bitnami/zookeeper:latest + image: bitnami/zookeeper:3.7 env: ALLOW_ANONYMOUS_LOGIN: yes @@ -455,7 +455,7 @@ jobs: - 2181:2181 kafka: - image: bitnami/kafka:latest + image: bitnami/kafka:3.2 ports: - 8080:8080 - 8081:8081 diff --git a/newrelic/hooks/messagebroker_kafkapython.py b/newrelic/hooks/messagebroker_kafkapython.py index fbb450a56..c83c39ce0 100644 --- a/newrelic/hooks/messagebroker_kafkapython.py +++ b/newrelic/hooks/messagebroker_kafkapython.py @@ -158,7 +158,7 @@ def wrap_kafkaconsumer_next(wrapped, instance, args, kwargs): library=library, destination_type=destination_type, destination_name=destination_name, - headers=record.headers, + headers=dict(record.headers), transport_type="Kafka", routing_key=record.key, source=wrapped, diff --git a/tests/messagebroker_kafkapython/conftest.py b/tests/messagebroker_kafkapython/conftest.py index 39ea88dc3..7f8ebbae0 100644 --- a/tests/messagebroker_kafkapython/conftest.py +++ b/tests/messagebroker_kafkapython/conftest.py @@ -55,7 +55,7 @@ @pytest.fixture(scope="function") -def producer(data_source): +def producer(topic, data_source): producer = kafka.KafkaProducer( bootstrap_servers=BROKER, api_version=(2, 0, 2), value_serializer=lambda v: json.dumps(v).encode("utf-8") ) @@ -64,23 +64,39 @@ def producer(data_source): @pytest.fixture(scope="function") -def consumer(topic, data_source): +def consumer(topic, data_source, producer): consumer = kafka.KafkaConsumer( topic, bootstrap_servers=BROKER, value_deserializer=lambda v: json.loads(v.decode("utf-8")), auto_offset_reset="earliest", - consumer_timeout_ms=5000, + consumer_timeout_ms=500, heartbeat_interval_ms=1000, group_id="test", ) + # The first time the kafka consumer is created and polled, it returns a StopIterator + # exception. To by-pass this, loop over the consumer before using it. + # NOTE: This seems to only happen in Python2.7. + for record in consumer: + pass yield consumer consumer.close() @pytest.fixture(scope="function") def topic(): - yield "test-topic-%s" % str(uuid.uuid4()) + # from kafka.admin.client import KafkaAdminClient + # from kafka.admin.new_topic import NewTopic + + topic = "test-topic-%s" % str(uuid.uuid4()) + + # admin = KafkaAdminClient(bootstrap_servers=BROKER) + # new_topics = [NewTopic(topic, num_partitions=1, replication_factor=1)] + # topics = admin.create_topics(new_topics) + + yield topic + + # admin.delete_topics([topic]) @pytest.fixture(scope="session") @@ -96,7 +112,7 @@ def data_source(): @transient_function_wrapper(kafka.producer.kafka, "KafkaProducer.send.__wrapped__") # Place transient wrapper underneath instrumentation -def cache_kafka_headers(wrapped, instance, args, kwargs): +def cache_kafka_producer_headers(wrapped, instance, args, kwargs): transaction = current_transaction() if transaction is None: @@ -107,3 +123,29 @@ def cache_kafka_headers(wrapped, instance, args, kwargs): headers = dict(headers) transaction._test_request_headers = headers return ret + + +@transient_function_wrapper(kafka.consumer.group, "KafkaConsumer.__next__") +# Place transient wrapper underneath instrumentation +def cache_kafka_consumer_headers(wrapped, instance, args, kwargs): + record = wrapped(*args, **kwargs) + transaction = current_transaction() + + if transaction is None: + return record + + headers = record.headers + headers = dict(headers) + transaction._test_request_headers = headers + return record + + +@pytest.fixture(autouse=True) +def assert_no_active_transaction(): + # Run before test + assert not current_transaction(active_only=False), "Transaction exists before test run." + + yield # Run test + + # Run after test + assert not current_transaction(active_only=False), "Transaction was not properly exited." diff --git a/tests/messagebroker_kafkapython/test_kafka_consumer.py b/tests/messagebroker_kafkapython/test_consumer.py similarity index 56% rename from tests/messagebroker_kafkapython/test_kafka_consumer.py rename to tests/messagebroker_kafkapython/test_consumer.py index 22e9962f1..3b985838b 100644 --- a/tests/messagebroker_kafkapython/test_kafka_consumer.py +++ b/tests/messagebroker_kafkapython/test_consumer.py @@ -12,21 +12,26 @@ # See the License for the specific language governing permissions and # limitations under the License. +import kafka import kafka.errors as Errors import pytest +from conftest import BROKER, cache_kafka_consumer_headers from testing_support.fixtures import ( validate_attributes, validate_error_event_attributes_outside_transaction, validate_transaction_errors, validate_transaction_metrics, ) +from testing_support.validators.validate_distributed_trace_accepted import ( + validate_distributed_trace_accepted, +) from newrelic.api.background_task import background_task from newrelic.api.transaction import end_of_transaction from newrelic.packages import six -def test_custom_metrics_are_recorded(get_consumer_records, topic): +def test_custom_metrics(get_consumer_records, topic): @validate_transaction_metrics( "Named/%s" % topic, group="Message/Kafka/Topic", @@ -42,11 +47,9 @@ def _test(): _test() -def test_custom_metrics_are_recorded_on_already_active_transaction(get_consumer_records, topic): +def test_custom_metrics_on_existing_transaction(get_consumer_records, topic): transaction_name = ( - "test_kafka_consumer:test_custom_metrics_are_recorded_on_already_active_transaction.._test" - if six.PY3 - else "test_kafka_consumer:_test" + "test_consumer:test_custom_metrics_on_existing_transaction.._test" if six.PY3 else "test_consumer:_test" ) @validate_transaction_metrics( @@ -64,11 +67,9 @@ def _test(): _test() -def test_custom_metrics_are_not_recorded_on_inactive_transaction(get_consumer_records, topic): +def test_custom_metrics_inactive_transaction(get_consumer_records, topic): transaction_name = ( - "test_kafka_consumer:test_custom_metrics_are_not_recorded_on_inactive_transaction.._test" - if six.PY3 - else "test_kafka_consumer:_test" + "test_consumer:test_custom_metrics_inactive_transaction.._test" if six.PY3 else "test_consumer:_test" ) @validate_transaction_metrics( @@ -87,7 +88,7 @@ def _test(): _test() -def test_agent_attributes_are_recorded(get_consumer_records): +def test_agent_attributes(get_consumer_records): @validate_attributes("agent", ["kafka.consume.client_id", "kafka.consume.byteCount"]) def _test(): get_consumer_records() @@ -95,7 +96,7 @@ def _test(): _test() -def test_agent_records_error_if_raised(get_consumer_records, consumer_next_raises): +def test_consumer_errors(get_consumer_records, consumer_next_raises): @validate_error_event_attributes_outside_transaction( exact_attrs={"intrinsic": {"error.class": "kafka.errors:KafkaError"}} ) @@ -106,7 +107,23 @@ def _test(): _test() -def test_agent_does_not_record_error_if_not_raised(get_consumer_records): +def test_consumer_deserialization_errors(topic, consumer): + producer = kafka.KafkaProducer( + bootstrap_servers=BROKER, api_version=(2, 0, 2), value_serializer=lambda v: str(v).encode("utf-8") + ) # Producer that allows us to upload invalid JSON. + + @validate_error_event_attributes_outside_transaction(exact_attrs={"intrinsic": {"error.class": "ValueError"}}) + def _test(): + with pytest.raises(ValueError): + producer.send(topic, value="%") # Invalid JSON + producer.flush() + for _ in consumer: + pass + + _test() + + +def test_consumer_handled_errors_not_recorded(get_consumer_records): # It's important to check that we do not notice the StopIteration error. @validate_transaction_errors([]) def _test(): @@ -115,6 +132,41 @@ def _test(): _test() +def test_distributed_tracing_headers(topic, producer, consumer): + # Send the messages inside a transaction, making sure to close it. + @background_task() + def _produce(): + producer.send(topic, value={"foo": "bar"}) + producer.flush() + + consumer_iter = iter(consumer) + + @validate_transaction_metrics( + "Named/%s" % topic, + group="Message/Kafka/Topic", + rollup_metrics=[ + ("Supportability/DistributedTrace/AcceptPayload/Success", None), + ("Supportability/TraceContext/Accept/Success", 1), + ], + background_task=True, + ) + def _consume(): + @validate_distributed_trace_accepted(transport_type="Kafka") + @cache_kafka_consumer_headers + def _test(): + # Start the transaction but don't exit it. + next(consumer_iter) + + _test() + + # Exit the transaction. + with pytest.raises(StopIteration): + next(consumer_iter) + + _produce() + _consume() + + @pytest.fixture() def get_consumer_records(topic, producer, consumer): def _test(): @@ -132,4 +184,4 @@ def _poll(*args, **kwargs): raise Errors.KafkaError() consumer.poll = _poll - consumer + return consumer diff --git a/tests/messagebroker_kafkapython/test_metrics.py b/tests/messagebroker_kafkapython/test_metrics.py index 94c7c224a..dc715815e 100644 --- a/tests/messagebroker_kafkapython/test_metrics.py +++ b/tests/messagebroker_kafkapython/test_metrics.py @@ -12,112 +12,27 @@ # See the License for the specific language governing permissions and # limitations under the License. +from newrelic.packages import six + def test_data_source_metrics(data_source, topic, producer, consumer): + _data_source_metrics = { + "MessageBroker/Kafka/Internal/kafka-metrics-count/count": "present", + "MessageBroker/Kafka/Internal/producer-metrics/request-rate": "present", + "MessageBroker/Kafka/Internal/producer-topic-metrics.%s/record-send-rate" % topic: "present", + "MessageBroker/Kafka/Internal/consumer-metrics/request-rate": "present", + } + producer.send(topic, value=1) producer.flush() - next(iter(consumer)) + for _ in consumer: + pass metrics = dict(data_source()) - metric_names = list(metrics.keys()) assert metrics - -# Example metrics - -# MessageBroker/Kafka/Internal/kafka-metrics-count/count -# MessageBroker/Kafka/Internal/producer-metrics/connection-close-rate -# MessageBroker/Kafka/Internal/producer-metrics/connection-creation-rate -# MessageBroker/Kafka/Internal/producer-metrics/select-rate -# MessageBroker/Kafka/Internal/producer-metrics/io-wait-time-ns-avg -# MessageBroker/Kafka/Internal/producer-metrics/io-wait-ratio -# MessageBroker/Kafka/Internal/producer-metrics/io-time-ns-avg -# MessageBroker/Kafka/Internal/producer-metrics/io-ratio -# MessageBroker/Kafka/Internal/producer-metrics/connection-count -# MessageBroker/Kafka/Internal/producer-metrics/batch-size-avg -# MessageBroker/Kafka/Internal/producer-metrics/batch-size-max -# MessageBroker/Kafka/Internal/producer-metrics/compression-rate-avg -# MessageBroker/Kafka/Internal/producer-metrics/record-queue-time-avg -# MessageBroker/Kafka/Internal/producer-metrics/record-queue-time-max -# MessageBroker/Kafka/Internal/producer-metrics/record-send-rate -# MessageBroker/Kafka/Internal/producer-metrics/records-per-request-avg -# MessageBroker/Kafka/Internal/producer-metrics/byte-rate -# MessageBroker/Kafka/Internal/producer-metrics/record-size-max -# MessageBroker/Kafka/Internal/producer-metrics/record-size-avg -# MessageBroker/Kafka/Internal/producer-metrics/metadata-age -# MessageBroker/Kafka/Internal/producer-metrics/network-io-rate -# MessageBroker/Kafka/Internal/producer-metrics/outgoing-byte-rate -# MessageBroker/Kafka/Internal/producer-metrics/request-rate -# MessageBroker/Kafka/Internal/producer-metrics/request-size-avg -# MessageBroker/Kafka/Internal/producer-metrics/request-size-max -# MessageBroker/Kafka/Internal/producer-metrics/incoming-byte-rate -# MessageBroker/Kafka/Internal/producer-metrics/response-rate -# MessageBroker/Kafka/Internal/producer-metrics/request-latency-avg -# MessageBroker/Kafka/Internal/producer-metrics/request-latency-max -# MessageBroker/Kafka/Internal/producer-node-metrics.node-bootstrap-0/outgoing-byte-rate -# MessageBroker/Kafka/Internal/producer-node-metrics.node-bootstrap-0/request-rate -# MessageBroker/Kafka/Internal/producer-node-metrics.node-bootstrap-0/request-size-avg -# MessageBroker/Kafka/Internal/producer-node-metrics.node-bootstrap-0/request-size-max -# MessageBroker/Kafka/Internal/producer-node-metrics.node-bootstrap-0/incoming-byte-rate -# MessageBroker/Kafka/Internal/producer-node-metrics.node-bootstrap-0/response-rate -# MessageBroker/Kafka/Internal/producer-node-metrics.node-bootstrap-0/request-latency-avg -# MessageBroker/Kafka/Internal/producer-node-metrics.node-bootstrap-0/request-latency-max -# MessageBroker/Kafka/Internal/producer-node-metrics.node-1001/outgoing-byte-rate -# MessageBroker/Kafka/Internal/producer-node-metrics.node-1001/request-rate -# MessageBroker/Kafka/Internal/producer-node-metrics.node-1001/request-size-avg -# MessageBroker/Kafka/Internal/producer-node-metrics.node-1001/request-size-max -# MessageBroker/Kafka/Internal/producer-node-metrics.node-1001/incoming-byte-rate -# MessageBroker/Kafka/Internal/producer-node-metrics.node-1001/response-rate -# MessageBroker/Kafka/Internal/producer-node-metrics.node-1001/request-latency-avg -# MessageBroker/Kafka/Internal/producer-node-metrics.node-1001/request-latency-max -# MessageBroker/Kafka/Internal/producer-topic-metrics.test-topic-c962647a-f6cf-4a24-a90b-40ab2364dc55/record-send-rate -# MessageBroker/Kafka/Internal/producer-topic-metrics.test-topic-c962647a-f6cf-4a24-a90b-40ab2364dc55/byte-rate -# MessageBroker/Kafka/Internal/producer-topic-metrics.test-topic-c962647a-f6cf-4a24-a90b-40ab2364dc55/compression-rate -# MessageBroker/Kafka/Internal/consumer-metrics/connection-close-rate -# MessageBroker/Kafka/Internal/consumer-metrics/connection-creation-rate -# MessageBroker/Kafka/Internal/consumer-metrics/select-rate -# MessageBroker/Kafka/Internal/consumer-metrics/io-wait-time-ns-avg -# MessageBroker/Kafka/Internal/consumer-metrics/io-wait-ratio -# MessageBroker/Kafka/Internal/consumer-metrics/io-time-ns-avg -# MessageBroker/Kafka/Internal/consumer-metrics/io-ratio -# MessageBroker/Kafka/Internal/consumer-metrics/connection-count -# MessageBroker/Kafka/Internal/consumer-metrics/network-io-rate -# MessageBroker/Kafka/Internal/consumer-metrics/outgoing-byte-rate -# MessageBroker/Kafka/Internal/consumer-metrics/request-rate -# MessageBroker/Kafka/Internal/consumer-metrics/request-size-avg -# MessageBroker/Kafka/Internal/consumer-metrics/request-size-max -# MessageBroker/Kafka/Internal/consumer-metrics/incoming-byte-rate -# MessageBroker/Kafka/Internal/consumer-metrics/response-rate -# MessageBroker/Kafka/Internal/consumer-metrics/request-latency-avg -# MessageBroker/Kafka/Internal/consumer-metrics/request-latency-max -# MessageBroker/Kafka/Internal/consumer-node-metrics.node-bootstrap-0/outgoing-byte-rate -# MessageBroker/Kafka/Internal/consumer-node-metrics.node-bootstrap-0/request-rate -# MessageBroker/Kafka/Internal/consumer-node-metrics.node-bootstrap-0/request-size-avg -# MessageBroker/Kafka/Internal/consumer-node-metrics.node-bootstrap-0/request-size-max -# MessageBroker/Kafka/Internal/consumer-node-metrics.node-bootstrap-0/incoming-byte-rate -# MessageBroker/Kafka/Internal/consumer-node-metrics.node-bootstrap-0/response-rate -# MessageBroker/Kafka/Internal/consumer-node-metrics.node-bootstrap-0/request-latency-avg -# MessageBroker/Kafka/Internal/consumer-node-metrics.node-bootstrap-0/request-latency-max -# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/fetch-size-avg -# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/fetch-size-max -# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/bytes-consumed-rate -# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/records-per-request-avg -# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/records-consumed-rate -# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/fetch-latency-avg -# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/fetch-latency-max -# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/fetch-rate -# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/records-lag-max -# MessageBroker/Kafka/Internal/consumer-coordinator-metrics/assigned-partitions -# MessageBroker/Kafka/Internal/consumer-node-metrics.node-1001/outgoing-byte-rate -# MessageBroker/Kafka/Internal/consumer-node-metrics.node-1001/request-rate -# MessageBroker/Kafka/Internal/consumer-node-metrics.node-1001/request-size-avg -# MessageBroker/Kafka/Internal/consumer-node-metrics.node-1001/request-size-max -# MessageBroker/Kafka/Internal/consumer-node-metrics.node-1001/incoming-byte-rate -# MessageBroker/Kafka/Internal/consumer-node-metrics.node-1001/response-rate -# MessageBroker/Kafka/Internal/consumer-node-metrics.node-1001/request-latency-avg -# MessageBroker/Kafka/Internal/consumer-node-metrics.node-1001/request-latency-max -# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/test-topic-c962647a-f6cf-4a24-a90b-40ab2364dc55/fetch-size-avg -# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/test-topic-c962647a-f6cf-4a24-a90b-40ab2364dc55/fetch-size-max -# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/test-topic-c962647a-f6cf-4a24-a90b-40ab2364dc55/bytes-consumed-rate -# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/test-topic-c962647a-f6cf-4a24-a90b-40ab2364dc55/records-per-request-avg -# MessageBroker/Kafka/Internal/consumer-fetch-manager-metrics/test-topic-c962647a-f6cf-4a24-a90b-40ab2364dc55/records-consumed-rate + for metric_name, count in six.iteritems(_data_source_metrics): + if count == "present": + assert metric_name in metrics + else: + assert metrics[metric_name]["count"] == count, "%s:%d" % (metric_name, count) diff --git a/tests/messagebroker_kafkapython/test_kafka_produce.py b/tests/messagebroker_kafkapython/test_producer.py similarity index 53% rename from tests/messagebroker_kafkapython/test_kafka_produce.py rename to tests/messagebroker_kafkapython/test_producer.py index 43b1c2a0e..43ff991b3 100644 --- a/tests/messagebroker_kafkapython/test_kafka_produce.py +++ b/tests/messagebroker_kafkapython/test_producer.py @@ -13,9 +13,9 @@ # limitations under the License. import pytest -from conftest import cache_kafka_headers +from conftest import cache_kafka_producer_headers from testing_support.fixtures import ( - validate_non_transaction_error_event, + validate_transaction_errors, validate_transaction_metrics, ) from testing_support.validators.validate_messagebroker_headers import ( @@ -23,13 +23,14 @@ ) from newrelic.api.background_task import background_task +from newrelic.common.object_names import callable_name from newrelic.packages import six -def test_producer_records_trace(topic, send_producer_messages): +def test_trace_metrics(topic, send_producer_messages): scoped_metrics = [("MessageBroker/Kafka/Topic/Produce/Named/%s" % topic, 3)] unscoped_metrics = scoped_metrics - txn_name = "test_kafka_produce:test_producer_records_trace..test" if six.PY3 else "test_kafka_produce:test" + txn_name = "test_producer:test_trace_metrics..test" if six.PY3 else "test_producer:test" @validate_transaction_metrics( txn_name, @@ -38,29 +39,49 @@ def test_producer_records_trace(topic, send_producer_messages): background_task=True, ) @background_task() - @cache_kafka_headers - @validate_messagebroker_headers def test(): send_producer_messages() test() -def test_producer_records_error_if_raised(topic, producer): - _intrinsic_attributes = { - "error.class": "AssertionError", - "error.message": "Need at least one: key or value", - "error.expected": False, - } +def test_distributed_tracing_headers(topic, send_producer_messages): + txn_name = "test_producer:test_distributed_tracing_headers..test" if six.PY3 else "test_producer:test" - @validate_non_transaction_error_event(_intrinsic_attributes) + @validate_transaction_metrics( + txn_name, + rollup_metrics=[ + ("Supportability/TraceContext/Create/Success", 3), + ("Supportability/DistributedTrace/CreatePayload/Success", 3), + ], + background_task=True, + ) @background_task() + @cache_kafka_producer_headers + @validate_messagebroker_headers def test(): - producer.send(topic, None) - producer.flush() + send_producer_messages() - with pytest.raises(AssertionError): - test() + test() + + +@pytest.mark.parametrize( + "input,error,message", + ( + (None, AssertionError, "Need at least one: key or value"), + (object(), TypeError, r".* is not JSON serializable"), + ), + ids=("None Value", "Serialization Error"), +) +def test_producer_errors(topic, producer, input, error, message): + @validate_transaction_errors([callable_name(error)]) + @background_task() + def test(): + with pytest.raises(error, match=message): + producer.send(topic, input) + producer.flush() + + test() @pytest.fixture diff --git a/tests/testing_support/validators/validate_distributed_trace_accepted.py b/tests/testing_support/validators/validate_distributed_trace_accepted.py new file mode 100644 index 000000000..0898f5f72 --- /dev/null +++ b/tests/testing_support/validators/validate_distributed_trace_accepted.py @@ -0,0 +1,39 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from newrelic.api.transaction import current_transaction +from newrelic.common.object_wrapper import function_wrapper + + +def validate_distributed_trace_accepted(header="newrelic", transport_type="HTTP"): + @function_wrapper + def _validate_distributed_trace_accepted(wrapped, instance, args, kwargs): + result = wrapped(*args, **kwargs) + + txn = current_transaction() + + assert txn + assert txn._distributed_trace_state + assert txn.parent_type == "App" + assert txn._trace_id.startswith(txn.parent_tx) + assert txn.parent_span is not None + assert txn.parent_account == txn.settings.account_id + assert txn.parent_transport_type == transport_type + assert txn._priority is not None + assert txn._sampled is not None + + return result + + return _validate_distributed_trace_accepted From a32cc2956b922523de838cd85309ef2e4487981f Mon Sep 17 00:00:00 2001 From: Uma Annamalai Date: Thu, 22 Sep 2022 12:56:25 -0700 Subject: [PATCH 31/49] Remove kafka metric instrumentation. (#626) --- newrelic/hooks/messagebroker_kafkapython.py | 140 ------------------ tests/messagebroker_kafkapython/conftest.py | 16 +- .../messagebroker_kafkapython/test_metrics.py | 38 ----- 3 files changed, 2 insertions(+), 192 deletions(-) delete mode 100644 tests/messagebroker_kafkapython/test_metrics.py diff --git a/newrelic/hooks/messagebroker_kafkapython.py b/newrelic/hooks/messagebroker_kafkapython.py index c83c39ce0..c4ec922e4 100644 --- a/newrelic/hooks/messagebroker_kafkapython.py +++ b/newrelic/hooks/messagebroker_kafkapython.py @@ -11,24 +11,14 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import logging -import math import sys -import threading -from kafka.metrics.metrics_reporter import AbstractMetricsReporter - -import newrelic.core.agent from newrelic.api.application import application_instance from newrelic.api.message_trace import MessageTrace from newrelic.api.message_transaction import MessageTransaction from newrelic.api.time_trace import notice_error from newrelic.api.transaction import current_transaction from newrelic.common.object_wrapper import wrap_function_wrapper -from newrelic.packages import six -from newrelic.samplers.decorators import data_source_factory - -_logger = logging.getLogger(__name__) HEARTBEAT_POLL = "MessageBroker/Kafka/Heartbeat/Poll" HEARTBEAT_SENT = "MessageBroker/Kafka/Heartbeat/Sent" @@ -187,141 +177,11 @@ def wrap_kafkaconsumer_next(wrapped, instance, args, kwargs): return record -class KafkaMetricsDataSource(object): - _instance = None - - def __init__(self): - self.reporters = [] - - @classmethod - @data_source_factory(name="Kafka Metrics Reporter") - def factory(cls, settings=None, environ=None): - return cls.singleton() - - @classmethod - def singleton(cls, register=True): - # If already initialized, exit early - if cls._instance: - return cls._instance - - # Init and register instance on class - instance = cls() - cls._instance = instance - - # register_data_source takes a callable so let it rerun singleton to retrieve the instance - if register: - try: - _logger.debug("Registering kafka metrics data source.") - newrelic.core.agent.agent_instance().register_data_source(cls.factory) - except Exception: - _logger.exception( - "Attempt to register kafka metrics data source has failed. Data source will be skipped." - ) - - return instance - - def register(self, reporter): - self.reporters.append(reporter) - - def unregister(self, reporter): - if reporter in self.reporters: - self.reporters.remove(reporter) - - def start(self): - return - - def stop(self): - # Clear references to reporters to prevent them from participating in a reference cycle. - self.reporters = [] - - def __call__(self): - for reporter in self.reporters: - for name, metric in six.iteritems(reporter.snapshot()): - yield name, metric - - -class NewRelicMetricsReporter(AbstractMetricsReporter): - def __init__(self, *args, **kwargs): - super(NewRelicMetricsReporter, self).__init__(*args, **kwargs) - - # Register with data source for harvesting - self.data_source = KafkaMetricsDataSource.singleton() - self.data_source.register(self) - - self._metrics = {} - self._lock = threading.Lock() - - def close(self, *args, **kwargs): - self.data_source.unregister(self) - with self._lock: - self._metrics = {} - - def init(self, metrics): - for metric in metrics: - self.metric_change(metric) - - @staticmethod - def invalid_metric_value(metric): - name, value = metric - return not any((math.isinf(value), math.isnan(value), value == 0)) - - def snapshot(self): - with self._lock: - # metric.value can only be called once, so care must be taken when filtering - metrics = ((name, metric.value()) for name, metric in six.iteritems(self._metrics)) - return { - "MessageBroker/Kafka/Internal/%s" % name: {"count": value} - for name, value in filter(self.invalid_metric_value, metrics) - } - - def get_metric_name(self, metric): - metric_name = metric.metric_name # Get MetricName object to work with - - name = metric_name.name - group = metric_name.group - - if "topic" in metric_name.tags: - topic = metric_name.tags["topic"] - return "/".join((group, topic, name)) - else: - return "/".join((group, name)) - - def metric_change(self, metric): - name = self.get_metric_name(metric) - with self._lock: - self._metrics[name] = metric - - def metric_removal(self, metric): - name = self.get_metric_name(metric) - with self._lock: - if name in self._metrics: - self._metrics.pop(name) - - def configure(self, configs): - return - - -def wrap_KafkaProducerConsumer_init(wrapped, instance, args, kwargs): - try: - if "metric_reporters" in kwargs: - metric_reporters = list(kwargs.get("metric_reporters", [])) - metric_reporters.append(NewRelicMetricsReporter) - kwargs["metric_reporters"] = [metric_reporters] - else: - kwargs["metric_reporters"] = [NewRelicMetricsReporter] - except Exception: - pass - - return wrapped(*args, **kwargs) - - def instrument_kafka_producer(module): if hasattr(module, "KafkaProducer"): - wrap_function_wrapper(module, "KafkaProducer.__init__", wrap_KafkaProducerConsumer_init) wrap_function_wrapper(module, "KafkaProducer.send", wrap_KafkaProducer_send) def instrument_kafka_consumer_group(module): if hasattr(module, "KafkaConsumer"): - wrap_function_wrapper(module, "KafkaConsumer.__init__", wrap_KafkaProducerConsumer_init) wrap_function_wrapper(module.KafkaConsumer, "__next__", wrap_kafkaconsumer_next) diff --git a/tests/messagebroker_kafkapython/conftest.py b/tests/messagebroker_kafkapython/conftest.py index 7f8ebbae0..ba0fc2271 100644 --- a/tests/messagebroker_kafkapython/conftest.py +++ b/tests/messagebroker_kafkapython/conftest.py @@ -26,7 +26,6 @@ from newrelic.api.transaction import current_transaction from newrelic.common.object_wrapper import transient_function_wrapper -from newrelic.hooks.messagebroker_kafkapython import KafkaMetricsDataSource DB_SETTINGS = kafka_settings()[0] @@ -55,7 +54,7 @@ @pytest.fixture(scope="function") -def producer(topic, data_source): +def producer(topic): producer = kafka.KafkaProducer( bootstrap_servers=BROKER, api_version=(2, 0, 2), value_serializer=lambda v: json.dumps(v).encode("utf-8") ) @@ -64,7 +63,7 @@ def producer(topic, data_source): @pytest.fixture(scope="function") -def consumer(topic, data_source, producer): +def consumer(topic, producer): consumer = kafka.KafkaConsumer( topic, bootstrap_servers=BROKER, @@ -99,17 +98,6 @@ def topic(): # admin.delete_topics([topic]) -@pytest.fixture(scope="session") -def data_source(): - """ - Must be required by consumer and producer fixtures, or the first one of them to be - instantiated will create and register the singleton. We rely on the singleton to - not be registered to properly test the output of it without interference from the - harvest thread. - """ - return KafkaMetricsDataSource.singleton(register=False) - - @transient_function_wrapper(kafka.producer.kafka, "KafkaProducer.send.__wrapped__") # Place transient wrapper underneath instrumentation def cache_kafka_producer_headers(wrapped, instance, args, kwargs): diff --git a/tests/messagebroker_kafkapython/test_metrics.py b/tests/messagebroker_kafkapython/test_metrics.py deleted file mode 100644 index dc715815e..000000000 --- a/tests/messagebroker_kafkapython/test_metrics.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2010 New Relic, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from newrelic.packages import six - - -def test_data_source_metrics(data_source, topic, producer, consumer): - _data_source_metrics = { - "MessageBroker/Kafka/Internal/kafka-metrics-count/count": "present", - "MessageBroker/Kafka/Internal/producer-metrics/request-rate": "present", - "MessageBroker/Kafka/Internal/producer-topic-metrics.%s/record-send-rate" % topic: "present", - "MessageBroker/Kafka/Internal/consumer-metrics/request-rate": "present", - } - - producer.send(topic, value=1) - producer.flush() - for _ in consumer: - pass - - metrics = dict(data_source()) - assert metrics - - for metric_name, count in six.iteritems(_data_source_metrics): - if count == "present": - assert metric_name in metrics - else: - assert metrics[metric_name]["count"] == count, "%s:%d" % (metric_name, count) From 75a8c6eea79d3701af6bb46a095c732da3d5558e Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Fri, 23 Sep 2022 12:53:42 -0700 Subject: [PATCH 32/49] Confluent Kafka Instrumentation (#620) * Confluent-Kafka requires librdkafka-dev to compile Co-authored-by: Timothy Pansino Co-authored-by: Lalleh Rafeei Co-authored-by: Uma Annamalai * Add confluent-kafka test env Co-authored-by: Timothy Pansino Co-authored-by: Lalleh Rafeei Co-authored-by: Uma Annamalai * Add support for confluent-kafka Co-authored-by: Timothy Pansino Co-authored-by: Lalleh Rafeei Co-authored-by: Uma Annamalai * Uncomment admin client in kafka-python fixture Co-authored-by: Timothy Pansino Co-authored-by: Lalleh Rafeei Co-authored-by: Uma Annamalai * Run kafka tests serially * Attempt to fix flakey test failures Co-authored-by: Timothy Pansino Co-authored-by: Lalleh Rafeei Co-authored-by: Uma Annamalai --- .github/workflows/tests.yml | 18 +- newrelic/config.py | 18 +- .../hooks/messagebroker_confluentkafka.py | 238 +++++++++++++++ .../messagebroker_confluentkafka/conftest.py | 272 ++++++++++++++++++ .../test_consumer.py | 176 ++++++++++++ .../test_producer.py | 80 ++++++ .../test_serialization.py | 142 +++++++++ tests/messagebroker_kafkapython/conftest.py | 14 +- tox.ini | 5 +- 9 files changed, 950 insertions(+), 13 deletions(-) create mode 100644 newrelic/hooks/messagebroker_confluentkafka.py create mode 100644 tests/messagebroker_confluentkafka/conftest.py create mode 100644 tests/messagebroker_confluentkafka/test_consumer.py create mode 100644 tests/messagebroker_confluentkafka/test_producer.py create mode 100644 tests/messagebroker_confluentkafka/test_serialization.py diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 55cd4b3d3..892bfce9a 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -435,12 +435,12 @@ jobs: kafka: env: - TOTAL_GROUPS: 1 + TOTAL_GROUPS: 2 strategy: fail-fast: false matrix: - group-number: [1] + group-number: [1, 2] runs-on: ubuntu-latest timeout-minutes: 30 @@ -472,6 +472,18 @@ jobs: - uses: actions/checkout@v3 - uses: ./.github/actions/setup-python-matrix + # Special case packages + - name: Install librdkafka-dev + run: | + # Use lsb-release to find the codename of Ubuntu to use to install the correct library name + sudo apt-get update + sudo ln -fs /usr/share/zoneinfo/America/Los_Angeles /etc/localtime + sudo apt-get install -y wget gnupg2 software-properties-common + sudo wget -qO - https://packages.confluent.io/deb/7.2/archive.key | sudo apt-key add - + sudo add-apt-repository "deb https://packages.confluent.io/clients/deb $(lsb_release -cs) main" + sudo apt-get update + sudo apt-get install -y librdkafka-dev/$(lsb_release -c | cut -f 2) + - name: Get Environments id: get-envs run: | @@ -481,7 +493,7 @@ jobs: - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + tox -vv -e ${{ steps.get-envs.outputs.envs }} env: TOX_PARALLEL_NO_SPINNER: 1 PY_COLORS: 0 diff --git a/newrelic/config.py b/newrelic/config.py index fde8ee402..4e0912db8 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -2307,12 +2307,27 @@ def _process_module_builtin_defaults(): "instrument_cherrypy__cptree", ) + _process_module_definition( + "confluent_kafka.cimpl", + "newrelic.hooks.messagebroker_confluentkafka", + "instrument_confluentkafka_cimpl", + ) + _process_module_definition( + "confluent_kafka.serializing_producer", + "newrelic.hooks.messagebroker_confluentkafka", + "instrument_confluentkafka_serializing_producer", + ) + _process_module_definition( + "confluent_kafka.deserializing_consumer", + "newrelic.hooks.messagebroker_confluentkafka", + "instrument_confluentkafka_deserializing_consumer", + ) + _process_module_definition( "kafka.consumer.group", "newrelic.hooks.messagebroker_kafkapython", "instrument_kafka_consumer_group", ) - _process_module_definition( "kafka.producer.kafka", "newrelic.hooks.messagebroker_kafkapython", @@ -2323,7 +2338,6 @@ def _process_module_builtin_defaults(): "newrelic.hooks.messagebroker_kafkapython", "instrument_kafka_heartbeat", ) - _process_module_definition( "kafka.consumer.group", "newrelic.hooks.messagebroker_kafkapython", diff --git a/newrelic/hooks/messagebroker_confluentkafka.py b/newrelic/hooks/messagebroker_confluentkafka.py new file mode 100644 index 000000000..917ec1d5b --- /dev/null +++ b/newrelic/hooks/messagebroker_confluentkafka.py @@ -0,0 +1,238 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging +import sys + +from newrelic.api.application import application_instance +from newrelic.api.error_trace import wrap_error_trace +from newrelic.api.function_trace import FunctionTraceWrapper +from newrelic.api.message_trace import MessageTrace +from newrelic.api.message_transaction import MessageTransaction +from newrelic.api.time_trace import notice_error +from newrelic.api.transaction import current_transaction +from newrelic.common.object_wrapper import function_wrapper, wrap_function_wrapper + +_logger = logging.getLogger(__name__) + +HEARTBEAT_POLL = "MessageBroker/Kafka/Heartbeat/Poll" +HEARTBEAT_SENT = "MessageBroker/Kafka/Heartbeat/Sent" +HEARTBEAT_FAIL = "MessageBroker/Kafka/Heartbeat/Fail" +HEARTBEAT_RECEIVE = "MessageBroker/Kafka/Heartbeat/Receive" +HEARTBEAT_SESSION_TIMEOUT = "MessageBroker/Kafka/Heartbeat/SessionTimeout" +HEARTBEAT_POLL_TIMEOUT = "MessageBroker/Kafka/Heartbeat/PollTimeout" + + +def _bind_Producer_produce(topic, value=None, key=None, partition=-1, on_delivery=None, timestamp=0, headers=None): + return topic, value, key, partition, on_delivery, timestamp, headers + + +def wrap_Producer_produce(wrapped, instance, args, kwargs): + transaction = current_transaction() + if transaction is None: + return wrapped(*args, **kwargs) + + topic, value, key, partition, on_delivery, timestamp, headers = _bind_Producer_produce(*args, **kwargs) + headers = list(headers) if headers else [] + + with MessageTrace( + library="Kafka", + operation="Produce", + destination_type="Topic", + destination_name=topic or "Default", + source=wrapped, + ) as trace: + dt_headers = [(k, v.encode("utf-8")) for k, v in trace.generate_request_headers(transaction)] + headers.extend(dt_headers) + try: + return wrapped( + topic, + value=value, + key=key, + partition=partition, + on_delivery=on_delivery, + timestamp=timestamp, + headers=headers, + ) + except Exception as error: + # Unwrap kafka errors + while hasattr(error, "exception"): + error = error.exception # pylint: disable=E1101 + + _, _, tb = sys.exc_info() + notice_error((type(error), error, tb)) + tb = None # Clear reference to prevent reference cycles + raise + + +def wrap_Consumer_poll(wrapped, instance, args, kwargs): + # This wrapper can be called either outside of a transaction, or + # within the context of an existing transaction. There are 4 + # possibilities we need to handle: (Note that this is similar to + # our Pika, Celery, and Kafka-Python instrumentation) + # + # 1. Inside an inner wrapper in the DeserializingConsumer + # + # Do nothing. The DeserializingConsumer is double wrapped because + # the underlying C implementation is wrapped as well. We need to + # detect when the second wrapper is called and ignore it completely + # or transactions will be stopped early. + # + # 2. In an inactive transaction + # + # If the end_of_transaction() or ignore_transaction() API + # calls have been invoked, this iterator may be called in the + # context of an inactive transaction. In this case, don't wrap + # the iterator in any way. Just run the original iterator. + # + # 3. In an active transaction + # + # Do nothing. + # + # 4. Outside of a transaction + # + # Since it's not running inside of an existing transaction, we + # want to create a new background transaction for it. + + # Step 1: Stop existing transactions + if hasattr(instance, "_nr_transaction") and not instance._nr_transaction.stopped: + instance._nr_transaction.__exit__(*sys.exc_info()) + + # Step 2: Poll for records + try: + record = wrapped(*args, **kwargs) + except Exception as e: + if current_transaction(): + notice_error() + else: + notice_error(application=application_instance(activate=False)) + raise + + # Step 3: Start new transaction for received record + if record: + library = "Kafka" + destination_type = "Topic" + destination_name = record.topic() + received_bytes = len(str(record.value()).encode("utf-8")) + message_count = 1 + + headers = record.headers() + headers = dict(headers) if headers else {} + + transaction = current_transaction(active_only=False) + if not transaction: + transaction = MessageTransaction( + application=application_instance(), + library=library, + destination_type=destination_type, + destination_name=destination_name, + headers=headers, + transport_type="Kafka", + routing_key=record.key(), + source=wrapped, + ) + instance._nr_transaction = transaction + transaction.__enter__() # pylint: disable=C2801 + + transaction._add_agent_attribute("kafka.consume.byteCount", received_bytes) + + transaction = current_transaction() + + if transaction: # If there is an active transaction now. + # Add metrics whether or not a transaction was already active, or one was just started. + # Don't add metrics if there was an inactive transaction. + # Name the metrics using the same format as the transaction, but in case the active transaction + # was an existing one and not a message transaction, reproduce the naming logic here. + group = "Message/%s/%s" % (library, destination_type) + name = "Named/%s" % destination_name + transaction.record_custom_metric("%s/%s/Received/Bytes" % (group, name), received_bytes) + transaction.record_custom_metric("%s/%s/Received/Messages" % (group, name), message_count) + + return record + + +def wrap_DeserializingConsumer_poll(wrapped, instance, args, kwargs): + try: + return wrapped(*args, **kwargs) + except Exception: + notice_error() + + # Stop existing transactions + if hasattr(instance, "_nr_transaction") and not instance._nr_transaction.stopped: + instance._nr_transaction.__exit__(*sys.exc_info()) + + raise + + +def wrap_serializer(serializer_name, group_prefix): + @function_wrapper + def _wrap_serializer(wrapped, instance, args, kwargs): + if not current_transaction(): + return wrapped(*args, **kwargs) + + topic = args[1].topic + group = "%s/Kafka/Topic" % group_prefix + name = "Named/%s/%s" % (topic, serializer_name) + + return FunctionTraceWrapper(wrapped, name=name, group=group)(*args, **kwargs) + + return _wrap_serializer + + +def wrap_SerializingProducer_init(wrapped, instance, args, kwargs): + wrapped(*args, **kwargs) + + if hasattr(instance, "_key_serializer") and callable(instance._key_serializer): + instance._key_serializer = wrap_serializer("Serialization/Key", "MessageBroker")(instance._key_serializer) + + if hasattr(instance, "_value_serializer") and callable(instance._value_serializer): + instance._value_serializer = wrap_serializer("Serialization/Value", "MessageBroker")(instance._value_serializer) + + +def wrap_DeserializingConsumer_init(wrapped, instance, args, kwargs): + wrapped(*args, **kwargs) + + if hasattr(instance, "_key_deserializer") and callable(instance._key_deserializer): + instance._key_deserializer = wrap_serializer("Deserialization/Key", "Message")(instance._key_deserializer) + + if hasattr(instance, "_value_deserializer") and callable(instance._value_deserializer): + instance._value_deserializer = wrap_serializer("Deserialization/Value", "Message")(instance._value_deserializer) + + +def wrap_immutable_class(module, class_name): + # Wrap immutable binary extension class with a mutable Python subclass + new_class = type(class_name, (getattr(module, class_name),), {}) + setattr(module, class_name, new_class) + return new_class + + +def instrument_confluentkafka_cimpl(module): + if hasattr(module, "Producer"): + wrap_immutable_class(module, "Producer") + wrap_function_wrapper(module, "Producer.produce", wrap_Producer_produce) + + if hasattr(module, "Consumer"): + wrap_immutable_class(module, "Consumer") + wrap_function_wrapper(module, "Consumer.poll", wrap_Consumer_poll) + + +def instrument_confluentkafka_serializing_producer(module): + if hasattr(module, "SerializingProducer"): + wrap_function_wrapper(module, "SerializingProducer.__init__", wrap_SerializingProducer_init) + wrap_error_trace(module, "SerializingProducer.produce") + + +def instrument_confluentkafka_deserializing_consumer(module): + if hasattr(module, "DeserializingConsumer"): + wrap_function_wrapper(module, "DeserializingConsumer.__init__", wrap_DeserializingConsumer_init) + wrap_function_wrapper(module, "DeserializingConsumer.poll", wrap_DeserializingConsumer_poll) diff --git a/tests/messagebroker_confluentkafka/conftest.py b/tests/messagebroker_confluentkafka/conftest.py new file mode 100644 index 000000000..c37831df0 --- /dev/null +++ b/tests/messagebroker_confluentkafka/conftest.py @@ -0,0 +1,272 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import uuid + +import pytest +from testing_support.db_settings import kafka_settings +from testing_support.fixtures import ( # noqa: F401, pylint: disable=W0611 + code_coverage_fixture, + collector_agent_registration_fixture, + collector_available_fixture, +) + +from newrelic.api.transaction import current_transaction +from newrelic.common.object_wrapper import transient_function_wrapper + +DB_SETTINGS = kafka_settings()[0] + +BROKER = "%s:%s" % (DB_SETTINGS["host"], DB_SETTINGS["port"]) + +_coverage_source = [ + "newrelic.hooks.messagebroker_confluentkafka", +] + +code_coverage = code_coverage_fixture(source=_coverage_source) + +_default_settings = { + "transaction_tracer.explain_threshold": 0.0, + "transaction_tracer.transaction_threshold": 0.0, + "transaction_tracer.stack_trace_threshold": 0.0, + "debug.log_data_collector_payloads": True, + "debug.record_transaction_failure": True, +} + +collector_agent_registration = collector_agent_registration_fixture( + app_name="Python Agent Test (messagebroker_confluentkafka)", + default_settings=_default_settings, + linked_applications=["Python Agent Test (messagebroker_confluentkafka)"], +) + + +@pytest.fixture(scope="session", params=["cimpl", "serializer_function", "serializer_object"]) +def client_type(request): + return request.param + + +@pytest.fixture() +def skip_if_not_serializing(client_type): + if client_type == "cimpl": + pytest.skip("Only serializing clients supported.") + + +@pytest.fixture(scope="function") +def producer(client_type, json_serializer): + from confluent_kafka import Producer, SerializingProducer + + if client_type == "cimpl": + producer = Producer({"bootstrap.servers": BROKER}) + elif client_type == "serializer_function": + producer = SerializingProducer( + { + "bootstrap.servers": BROKER, + "value.serializer": lambda v, c: json.dumps(v).encode("utf-8"), + "key.serializer": lambda v, c: json.dumps(v).encode("utf-8") if v is not None else None, + } + ) + elif client_type == "serializer_object": + producer = SerializingProducer( + { + "bootstrap.servers": BROKER, + "value.serializer": json_serializer, + "key.serializer": json_serializer, + } + ) + + yield producer + producer.purge() + + +@pytest.fixture(scope="function") +def consumer(topic, producer, client_type, json_deserializer): + from confluent_kafka import Consumer, DeserializingConsumer + + if client_type == "cimpl": + consumer = Consumer( + { + "bootstrap.servers": BROKER, + "auto.offset.reset": "earliest", + "heartbeat.interval.ms": 1000, + "group.id": "test", + } + ) + elif client_type == "serializer_function": + consumer = DeserializingConsumer( + { + "bootstrap.servers": BROKER, + "auto.offset.reset": "earliest", + "heartbeat.interval.ms": 1000, + "group.id": "test", + "value.deserializer": lambda v, c: json.loads(v.decode("utf-8")), + "key.deserializer": lambda v, c: json.loads(v.decode("utf-8")) if v is not None else None, + } + ) + elif client_type == "serializer_object": + consumer = DeserializingConsumer( + { + "bootstrap.servers": BROKER, + "auto.offset.reset": "earliest", + "heartbeat.interval.ms": 1000, + "group.id": "test", + "value.deserializer": json_deserializer, + "key.deserializer": json_deserializer, + } + ) + + consumer.subscribe([topic]) + + yield consumer + + consumer.close() + + +@pytest.fixture(scope="session") +def serialize(client_type): + if client_type == "cimpl": + return lambda v: json.dumps(v).encode("utf-8") + else: + return lambda v: v + + +@pytest.fixture(scope="session") +def deserialize(client_type): + if client_type == "cimpl": + return lambda v: json.loads(v.decode("utf-8")) + else: + return lambda v: v + + +@pytest.fixture(scope="session") +def json_serializer(): + from confluent_kafka.serialization import Serializer + + class JSONSerializer(Serializer): + def __call__(self, obj, ctx): + return json.dumps(obj).encode("utf-8") if obj is not None else None + + return JSONSerializer() + + +@pytest.fixture(scope="session") +def json_deserializer(): + from confluent_kafka.serialization import Deserializer + + class JSONDeserializer(Deserializer): + def __call__(self, obj, ctx): + return json.loads(obj.decode("utf-8")) if obj is not None else None + + return JSONDeserializer() + + +@pytest.fixture(scope="function") +def topic(): + from confluent_kafka.admin import AdminClient, NewTopic + + topic = "test-topic-%s" % str(uuid.uuid4()) + + admin = AdminClient({"bootstrap.servers": BROKER}) + new_topics = [NewTopic(topic, num_partitions=1, replication_factor=1)] + topics = admin.create_topics(new_topics) + for _, f in topics.items(): + f.result() # Block until topic is created. + + yield topic + + admin.delete_topics(new_topics) + + +@pytest.fixture() +def send_producer_message(topic, producer, serialize): + def _test(): + producer.produce(topic, value=serialize({"foo": 1})) + producer.flush() + + return _test + + +@pytest.fixture() +def get_consumer_record(topic, send_producer_message, consumer, deserialize): + def _test(): + send_producer_message() + + record_count = 0 + + timeout = 10 + attempts = 0 + record = None + while not record and attempts < timeout: + record = consumer.poll(0.5) + if not record: + attempts += 1 + continue + assert not record.error() + + assert deserialize(record.value()) == {"foo": 1} + record_count += 1 + consumer.poll(0.5) # Exit the transaction. + + assert record_count == 1, "Incorrect count of records consumed: %d. Expected 1." % record_count + + return _test + + +def cache_kafka_producer_headers(): + import confluent_kafka.cimpl + + @transient_function_wrapper(confluent_kafka.cimpl, "Producer.produce.__wrapped__") + # Place transient wrapper underneath instrumentation + def _cache_kafka_producer_headers(wrapped, instance, args, kwargs): + transaction = current_transaction() + + if transaction is None: + return wrapped(*args, **kwargs) + + ret = wrapped(*args, **kwargs) + headers = kwargs.get("headers", []) + headers = dict(headers) + transaction._test_request_headers = headers + return ret + + return _cache_kafka_producer_headers + + +def cache_kafka_consumer_headers(): + import confluent_kafka.cimpl + + @transient_function_wrapper(confluent_kafka.cimpl, "Consumer.poll") + # Place transient wrapper underneath instrumentation + def _cache_kafka_consumer_headers(wrapped, instance, args, kwargs): + record = wrapped(*args, **kwargs) + transaction = current_transaction() + + if transaction is None: + return record + + headers = dict(record.headers()) + transaction._test_request_headers = headers + return record + + return _cache_kafka_consumer_headers + + +@pytest.fixture(autouse=True) +def assert_no_active_transaction(): + # Run before test + assert not current_transaction(active_only=False), "Transaction exists before test run." + + yield # Run test + + # Run after test + assert not current_transaction(active_only=False), "Transaction was not properly exited." diff --git a/tests/messagebroker_confluentkafka/test_consumer.py b/tests/messagebroker_confluentkafka/test_consumer.py new file mode 100644 index 000000000..0edcc73fd --- /dev/null +++ b/tests/messagebroker_confluentkafka/test_consumer.py @@ -0,0 +1,176 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from conftest import cache_kafka_consumer_headers +from testing_support.fixtures import ( + reset_core_stats_engine, + validate_attributes, + validate_error_event_attributes_outside_transaction, + validate_transaction_errors, + validate_transaction_metrics, +) +from testing_support.validators.validate_distributed_trace_accepted import ( + validate_distributed_trace_accepted, +) +from testing_support.validators.validate_transaction_count import ( + validate_transaction_count, +) + +from newrelic.api.background_task import background_task +from newrelic.api.transaction import end_of_transaction +from newrelic.common.object_names import callable_name +from newrelic.packages import six + + +def test_custom_metrics(get_consumer_record, topic): + custom_metrics = [ + ("Message/Kafka/Topic/Named/%s/Received/Bytes" % topic, 1), + ("Message/Kafka/Topic/Named/%s/Received/Messages" % topic, 1), + ] + + @validate_transaction_metrics( + "Named/%s" % topic, + group="Message/Kafka/Topic", + custom_metrics=custom_metrics, + background_task=True, + ) + @validate_transaction_count(1) + def _test(): + get_consumer_record() + + _test() + + +def test_multiple_transactions(get_consumer_record, topic): + @validate_transaction_count(2) + def _test(): + get_consumer_record() + get_consumer_record() + + _test() + + +def test_custom_metrics_on_existing_transaction(get_consumer_record, topic): + transaction_name = ( + "test_consumer:test_custom_metrics_on_existing_transaction.._test" if six.PY3 else "test_consumer:_test" + ) + + @validate_transaction_metrics( + transaction_name, + custom_metrics=[ + ("Message/Kafka/Topic/Named/%s/Received/Bytes" % topic, 1), + ("Message/Kafka/Topic/Named/%s/Received/Messages" % topic, 1), + ], + background_task=True, + ) + @validate_transaction_count(1) + @background_task() + def _test(): + get_consumer_record() + + _test() + + +def test_custom_metrics_inactive_transaction(get_consumer_record, topic): + transaction_name = ( + "test_consumer:test_custom_metrics_inactive_transaction.._test" if six.PY3 else "test_consumer:_test" + ) + + @validate_transaction_metrics( + transaction_name, + custom_metrics=[ + ("Message/Kafka/Topic/Named/%s/Received/Bytes" % topic, None), + ("Message/Kafka/Topic/Named/%s/Received/Messages" % topic, None), + ], + background_task=True, + ) + @validate_transaction_count(1) + @background_task() + def _test(): + end_of_transaction() + get_consumer_record() + + _test() + + +def test_agent_attributes(get_consumer_record): + @validate_attributes("agent", ["kafka.consume.byteCount"]) + def _test(): + get_consumer_record() + + _test() + + +def test_consumer_errors(topic, consumer, producer): + # Close the consumer in order to force poll to raise an exception. + consumer.close() + + expected_error = RuntimeError + + @reset_core_stats_engine() + @validate_error_event_attributes_outside_transaction( + num_errors=1, exact_attrs={"intrinsic": {"error.class": callable_name(expected_error)}, "agent": {}, "user": {}} + ) + def _test(): + with pytest.raises(expected_error): + producer.produce(topic, value="A") + producer.flush() + while consumer.poll(0.5): + pass + + _test() + + +def test_consumer_handled_errors_not_recorded(get_consumer_record): + # It's important to check that we do not notice the StopIteration error. + @validate_transaction_errors([]) + def _test(): + get_consumer_record() + + _test() + + +def test_distributed_tracing_headers(topic, producer, consumer, serialize): + # Produce the messages inside a transaction, making sure to close it. + @validate_transaction_count(1) + @background_task() + def _produce(): + producer.produce(topic, key="bar", value=serialize({"foo": 1})) + producer.flush() + + @validate_transaction_metrics( + "Named/%s" % topic, + group="Message/Kafka/Topic", + rollup_metrics=[ + ("Supportability/DistributedTrace/AcceptPayload/Success", None), + ("Supportability/TraceContext/Accept/Success", 1), + ], + background_task=True, + ) + @validate_transaction_count(1) + def _consume(): + @validate_distributed_trace_accepted(transport_type="Kafka") + @cache_kafka_consumer_headers() + def _test(): + # Start the transaction but don't exit it. + consumer.poll(0.5) + + _test() + + # Exit the transaction. + consumer.poll(0.5) + + _produce() + _consume() diff --git a/tests/messagebroker_confluentkafka/test_producer.py b/tests/messagebroker_confluentkafka/test_producer.py new file mode 100644 index 000000000..fe91295c4 --- /dev/null +++ b/tests/messagebroker_confluentkafka/test_producer.py @@ -0,0 +1,80 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from conftest import cache_kafka_producer_headers +from testing_support.fixtures import ( + validate_transaction_errors, + validate_transaction_metrics, +) +from testing_support.validators.validate_messagebroker_headers import ( + validate_messagebroker_headers, +) + +from newrelic.api.background_task import background_task +from newrelic.common.object_names import callable_name +from newrelic.packages import six + + +def test_trace_metrics(topic, send_producer_message): + scoped_metrics = [("MessageBroker/Kafka/Topic/Produce/Named/%s" % topic, 1)] + unscoped_metrics = scoped_metrics + txn_name = "test_producer:test_trace_metrics..test" if six.PY3 else "test_producer:test" + + @validate_transaction_metrics( + txn_name, + scoped_metrics=scoped_metrics, + rollup_metrics=unscoped_metrics, + background_task=True, + ) + @background_task() + def test(): + send_producer_message() + + test() + + +def test_distributed_tracing_headers(topic, send_producer_message): + txn_name = "test_producer:test_distributed_tracing_headers..test" if six.PY3 else "test_producer:test" + + @validate_transaction_metrics( + txn_name, + rollup_metrics=[ + ("Supportability/TraceContext/Create/Success", 1), + ("Supportability/DistributedTrace/CreatePayload/Success", 1), + ], + background_task=True, + ) + @background_task() + @cache_kafka_producer_headers() + @validate_messagebroker_headers + def test(): + send_producer_message() + + test() + + +def test_producer_errors(topic, producer, monkeypatch): + if hasattr(producer, "_value_serializer"): + # Remove serializer to intentionally cause a type error in underlying producer implementation + monkeypatch.setattr(producer, "_value_serializer", None) + + @validate_transaction_errors([callable_name(TypeError)]) + @background_task() + def test(): + with pytest.raises(TypeError): + producer.produce(topic, value=object()) + producer.flush() + + test() diff --git a/tests/messagebroker_confluentkafka/test_serialization.py b/tests/messagebroker_confluentkafka/test_serialization.py new file mode 100644 index 000000000..350b5ea35 --- /dev/null +++ b/tests/messagebroker_confluentkafka/test_serialization.py @@ -0,0 +1,142 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from testing_support.fixtures import ( + validate_transaction_errors, + validate_transaction_metrics, +) + +from newrelic.api.background_task import background_task +from newrelic.common.object_names import callable_name +from newrelic.packages import six + + +def test_serialization_metrics(skip_if_not_serializing, topic, send_producer_message): + txn_name = "test_serialization:test_serialization_metrics..test" if six.PY3 else "test_serialization:test" + + _metrics = [ + ("MessageBroker/Kafka/Topic/Named/%s/Serialization/Value" % topic, 1), + ("MessageBroker/Kafka/Topic/Named/%s/Serialization/Key" % topic, 1), + ] + + @validate_transaction_metrics( + txn_name, + scoped_metrics=_metrics, + rollup_metrics=_metrics, + background_task=True, + ) + @background_task() + def test(): + send_producer_message() + + test() + + +def test_deserialization_metrics(skip_if_not_serializing, topic, get_consumer_record): + _metrics = [ + ("Message/Kafka/Topic/Named/%s/Deserialization/Value" % topic, 1), + ("Message/Kafka/Topic/Named/%s/Deserialization/Key" % topic, 1), + ] + + @validate_transaction_metrics( + "Named/%s" % topic, + group="Message/Kafka/Topic", + scoped_metrics=_metrics, + rollup_metrics=_metrics, + background_task=True, + ) + def test(): + get_consumer_record() + + test() + + +@pytest.mark.parametrize( + "key,value,error", + ( + (object(), "A", "KeySerializationError"), + ("A", object(), "ValueSerializationError"), + ), +) +def test_serialization_errors(skip_if_not_serializing, topic, producer, key, value, error): + import confluent_kafka.error + + error_cls = getattr(confluent_kafka.error, error) + + @validate_transaction_errors([callable_name(error_cls)]) + @background_task() + def test(): + with pytest.raises(error_cls): + producer.produce(topic=topic, key=key, value=value) + + test() + + +@pytest.mark.parametrize( + "key,value,error", + ( + ("%", "{}", "KeyDeserializationError"), + ("{}", "%", "ValueDeserializationError"), + ), +) +def test_deserialization_errors(skip_if_not_serializing, monkeypatch, topic, producer, consumer, key, value, error): + import confluent_kafka.error + + error_cls = getattr(confluent_kafka.error, error) + + # Remove serializers to cause intentional issues + monkeypatch.setattr(producer, "_value_serializer", None) + monkeypatch.setattr(producer, "_key_serializer", None) + + producer.produce(topic=topic, key=key, value=value) + producer.flush() + + @validate_transaction_errors([callable_name(error_cls)]) + @background_task() + def test(): + with pytest.raises(error_cls): + record = consumer.poll(0.5) + assert record is not None, "No record consumed." + + test() + + +@pytest.fixture +def send_producer_message(topic, producer): + def _test(): + producer.produce(topic, value={"foo": 1}) + producer.flush() + + return _test + + +@pytest.fixture() +def get_consumer_record(topic, send_producer_message, consumer): + def _test(): + send_producer_message() + + record_count = 0 + while True: + record = consumer.poll(0.5) + if not record: + break + assert not record.error() + + assert record.value() == {"foo": 1} + record_count += 1 + + assert record_count == 1, "Incorrect count of records consumed: %d. Expected 1." % record_count + + return _test diff --git a/tests/messagebroker_kafkapython/conftest.py b/tests/messagebroker_kafkapython/conftest.py index ba0fc2271..35fde04d0 100644 --- a/tests/messagebroker_kafkapython/conftest.py +++ b/tests/messagebroker_kafkapython/conftest.py @@ -18,7 +18,7 @@ import kafka import pytest from testing_support.db_settings import kafka_settings -from testing_support.fixtures import ( # noqa: F401, W0611 +from testing_support.fixtures import ( # noqa: F401, pylint: disable=W0611 code_coverage_fixture, collector_agent_registration_fixture, collector_available_fixture, @@ -84,18 +84,18 @@ def consumer(topic, producer): @pytest.fixture(scope="function") def topic(): - # from kafka.admin.client import KafkaAdminClient - # from kafka.admin.new_topic import NewTopic + from kafka.admin.client import KafkaAdminClient + from kafka.admin.new_topic import NewTopic topic = "test-topic-%s" % str(uuid.uuid4()) - # admin = KafkaAdminClient(bootstrap_servers=BROKER) - # new_topics = [NewTopic(topic, num_partitions=1, replication_factor=1)] - # topics = admin.create_topics(new_topics) + admin = KafkaAdminClient(bootstrap_servers=BROKER) + new_topics = [NewTopic(topic, num_partitions=1, replication_factor=1)] + topics = admin.create_topics(new_topics) yield topic - # admin.delete_topics([topic]) + admin.delete_topics([topic]) @transient_function_wrapper(kafka.producer.kafka, "KafkaProducer.send.__wrapped__") diff --git a/tox.ini b/tox.ini index 2caee17cf..e2fc16886 100644 --- a/tox.ini +++ b/tox.ini @@ -149,7 +149,8 @@ envlist = libcurl-framework_tornado-{py37,py38,py39,py310}-tornadomaster, rabbitmq-messagebroker_pika-{py27,py37,py38,py39,pypy,pypy37}-pika0.13, rabbitmq-messagebroker_pika-{py37,py38,py39,py310,pypy37}-pikalatest, - kafka-messagebroker_kafkapython-{pypy,py27,py37,py38,pypy37}-kafkapythonlatest + kafka-messagebroker_confluentkafka-{py27,py37,py38,py39,py310}-confluentkafkalatest, + kafka-messagebroker_kafkapython-{pypy,py27,py37,py38,pypy37}-kafkapythonlatest, python-template_mako-{py27,py37,py38,py39,py310} [pytest] @@ -354,6 +355,7 @@ deps = messagebroker_pika-pikalatest: pika messagebroker_pika: tornado<5 messagebroker_pika-{py27,pypy}: enum34 + messagebroker_confluentkafka: confluent-kafka messagebroker_kafkapython: kafka-python template_mako: mako<1.2 @@ -453,5 +455,6 @@ changedir = logger_logging: tests/logger_logging logger_loguru: tests/logger_loguru messagebroker_pika: tests/messagebroker_pika + messagebroker_confluentkafka: tests/messagebroker_confluentkafka messagebroker_kafkapython: tests/messagebroker_kafkapython template_mako: tests/template_mako From 8beb0cc918462d911da52af30686a0a0f3b39f95 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Fri, 23 Sep 2022 15:48:36 -0700 Subject: [PATCH 33/49] Kafka-Python Serialization Metrics (#628) * Add more metrics, consumer, and producer tests Co-authored-by: Timothy Pansino Co-authored-by: Hannah Stepanek * Remove breakpoint * Add DT accepted validator * Fix issue with holdover transaction Co-authored-by: Lalleh Rafeei Co-authored-by: Hannah Stepanek * [Mega-Linter] Apply linters fixes * Fix broken producer error test * [Mega-Linter] Apply linters fixes * Working on serializer instrumentation * Working on testing * Squashed commit of the following: commit 0b7b56b8c2a9ed4dcf54760b694058b5a24f32a6 Author: Tim Pansino Date: Tue Sep 20 15:26:49 2022 -0700 Assert records counts for consumer commit c0d32bb6a9c024480422550632d7360745b0c534 Author: Tim Pansino Date: Tue Sep 20 15:05:20 2022 -0700 Add producer requirement to consumers Co-authored-by: Lalleh Rafeei Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai commit 9e949206f0fb7c1131595f19e7f34e535cd692b0 Author: Tim Pansino Date: Tue Sep 20 14:49:57 2022 -0700 Remove commented out code commit b2f1257ed2d523b79526f6acb00cc8e4240cb955 Author: Tim Pansino Date: Tue Sep 20 14:49:47 2022 -0700 Fix exception tracebacks for py27 commit 686c9ae077634109df1bb90a6c31f0bbbffc0625 Author: Tim Pansino Date: Tue Sep 20 14:27:40 2022 -0700 Fix errors in test and tox matrix Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai Co-authored-by: Lalleh Rafeei commit 7f92c6d10640a8ca6d3205e0db617663e1794b69 Author: Hannah Stepanek Date: Tue Sep 20 13:54:48 2022 -0700 Fix Py2.7 kafka consumer first access issue commit 4245201c2fb50de7a7745481dc25ad720a5e7917 Author: Hannah Stepanek Date: Tue Sep 20 12:34:35 2022 -0700 Add sudo to tz info commit 2251a0a302356397383f6619daf2407a0d56e287 Author: Hannah Stepanek Date: Tue Sep 20 12:26:36 2022 -0700 Use ubuntu-latest commit 1ca1175d27411558b30e74d057a95d35b8ba9d43 Author: Hannah Stepanek Date: Tue Sep 20 12:15:59 2022 -0700 Grab librdkafka from confluent commit bb0a192e0cb6a71706b60adb6370452065169b1f Author: Tim Pansino Date: Tue Sep 20 10:37:33 2022 -0700 Fixed cutting release from lsb commit 3cf38528a97f91510c3cbef8b6a246a77a330006 Author: Hannah Stepanek Date: Tue Sep 20 10:19:24 2022 -0700 Fixup: librdkafka installed from universe commit bf20359bfbea3b003742ecb2cdc45ff5be9592ee Author: Tim Pansino Date: Mon Sep 19 16:46:23 2022 -0700 Use lsb to install librdkafka-dev commit a85e3fd44efbeefc61b0dad3c136bec163a210d6 Merge: 7fc2b8912 d5cf9a058 Author: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Mon Sep 19 16:34:19 2022 -0700 Merge branch 'develop-kafka' into feature-confluent-kafka commit 7fc2b8912bfdac54bdcf0f3fc04c68701d77c372 Author: Tim Pansino Date: Mon Sep 19 16:30:19 2022 -0700 Fix package name commit f25c59df4eae0217da11e991250b44c19103ac71 Author: Hannah Stepanek Date: Mon Sep 19 16:00:07 2022 -0700 Specify later version of librdkafka commit 6be9b43fb819843eb73cc02ea46d03efa38f8e87 Author: Hannah Stepanek Date: Mon Sep 19 09:26:39 2022 -0700 Fix removing client_id from incorrect kafka commit d658ef191fb108173428860949c2c1ffeb803145 Author: Hannah Stepanek Date: Fri Sep 16 10:50:04 2022 -0700 Add install of librdkafka-dev for kafka commit 940f9f429e5f3952bf85130d2c8d5142a255fbf6 Author: Tim Pansino Date: Fri Sep 9 16:09:21 2022 -0700 Clean up names commit 8bbed468535f810f77badcbe070f3c2530f7a901 Author: Tim Pansino Date: Fri Sep 9 14:58:21 2022 -0700 Serialization timing commit 761c75361bdb732e758771ea55ac7e4cb08815c9 Merge: bccb32129 7897a992c Author: Hannah Stepanek Date: Fri Sep 9 12:51:29 2022 -0700 Merge branch 'feature-confluent-kafka' of github.com:newrelic/newrelic-python-agent into feature-confluent-kafka commit bccb3212995fbaf2dce2e1e64d869bff67827ccc Author: Hannah Stepanek Date: Fri Sep 9 12:51:09 2022 -0700 Fix test_consumer_errors tests commit 7897a992c8aeca44552117f1ba95eda335596636 Author: hmstepanek Date: Fri Sep 9 19:04:58 2022 +0000 [Mega-Linter] Apply linters fixes commit 34c084c3fe19fe42c23c4ce668277f1bfc4ab7d3 Author: Hannah Stepanek Date: Fri Sep 9 12:01:18 2022 -0700 Fix consumer tests & reorg fixtures commit ff77d9025e122e48d14dc38c2413cdf125cffbdc Author: Tim Pansino Date: Thu Sep 8 14:53:28 2022 -0700 Consumer testing setup commit 9f1451e8df1466cbb8bb8c8beb1dca05e6250954 Author: Tim Pansino Date: Thu Sep 8 13:25:15 2022 -0700 Confluent kafka test setup commit 74c443cfb7ed5359f3d7a8672b8d8781a9191e1a Author: Tim Pansino Date: Thu Sep 8 11:49:03 2022 -0700 Starting work on confluent kafka * Clean up tests to refactor out fixtures * Refactor and test serialization tracing * Finish fixing testing for serialization. Co-authored-by: Lalleh Rafeei Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai * Add serializer object test * Remove unused test files * Starting merge of confluent kafka to kafka python * Make message trace terminal optional * Clean up confluent_kafka tests * Update kafkapython tests * Fix failing tests * Finish kafkapython serialization Co-authored-by: Lalleh Rafeei Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai * Clean up tests * Remove kafkapython deserialization metrics * Fix py2 testing differences * Add mutliple transaction test * Rename fixtures * Fix multiple transaction consumer failure Co-authored-by: Lalleh Rafeei Co-authored-by: Timothy Pansino Co-authored-by: Hannah Stepanek Co-authored-by: Lalleh Rafeei Co-authored-by: Uma Annamalai Co-authored-by: Hannah Stepanek --- newrelic/api/message_trace.py | 17 +- newrelic/core/message_node.py | 6 + newrelic/hooks/messagebroker_kafkapython.py | 179 +++++++++++++----- tests/messagebroker_kafkapython/conftest.py | 171 +++++++++++++++-- .../test_consumer.py | 94 +++++---- .../test_heartbeat.py | 8 +- .../test_producer.py | 45 ++--- .../test_serialization.py | 92 +++++++++ 8 files changed, 456 insertions(+), 156 deletions(-) create mode 100644 tests/messagebroker_kafkapython/test_serialization.py diff --git a/newrelic/api/message_trace.py b/newrelic/api/message_trace.py index a217ba6cc..be819d704 100644 --- a/newrelic/api/message_trace.py +++ b/newrelic/api/message_trace.py @@ -28,7 +28,7 @@ class MessageTrace(CatHeaderMixin, TimeTrace): cat_appdata_key = "NewRelicAppData" cat_synthetics_key = "NewRelicSynthetics" - def __init__(self, library, operation, destination_type, destination_name, params=None, **kwargs): + def __init__(self, library, operation, destination_type, destination_name, params=None, terminal=True, **kwargs): parent = kwargs.pop("parent", None) source = kwargs.pop("source", None) if kwargs: @@ -36,6 +36,8 @@ def __init__(self, library, operation, destination_type, destination_name, param super(MessageTrace, self).__init__(parent=parent, source=source) + self.terminal = terminal + self.library = library self.operation = operation @@ -69,7 +71,7 @@ def __repr__(self): ) def terminal_node(self): - return True + return self.terminal def create_node(self): return MessageNode( @@ -89,7 +91,7 @@ def create_node(self): ) -def MessageTraceWrapper(wrapped, library, operation, destination_type, destination_name, params={}): +def MessageTraceWrapper(wrapped, library, operation, destination_type, destination_name, params={}, terminal=True): def _nr_message_trace_wrapper_(wrapped, instance, args, kwargs): wrapper = async_wrapper(wrapped) if not wrapper: @@ -131,7 +133,7 @@ def _nr_message_trace_wrapper_(wrapped, instance, args, kwargs): else: _destination_name = destination_name - trace = MessageTrace(_library, _operation, _destination_type, _destination_name, params={}, parent=parent, source=wrapped) + trace = MessageTrace(_library, _operation, _destination_type, _destination_name, params={}, terminal=terminal, parent=parent, source=wrapped) if wrapper: # pylint: disable=W0125,W0126 return wrapper(wrapped, trace)(*args, **kwargs) @@ -142,7 +144,7 @@ def _nr_message_trace_wrapper_(wrapped, instance, args, kwargs): return FunctionWrapper(wrapped, _nr_message_trace_wrapper_) -def message_trace(library, operation, destination_type, destination_name, params={}): +def message_trace(library, operation, destination_type, destination_name, params={}, terminal=True): return functools.partial( MessageTraceWrapper, library=library, @@ -150,10 +152,11 @@ def message_trace(library, operation, destination_type, destination_name, params destination_type=destination_type, destination_name=destination_name, params=params, + terminal=terminal, ) -def wrap_message_trace(module, object_path, library, operation, destination_type, destination_name, params={}): +def wrap_message_trace(module, object_path, library, operation, destination_type, destination_name, params={}, terminal=True): wrap_object( - module, object_path, MessageTraceWrapper, (library, operation, destination_type, destination_name, params) + module, object_path, MessageTraceWrapper, (library, operation, destination_type, destination_name, params, terminal) ) diff --git a/newrelic/core/message_node.py b/newrelic/core/message_node.py index 8c0a334b7..02e431eb3 100644 --- a/newrelic/core/message_node.py +++ b/newrelic/core/message_node.py @@ -51,6 +51,12 @@ def time_metrics(self, stats, root, parent): yield TimeMetric(name=name, scope=root.path, duration=self.duration, exclusive=self.exclusive) + # Now for the children, if the trace is not terminal. + + for child in self.children: + for metric in child.time_metrics(stats, root, self): + yield metric + def trace_node(self, stats, root, connections): name = root.string_table.cache(self.name) diff --git a/newrelic/hooks/messagebroker_kafkapython.py b/newrelic/hooks/messagebroker_kafkapython.py index c4ec922e4..697b46349 100644 --- a/newrelic/hooks/messagebroker_kafkapython.py +++ b/newrelic/hooks/messagebroker_kafkapython.py @@ -13,12 +13,19 @@ # limitations under the License. import sys +from kafka.serializer import Serializer + from newrelic.api.application import application_instance +from newrelic.api.function_trace import FunctionTraceWrapper from newrelic.api.message_trace import MessageTrace from newrelic.api.message_transaction import MessageTransaction -from newrelic.api.time_trace import notice_error +from newrelic.api.time_trace import current_trace, notice_error from newrelic.api.transaction import current_transaction -from newrelic.common.object_wrapper import wrap_function_wrapper +from newrelic.common.object_wrapper import ( + ObjectProxy, + function_wrapper, + wrap_function_wrapper, +) HEARTBEAT_POLL = "MessageBroker/Kafka/Heartbeat/Poll" HEARTBEAT_SENT = "MessageBroker/Kafka/Heartbeat/Sent" @@ -47,6 +54,7 @@ def wrap_KafkaProducer_send(wrapped, instance, args, kwargs): destination_type="Topic", destination_name=topic or "Default", source=wrapped, + terminal=False, ) as trace: dt_headers = [(k, v.encode("utf-8")) for k, v in trace.generate_request_headers(transaction)] headers.extend(dt_headers) @@ -57,49 +65,6 @@ def wrap_KafkaProducer_send(wrapped, instance, args, kwargs): raise -def metric_wrapper(metric_name, check_result=False): - def _metric_wrapper(wrapped, instance, args, kwargs): - result = wrapped(*args, **kwargs) - - application = application_instance(activate=False) - if application: - if not check_result or check_result and result: - # If the result does not need validated, send metric. - # If the result does need validated, ensure it is True. - application.record_custom_metric(metric_name, 1) - - return result - - return _metric_wrapper - - -def instrument_kafka_heartbeat(module): - if hasattr(module, "Heartbeat"): - if hasattr(module.Heartbeat, "poll"): - wrap_function_wrapper(module, "Heartbeat.poll", metric_wrapper(HEARTBEAT_POLL)) - - if hasattr(module.Heartbeat, "fail_heartbeat"): - wrap_function_wrapper(module, "Heartbeat.fail_heartbeat", metric_wrapper(HEARTBEAT_FAIL)) - - if hasattr(module.Heartbeat, "sent_heartbeat"): - wrap_function_wrapper(module, "Heartbeat.sent_heartbeat", metric_wrapper(HEARTBEAT_SENT)) - - if hasattr(module.Heartbeat, "received_heartbeat"): - wrap_function_wrapper(module, "Heartbeat.received_heartbeat", metric_wrapper(HEARTBEAT_RECEIVE)) - - if hasattr(module.Heartbeat, "session_timeout_expired"): - wrap_function_wrapper( - module, - "Heartbeat.session_timeout_expired", - metric_wrapper(HEARTBEAT_SESSION_TIMEOUT, check_result=True), - ) - - if hasattr(module.Heartbeat, "poll_timeout_expired"): - wrap_function_wrapper( - module, "Heartbeat.poll_timeout_expired", metric_wrapper(HEARTBEAT_POLL_TIMEOUT, check_result=True) - ) - - def wrap_kafkaconsumer_next(wrapped, instance, args, kwargs): if hasattr(instance, "_nr_transaction") and not instance._nr_transaction.stopped: instance._nr_transaction.__exit__(*sys.exc_info()) @@ -110,7 +75,12 @@ def wrap_kafkaconsumer_next(wrapped, instance, args, kwargs): # StopIteration is an expected error, indicating the end of an iterable, # that should not be captured. if not isinstance(e, StopIteration): - notice_error() + if current_transaction(): + # Report error on existing transaction if there is one + notice_error() + else: + # Report error on application + notice_error(application=application_instance(activate=False)) raise if record: @@ -177,11 +147,126 @@ def wrap_kafkaconsumer_next(wrapped, instance, args, kwargs): return record +def wrap_KafkaProducer_init(wrapped, instance, args, kwargs): + get_config_key = lambda key: kwargs.get(key, instance.DEFAULT_CONFIG[key]) # noqa: E731 + + kwargs["key_serializer"] = wrap_serializer( + instance, "Serialization/Key", "MessageBroker", get_config_key("key_serializer") + ) + kwargs["value_serializer"] = wrap_serializer( + instance, "Serialization/Value", "MessageBroker", get_config_key("value_serializer") + ) + + return wrapped(*args, **kwargs) + + +class NewRelicSerializerWrapper(ObjectProxy): + def __init__(self, wrapped, serializer_name, group_prefix): + ObjectProxy.__init__.__get__(self)(wrapped) + + self._nr_serializer_name = serializer_name + self._nr_group_prefix = group_prefix + + def serialize(self, topic, object): + wrapped = self.__wrapped__.serialize + args = (topic, object) + kwargs = {} + + if not current_transaction(): + return wrapped(*args, **kwargs) + + group = "%s/Kafka/Topic" % self._nr_group_prefix + name = "Named/%s/%s" % (topic, self._nr_serializer_name) + + return FunctionTraceWrapper(wrapped, name=name, group=group)(*args, **kwargs) + + +def wrap_serializer(client, serializer_name, group_prefix, serializer): + @function_wrapper + def _wrap_serializer(wrapped, instance, args, kwargs): + transaction = current_transaction() + if not transaction: + return wrapped(*args, **kwargs) + + topic = "Unknown" + if isinstance(transaction, MessageTransaction): + topic = transaction.destination_name + else: + # Find parent message trace to retrieve topic + message_trace = current_trace() + while message_trace is not None and not isinstance(message_trace, MessageTrace): + message_trace = message_trace.parent + if message_trace: + topic = message_trace.destination_name + + group = "%s/Kafka/Topic" % group_prefix + name = "Named/%s/%s" % (topic, serializer_name) + + return FunctionTraceWrapper(wrapped, name=name, group=group)(*args, **kwargs) + + try: + # Apply wrapper to serializer + if serializer is None: + # Do nothing + return serializer + elif isinstance(serializer, Serializer): + return NewRelicSerializerWrapper(serializer, group_prefix=group_prefix, serializer_name=serializer_name) + else: + # Wrap callable in wrapper + return _wrap_serializer(serializer) + except Exception: + return serializer # Avoid crashes from immutable serializers + + +def metric_wrapper(metric_name, check_result=False): + def _metric_wrapper(wrapped, instance, args, kwargs): + result = wrapped(*args, **kwargs) + + application = application_instance(activate=False) + if application: + if not check_result or check_result and result: + # If the result does not need validated, send metric. + # If the result does need validated, ensure it is True. + application.record_custom_metric(metric_name, 1) + + return result + + return _metric_wrapper + + def instrument_kafka_producer(module): if hasattr(module, "KafkaProducer"): + wrap_function_wrapper(module, "KafkaProducer.__init__", wrap_KafkaProducer_init) wrap_function_wrapper(module, "KafkaProducer.send", wrap_KafkaProducer_send) def instrument_kafka_consumer_group(module): if hasattr(module, "KafkaConsumer"): - wrap_function_wrapper(module.KafkaConsumer, "__next__", wrap_kafkaconsumer_next) + wrap_function_wrapper(module, "KafkaConsumer.__next__", wrap_kafkaconsumer_next) + + +def instrument_kafka_heartbeat(module): + if hasattr(module, "Heartbeat"): + if hasattr(module.Heartbeat, "poll"): + wrap_function_wrapper(module, "Heartbeat.poll", metric_wrapper(HEARTBEAT_POLL)) + + if hasattr(module.Heartbeat, "fail_heartbeat"): + wrap_function_wrapper(module, "Heartbeat.fail_heartbeat", metric_wrapper(HEARTBEAT_FAIL)) + + if hasattr(module.Heartbeat, "sent_heartbeat"): + wrap_function_wrapper(module, "Heartbeat.sent_heartbeat", metric_wrapper(HEARTBEAT_SENT)) + + if hasattr(module.Heartbeat, "received_heartbeat"): + wrap_function_wrapper(module, "Heartbeat.received_heartbeat", metric_wrapper(HEARTBEAT_RECEIVE)) + + if hasattr(module.Heartbeat, "session_timeout_expired"): + wrap_function_wrapper( + module, + "Heartbeat.session_timeout_expired", + metric_wrapper(HEARTBEAT_SESSION_TIMEOUT, check_result=True), + ) + + if hasattr(module.Heartbeat, "poll_timeout_expired"): + wrap_function_wrapper( + module, "Heartbeat.poll_timeout_expired", metric_wrapper(HEARTBEAT_POLL_TIMEOUT, check_result=True) + ) diff --git a/tests/messagebroker_kafkapython/conftest.py b/tests/messagebroker_kafkapython/conftest.py index 35fde04d0..1caccd6d6 100644 --- a/tests/messagebroker_kafkapython/conftest.py +++ b/tests/messagebroker_kafkapython/conftest.py @@ -53,26 +53,91 @@ ) +@pytest.fixture( + scope="session", params=["no_serializer", "serializer_function", "callable_object", "serializer_object"] +) +def client_type(request): + return request.param + + +@pytest.fixture() +def skip_if_not_serializing(client_type): + if client_type == "no_serializer": + pytest.skip("Only serializing clients supported.") + + @pytest.fixture(scope="function") -def producer(topic): - producer = kafka.KafkaProducer( - bootstrap_servers=BROKER, api_version=(2, 0, 2), value_serializer=lambda v: json.dumps(v).encode("utf-8") - ) +def producer(client_type, json_serializer, json_callable_serializer): + if client_type == "no_serializer": + producer = kafka.KafkaProducer(bootstrap_servers=BROKER) + elif client_type == "serializer_function": + producer = kafka.KafkaProducer( + bootstrap_servers=BROKER, + value_serializer=lambda v: json.dumps(v).encode("utf-8") if v else None, + key_serializer=lambda v: json.dumps(v).encode("utf-8") if v else None, + ) + elif client_type == "callable_object": + producer = kafka.KafkaProducer( + bootstrap_servers=BROKER, + value_serializer=json_callable_serializer, + key_serializer=json_callable_serializer, + ) + elif client_type == "serializer_object": + producer = kafka.KafkaProducer( + bootstrap_servers=BROKER, + value_serializer=json_serializer, + key_serializer=json_serializer, + ) + yield producer producer.close() @pytest.fixture(scope="function") -def consumer(topic, producer): - consumer = kafka.KafkaConsumer( - topic, - bootstrap_servers=BROKER, - value_deserializer=lambda v: json.loads(v.decode("utf-8")), - auto_offset_reset="earliest", - consumer_timeout_ms=500, - heartbeat_interval_ms=1000, - group_id="test", - ) +def consumer(topic, producer, client_type, json_deserializer, json_callable_deserializer): + if client_type == "no_serializer": + consumer = kafka.KafkaConsumer( + topic, + bootstrap_servers=BROKER, + auto_offset_reset="earliest", + consumer_timeout_ms=100, + heartbeat_interval_ms=1000, + group_id="test", + ) + elif client_type == "serializer_function": + consumer = kafka.KafkaConsumer( + topic, + bootstrap_servers=BROKER, + key_deserializer=lambda v: json.loads(v.decode("utf-8")) if v else None, + value_deserializer=lambda v: json.loads(v.decode("utf-8")) if v else None, + auto_offset_reset="earliest", + consumer_timeout_ms=100, + heartbeat_interval_ms=1000, + group_id="test", + ) + elif client_type == "callable_object": + consumer = kafka.KafkaConsumer( + topic, + bootstrap_servers=BROKER, + key_deserializer=json_callable_deserializer, + value_deserializer=json_callable_deserializer, + auto_offset_reset="earliest", + consumer_timeout_ms=100, + heartbeat_interval_ms=1000, + group_id="test", + ) + elif client_type == "serializer_object": + consumer = kafka.KafkaConsumer( + topic, + bootstrap_servers=BROKER, + key_deserializer=json_deserializer, + value_deserializer=json_deserializer, + auto_offset_reset="earliest", + consumer_timeout_ms=100, + heartbeat_interval_ms=1000, + group_id="test", + ) + # The first time the kafka consumer is created and polled, it returns a StopIterator # exception. To by-pass this, loop over the consumer before using it. # NOTE: This seems to only happen in Python2.7. @@ -82,6 +147,58 @@ def consumer(topic, producer): consumer.close() +@pytest.fixture(scope="session") +def serialize(client_type): + if client_type == "no_serializer": + return lambda v: json.dumps(v).encode("utf-8") + else: + return lambda v: v + + +@pytest.fixture(scope="session") +def deserialize(client_type): + if client_type == "no_serializer": + return lambda v: json.loads(v.decode("utf-8")) + else: + return lambda v: v + + +@pytest.fixture(scope="session") +def json_serializer(): + class JSONSerializer(kafka.serializer.Serializer): + def serialize(self, topic, obj): + return json.dumps(obj).encode("utf-8") if obj is not None else None + + return JSONSerializer() + + +@pytest.fixture(scope="session") +def json_deserializer(): + class JSONDeserializer(kafka.serializer.Deserializer): + def deserialize(self, topic, bytes_): + return json.loads(bytes_.decode("utf-8")) if bytes_ is not None else None + + return JSONDeserializer() + + +@pytest.fixture(scope="session") +def json_callable_serializer(): + class JSONCallableSerializer(object): + def __call__(self, obj): + return json.dumps(obj).encode("utf-8") if obj is not None else None + + return JSONCallableSerializer() + + +@pytest.fixture(scope="session") +def json_callable_deserializer(): + class JSONCallableDeserializer(object): + def __call__(self, obj): + return json.loads(obj.decode("utf-8")) if obj is not None else None + + return JSONCallableDeserializer() + + @pytest.fixture(scope="function") def topic(): from kafka.admin.client import KafkaAdminClient @@ -91,13 +208,37 @@ def topic(): admin = KafkaAdminClient(bootstrap_servers=BROKER) new_topics = [NewTopic(topic, num_partitions=1, replication_factor=1)] - topics = admin.create_topics(new_topics) + admin.create_topics(new_topics) yield topic admin.delete_topics([topic]) +@pytest.fixture() +def send_producer_message(topic, producer, serialize): + def _test(): + producer.send(topic, key=serialize("bar"), value=serialize({"foo": 1})) + producer.flush() + + return _test + + +@pytest.fixture() +def get_consumer_record(topic, send_producer_message, consumer, deserialize): + def _test(): + send_producer_message() + + record_count = 0 + for record in consumer: + assert deserialize(record.value) == {"foo": 1} + record_count += 1 + + assert record_count == 1, "Incorrect count of records consumed: %d. Expected 1." % record_count + + return _test + + @transient_function_wrapper(kafka.producer.kafka, "KafkaProducer.send.__wrapped__") # Place transient wrapper underneath instrumentation def cache_kafka_producer_headers(wrapped, instance, args, kwargs): diff --git a/tests/messagebroker_kafkapython/test_consumer.py b/tests/messagebroker_kafkapython/test_consumer.py index 3b985838b..bb736fb64 100644 --- a/tests/messagebroker_kafkapython/test_consumer.py +++ b/tests/messagebroker_kafkapython/test_consumer.py @@ -12,15 +12,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -import kafka -import kafka.errors as Errors import pytest -from conftest import BROKER, cache_kafka_consumer_headers +from conftest import cache_kafka_consumer_headers + +from newrelic.common.object_names import callable_name + from testing_support.fixtures import ( validate_attributes, validate_error_event_attributes_outside_transaction, validate_transaction_errors, validate_transaction_metrics, + reset_core_stats_engine, +) +from testing_support.validators.validate_transaction_count import ( + validate_transaction_count, ) from testing_support.validators.validate_distributed_trace_accepted import ( validate_distributed_trace_accepted, @@ -31,7 +36,7 @@ from newrelic.packages import six -def test_custom_metrics(get_consumer_records, topic): +def test_custom_metrics(get_consumer_record, topic): @validate_transaction_metrics( "Named/%s" % topic, group="Message/Kafka/Topic", @@ -42,12 +47,21 @@ def test_custom_metrics(get_consumer_records, topic): background_task=True, ) def _test(): - get_consumer_records() + get_consumer_record() + + _test() + + +def test_multiple_transactions(get_consumer_record, topic): + @validate_transaction_count(2) + def _test(): + get_consumer_record() + get_consumer_record() _test() -def test_custom_metrics_on_existing_transaction(get_consumer_records, topic): +def test_custom_metrics_on_existing_transaction(get_consumer_record, topic): transaction_name = ( "test_consumer:test_custom_metrics_on_existing_transaction.._test" if six.PY3 else "test_consumer:_test" ) @@ -60,14 +74,15 @@ def test_custom_metrics_on_existing_transaction(get_consumer_records, topic): ], background_task=True, ) + @validate_transaction_count(1) @background_task() def _test(): - get_consumer_records() + get_consumer_record() _test() -def test_custom_metrics_inactive_transaction(get_consumer_records, topic): +def test_custom_metrics_inactive_transaction(get_consumer_record, topic): transaction_name = ( "test_consumer:test_custom_metrics_inactive_transaction.._test" if six.PY3 else "test_consumer:_test" ) @@ -80,67 +95,54 @@ def test_custom_metrics_inactive_transaction(get_consumer_records, topic): ], background_task=True, ) + @validate_transaction_count(1) @background_task() def _test(): end_of_transaction() - get_consumer_records() + get_consumer_record() _test() -def test_agent_attributes(get_consumer_records): +def test_agent_attributes(get_consumer_record): @validate_attributes("agent", ["kafka.consume.client_id", "kafka.consume.byteCount"]) def _test(): - get_consumer_records() + get_consumer_record() _test() -def test_consumer_errors(get_consumer_records, consumer_next_raises): +def test_consumer_errors(get_consumer_record, consumer_next_raises): + exc_class = RuntimeError + + @reset_core_stats_engine() @validate_error_event_attributes_outside_transaction( - exact_attrs={"intrinsic": {"error.class": "kafka.errors:KafkaError"}} + num_errors=1, + exact_attrs={"intrinsic": {"error.class": callable_name(exc_class)}, "agent": {}, "user": {}} ) def _test(): - with pytest.raises(Errors.KafkaError): - get_consumer_records() + with pytest.raises(exc_class): + get_consumer_record() _test() -def test_consumer_deserialization_errors(topic, consumer): - producer = kafka.KafkaProducer( - bootstrap_servers=BROKER, api_version=(2, 0, 2), value_serializer=lambda v: str(v).encode("utf-8") - ) # Producer that allows us to upload invalid JSON. - - @validate_error_event_attributes_outside_transaction(exact_attrs={"intrinsic": {"error.class": "ValueError"}}) - def _test(): - with pytest.raises(ValueError): - producer.send(topic, value="%") # Invalid JSON - producer.flush() - for _ in consumer: - pass - - _test() - - -def test_consumer_handled_errors_not_recorded(get_consumer_records): +def test_consumer_handled_errors_not_recorded(get_consumer_record): # It's important to check that we do not notice the StopIteration error. @validate_transaction_errors([]) def _test(): - get_consumer_records() + get_consumer_record() _test() -def test_distributed_tracing_headers(topic, producer, consumer): - # Send the messages inside a transaction, making sure to close it. +def test_distributed_tracing_headers(topic, producer, consumer, serialize): + # Produce the messages inside a transaction, making sure to close it. @background_task() def _produce(): - producer.send(topic, value={"foo": "bar"}) + producer.send(topic, key=serialize("bar"), value=serialize({"foo": 1})) producer.flush() - consumer_iter = iter(consumer) - @validate_transaction_metrics( "Named/%s" % topic, group="Message/Kafka/Topic", @@ -150,7 +152,10 @@ def _produce(): ], background_task=True, ) + @validate_transaction_count(1) def _consume(): + consumer_iter = iter(consumer) + @validate_distributed_trace_accepted(transport_type="Kafka") @cache_kafka_consumer_headers def _test(): @@ -167,21 +172,10 @@ def _test(): _consume() -@pytest.fixture() -def get_consumer_records(topic, producer, consumer): - def _test(): - producer.send(topic, value={"foo": "bar"}) - producer.flush() - for record in consumer: - assert record.value == {"foo": "bar"} - - return _test - - @pytest.fixture() def consumer_next_raises(consumer): def _poll(*args, **kwargs): - raise Errors.KafkaError() + raise RuntimeError() consumer.poll = _poll return consumer diff --git a/tests/messagebroker_kafkapython/test_heartbeat.py b/tests/messagebroker_kafkapython/test_heartbeat.py index b2ae58f9c..32ac9bb7f 100644 --- a/tests/messagebroker_kafkapython/test_heartbeat.py +++ b/tests/messagebroker_kafkapython/test_heartbeat.py @@ -30,12 +30,8 @@ ("MessageBroker/Kafka/Heartbeat/PollTimeout", None), ] ) -def test_successful_heartbeat_metrics_recorded(topic, producer, consumer): - producer.send(topic, value=1) - producer.flush() - - for record in consumer: - pass +def test_successful_heartbeat_metrics_recorded(topic, get_consumer_record): + get_consumer_record() time.sleep(1.5) diff --git a/tests/messagebroker_kafkapython/test_producer.py b/tests/messagebroker_kafkapython/test_producer.py index 43ff991b3..927956482 100644 --- a/tests/messagebroker_kafkapython/test_producer.py +++ b/tests/messagebroker_kafkapython/test_producer.py @@ -27,8 +27,8 @@ from newrelic.packages import six -def test_trace_metrics(topic, send_producer_messages): - scoped_metrics = [("MessageBroker/Kafka/Topic/Produce/Named/%s" % topic, 3)] +def test_trace_metrics(topic, send_producer_message): + scoped_metrics = [("MessageBroker/Kafka/Topic/Produce/Named/%s" % topic, 1)] unscoped_metrics = scoped_metrics txn_name = "test_producer:test_trace_metrics..test" if six.PY3 else "test_producer:test" @@ -40,19 +40,19 @@ def test_trace_metrics(topic, send_producer_messages): ) @background_task() def test(): - send_producer_messages() + send_producer_message() test() -def test_distributed_tracing_headers(topic, send_producer_messages): +def test_distributed_tracing_headers(topic, send_producer_message): txn_name = "test_producer:test_distributed_tracing_headers..test" if six.PY3 else "test_producer:test" @validate_transaction_metrics( txn_name, rollup_metrics=[ - ("Supportability/TraceContext/Create/Success", 3), - ("Supportability/DistributedTrace/CreatePayload/Success", 3), + ("Supportability/TraceContext/Create/Success", 1), + ("Supportability/DistributedTrace/CreatePayload/Success", 1), ], background_task=True, ) @@ -60,37 +60,20 @@ def test_distributed_tracing_headers(topic, send_producer_messages): @cache_kafka_producer_headers @validate_messagebroker_headers def test(): - send_producer_messages() + send_producer_message() test() -@pytest.mark.parametrize( - "input,error,message", - ( - (None, AssertionError, "Need at least one: key or value"), - (object(), TypeError, r".* is not JSON serializable"), - ), - ids=("None Value", "Serialization Error"), -) -def test_producer_errors(topic, producer, input, error, message): - @validate_transaction_errors([callable_name(error)]) +def test_producer_errors(topic, producer, monkeypatch): + monkeypatch.setitem(producer.config, "value_serializer", None) + monkeypatch.setitem(producer.config, "key_serializer", None) + + @validate_transaction_errors([callable_name(AssertionError)]) @background_task() def test(): - with pytest.raises(error, match=message): - producer.send(topic, input) + with pytest.raises(AssertionError): + producer.send(topic, value=object()) producer.flush() test() - - -@pytest.fixture -def send_producer_messages(topic, producer): - def _test(): - messages = [1, 2, 3] - for message in messages: - producer.send(topic, message) - - producer.flush() - - return _test diff --git a/tests/messagebroker_kafkapython/test_serialization.py b/tests/messagebroker_kafkapython/test_serialization.py new file mode 100644 index 000000000..c4af60f6d --- /dev/null +++ b/tests/messagebroker_kafkapython/test_serialization.py @@ -0,0 +1,92 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from testing_support.fixtures import ( + validate_transaction_errors, + validate_transaction_metrics, + validate_error_event_attributes_outside_transaction, + reset_core_stats_engine, +) + +from newrelic.api.background_task import background_task +from newrelic.packages import six + +from newrelic.common.object_names import callable_name + +import json + +def test_serialization_metrics(skip_if_not_serializing, topic, send_producer_message): + txn_name = "test_serialization:test_serialization_metrics..test" if six.PY3 else "test_serialization:test" + + _metrics = [ + ("MessageBroker/Kafka/Topic/Named/%s/Serialization/Value" % topic, 1), + ("MessageBroker/Kafka/Topic/Named/%s/Serialization/Key" % topic, 1), + ] + + @validate_transaction_metrics( + txn_name, + scoped_metrics=_metrics, + rollup_metrics=_metrics, + background_task=True, + ) + @background_task() + def test(): + send_producer_message() + + test() + + +@pytest.mark.parametrize("key,value", ( + (object(), "A"), + ("A", object()), +)) +def test_serialization_errors(skip_if_not_serializing, topic, producer, key, value): + error_cls = TypeError + + @validate_transaction_errors([callable_name(error_cls)]) + @background_task() + def test(): + with pytest.raises(error_cls): + producer.send(topic=topic, key=key, value=value) + + test() + + +@pytest.mark.parametrize("key,value", ( + (b"%", b"{}"), + (b"{}", b"%"), +)) +def test_deserialization_errors(skip_if_not_serializing, monkeypatch, topic, producer, consumer, key, value): + error_cls = json.decoder.JSONDecodeError if six.PY3 else ValueError + + # Remove serializers to cause intentional issues + monkeypatch.setitem(producer.config, "value_serializer", None) + monkeypatch.setitem(producer.config, "key_serializer", None) + + producer.send(topic=topic, key=key, value=value) + producer.flush() + + @reset_core_stats_engine() + @validate_error_event_attributes_outside_transaction( + num_errors=1, + exact_attrs={"intrinsic": {"error.class": callable_name(error_cls)}, "agent": {}, "user": {}} + ) + def test(): + with pytest.raises(error_cls): + for record in consumer: + pass + assert record is not None, "No record consumed." + + test() From 684ca893bdb3a3d736cc802ec8a2aded85e397fe Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Tue, 27 Sep 2022 09:26:02 -0700 Subject: [PATCH 34/49] Kafka Expanded Version Testing (#629) * Expand kafka testing range to older versions * Remove confluent-kafka 1.5 * Remove confluent-kafka 1.5 * Fix flakey confluent-kafka tests * Fixup: fix flakey tests * Fixup: fix kafka-python flakey tests * Fixup: fix kafka-python flakey tests * Remove confluent-kafka 1.8 tests The following is an unresolved issue occuring in the setup of confluent-kafka 1.8.2: https://github.com/asweigart/PyGetWindow/issues/9 Co-authored-by: Hannah Stepanek --- .../messagebroker_confluentkafka/conftest.py | 6 ++- .../test_consumer.py | 10 +++- .../test_serialization.py | 18 +++++-- tests/messagebroker_kafkapython/conftest.py | 17 +++---- .../test_consumer.py | 24 ++++++---- .../test_serialization.py | 47 +++++++++++-------- tox.ini | 11 ++++- 7 files changed, 87 insertions(+), 46 deletions(-) diff --git a/tests/messagebroker_confluentkafka/conftest.py b/tests/messagebroker_confluentkafka/conftest.py index c37831df0..6e19005d8 100644 --- a/tests/messagebroker_confluentkafka/conftest.py +++ b/tests/messagebroker_confluentkafka/conftest.py @@ -63,7 +63,7 @@ def skip_if_not_serializing(client_type): @pytest.fixture(scope="function") -def producer(client_type, json_serializer): +def producer(topic, client_type, json_serializer): from confluent_kafka import Producer, SerializingProducer if client_type == "cimpl": @@ -86,7 +86,9 @@ def producer(client_type, json_serializer): ) yield producer - producer.purge() + + if hasattr(producer, "purge"): + producer.purge() @pytest.fixture(scope="function") diff --git a/tests/messagebroker_confluentkafka/test_consumer.py b/tests/messagebroker_confluentkafka/test_consumer.py index 0edcc73fd..61f532a78 100644 --- a/tests/messagebroker_confluentkafka/test_consumer.py +++ b/tests/messagebroker_confluentkafka/test_consumer.py @@ -165,7 +165,15 @@ def _consume(): @cache_kafka_consumer_headers() def _test(): # Start the transaction but don't exit it. - consumer.poll(0.5) + # Keep polling until we get the record or the timeout is exceeded. + timeout = 10 + attempts = 0 + record = None + while not record and attempts < timeout: + record = consumer.poll(0.5) + if not record: + attempts += 1 + continue _test() diff --git a/tests/messagebroker_confluentkafka/test_serialization.py b/tests/messagebroker_confluentkafka/test_serialization.py index 350b5ea35..4d948713d 100644 --- a/tests/messagebroker_confluentkafka/test_serialization.py +++ b/tests/messagebroker_confluentkafka/test_serialization.py @@ -107,8 +107,12 @@ def test_deserialization_errors(skip_if_not_serializing, monkeypatch, topic, pro @background_task() def test(): with pytest.raises(error_cls): - record = consumer.poll(0.5) - assert record is not None, "No record consumed." + timeout = 10 + attempts = 0 + while attempts < timeout: + if not consumer.poll(0.5): + attempts += 1 + continue test() @@ -128,14 +132,20 @@ def _test(): send_producer_message() record_count = 0 - while True: + + timeout = 10 + attempts = 0 + record = None + while not record and attempts < timeout: record = consumer.poll(0.5) if not record: - break + attempts += 1 + continue assert not record.error() assert record.value() == {"foo": 1} record_count += 1 + consumer.poll(0.5) # Exit the transaction. assert record_count == 1, "Incorrect count of records consumed: %d. Expected 1." % record_count diff --git a/tests/messagebroker_kafkapython/conftest.py b/tests/messagebroker_kafkapython/conftest.py index 1caccd6d6..098486f34 100644 --- a/tests/messagebroker_kafkapython/conftest.py +++ b/tests/messagebroker_kafkapython/conftest.py @@ -138,11 +138,6 @@ def consumer(topic, producer, client_type, json_deserializer, json_callable_dese group_id="test", ) - # The first time the kafka consumer is created and polled, it returns a StopIterator - # exception. To by-pass this, loop over the consumer before using it. - # NOTE: This seems to only happen in Python2.7. - for record in consumer: - pass yield consumer consumer.close() @@ -230,9 +225,15 @@ def _test(): send_producer_message() record_count = 0 - for record in consumer: - assert deserialize(record.value) == {"foo": 1} - record_count += 1 + + timeout = 10 + attempts = 0 + record = None + while not record and attempts < timeout: + for record in consumer: + assert deserialize(record.value) == {"foo": 1} + record_count += 1 + attempts += 1 assert record_count == 1, "Incorrect count of records consumed: %d. Expected 1." % record_count diff --git a/tests/messagebroker_kafkapython/test_consumer.py b/tests/messagebroker_kafkapython/test_consumer.py index bb736fb64..f53b2acb3 100644 --- a/tests/messagebroker_kafkapython/test_consumer.py +++ b/tests/messagebroker_kafkapython/test_consumer.py @@ -14,25 +14,23 @@ import pytest from conftest import cache_kafka_consumer_headers - -from newrelic.common.object_names import callable_name - from testing_support.fixtures import ( + reset_core_stats_engine, validate_attributes, validate_error_event_attributes_outside_transaction, validate_transaction_errors, validate_transaction_metrics, - reset_core_stats_engine, -) -from testing_support.validators.validate_transaction_count import ( - validate_transaction_count, ) from testing_support.validators.validate_distributed_trace_accepted import ( validate_distributed_trace_accepted, ) +from testing_support.validators.validate_transaction_count import ( + validate_transaction_count, +) from newrelic.api.background_task import background_task from newrelic.api.transaction import end_of_transaction +from newrelic.common.object_names import callable_name from newrelic.packages import six @@ -117,8 +115,7 @@ def test_consumer_errors(get_consumer_record, consumer_next_raises): @reset_core_stats_engine() @validate_error_event_attributes_outside_transaction( - num_errors=1, - exact_attrs={"intrinsic": {"error.class": callable_name(exc_class)}, "agent": {}, "user": {}} + num_errors=1, exact_attrs={"intrinsic": {"error.class": callable_name(exc_class)}, "agent": {}, "user": {}} ) def _test(): with pytest.raises(exc_class): @@ -160,7 +157,14 @@ def _consume(): @cache_kafka_consumer_headers def _test(): # Start the transaction but don't exit it. - next(consumer_iter) + timeout = 10 + attempts = 0 + record = None + while not record and attempts < timeout: + try: + record = next(consumer_iter) + except StopIteration: + attempts += 1 _test() diff --git a/tests/messagebroker_kafkapython/test_serialization.py b/tests/messagebroker_kafkapython/test_serialization.py index c4af60f6d..b83b4e85c 100644 --- a/tests/messagebroker_kafkapython/test_serialization.py +++ b/tests/messagebroker_kafkapython/test_serialization.py @@ -12,20 +12,20 @@ # See the License for the specific language governing permissions and # limitations under the License. +import json + import pytest from testing_support.fixtures import ( + reset_core_stats_engine, + validate_error_event_attributes_outside_transaction, validate_transaction_errors, validate_transaction_metrics, - validate_error_event_attributes_outside_transaction, - reset_core_stats_engine, ) from newrelic.api.background_task import background_task -from newrelic.packages import six - from newrelic.common.object_names import callable_name +from newrelic.packages import six -import json def test_serialization_metrics(skip_if_not_serializing, topic, send_producer_message): txn_name = "test_serialization:test_serialization_metrics..test" if six.PY3 else "test_serialization:test" @@ -48,10 +48,13 @@ def test(): test() -@pytest.mark.parametrize("key,value", ( - (object(), "A"), - ("A", object()), -)) +@pytest.mark.parametrize( + "key,value", + ( + (object(), "A"), + ("A", object()), + ), +) def test_serialization_errors(skip_if_not_serializing, topic, producer, key, value): error_cls = TypeError @@ -64,13 +67,16 @@ def test(): test() -@pytest.mark.parametrize("key,value", ( - (b"%", b"{}"), - (b"{}", b"%"), -)) +@pytest.mark.parametrize( + "key,value", + ( + (b"%", b"{}"), + (b"{}", b"%"), + ), +) def test_deserialization_errors(skip_if_not_serializing, monkeypatch, topic, producer, consumer, key, value): error_cls = json.decoder.JSONDecodeError if six.PY3 else ValueError - + # Remove serializers to cause intentional issues monkeypatch.setitem(producer.config, "value_serializer", None) monkeypatch.setitem(producer.config, "key_serializer", None) @@ -80,13 +86,16 @@ def test_deserialization_errors(skip_if_not_serializing, monkeypatch, topic, pro @reset_core_stats_engine() @validate_error_event_attributes_outside_transaction( - num_errors=1, - exact_attrs={"intrinsic": {"error.class": callable_name(error_cls)}, "agent": {}, "user": {}} + num_errors=1, exact_attrs={"intrinsic": {"error.class": callable_name(error_cls)}, "agent": {}, "user": {}} ) def test(): with pytest.raises(error_cls): - for record in consumer: - pass - assert record is not None, "No record consumed." + timeout = 10 + attempts = 0 + record = None + while not record and attempts < timeout: + for record in consumer: + pass + attempts += 1 test() diff --git a/tox.ini b/tox.ini index e2fc16886..f046721ae 100644 --- a/tox.ini +++ b/tox.ini @@ -150,7 +150,9 @@ envlist = rabbitmq-messagebroker_pika-{py27,py37,py38,py39,pypy,pypy37}-pika0.13, rabbitmq-messagebroker_pika-{py37,py38,py39,py310,pypy37}-pikalatest, kafka-messagebroker_confluentkafka-{py27,py37,py38,py39,py310}-confluentkafkalatest, + kafka-messagebroker_confluentkafka-{py27,py39}-confluentkafka{0107,0106}, kafka-messagebroker_kafkapython-{pypy,py27,py37,py38,pypy37}-kafkapythonlatest, + kafka-messagebroker_kafkapython-{py27,py38}-kafkapython{020001,020000,0104}, python-template_mako-{py27,py37,py38,py39,py310} [pytest] @@ -355,8 +357,13 @@ deps = messagebroker_pika-pikalatest: pika messagebroker_pika: tornado<5 messagebroker_pika-{py27,pypy}: enum34 - messagebroker_confluentkafka: confluent-kafka - messagebroker_kafkapython: kafka-python + messagebroker_confluentkafka-confluentkafkalatest: confluent-kafka + messagebroker_confluentkafka-confluentkafka0107: confluent-kafka<1.8 + messagebroker_confluentkafka-confluentkafka0106: confluent-kafka<1.7 + messagebroker_kafkapython-kafkapythonlatest: kafka-python + messagebroker_kafkapython-kafkapython020001: kafka-python<2.0.2 + messagebroker_kafkapython-kafkapython020000: kafka-python<2.0.1 + messagebroker_kafkapython-kafkapython0104: kafka-python<1.5 template_mako: mako<1.2 setenv = From 35c1bf699935b3ecb237eaece4610825df26370e Mon Sep 17 00:00:00 2001 From: Uma Annamalai Date: Tue, 27 Sep 2022 15:28:47 -0700 Subject: [PATCH 35/49] Pin incompatibility version dependencies in tests. (#635) * Pin aiohttp version in tests. * Pin grpc version * Pin starlette version * Fix tox syntax issue Co-authored-by: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Co-authored-by: Tim Pansino --- tox.ini | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tox.ini b/tox.ini index f046721ae..6944f3322 100644 --- a/tox.ini +++ b/tox.ini @@ -265,7 +265,7 @@ deps = external_requests: requests external_urllib3-urllib30109: urllib3<1.10 external_urllib3-urllib3latest: urllib3 - framework_aiohttp-aiohttp03: aiohttp<4 + framework_aiohttp-aiohttp03: aiohttp==3.8.1 framework_ariadne-ariadnelatest: ariadne framework_ariadne-ariadne0011: ariadne<0.12 framework_ariadne-ariadne0012: ariadne<0.13 @@ -315,11 +315,12 @@ deps = framework_graphql-graphql0301: graphql-core<3.2 framework_graphql-graphql0302: graphql-core<3.3 framework_graphql-graphqlmaster: https://github.com/graphql-python/graphql-core/archive/main.zip + framework_grpc-grpclatest: protobuf<4 + framework_grpc-grpclatest: grpcio + framework_grpc-grpclatest: grpcio-tools framework_grpc-grpc0125: grpcio<1.26 framework_grpc-grpc0125: grpcio-tools<1.26 framework_grpc-grpc0125: protobuf<3.18.0 - framework_grpc-grpclatest: grpcio - framework_grpc-grpclatest: grpcio-tools framework_pyramid: routes framework_pyramid-cornice: cornice!=5.0.0 framework_pyramid-Pyramid0104: Pyramid<1.5 @@ -340,7 +341,7 @@ deps = framework_starlette-starlette0014: starlette<0.15 framework_starlette-starlette0015: starlette<0.16 framework_starlette-starlette0019: starlette<0.20 - framework_starlette-starlettelatest: starlette + framework_starlette-starlettelatest: starlette<0.21 ; Strawberry 0.95.0 is incompatible with Starlette 0.18.0, downgrade until future release framework_strawberry: starlette<0.18.0 framework_strawberry-strawberrylatest: strawberry-graphql From 2e8ba713a394a16197b9a665a7b94744edcaeb87 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Thu, 29 Sep 2022 16:27:53 -0700 Subject: [PATCH 36/49] Update wrapt to v1.14.1 (#638) --- newrelic/packages/wrapt/__init__.py | 13 ++- newrelic/packages/wrapt/_wrappers.c | 124 ++++++++++++++++++++++++-- newrelic/packages/wrapt/arguments.py | 38 ++++++++ newrelic/packages/wrapt/decorators.py | 43 +++++++-- newrelic/packages/wrapt/importer.py | 76 +++++++++++++--- newrelic/packages/wrapt/wrappers.py | 57 ++++++++++-- 6 files changed, 316 insertions(+), 35 deletions(-) create mode 100644 newrelic/packages/wrapt/arguments.py diff --git a/newrelic/packages/wrapt/__init__.py b/newrelic/packages/wrapt/__init__.py index 7be739bf6..ee6539b77 100644 --- a/newrelic/packages/wrapt/__init__.py +++ b/newrelic/packages/wrapt/__init__.py @@ -1,4 +1,4 @@ -__version_info__ = ('1', '12', '1') +__version_info__ = ('1', '14', '1') __version__ = '.'.join(__version_info__) from .wrappers import (ObjectProxy, CallableObjectProxy, FunctionWrapper, @@ -13,4 +13,15 @@ from .importer import (register_post_import_hook, when_imported, notify_module_loaded, discover_post_import_hooks) +# Import of inspect.getcallargs() included for backward compatibility. An +# implementation of this was previously bundled and made available here for +# Python <2.7. Avoid using this in future. + from inspect import getcallargs + +# Variant of inspect.formatargspec() included here for forward compatibility. +# This is being done because Python 3.11 dropped inspect.formatargspec() but +# code for handling signature changing decorators relied on it. Exposing the +# bundled implementation here in case any user of wrapt was also needing it. + +from .arguments import formatargspec diff --git a/newrelic/packages/wrapt/_wrappers.c b/newrelic/packages/wrapt/_wrappers.c index 660ad6b3b..67c5d5e1a 100644 --- a/newrelic/packages/wrapt/_wrappers.c +++ b/newrelic/packages/wrapt/_wrappers.c @@ -1961,13 +1961,13 @@ static int WraptPartialCallableObjectProxy_init( if (!PyObject_Length(args)) { PyErr_SetString(PyExc_TypeError, - "__init__ of partial needs an argument"); + "__init__ of partial needs an argument"); return -1; } if (PyObject_Length(args) < 1) { PyErr_SetString(PyExc_TypeError, - "partial type takes at least one argument"); + "partial type takes at least one argument"); return -1; } @@ -1975,7 +1975,7 @@ static int WraptPartialCallableObjectProxy_init( if (!PyCallable_Check(wrapped)) { PyErr_SetString(PyExc_TypeError, - "the first argument must be callable"); + "the first argument must be callable"); return -1; } @@ -1985,7 +1985,7 @@ static int WraptPartialCallableObjectProxy_init( return -1; result = WraptPartialCallableObjectProxy_raw_init(self, wrapped, - fnargs, kwds); + fnargs, kwds); Py_DECREF(fnargs); @@ -2299,12 +2299,15 @@ static PyObject *WraptFunctionWrapperBase_call( PyObject *result = NULL; static PyObject *function_str = NULL; + static PyObject *classmethod_str = NULL; if (!function_str) { #if PY_MAJOR_VERSION >= 3 function_str = PyUnicode_InternFromString("function"); + classmethod_str = PyUnicode_InternFromString("classmethod"); #else function_str = PyString_InternFromString("function"); + classmethod_str = PyString_InternFromString("classmethod"); #endif } @@ -2334,8 +2337,10 @@ static PyObject *WraptFunctionWrapperBase_call( kwds = param_kwds; } - if (self->instance == Py_None && (self->binding == function_str || + if ((self->instance == Py_None) && (self->binding == function_str || PyObject_RichCompareBool(self->binding, function_str, + Py_EQ) == 1 || self->binding == classmethod_str || + PyObject_RichCompareBool(self->binding, classmethod_str, Py_EQ) == 1)) { PyObject *instance = NULL; @@ -2510,6 +2515,101 @@ static PyObject *WraptFunctionWrapperBase_descr_get( /* ------------------------------------------------------------------------- */ +static PyObject *WraptFunctionWrapperBase_set_name( + WraptFunctionWrapperObject *self, PyObject *args, PyObject *kwds) +{ + PyObject *method = NULL; + PyObject *result = NULL; + + if (!self->object_proxy.wrapped) { + PyErr_SetString(PyExc_ValueError, "wrapper has not been initialized"); + return NULL; + } + + method = PyObject_GetAttrString(self->object_proxy.wrapped, + "__set_name__"); + + if (!method) { + PyErr_Clear(); + Py_INCREF(Py_None); + return Py_None; + } + + result = PyObject_Call(method, args, kwds); + + Py_DECREF(method); + + return result; +} + +/* ------------------------------------------------------------------------- */ + +static PyObject *WraptFunctionWrapperBase_instancecheck( + WraptFunctionWrapperObject *self, PyObject *instance) +{ + PyObject *object = NULL; + PyObject *result = NULL; + + int check = 0; + + if (!self->object_proxy.wrapped) { + PyErr_SetString(PyExc_ValueError, "wrapper has not been initialized"); + return NULL; + } + + check = PyObject_IsInstance(instance, self->object_proxy.wrapped); + + if (check < 0) { + return NULL; + } + + result = check ? Py_True : Py_False; + + Py_INCREF(result); + return result; +} + +/* ------------------------------------------------------------------------- */ + +static PyObject *WraptFunctionWrapperBase_subclasscheck( + WraptFunctionWrapperObject *self, PyObject *args) +{ + PyObject *subclass = NULL; + PyObject *object = NULL; + PyObject *result = NULL; + + int check = 0; + + if (!self->object_proxy.wrapped) { + PyErr_SetString(PyExc_ValueError, "wrapper has not been initialized"); + return NULL; + } + + if (!PyArg_ParseTuple(args, "O", &subclass)) + return NULL; + + object = PyObject_GetAttrString(subclass, "__wrapped__"); + + if (!object) + PyErr_Clear(); + + check = PyObject_IsSubclass(object ? object: subclass, + self->object_proxy.wrapped); + + Py_XDECREF(object); + + if (check == -1) + return NULL; + + result = check ? Py_True : Py_False; + + Py_INCREF(result); + + return result; +} + +/* ------------------------------------------------------------------------- */ + static PyObject *WraptFunctionWrapperBase_get_self_instance( WraptFunctionWrapperObject *self, void *closure) { @@ -2580,6 +2680,18 @@ static PyObject *WraptFunctionWrapperBase_get_self_parent( /* ------------------------------------------------------------------------- */; +static PyMethodDef WraptFunctionWrapperBase_methods[] = { + { "__set_name__", (PyCFunction)WraptFunctionWrapperBase_set_name, + METH_VARARGS | METH_KEYWORDS, 0 }, + { "__instancecheck__", (PyCFunction)WraptFunctionWrapperBase_instancecheck, + METH_O, 0}, + { "__subclasscheck__", (PyCFunction)WraptFunctionWrapperBase_subclasscheck, + METH_VARARGS, 0 }, + { NULL, NULL }, +}; + +/* ------------------------------------------------------------------------- */; + static PyGetSetDef WraptFunctionWrapperBase_getset[] = { { "__module__", (getter)WraptObjectProxy_get_module, (setter)WraptObjectProxy_set_module, 0 }, @@ -2633,7 +2745,7 @@ PyTypeObject WraptFunctionWrapperBase_Type = { offsetof(WraptObjectProxyObject, weakreflist), /*tp_weaklistoffset*/ 0, /*tp_iter*/ 0, /*tp_iternext*/ - 0, /*tp_methods*/ + WraptFunctionWrapperBase_methods, /*tp_methods*/ 0, /*tp_members*/ WraptFunctionWrapperBase_getset, /*tp_getset*/ 0, /*tp_base*/ diff --git a/newrelic/packages/wrapt/arguments.py b/newrelic/packages/wrapt/arguments.py new file mode 100644 index 000000000..032bc059e --- /dev/null +++ b/newrelic/packages/wrapt/arguments.py @@ -0,0 +1,38 @@ +# The inspect.formatargspec() function was dropped in Python 3.11 but we need +# need it for when constructing signature changing decorators based on result of +# inspect.getargspec() or inspect.getfullargspec(). The code here implements +# inspect.formatargspec() base on Parameter and Signature from inspect module, +# which were added in Python 3.6. Thanks to Cyril Jouve for the implementation. + +try: + from inspect import Parameter, Signature +except ImportError: + from inspect import formatargspec +else: + def formatargspec(args, varargs=None, varkw=None, defaults=None, + kwonlyargs=(), kwonlydefaults={}, annotations={}): + if kwonlydefaults is None: + kwonlydefaults = {} + ndefaults = len(defaults) if defaults else 0 + parameters = [ + Parameter( + arg, + Parameter.POSITIONAL_OR_KEYWORD, + default=defaults[i] if i >= 0 else Parameter.empty, + annotation=annotations.get(arg, Parameter.empty), + ) for i, arg in enumerate(args, ndefaults - len(args)) + ] + if varargs: + parameters.append(Parameter(varargs, Parameter.VAR_POSITIONAL)) + parameters.extend( + Parameter( + kwonlyarg, + Parameter.KEYWORD_ONLY, + default=kwonlydefaults.get(kwonlyarg, Parameter.empty), + annotation=annotations.get(kwonlyarg, Parameter.empty), + ) for kwonlyarg in kwonlyargs + ) + if varkw: + parameters.append(Parameter(varkw, Parameter.VAR_KEYWORD)) + return_annotation = annotations.get('return', Signature.empty) + return str(Signature(parameters, return_annotation=return_annotation)) \ No newline at end of file diff --git a/newrelic/packages/wrapt/decorators.py b/newrelic/packages/wrapt/decorators.py index 506303d7a..c3f254729 100644 --- a/newrelic/packages/wrapt/decorators.py +++ b/newrelic/packages/wrapt/decorators.py @@ -31,10 +31,11 @@ def exec_(_code_, _globs_=None, _locs_=None): del builtins from functools import partial -from inspect import ismethod, isclass, formatargspec -from collections import namedtuple +from inspect import isclass from threading import Lock, RLock +from .arguments import formatargspec + try: from inspect import signature except ImportError: @@ -173,7 +174,7 @@ def __call__(self, wrapped): # function so the wrapper is effectively indistinguishable from the # original wrapped function. -def decorator(wrapper=None, enabled=None, adapter=None): +def decorator(wrapper=None, enabled=None, adapter=None, proxy=FunctionWrapper): # The decorator should be supplied with a single positional argument # which is the wrapper function to be used to implement the # decorator. This may be preceded by a step whereby the keyword @@ -183,7 +184,7 @@ def decorator(wrapper=None, enabled=None, adapter=None): # decorator. In that case parts of the function '__code__' and # '__defaults__' attributes are used from the adapter function # rather than those of the wrapped function. This allows for the - # argument specification from inspect.getargspec() and similar + # argument specification from inspect.getfullargspec() and similar # functions to be overridden with a prototype for a different # function than what was wrapped. The 'enabled' argument provides a # way to enable/disable the use of the decorator. If the type of @@ -194,6 +195,8 @@ def decorator(wrapper=None, enabled=None, adapter=None): # if 'enabled' is callable it will be called to obtain the value to # be checked. If False, the wrapper will not be called and instead # the original wrapped function will be called directly instead. + # The 'proxy' argument provides a way of passing a custom version of + # the FunctionWrapper class used in decorating the function. if wrapper is not None: # Helper function for creating wrapper of the appropriate @@ -206,16 +209,37 @@ def _build(wrapped, wrapper, enabled=None, adapter=None): if not callable(adapter): ns = {} + + # Check if the signature argument specification has + # annotations. If it does then we need to remember + # it but also drop it when attempting to manufacture + # a standin adapter function. This is necessary else + # it will try and look up any types referenced in + # the annotations in the empty namespace we use, + # which will fail. + + annotations = {} + if not isinstance(adapter, string_types): + if len(adapter) == 7: + annotations = adapter[-1] + adapter = adapter[:-1] adapter = formatargspec(*adapter) + exec_('def adapter{}: pass'.format(adapter), ns, ns) adapter = ns['adapter'] + # Override the annotations for the manufactured + # adapter function so they match the original + # adapter signature argument specification. + + if annotations: + adapter.__annotations__ = annotations + return AdapterWrapper(wrapped=wrapped, wrapper=wrapper, enabled=enabled, adapter=adapter) - return FunctionWrapper(wrapped=wrapped, wrapper=wrapper, - enabled=enabled) + return proxy(wrapped=wrapped, wrapper=wrapper, enabled=enabled) # The wrapper has been provided so return the final decorator. # The decorator is itself one of our function wrappers so we @@ -360,7 +384,7 @@ def _capture(target_wrapped): # This one is a bit strange because binding was actually # performed on the wrapper created by our decorator # factory. We need to apply that binding to the decorator - # wrapper function which which the decorator factory + # wrapper function that the decorator factory # was applied to. target_wrapper = wrapper.__get__(None, instance) @@ -384,7 +408,7 @@ def _capture(target_wrapped): # This one is a bit strange because binding was actually # performed on the wrapper created by our decorator # factory. We need to apply that binding to the decorator - # wrapper function which which the decorator factory + # wrapper function that the decorator factory # was applied to. target_wrapper = wrapper.__get__(instance, type(instance)) @@ -408,7 +432,8 @@ def _capture(target_wrapped): # decorator again wrapped in a partial using the collected # arguments. - return partial(decorator, enabled=enabled, adapter=adapter) + return partial(decorator, enabled=enabled, adapter=adapter, + proxy=proxy) # Decorator for implementing thread synchronization. It can be used as a # decorator, in which case the synchronization context is determined by diff --git a/newrelic/packages/wrapt/importer.py b/newrelic/packages/wrapt/importer.py index 4665f3865..5c4d4cc66 100644 --- a/newrelic/packages/wrapt/importer.py +++ b/newrelic/packages/wrapt/importer.py @@ -10,16 +10,17 @@ if PY2: string_types = basestring, + find_spec = None else: - import importlib string_types = str, + from importlib.util import find_spec from .decorators import synchronized # The dictionary registering any post import hooks to be triggered once # the target module has been imported. Once a module has been imported # and the hooks fired, the list of hooks recorded against the target -# module will be truncacted but the list left in the dictionary. This +# module will be truncated but the list left in the dictionary. This # acts as a flag to indicate that the module had already been imported. _post_import_hooks = {} @@ -152,12 +153,29 @@ class _ImportHookChainedLoader: def __init__(self, loader): self.loader = loader - def load_module(self, fullname): + if hasattr(loader, "load_module"): + self.load_module = self._load_module + if hasattr(loader, "create_module"): + self.create_module = self._create_module + if hasattr(loader, "exec_module"): + self.exec_module = self._exec_module + + def _load_module(self, fullname): module = self.loader.load_module(fullname) notify_module_loaded(module) return module + # Python 3.4 introduced create_module() and exec_module() instead of + # load_module() alone. Splitting the two steps. + + def _create_module(self, spec): + return self.loader.create_module(spec) + + def _exec_module(self, module): + self.loader.exec_module(module) + notify_module_loaded(module) + class ImportHookFinder: def __init__(self): @@ -187,7 +205,7 @@ def find_module(self, fullname, path=None): # Now call back into the import system again. try: - if PY2: + if not find_spec: # For Python 2 we don't have much choice but to # call back in to __import__(). This will # actually cause the module to be imported. If no @@ -208,14 +226,52 @@ def find_module(self, fullname, path=None): # our own loader which will then in turn call the # real loader to import the module and invoke the # post import hooks. - try: - import importlib.util - loader = importlib.util.find_spec(fullname).loader - except (ImportError, AttributeError): - loader = importlib.find_loader(fullname, path) - if loader: + + loader = getattr(find_spec(fullname), "loader", None) + + if loader and not isinstance(loader, _ImportHookChainedLoader): return _ImportHookChainedLoader(loader) + finally: + del self.in_progress[fullname] + + def find_spec(self, fullname, path=None, target=None): + # Since Python 3.4, you are meant to implement find_spec() method + # instead of find_module() and since Python 3.10 you get deprecation + # warnings if you don't define find_spec(). + + # If the module being imported is not one we have registered + # post import hooks for, we can return immediately. We will + # take no further part in the importing of this module. + + if not fullname in _post_import_hooks: + return None + + # When we are interested in a specific module, we will call back + # into the import system a second time to defer to the import + # finder that is supposed to handle the importing of the module. + # We set an in progress flag for the target module so that on + # the second time through we don't trigger another call back + # into the import system and cause a infinite loop. + + if fullname in self.in_progress: + return None + + self.in_progress[fullname] = True + + # Now call back into the import system again. + + try: + # This should only be Python 3 so find_spec() should always + # exist so don't need to check. + + spec = find_spec(fullname) + loader = getattr(spec, "loader", None) + + if loader and not isinstance(loader, _ImportHookChainedLoader): + spec.loader = _ImportHookChainedLoader(loader) + + return spec finally: del self.in_progress[fullname] diff --git a/newrelic/packages/wrapt/wrappers.py b/newrelic/packages/wrapt/wrappers.py index 18cf5e053..2716cd1da 100644 --- a/newrelic/packages/wrapt/wrappers.py +++ b/newrelic/packages/wrapt/wrappers.py @@ -86,6 +86,14 @@ def __init__(self, wrapped): except AttributeError: pass + # Python 3.10 onwards also does not allow itself to be overridden + # using a property and it must instead be set explicitly. + + try: + object.__setattr__(self, '__annotations__', wrapped.__annotations__) + except AttributeError: + pass + @property def __name__(self): return self.__wrapped__.__name__ @@ -102,14 +110,6 @@ def __class__(self): def __class__(self, value): self.__wrapped__.__class__ = value - @property - def __annotations__(self): - return self.__wrapped__.__annotations__ - - @__annotations__.setter - def __annotations__(self, value): - self.__wrapped__.__annotations__ = value - def __dir__(self): return dir(self.__wrapped__) @@ -178,11 +178,23 @@ def __setattr__(self, name, value): object.__setattr__(self, '__qualname__', value.__qualname__) except AttributeError: pass + try: + object.__delattr__(self, '__annotations__') + except AttributeError: + pass + try: + object.__setattr__(self, '__annotations__', value.__annotations__) + except AttributeError: + pass elif name == '__qualname__': setattr(self.__wrapped__, name, value) object.__setattr__(self, name, value) + elif name == '__annotations__': + setattr(self.__wrapped__, name, value) + object.__setattr__(self, name, value) + elif hasattr(type(self), name): object.__setattr__(self, name, value) @@ -550,7 +562,7 @@ def __call__(self, *args, **kwargs): # a function that was already bound to an instance. In that case # we want to extract the instance from the function and use it. - if self._self_binding == 'function': + if self._self_binding in ('function', 'classmethod'): if self._self_instance is None: instance = getattr(self.__wrapped__, '__self__', None) if instance is not None: @@ -566,6 +578,33 @@ def __call__(self, *args, **kwargs): return self._self_wrapper(self.__wrapped__, self._self_instance, args, kwargs) + def __set_name__(self, owner, name): + # This is a special method use to supply information to + # descriptors about what the name of variable in a class + # definition is. Not wanting to add this to ObjectProxy as not + # sure of broader implications of doing that. Thus restrict to + # FunctionWrapper used by decorators. + + if hasattr(self.__wrapped__, "__set_name__"): + self.__wrapped__.__set_name__(owner, name) + + def __instancecheck__(self, instance): + # This is a special method used by isinstance() to make checks + # instance of the `__wrapped__`. + return isinstance(instance, self.__wrapped__) + + def __subclasscheck__(self, subclass): + # This is a special method used by issubclass() to make checks + # about inheritance of classes. We need to upwrap any object + # proxy. Not wanting to add this to ObjectProxy as not sure of + # broader implications of doing that. Thus restrict to + # FunctionWrapper used by decorators. + + if hasattr(subclass, "__wrapped__"): + return issubclass(subclass.__wrapped__, self.__wrapped__) + else: + return issubclass(subclass, self.__wrapped__) + class BoundFunctionWrapper(_FunctionWrapperBase): def __call__(self, *args, **kwargs): From c1b857b565654a5ecb66196ec76501eaf63f5346 Mon Sep 17 00:00:00 2001 From: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> Date: Thu, 29 Sep 2022 17:42:15 -0700 Subject: [PATCH 37/49] Update Starlette tests due to bug being fixed (#639) * Starlette BaseHTTPMiddleware testing fix with working BackgroundTask Co-authored-by: Timothy Pansino Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai * Temporarily pin saniclatest to v22.6.2 * Update tox.ini Co-authored-by: Hannah Stepanek * Update tests/framework_starlette/test_bg_tasks.py Co-authored-by: Hannah Stepanek * [Mega-Linter] Apply linters fixes * Trigger tests Co-authored-by: Timothy Pansino Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai Co-authored-by: Hannah Stepanek Co-authored-by: lrafeei --- tests/framework_starlette/test_bg_tasks.py | 77 +++++++++++----------- tox.ini | 10 +-- 2 files changed, 44 insertions(+), 43 deletions(-) diff --git a/tests/framework_starlette/test_bg_tasks.py b/tests/framework_starlette/test_bg_tasks.py index af929895f..308f67d10 100644 --- a/tests/framework_starlette/test_bg_tasks.py +++ b/tests/framework_starlette/test_bg_tasks.py @@ -1,4 +1,4 @@ - # Copyright 2010 New Relic, Inc. +# Copyright 2010 New Relic, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,26 +12,25 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest import sys + +import pytest +from starlette import __version__ from testing_support.fixtures import validate_transaction_metrics from testing_support.validators.validate_transaction_count import ( validate_transaction_count, ) -from starlette import __version__ starlette_version = tuple(int(x) for x in __version__.split(".")) try: - from starlette.middleware import Middleware # Ignore Flake8 Error + from starlette.middleware import Middleware # noqa: F401 no_middleware = False except ImportError: no_middleware = True -skip_if_no_middleware = pytest.mark.skipif( - no_middleware, reason="These tests verify middleware functionality" -) +skip_if_no_middleware = pytest.mark.skipif(no_middleware, reason="These tests verify middleware functionality") @pytest.fixture(scope="session") @@ -45,12 +44,8 @@ def target_application(): def test_simple(target_application, route): route_metrics = [("Function/_test_bg_tasks:run_%s_bg_task" % route, 1)] - @validate_transaction_metrics( - "_test_bg_tasks:run_%s_bg_task" % route, index=-2, scoped_metrics=route_metrics - ) - @validate_transaction_metrics( - "_test_bg_tasks:%s_bg_task" % route, background_task=True - ) + @validate_transaction_metrics("_test_bg_tasks:run_%s_bg_task" % route, index=-2, scoped_metrics=route_metrics) + @validate_transaction_metrics("_test_bg_tasks:%s_bg_task" % route, background_task=True) @validate_transaction_count(2) def _test(): app = target_application["none"] @@ -65,12 +60,8 @@ def _test(): def test_asgi_style_middleware(target_application, route): route_metrics = [("Function/_test_bg_tasks:run_%s_bg_task" % route, 1)] - @validate_transaction_metrics( - "_test_bg_tasks:run_%s_bg_task" % route, index=-2, scoped_metrics=route_metrics - ) - @validate_transaction_metrics( - "_test_bg_tasks:%s_bg_task" % route, background_task=True - ) + @validate_transaction_metrics("_test_bg_tasks:run_%s_bg_task" % route, index=-2, scoped_metrics=route_metrics) + @validate_transaction_metrics("_test_bg_tasks:%s_bg_task" % route, background_task=True) @validate_transaction_count(2) def _test(): app = target_application["asgi"] @@ -83,34 +74,42 @@ def _test(): @skip_if_no_middleware @pytest.mark.parametrize("route", ["async", "sync"]) def test_basehttp_style_middleware(target_application, route): - route_metrics = [("Function/_test_bg_tasks:run_%s_bg_task" % route, 1)] - old_metrics = [ - ("Function/_test_bg_tasks:%s_bg_task" % route, 1), - ("Function/_test_bg_tasks:run_%s_bg_task" % route, 1), - ] + route_metric = ("Function/_test_bg_tasks:run_%s_bg_task" % route, 1) + # A function trace metric that appears only when the bug below is present, causing background tasks to be + # completed inside web transactions, requiring a function trace to be used for timing + # instead of a background task transaction. Should not be present at all when bug is fixed. + bg_task_metric = ("Function/_test_bg_tasks:%s_bg_task" % route, 1) def _test(): app = target_application["basehttp"] response = app.get("/" + route) assert response.status == 200 - if starlette_version >= (0, 20, 1): - if sys.version_info[:2] > (3, 7): - _test = validate_transaction_metrics( - "_test_bg_tasks:run_%s_bg_task" % route, index=-2, scoped_metrics=route_metrics - )(_test) - _test = validate_transaction_metrics( - "_test_bg_tasks:%s_bg_task" % route, background_task=True - )(_test) - _test = validate_transaction_count(2)(_test) - else: # Python <= 3.7 requires this specific configuration with starlette 0.20.1 - _test = validate_transaction_metrics( - "_test_bg_tasks:run_%s_bg_task" % route, scoped_metrics=route_metrics - )(_test) - _test = validate_transaction_count(1)(_test) + BUG_COMPLETELY_FIXED = (starlette_version >= (0, 21, 0)) or ( + starlette_version >= (0, 20, 1) and sys.version_info[:2] > (3, 7) + ) + BUG_PARTIALLY_FIXED = (0, 20, 1) <= starlette_version < (0, 21, 0) and sys.version_info[:2] <= (3, 7) + + if BUG_COMPLETELY_FIXED: + # Assert both web transaction and background task transactions are present. + _test = validate_transaction_metrics( + "_test_bg_tasks:run_%s_bg_task" % route, index=-2, scoped_metrics=[route_metric] + )(_test) + _test = validate_transaction_metrics("_test_bg_tasks:%s_bg_task" % route, background_task=True)(_test) + _test = validate_transaction_count(2)(_test) + elif BUG_PARTIALLY_FIXED: + # The background task no longer blocks the completion of the web request/web transaction. + # However, the BaseHTTPMiddleware causes the task to be cancelled when the web request disconnects, so there are no + # longer function traces or background task transactions. + _test = validate_transaction_metrics("_test_bg_tasks:run_%s_bg_task" % route, scoped_metrics=[route_metric])( + _test + ) + _test = validate_transaction_count(1)(_test) else: + # The BaseHTTPMiddleware causes the background task to execute within the web request + # with the web transaction still active. _test = validate_transaction_metrics( - "_test_bg_tasks:run_%s_bg_task" % route, scoped_metrics=old_metrics + "_test_bg_tasks:run_%s_bg_task" % route, scoped_metrics=[route_metric, bg_task_metric] )(_test) _test = validate_transaction_count(1)(_test) diff --git a/tox.ini b/tox.ini index 6944f3322..57d525a08 100644 --- a/tox.ini +++ b/tox.ini @@ -140,6 +140,7 @@ envlist = python-framework_sanic-{py38,pypy37}-sanic{190301,1906,1812,1912,200904,210300,2109,2112,2203}, python-framework_sanic-{py37,py38,py39,py310,pypy37}-saniclatest, python-framework_starlette-{py310,pypy37}-starlette{0014,0015,0019}, + python-framework_starlette-{py37,py38}-starlette{002001}, python-framework_starlette-{py37,py38,py39,py310,pypy37}-starlettelatest, python-framework_strawberry-{py37,py38,py39,py310}-strawberrylatest, python-logger_logging-{py27,py37,py38,py39,py310,pypy,pypy37}, @@ -335,15 +336,16 @@ deps = framework_sanic-sanic2109: sanic<21.10 framework_sanic-sanic2112: sanic<21.13 framework_sanic-sanic2203: sanic<22.4 - framework_sanic-saniclatest: sanic + ; Temporarily pin this to the second to last release + framework_sanic-saniclatest: sanic==22.6.2 framework_sanic-sanic{1812,190301,1906}: aiohttp framework_starlette: graphene<3 framework_starlette-starlette0014: starlette<0.15 framework_starlette-starlette0015: starlette<0.16 framework_starlette-starlette0019: starlette<0.20 - framework_starlette-starlettelatest: starlette<0.21 - ; Strawberry 0.95.0 is incompatible with Starlette 0.18.0, downgrade until future release - framework_strawberry: starlette<0.18.0 + framework_starlette-starlette002001: starlette==0.20.1 + framework_starlette-starlettelatest: starlette + framework_strawberry: starlette framework_strawberry-strawberrylatest: strawberry-graphql framework_tornado: pycurl framework_tornado-tornado0600: tornado<6.1 From dccecdd9668fb3572f20a22b8c6540ef6f1579cf Mon Sep 17 00:00:00 2001 From: Uma Annamalai Date: Mon, 3 Oct 2022 13:39:04 -0700 Subject: [PATCH 38/49] Fix celery py37 tests. (#642) * Pin importlib-metadata version for celery py37. * Add pinned importlib-metadata version for pypy37. --- tox.ini | 1 + 1 file changed, 1 insertion(+) diff --git a/tox.ini b/tox.ini index 57d525a08..da9b8db63 100644 --- a/tox.ini +++ b/tox.ini @@ -193,6 +193,7 @@ deps = adapter_uvicorn-uvicornlatest: uvicorn agent_features: beautifulsoup4 application_celery: celery<6.0 + application_celery-py{py37,37}: importlib-metadata<5.0 application_gearman: gearman<3.0.0 component_djangorestframework-djangorestframework0300: Django < 1.9 component_djangorestframework-djangorestframework0300: djangorestframework < 3.1 From cf306cea7eed4a6b4a954622438628fd34cd9824 Mon Sep 17 00:00:00 2001 From: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> Date: Tue, 4 Oct 2022 14:58:10 -0700 Subject: [PATCH 39/49] Support confluent-kafka produce arguments (#645) * Add functionality for callback keyword Co-authored-by: Timothy Pansino Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai * Fixup: change arg order for py2.7 compatibility * Allow dictionary or list for headers * Fixup: don't run 1.8.2 on py2.7 Co-authored-by: Timothy Pansino Co-authored-by: Hannah Stepanek Co-authored-by: Uma Annamalai Co-authored-by: Hannah Stepanek --- .../hooks/messagebroker_confluentkafka.py | 38 ++++++++++--------- .../messagebroker_confluentkafka/conftest.py | 13 ++++++- .../test_producer.py | 37 ++++++++++++++++++ tox.ini | 3 ++ 4 files changed, 72 insertions(+), 19 deletions(-) diff --git a/newrelic/hooks/messagebroker_confluentkafka.py b/newrelic/hooks/messagebroker_confluentkafka.py index 917ec1d5b..965fd765b 100644 --- a/newrelic/hooks/messagebroker_confluentkafka.py +++ b/newrelic/hooks/messagebroker_confluentkafka.py @@ -33,17 +33,28 @@ HEARTBEAT_POLL_TIMEOUT = "MessageBroker/Kafka/Heartbeat/PollTimeout" -def _bind_Producer_produce(topic, value=None, key=None, partition=-1, on_delivery=None, timestamp=0, headers=None): - return topic, value, key, partition, on_delivery, timestamp, headers - - def wrap_Producer_produce(wrapped, instance, args, kwargs): transaction = current_transaction() if transaction is None: return wrapped(*args, **kwargs) - topic, value, key, partition, on_delivery, timestamp, headers = _bind_Producer_produce(*args, **kwargs) - headers = list(headers) if headers else [] + # Binding with a standard function signature does not work properly due to a bug in handling arguments + # in the underlying C code, where callback=None being specified causes on_delivery=callback to never run. + + # Bind out headers from end of args list + if len(args) == 8: + # Take headers off the end of the positional args + headers = args[7] + args = args[0:7] + else: + headers = kwargs.pop("headers", []) + + # Bind topic off of the beginning of the args list + if len(args) >= 1: + topic = args[0] + args = args[1:] + else: + topic = kwargs.get("topic", None) with MessageTrace( library="Kafka", @@ -52,18 +63,11 @@ def wrap_Producer_produce(wrapped, instance, args, kwargs): destination_name=topic or "Default", source=wrapped, ) as trace: - dt_headers = [(k, v.encode("utf-8")) for k, v in trace.generate_request_headers(transaction)] - headers.extend(dt_headers) + dt_headers = {k: v.encode("utf-8") for k, v in trace.generate_request_headers(transaction)} + # headers can be a list of tuples or a dict so convert to dict for consistency. + dt_headers.update(dict(headers) if headers else {}) try: - return wrapped( - topic, - value=value, - key=key, - partition=partition, - on_delivery=on_delivery, - timestamp=timestamp, - headers=headers, - ) + return wrapped(topic, headers=dt_headers, *args, **kwargs) except Exception as error: # Unwrap kafka errors while hasattr(error, "exception"): diff --git a/tests/messagebroker_confluentkafka/conftest.py b/tests/messagebroker_confluentkafka/conftest.py index 6e19005d8..a86af3ff9 100644 --- a/tests/messagebroker_confluentkafka/conftest.py +++ b/tests/messagebroker_confluentkafka/conftest.py @@ -190,10 +190,19 @@ def topic(): @pytest.fixture() -def send_producer_message(topic, producer, serialize): +def send_producer_message(topic, producer, serialize, client_type): + callback_called = [] + + def producer_callback(err, msg): + callback_called.append(True) + def _test(): - producer.produce(topic, value=serialize({"foo": 1})) + if client_type == "cimpl": + producer.produce(topic, value=serialize({"foo": 1}), callback=producer_callback) + else: + producer.produce(topic, value=serialize({"foo": 1}), on_delivery=producer_callback) producer.flush() + assert callback_called return _test diff --git a/tests/messagebroker_confluentkafka/test_producer.py b/tests/messagebroker_confluentkafka/test_producer.py index fe91295c4..71b674e80 100644 --- a/tests/messagebroker_confluentkafka/test_producer.py +++ b/tests/messagebroker_confluentkafka/test_producer.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import threading + import pytest from conftest import cache_kafka_producer_headers from testing_support.fixtures import ( @@ -27,6 +29,41 @@ from newrelic.packages import six +@pytest.mark.parametrize( + "headers", [[("MY-HEADER", "nonsense")], {"MY-HEADER": "nonsense"}], ids=["list of tuples headers", "dict headers"] +) +@background_task() +def test_produce_arguments(topic, producer, client_type, serialize, headers): + callback_called = threading.Event() + + def producer_callback(err, msg): + callback_called.set() + + if client_type == "cimpl": + producer.produce( + topic, + value=serialize({"foo": 1}), + key=serialize("my-key"), + callback=producer_callback, + partition=1, + timestamp=1, + headers=headers, + ) + else: + producer.produce( + topic, + value=serialize({"foo": 1}), + key=serialize("my-key"), + partition=1, + on_delivery=producer_callback, + timestamp=1, + headers=headers, + ) + producer.flush() + + assert callback_called.wait(5), "Callback never called." + + def test_trace_metrics(topic, send_producer_message): scoped_metrics = [("MessageBroker/Kafka/Topic/Produce/Named/%s" % topic, 1)] unscoped_metrics = scoped_metrics diff --git a/tox.ini b/tox.ini index da9b8db63..663f50bb9 100644 --- a/tox.ini +++ b/tox.ini @@ -152,6 +152,8 @@ envlist = rabbitmq-messagebroker_pika-{py37,py38,py39,py310,pypy37}-pikalatest, kafka-messagebroker_confluentkafka-{py27,py37,py38,py39,py310}-confluentkafkalatest, kafka-messagebroker_confluentkafka-{py27,py39}-confluentkafka{0107,0106}, + ; confluent-kafka had a bug in 1.8.2's setup.py file which was incompatible with 2.7. + kafka-messagebroker_confluentkafka-{py39}-confluentkafka{0108}, kafka-messagebroker_kafkapython-{pypy,py27,py37,py38,pypy37}-kafkapythonlatest, kafka-messagebroker_kafkapython-{py27,py38}-kafkapython{020001,020000,0104}, python-template_mako-{py27,py37,py38,py39,py310} @@ -362,6 +364,7 @@ deps = messagebroker_pika: tornado<5 messagebroker_pika-{py27,pypy}: enum34 messagebroker_confluentkafka-confluentkafkalatest: confluent-kafka + messagebroker_confluentkafka-confluentkafka0108: confluent-kafka<1.9 messagebroker_confluentkafka-confluentkafka0107: confluent-kafka<1.8 messagebroker_confluentkafka-confluentkafka0106: confluent-kafka<1.7 messagebroker_kafkapython-kafkapythonlatest: kafka-python From 29abfdf905d08af5106e4e779d3b986fe93b8169 Mon Sep 17 00:00:00 2001 From: Mattias Loverot Date: Wed, 5 Oct 2022 00:50:44 +0200 Subject: [PATCH 40/49] Bugfix: Avoiding "RuntimeError: dictionary changed size during iteration" error (#631) Co-authored-by: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> --- newrelic/core/trace_cache.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/newrelic/core/trace_cache.py b/newrelic/core/trace_cache.py index 4a087c4fb..1634d0d0b 100644 --- a/newrelic/core/trace_cache.py +++ b/newrelic/core/trace_cache.py @@ -197,7 +197,7 @@ def active_threads(self): debug = global_settings().debug if debug.enable_coroutine_profiling: - for thread_id, trace in self._cache.items(): + for thread_id, trace in list(self._cache.items()): transaction = trace.transaction if transaction and transaction._greenlet is not None: gr = transaction._greenlet() @@ -359,7 +359,7 @@ def record_event_loop_wait(self, start_time, end_time): task = getattr(transaction.root_span, "_task", None) loop = get_event_loop(task) - for trace in self._cache.values(): + for trace in list(self._cache.values()): if trace in seen: continue From 534a0ce0b8cfb29e97b7c986af2311fc75005839 Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Wed, 5 Oct 2022 10:59:54 -0700 Subject: [PATCH 41/49] Fix instumentation for sanic 22.0.9 (#644) * Refactor sanic version into fixture * Fix broken sanic tests & impl on 22.9.0 * Fix pylint errors * Test sanic 22.9.0 & sanic latest Co-authored-by: Uma Annamalai --- newrelic/hooks/framework_sanic.py | 7 +- tests/framework_sanic/conftest.py | 16 +- tests/framework_sanic/test_application.py | 319 ++++++++++++---------- tox.ini | 6 +- 4 files changed, 197 insertions(+), 151 deletions(-) diff --git a/newrelic/hooks/framework_sanic.py b/newrelic/hooks/framework_sanic.py index ab5a22eac..745cdbf70 100644 --- a/newrelic/hooks/framework_sanic.py +++ b/newrelic/hooks/framework_sanic.py @@ -243,7 +243,12 @@ def _nr_sanic_register_middleware_(wrapped, instance, args, kwargs): # Cache the callable_name on the middleware object callable_name(middleware) - wrapped_middleware = _nr_wrapper_middleware_(attach_to)(middleware) + middleware_func = middleware + if hasattr(middleware, "func"): + name = callable_name(middleware.func) + middleware_func = middleware.func + + wrapped_middleware = _nr_wrapper_middleware_(attach_to)(middleware_func) wrapped(wrapped_middleware, attach_to) return middleware diff --git a/tests/framework_sanic/conftest.py b/tests/framework_sanic/conftest.py index 4880bf116..434528bac 100644 --- a/tests/framework_sanic/conftest.py +++ b/tests/framework_sanic/conftest.py @@ -15,13 +15,15 @@ import asyncio import pytest -from testing_support.fixtures import ( +from testing_support.fixtures import ( # noqa: F401 pylint: disable=W0611 code_coverage_fixture, collector_agent_registration_fixture, collector_available_fixture, ) -from newrelic.common.object_wrapper import transient_function_wrapper +from newrelic.common.object_wrapper import ( # noqa: F401 pylint: disable=W0611 + transient_function_wrapper, +) _coverage_source = [ "newrelic.hooks.framework_sanic", @@ -74,13 +76,13 @@ def create_request_class(app, method, url, headers=None, loop=None): from sanic.server import HttpProtocol class MockProtocol(HttpProtocol): - async def send(*args, **kwargs): + async def send(*args, **kwargs): # pylint: disable=E0211 return proto = MockProtocol(loop=loop, app=app) proto.recv_buffer = bytearray() http = Http(proto) - + if hasattr(http, "init_for_request"): http.init_for_request() @@ -134,6 +136,12 @@ def request(app, method, url, headers=None): loop.run_until_complete(app._startup()) else: app.router.finalize() + # Starting in 22.9.0 sanic introduced an API to control middleware ordering. + # This included a new method called finalize_middleware that sets the middleware + # to be used on the request.route during the app._startup. In order to register + # new middleware the finalize_middleware must be called. + elif hasattr(app, "finalize_middleware"): + app.finalize_middleware() coro = create_request_coroutine(app, method, url, headers, loop) loop.run_until_complete(coro) diff --git a/tests/framework_sanic/test_application.py b/tests/framework_sanic/test_application.py index ac2726bd9..eebbde003 100644 --- a/tests/framework_sanic/test_application.py +++ b/tests/framework_sanic/test_application.py @@ -12,142 +12,147 @@ # See the License for the specific language governing permissions and # limitations under the License. +from collections import deque + import pytest import sanic - -from newrelic.core.config import global_settings -from collections import deque +from testing_support.fixtures import ( + function_not_called, + override_application_settings, + override_generic_settings, + override_ignore_status_codes, + validate_transaction_errors, + validate_transaction_event_attributes, + validate_transaction_metrics, +) +from testing_support.validators.validate_code_level_metrics import ( + validate_code_level_metrics, +) from newrelic.api.application import application_instance -from newrelic.api.transaction import Transaction from newrelic.api.external_trace import ExternalTrace - -from testing_support.fixtures import (validate_transaction_metrics, - override_application_settings, validate_transaction_errors, - validate_transaction_event_attributes, - override_ignore_status_codes, override_generic_settings, - function_not_called) -from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics - - -sanic_21 = int(sanic.__version__.split('.', 1)[0]) >= 21 - +from newrelic.api.transaction import Transaction +from newrelic.core.config import global_settings BASE_METRICS = [ - ('Function/_target_application:index', 1), - ('Function/_target_application:request_middleware', 1 if int(sanic.__version__.split('.', 1)[0]) > 18 else 2), + ("Function/_target_application:index", 1), + ("Function/_target_application:request_middleware", 1 if int(sanic.__version__.split(".", 1)[0]) > 18 else 2), ] FRAMEWORK_METRICS = [ - ('Python/Framework/Sanic/%s' % sanic.__version__, 1), + ("Python/Framework/Sanic/%s" % sanic.__version__, 1), ] -BASE_ATTRS = ['response.status', 'response.headers.contentType', - 'response.headers.contentLength'] +BASE_ATTRS = ["response.status", "response.headers.contentType", "response.headers.contentLength"] validate_base_transaction_event_attr = validate_transaction_event_attributes( - required_params={'agent': BASE_ATTRS, 'user': [], 'intrinsic': []}, + required_params={"agent": BASE_ATTRS, "user": [], "intrinsic": []}, ) @validate_code_level_metrics("_target_application", "index") @validate_transaction_metrics( - '_target_application:index', + "_target_application:index", scoped_metrics=BASE_METRICS, rollup_metrics=BASE_METRICS + FRAMEWORK_METRICS, ) @validate_base_transaction_event_attr def test_simple_request(app): - response = app.fetch('get', '/') + response = app.fetch("get", "/") assert response.status == 200 -@function_not_called('newrelic.core.stats_engine', - 'StatsEngine.record_transaction') +@function_not_called("newrelic.core.stats_engine", "StatsEngine.record_transaction") def test_websocket(app): - headers = {'upgrade': 'WebSocket'} - response = app.fetch('get', '/', headers=headers) + headers = {"upgrade": "WebSocket"} + response = app.fetch("get", "/", headers=headers) assert response.status == 200 -@pytest.mark.parametrize('method', ( - 'get', - 'post', - 'put', - 'patch', - 'delete', -)) +@pytest.mark.parametrize( + "method", + ( + "get", + "post", + "put", + "patch", + "delete", + ), +) def test_method_view(app, method): - metric_name = 'Function/_target_application:MethodView.' + method + metric_name = "Function/_target_application:MethodView." + method @validate_code_level_metrics("_target_application.MethodView", method) @validate_transaction_metrics( - '_target_application:MethodView.' + method, + "_target_application:MethodView." + method, scoped_metrics=[(metric_name, 1)], rollup_metrics=[(metric_name, 1)], ) @validate_base_transaction_event_attr def _test(): - response = app.fetch(method, '/method_view') + response = app.fetch(method, "/method_view") assert response.status == 200 _test() DT_METRICS = [ - ('Supportability/DistributedTrace/AcceptPayload/Success', None), - ('Supportability/TraceContext/TraceParent/Accept/Success', 1), + ("Supportability/DistributedTrace/AcceptPayload/Success", None), + ("Supportability/TraceContext/TraceParent/Accept/Success", 1), ] @validate_transaction_metrics( - '_target_application:index', + "_target_application:index", scoped_metrics=BASE_METRICS, rollup_metrics=BASE_METRICS + DT_METRICS + FRAMEWORK_METRICS, ) @validate_base_transaction_event_attr -@override_application_settings({ - 'distributed_tracing.enabled': True, -}) +@override_application_settings( + { + "distributed_tracing.enabled": True, + } +) def test_inbound_distributed_trace(app): transaction = Transaction(application_instance()) dt_headers = ExternalTrace.generate_request_headers(transaction) - response = app.fetch('get', '/', headers=dict(dt_headers)) + response = app.fetch("get", "/", headers=dict(dt_headers)) assert response.status == 200 -_params = ["error"] -if not sanic_21: - _params.append('write_response_error') -@pytest.mark.parametrize('endpoint', _params) -def test_recorded_error(app, endpoint): + +@pytest.mark.parametrize("endpoint", ["error", "write_response_error"]) +def test_recorded_error(app, endpoint, sanic_version): + if sanic_version >= (21, 0, 0) and endpoint == "write_response_error": + pytest.skip() + ERROR_METRICS = [ - ('Function/_target_application:%s' % endpoint, 1), + ("Function/_target_application:%s" % endpoint, 1), ] - @validate_transaction_errors(errors=['builtins:ValueError']) + @validate_transaction_errors(errors=["builtins:ValueError"]) @validate_base_transaction_event_attr @validate_transaction_metrics( - '_target_application:%s' % endpoint, + "_target_application:%s" % endpoint, scoped_metrics=ERROR_METRICS, rollup_metrics=ERROR_METRICS + FRAMEWORK_METRICS, ) def _test(): - if endpoint == 'write_response_error': + if endpoint == "write_response_error": with pytest.raises(ValueError): - response = app.fetch('get', '/' + endpoint) + response = app.fetch("get", "/" + endpoint) else: - response = app.fetch('get', '/' + endpoint) + response = app.fetch("get", "/" + endpoint) assert response.status == 500 _test() NOT_FOUND_METRICS = [ - ('Function/_target_application:not_found', 1), + ("Function/_target_application:not_found", 1), ] @validate_transaction_metrics( - '_target_application:not_found', + "_target_application:not_found", scoped_metrics=NOT_FOUND_METRICS, rollup_metrics=NOT_FOUND_METRICS + FRAMEWORK_METRICS, ) @@ -155,88 +160,90 @@ def _test(): @override_ignore_status_codes([404]) @validate_transaction_errors(errors=[]) def test_ignored_by_status_error(app): - response = app.fetch('get', '/404') + response = app.fetch("get", "/404") assert response.status == 404 DOUBLE_ERROR_METRICS = [ - ('Function/_target_application:zero_division_error', 1), + ("Function/_target_application:zero_division_error", 1), ] @validate_transaction_metrics( - '_target_application:zero_division_error', + "_target_application:zero_division_error", scoped_metrics=DOUBLE_ERROR_METRICS, rollup_metrics=DOUBLE_ERROR_METRICS, ) -@validate_transaction_errors( - errors=['builtins:ValueError', 'builtins:ZeroDivisionError']) +@validate_transaction_errors(errors=["builtins:ValueError", "builtins:ZeroDivisionError"]) def test_error_raised_in_error_handler(app): # Because of a bug in Sanic versions <0.8.0, the response.status value is # inconsistent. Rather than assert the status value, we rely on the # transaction errors validator to confirm the application acted as we'd # expect it to. - app.fetch('get', '/zero') + app.fetch("get", "/zero") -STREAMING_ATTRS = ['response.status', 'response.headers.contentType'] +STREAMING_ATTRS = ["response.status", "response.headers.contentType"] STREAMING_METRICS = [ - ('Function/_target_application:streaming', 1), + ("Function/_target_application:streaming", 1), ] @validate_code_level_metrics("_target_application", "streaming") @validate_transaction_metrics( - '_target_application:streaming', + "_target_application:streaming", scoped_metrics=STREAMING_METRICS, rollup_metrics=STREAMING_METRICS, ) @validate_transaction_event_attributes( - required_params={'agent': STREAMING_ATTRS, 'user': [], 'intrinsic': []}, + required_params={"agent": STREAMING_ATTRS, "user": [], "intrinsic": []}, ) def test_streaming_response(app): # streaming responses do not have content-length headers - response = app.fetch('get', '/streaming') + response = app.fetch("get", "/streaming") assert response.status == 200 ERROR_IN_ERROR_TESTS = [ - ('/sync-error', '_target_application:sync_error', - [('Function/_target_application:sync_error', 1), - ('Function/_target_application:handle_custom_exception_sync', 1)], - ['_target_application:CustomExceptionSync', - 'sanic.exceptions:SanicException']), - - ('/async-error', '_target_application:async_error', - [('Function/_target_application:async_error', 1), - ('Function/_target_application:handle_custom_exception_async', 1)], - ['_target_application:CustomExceptionAsync']), + ( + "/sync-error", + "_target_application:sync_error", + [ + ("Function/_target_application:sync_error", 1), + ("Function/_target_application:handle_custom_exception_sync", 1), + ], + ["_target_application:CustomExceptionSync", "sanic.exceptions:SanicException"], + ), + ( + "/async-error", + "_target_application:async_error", + [ + ("Function/_target_application:async_error", 1), + ("Function/_target_application:handle_custom_exception_async", 1), + ], + ["_target_application:CustomExceptionAsync"], + ), ] -@pytest.mark.parametrize('url,metric_name,metrics,errors', - ERROR_IN_ERROR_TESTS) -@pytest.mark.parametrize('nr_enabled', (True, False)) -def test_errors_in_error_handlers( - nr_enabled, app, url, metric_name, metrics, errors): +@pytest.mark.parametrize("url,metric_name,metrics,errors", ERROR_IN_ERROR_TESTS) +@pytest.mark.parametrize("nr_enabled", (True, False)) +def test_errors_in_error_handlers(nr_enabled, app, url, metric_name, metrics, errors): settings = global_settings() - @override_generic_settings(settings, {'enabled': nr_enabled}) + @override_generic_settings(settings, {"enabled": nr_enabled}) def _test(): # Because of a bug in Sanic versions <0.8.0, the response.status value # is inconsistent. Rather than assert the status value, we rely on the # transaction errors validator to confirm the application acted as we'd # expect it to. - app.fetch('get', url) + app.fetch("get", url) if nr_enabled: _test = validate_transaction_errors(errors=errors)(_test) - _test = validate_transaction_metrics(metric_name, - scoped_metrics=metrics, - rollup_metrics=metrics)(_test) + _test = validate_transaction_metrics(metric_name, scoped_metrics=metrics, rollup_metrics=metrics)(_test) else: - _test = function_not_called('newrelic.core.stats_engine', - 'StatsEngine.record_transaction')(_test) + _test = function_not_called("newrelic.core.stats_engine", "StatsEngine.record_transaction")(_test) _test() @@ -244,63 +251,82 @@ def _test(): def test_no_transaction_when_nr_disabled(app): settings = global_settings() - @function_not_called('newrelic.core.stats_engine', - 'StatsEngine.record_transaction') - @override_generic_settings(settings, {'enabled': False}) + @function_not_called("newrelic.core.stats_engine", "StatsEngine.record_transaction") + @override_generic_settings(settings, {"enabled": False}) def _test(): - app.fetch('GET', '/') + app.fetch("GET", "/") _test() async def async_returning_middleware(*args, **kwargs): from sanic.response import json - return json({'oops': 'I returned it again'}) + + return json({"oops": "I returned it again"}) def sync_returning_middleware(*args, **kwargs): from sanic.response import json - return json({'oops': 'I returned it again'}) + + return json({"oops": "I returned it again"}) def sync_failing_middleware(*args, **kwargs): from sanic.exceptions import SanicException - raise SanicException('Everything is ok', status_code=200) - - -@pytest.mark.parametrize('middleware,attach_to,metric_name,transaction_name', [ - (async_returning_middleware, 'request', - 'test_application:async_returning_middleware', - 'test_application:async_returning_middleware'), - (sync_returning_middleware, 'request', - 'test_application:sync_returning_middleware', - 'test_application:sync_returning_middleware'), - (sync_failing_middleware, 'request', - 'test_application:sync_failing_middleware', - 'test_application:sync_failing_middleware'), - (async_returning_middleware, 'response', - 'test_application:async_returning_middleware', - '_target_application:index'), - (sync_returning_middleware, 'response', - 'test_application:sync_returning_middleware', - '_target_application:index'), -]) -def test_returning_middleware(app, middleware, attach_to, metric_name, - transaction_name): + + raise SanicException("Everything is ok", status_code=200) + + +@pytest.mark.parametrize( + "middleware,attach_to,metric_name,transaction_name", + [ + ( + async_returning_middleware, + "request", + "test_application:async_returning_middleware", + "test_application:async_returning_middleware", + ), + ( + sync_returning_middleware, + "request", + "test_application:sync_returning_middleware", + "test_application:sync_returning_middleware", + ), + ( + sync_failing_middleware, + "request", + "test_application:sync_failing_middleware", + "test_application:sync_failing_middleware", + ), + ( + async_returning_middleware, + "response", + "test_application:async_returning_middleware", + "_target_application:index", + ), + ( + sync_returning_middleware, + "response", + "test_application:sync_returning_middleware", + "_target_application:index", + ), + ], +) +def test_returning_middleware(app, middleware, attach_to, metric_name, transaction_name): metrics = [ - ('Function/%s' % metric_name, 1), + ("Function/%s" % metric_name, 1), ] @validate_code_level_metrics(*metric_name.split(":")) @validate_transaction_metrics( - transaction_name, - scoped_metrics=metrics, - rollup_metrics=metrics, + transaction_name, + scoped_metrics=metrics, + rollup_metrics=metrics, ) @validate_base_transaction_event_attr def _test(): - response = app.fetch('get', '/') + response = app.fetch("get", "/") assert response.status == 200 original_request_middleware = deque(app.app.request_middleware) @@ -319,17 +345,17 @@ def error_middleware(*args, **kwargs): def test_errors_in_middleware(app): - metrics = [('Function/test_application:error_middleware', 1)] + metrics = [("Function/test_application:error_middleware", 1)] @validate_transaction_metrics( - 'test_application:error_middleware', - scoped_metrics=metrics, - rollup_metrics=metrics, + "test_application:error_middleware", + scoped_metrics=metrics, + rollup_metrics=metrics, ) @validate_base_transaction_event_attr - @validate_transaction_errors(errors=['builtins:ValueError']) + @validate_transaction_errors(errors=["builtins:ValueError"]) def _test(): - response = app.fetch('get', '/') + response = app.fetch("get", "/") assert response.status == 500 original_request_middleware = deque(app.app.request_middleware) @@ -358,31 +384,38 @@ def _test(): ) @validate_transaction_errors(errors=[]) def test_blueprint_middleware(app): - response = app.fetch('get', '/blueprint') + response = app.fetch("get", "/blueprint") assert response.status == 200 -def test_unknown_route(app): - import sanic - sanic_version = [int(x) for x in sanic.__version__.split(".")] - _tx_name = "_target_application:CustomRouter.get" if sanic_version[0] < 21 else "_target_application:request_middleware" - +def test_unknown_route(app, sanic_version): + _tx_name = ( + "_target_application:CustomRouter.get" if sanic_version[0] < 21 else "_target_application:request_middleware" + ) + @validate_transaction_metrics(_tx_name) def _test(): - response = app.fetch('get', '/what-route') + response = app.fetch("get", "/what-route") assert response.status == 404 - - _test() -def test_bad_method(app): - import sanic - sanic_version = [int(x) for x in sanic.__version__.split(".")] - _tx_name = "_target_application:CustomRouter.get" if sanic_version[0] < 21 else "_target_application:request_middleware" + _test() + + +def test_bad_method(app, sanic_version): + _tx_name = ( + "_target_application:CustomRouter.get" if sanic_version[0] < 21 else "_target_application:request_middleware" + ) @validate_transaction_metrics(_tx_name) @override_ignore_status_codes([405]) @validate_transaction_errors(errors=[]) def _test(): - response = app.fetch('post', '/') + response = app.fetch("post", "/") assert response.status == 405 + _test() + + +@pytest.fixture +def sanic_version(): + return tuple(int(v) for v in sanic.__version__.split(".")) diff --git a/tox.ini b/tox.ini index 663f50bb9..88e4880c1 100644 --- a/tox.ini +++ b/tox.ini @@ -137,7 +137,7 @@ envlist = python-framework_pyramid-{pypy,py27,py38}-Pyramid0104, python-framework_pyramid-{pypy,py27,pypy37,py37,py38,py39,py310}-Pyramid0110-cornice, python-framework_pyramid-{py37,py38,py39,py310,pypy37}-Pyramidmaster, - python-framework_sanic-{py38,pypy37}-sanic{190301,1906,1812,1912,200904,210300,2109,2112,2203}, + python-framework_sanic-{py38,pypy37}-sanic{190301,1906,1812,1912,200904,210300,2109,2112,2203,2290}, python-framework_sanic-{py37,py38,py39,py310,pypy37}-saniclatest, python-framework_starlette-{py310,pypy37}-starlette{0014,0015,0019}, python-framework_starlette-{py37,py38}-starlette{002001}, @@ -339,8 +339,8 @@ deps = framework_sanic-sanic2109: sanic<21.10 framework_sanic-sanic2112: sanic<21.13 framework_sanic-sanic2203: sanic<22.4 - ; Temporarily pin this to the second to last release - framework_sanic-saniclatest: sanic==22.6.2 + framework_sanic-sanic2290: sanic<22.9.1 + framework_sanic-saniclatest: sanic framework_sanic-sanic{1812,190301,1906}: aiohttp framework_starlette: graphene<3 framework_starlette-starlette0014: starlette<0.15 From 83ed0fc60931776dbb0e1d4997d64d78d46dc786 Mon Sep 17 00:00:00 2001 From: Uma Annamalai Date: Wed, 5 Oct 2022 12:06:59 -0700 Subject: [PATCH 42/49] Update aiohttp tests for v3.8.3. (#641) * Update aiohttp tests. * Fix linting warnings. * Add back event_loop import. * Fix pylint unused import * Trigger tests Co-authored-by: Hannah Stepanek --- .../framework_aiohttp/_target_application.py | 4 ++-- tests/framework_aiohttp/conftest.py | 20 ++++++++++++------- tox.ini | 2 +- 3 files changed, 16 insertions(+), 10 deletions(-) diff --git a/tests/framework_aiohttp/_target_application.py b/tests/framework_aiohttp/_target_application.py index 207c75486..895260798 100644 --- a/tests/framework_aiohttp/_target_application.py +++ b/tests/framework_aiohttp/_target_application.py @@ -174,8 +174,8 @@ def multi_fetch_handler(request): return web.Response(text=responses, content_type='text/html') -def make_app(middlewares=None, loop=None): - app = web.Application(middlewares=middlewares, loop=loop) +def make_app(middlewares=None): + app = web.Application(middlewares=middlewares) app.router.add_route('*', '/coro', index) app.router.add_route('*', '/class', HelloWorldView) app.router.add_route('*', '/error', error) diff --git a/tests/framework_aiohttp/conftest.py b/tests/framework_aiohttp/conftest.py index eccf71a72..b4a31d7e2 100644 --- a/tests/framework_aiohttp/conftest.py +++ b/tests/framework_aiohttp/conftest.py @@ -19,8 +19,10 @@ from _target_application import make_app from aiohttp.test_utils import AioHTTPTestCase from aiohttp.test_utils import TestClient as _TestClient -from testing_support.fixture.event_loop import event_loop -from testing_support.fixtures import ( +from testing_support.fixture.event_loop import ( # noqa: F401 pylint: disable=W0611 + event_loop, +) +from testing_support.fixtures import ( # noqa: F401 pylint: disable=W0611 code_coverage_fixture, collector_agent_registration_fixture, collector_available_fixture, @@ -62,10 +64,17 @@ def __init__(self, server_cls, middleware, *args, **kwargs): def setUp(self): super(SimpleAiohttpApp, self).setUp() + if hasattr(self, "asyncSetUp"): + asyncio.get_event_loop().run_until_complete(self.asyncSetUp()) asyncio.set_event_loop(self.loop) def get_app(self, *args, **kwargs): - return make_app(self.middleware, loop=self.loop) + return make_app(self.middleware) + + def tearDown(self): + super(SimpleAiohttpApp, self).tearDown() + if hasattr(self, "asyncTearDown"): + asyncio.get_event_loop().run_until_complete(self.asyncTearDown()) @asyncio.coroutine def _get_client(self, app_or_server): @@ -79,10 +88,7 @@ def _get_client(self, app_or_server): test_server = self.server_cls(app_or_server, scheme=scheme, host=host, **server_kwargs) client_constructor_arg = test_server - try: - return _TestClient(client_constructor_arg, loop=self.loop) - except TypeError: - return _TestClient(client_constructor_arg) + return _TestClient(client_constructor_arg) get_client = _get_client diff --git a/tox.ini b/tox.ini index 88e4880c1..175c12ada 100644 --- a/tox.ini +++ b/tox.ini @@ -269,7 +269,7 @@ deps = external_requests: requests external_urllib3-urllib30109: urllib3<1.10 external_urllib3-urllib3latest: urllib3 - framework_aiohttp-aiohttp03: aiohttp==3.8.1 + framework_aiohttp-aiohttp03: aiohttp framework_ariadne-ariadnelatest: ariadne framework_ariadne-ariadne0011: ariadne<0.12 framework_ariadne-ariadne0012: ariadne<0.13 From 20203eb5a3033fe8599460aa256c261f94e3d12a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Borkowski?= Date: Wed, 5 Oct 2022 22:37:33 +0200 Subject: [PATCH 43/49] Fix app id fetch in newrelic-admin record-deploy (#640) Co-authored-by: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> --- newrelic/admin/record_deploy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/newrelic/admin/record_deploy.py b/newrelic/admin/record_deploy.py index 3a6229de6..65748cc2a 100644 --- a/newrelic/admin/record_deploy.py +++ b/newrelic/admin/record_deploy.py @@ -39,7 +39,7 @@ def fetch_app_id(app_name, client, headers): return for application in response_json["applications"]: - if application["name"] == app_name: + if application["name"].lower() == app_name.lower(): return application["id"] From 0c0a7f89a4af1ed18ebf4975f4309a04c0146286 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Wed, 5 Oct 2022 16:19:01 -0700 Subject: [PATCH 44/49] Add protobuf v4 pb2 file (#646) --- newrelic/core/infinite_tracing_pb2.py | 379 +--------------------- newrelic/core/infinite_tracing_v3_pb2.py | 386 +++++++++++++++++++++++ newrelic/core/infinite_tracing_v4_pb2.py | 57 ++++ setup.py | 2 +- tox.ini | 12 +- 5 files changed, 461 insertions(+), 375 deletions(-) create mode 100644 newrelic/core/infinite_tracing_v3_pb2.py create mode 100644 newrelic/core/infinite_tracing_v4_pb2.py diff --git a/newrelic/core/infinite_tracing_pb2.py b/newrelic/core/infinite_tracing_pb2.py index 987c96303..a0fa9dc54 100644 --- a/newrelic/core/infinite_tracing_pb2.py +++ b/newrelic/core/infinite_tracing_pb2.py @@ -13,374 +13,13 @@ # limitations under the License. try: - from google.protobuf import descriptor as _descriptor - from google.protobuf import message as _message - from google.protobuf import reflection as _reflection - from google.protobuf import symbol_database as _symbol_database - # @@protoc_insertion_point(imports) -except ImportError: - pass + from google.protobuf import __version__ + PROTOBUF_VERSION = tuple(int(v) for v in __version__.split(".")) +except Exception: + PROTOBUF_VERSION = (0, 0, 0) + +# Import appropriate generated pb2 file for protobuf version +if PROTOBUF_VERSION >= (4,): + from newrelic.core.infinite_tracing_v4_pb2 import * else: - _sym_db = _symbol_database.Default() - - - DESCRIPTOR = _descriptor.FileDescriptor( - name='infinite_tracing.proto', - package='com.newrelic.trace.v1', - syntax='proto3', - serialized_options=None, - serialized_pb=b'\n\x16infinite_tracing.proto\x12\x15\x63om.newrelic.trace.v1\"\x86\x04\n\x04Span\x12\x10\n\x08trace_id\x18\x01 \x01(\t\x12?\n\nintrinsics\x18\x02 \x03(\x0b\x32+.com.newrelic.trace.v1.Span.IntrinsicsEntry\x12H\n\x0fuser_attributes\x18\x03 \x03(\x0b\x32/.com.newrelic.trace.v1.Span.UserAttributesEntry\x12J\n\x10\x61gent_attributes\x18\x04 \x03(\x0b\x32\x30.com.newrelic.trace.v1.Span.AgentAttributesEntry\x1aX\n\x0fIntrinsicsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\x1a\\\n\x13UserAttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\x1a]\n\x14\x41gentAttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\"t\n\x0e\x41ttributeValue\x12\x16\n\x0cstring_value\x18\x01 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x02 \x01(\x08H\x00\x12\x13\n\tint_value\x18\x03 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x42\x07\n\x05value\"%\n\x0cRecordStatus\x12\x15\n\rmessages_seen\x18\x01 \x01(\x04\x32\x65\n\rIngestService\x12T\n\nRecordSpan\x12\x1b.com.newrelic.trace.v1.Span\x1a#.com.newrelic.trace.v1.RecordStatus\"\x00(\x01\x30\x01\x62\x06proto3' - ) - - - - - _SPAN_INTRINSICSENTRY = _descriptor.Descriptor( - name='IntrinsicsEntry', - full_name='com.newrelic.trace.v1.Span.IntrinsicsEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='com.newrelic.trace.v1.Span.IntrinsicsEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='com.newrelic.trace.v1.Span.IntrinsicsEntry.value', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=b'8\001', - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=291, - serialized_end=379, - ) - - _SPAN_USERATTRIBUTESENTRY = _descriptor.Descriptor( - name='UserAttributesEntry', - full_name='com.newrelic.trace.v1.Span.UserAttributesEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='com.newrelic.trace.v1.Span.UserAttributesEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='com.newrelic.trace.v1.Span.UserAttributesEntry.value', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=b'8\001', - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=381, - serialized_end=473, - ) - - _SPAN_AGENTATTRIBUTESENTRY = _descriptor.Descriptor( - name='AgentAttributesEntry', - full_name='com.newrelic.trace.v1.Span.AgentAttributesEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='com.newrelic.trace.v1.Span.AgentAttributesEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='com.newrelic.trace.v1.Span.AgentAttributesEntry.value', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=b'8\001', - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=475, - serialized_end=568, - ) - - _SPAN = _descriptor.Descriptor( - name='Span', - full_name='com.newrelic.trace.v1.Span', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='trace_id', full_name='com.newrelic.trace.v1.Span.trace_id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='intrinsics', full_name='com.newrelic.trace.v1.Span.intrinsics', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='user_attributes', full_name='com.newrelic.trace.v1.Span.user_attributes', index=2, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='agent_attributes', full_name='com.newrelic.trace.v1.Span.agent_attributes', index=3, - number=4, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_SPAN_INTRINSICSENTRY, _SPAN_USERATTRIBUTESENTRY, _SPAN_AGENTATTRIBUTESENTRY, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=50, - serialized_end=568, - ) - - - _ATTRIBUTEVALUE = _descriptor.Descriptor( - name='AttributeValue', - full_name='com.newrelic.trace.v1.AttributeValue', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='string_value', full_name='com.newrelic.trace.v1.AttributeValue.string_value', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='bool_value', full_name='com.newrelic.trace.v1.AttributeValue.bool_value', index=1, - number=2, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='int_value', full_name='com.newrelic.trace.v1.AttributeValue.int_value', index=2, - number=3, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='double_value', full_name='com.newrelic.trace.v1.AttributeValue.double_value', index=3, - number=4, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='value', full_name='com.newrelic.trace.v1.AttributeValue.value', - index=0, containing_type=None, fields=[]), - ], - serialized_start=570, - serialized_end=686, - ) - - - _RECORDSTATUS = _descriptor.Descriptor( - name='RecordStatus', - full_name='com.newrelic.trace.v1.RecordStatus', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='messages_seen', full_name='com.newrelic.trace.v1.RecordStatus.messages_seen', index=0, - number=1, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=688, - serialized_end=725, - ) - - _SPAN_INTRINSICSENTRY.fields_by_name['value'].message_type = _ATTRIBUTEVALUE - _SPAN_INTRINSICSENTRY.containing_type = _SPAN - _SPAN_USERATTRIBUTESENTRY.fields_by_name['value'].message_type = _ATTRIBUTEVALUE - _SPAN_USERATTRIBUTESENTRY.containing_type = _SPAN - _SPAN_AGENTATTRIBUTESENTRY.fields_by_name['value'].message_type = _ATTRIBUTEVALUE - _SPAN_AGENTATTRIBUTESENTRY.containing_type = _SPAN - _SPAN.fields_by_name['intrinsics'].message_type = _SPAN_INTRINSICSENTRY - _SPAN.fields_by_name['user_attributes'].message_type = _SPAN_USERATTRIBUTESENTRY - _SPAN.fields_by_name['agent_attributes'].message_type = _SPAN_AGENTATTRIBUTESENTRY - _ATTRIBUTEVALUE.oneofs_by_name['value'].fields.append( - _ATTRIBUTEVALUE.fields_by_name['string_value']) - _ATTRIBUTEVALUE.fields_by_name['string_value'].containing_oneof = _ATTRIBUTEVALUE.oneofs_by_name['value'] - _ATTRIBUTEVALUE.oneofs_by_name['value'].fields.append( - _ATTRIBUTEVALUE.fields_by_name['bool_value']) - _ATTRIBUTEVALUE.fields_by_name['bool_value'].containing_oneof = _ATTRIBUTEVALUE.oneofs_by_name['value'] - _ATTRIBUTEVALUE.oneofs_by_name['value'].fields.append( - _ATTRIBUTEVALUE.fields_by_name['int_value']) - _ATTRIBUTEVALUE.fields_by_name['int_value'].containing_oneof = _ATTRIBUTEVALUE.oneofs_by_name['value'] - _ATTRIBUTEVALUE.oneofs_by_name['value'].fields.append( - _ATTRIBUTEVALUE.fields_by_name['double_value']) - _ATTRIBUTEVALUE.fields_by_name['double_value'].containing_oneof = _ATTRIBUTEVALUE.oneofs_by_name['value'] - DESCRIPTOR.message_types_by_name['Span'] = _SPAN - DESCRIPTOR.message_types_by_name['AttributeValue'] = _ATTRIBUTEVALUE - DESCRIPTOR.message_types_by_name['RecordStatus'] = _RECORDSTATUS - _sym_db.RegisterFileDescriptor(DESCRIPTOR) - - Span = _reflection.GeneratedProtocolMessageType('Span', (_message.Message,), { - - 'IntrinsicsEntry' : _reflection.GeneratedProtocolMessageType('IntrinsicsEntry', (_message.Message,), { - 'DESCRIPTOR' : _SPAN_INTRINSICSENTRY, - '__module__' : 'infinite_tracing_pb2' - # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span.IntrinsicsEntry) - }) - , - - 'UserAttributesEntry' : _reflection.GeneratedProtocolMessageType('UserAttributesEntry', (_message.Message,), { - 'DESCRIPTOR' : _SPAN_USERATTRIBUTESENTRY, - '__module__' : 'infinite_tracing_pb2' - # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span.UserAttributesEntry) - }) - , - - 'AgentAttributesEntry' : _reflection.GeneratedProtocolMessageType('AgentAttributesEntry', (_message.Message,), { - 'DESCRIPTOR' : _SPAN_AGENTATTRIBUTESENTRY, - '__module__' : 'infinite_tracing_pb2' - # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span.AgentAttributesEntry) - }) - , - 'DESCRIPTOR' : _SPAN, - '__module__' : 'infinite_tracing_pb2' - # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span) - }) - _sym_db.RegisterMessage(Span) - _sym_db.RegisterMessage(Span.IntrinsicsEntry) - _sym_db.RegisterMessage(Span.UserAttributesEntry) - _sym_db.RegisterMessage(Span.AgentAttributesEntry) - - AttributeValue = _reflection.GeneratedProtocolMessageType('AttributeValue', (_message.Message,), { - 'DESCRIPTOR' : _ATTRIBUTEVALUE, - '__module__' : 'infinite_tracing_pb2' - # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.AttributeValue) - }) - _sym_db.RegisterMessage(AttributeValue) - - RecordStatus = _reflection.GeneratedProtocolMessageType('RecordStatus', (_message.Message,), { - 'DESCRIPTOR' : _RECORDSTATUS, - '__module__' : 'infinite_tracing_pb2' - # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.RecordStatus) - }) - _sym_db.RegisterMessage(RecordStatus) - - - _SPAN_INTRINSICSENTRY._options = None - _SPAN_USERATTRIBUTESENTRY._options = None - _SPAN_AGENTATTRIBUTESENTRY._options = None - - _INGESTSERVICE = _descriptor.ServiceDescriptor( - name='IngestService', - full_name='com.newrelic.trace.v1.IngestService', - file=DESCRIPTOR, - index=0, - serialized_options=None, - serialized_start=727, - serialized_end=828, - methods=[ - _descriptor.MethodDescriptor( - name='RecordSpan', - full_name='com.newrelic.trace.v1.IngestService.RecordSpan', - index=0, - containing_service=None, - input_type=_SPAN, - output_type=_RECORDSTATUS, - serialized_options=None, - ), - ]) - _sym_db.RegisterServiceDescriptor(_INGESTSERVICE) - - DESCRIPTOR.services_by_name['IngestService'] = _INGESTSERVICE - - # @@protoc_insertion_point(module_scope) - + from newrelic.core.infinite_tracing_v3_pb2 import * diff --git a/newrelic/core/infinite_tracing_v3_pb2.py b/newrelic/core/infinite_tracing_v3_pb2.py new file mode 100644 index 000000000..987c96303 --- /dev/null +++ b/newrelic/core/infinite_tracing_v3_pb2.py @@ -0,0 +1,386 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + from google.protobuf import descriptor as _descriptor + from google.protobuf import message as _message + from google.protobuf import reflection as _reflection + from google.protobuf import symbol_database as _symbol_database + # @@protoc_insertion_point(imports) +except ImportError: + pass +else: + _sym_db = _symbol_database.Default() + + + DESCRIPTOR = _descriptor.FileDescriptor( + name='infinite_tracing.proto', + package='com.newrelic.trace.v1', + syntax='proto3', + serialized_options=None, + serialized_pb=b'\n\x16infinite_tracing.proto\x12\x15\x63om.newrelic.trace.v1\"\x86\x04\n\x04Span\x12\x10\n\x08trace_id\x18\x01 \x01(\t\x12?\n\nintrinsics\x18\x02 \x03(\x0b\x32+.com.newrelic.trace.v1.Span.IntrinsicsEntry\x12H\n\x0fuser_attributes\x18\x03 \x03(\x0b\x32/.com.newrelic.trace.v1.Span.UserAttributesEntry\x12J\n\x10\x61gent_attributes\x18\x04 \x03(\x0b\x32\x30.com.newrelic.trace.v1.Span.AgentAttributesEntry\x1aX\n\x0fIntrinsicsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\x1a\\\n\x13UserAttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\x1a]\n\x14\x41gentAttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\"t\n\x0e\x41ttributeValue\x12\x16\n\x0cstring_value\x18\x01 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x02 \x01(\x08H\x00\x12\x13\n\tint_value\x18\x03 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x42\x07\n\x05value\"%\n\x0cRecordStatus\x12\x15\n\rmessages_seen\x18\x01 \x01(\x04\x32\x65\n\rIngestService\x12T\n\nRecordSpan\x12\x1b.com.newrelic.trace.v1.Span\x1a#.com.newrelic.trace.v1.RecordStatus\"\x00(\x01\x30\x01\x62\x06proto3' + ) + + + + + _SPAN_INTRINSICSENTRY = _descriptor.Descriptor( + name='IntrinsicsEntry', + full_name='com.newrelic.trace.v1.Span.IntrinsicsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='com.newrelic.trace.v1.Span.IntrinsicsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='com.newrelic.trace.v1.Span.IntrinsicsEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=b'8\001', + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=291, + serialized_end=379, + ) + + _SPAN_USERATTRIBUTESENTRY = _descriptor.Descriptor( + name='UserAttributesEntry', + full_name='com.newrelic.trace.v1.Span.UserAttributesEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='com.newrelic.trace.v1.Span.UserAttributesEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='com.newrelic.trace.v1.Span.UserAttributesEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=b'8\001', + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=381, + serialized_end=473, + ) + + _SPAN_AGENTATTRIBUTESENTRY = _descriptor.Descriptor( + name='AgentAttributesEntry', + full_name='com.newrelic.trace.v1.Span.AgentAttributesEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='com.newrelic.trace.v1.Span.AgentAttributesEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='com.newrelic.trace.v1.Span.AgentAttributesEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=b'8\001', + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=475, + serialized_end=568, + ) + + _SPAN = _descriptor.Descriptor( + name='Span', + full_name='com.newrelic.trace.v1.Span', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='trace_id', full_name='com.newrelic.trace.v1.Span.trace_id', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='intrinsics', full_name='com.newrelic.trace.v1.Span.intrinsics', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='user_attributes', full_name='com.newrelic.trace.v1.Span.user_attributes', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='agent_attributes', full_name='com.newrelic.trace.v1.Span.agent_attributes', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_SPAN_INTRINSICSENTRY, _SPAN_USERATTRIBUTESENTRY, _SPAN_AGENTATTRIBUTESENTRY, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=50, + serialized_end=568, + ) + + + _ATTRIBUTEVALUE = _descriptor.Descriptor( + name='AttributeValue', + full_name='com.newrelic.trace.v1.AttributeValue', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='string_value', full_name='com.newrelic.trace.v1.AttributeValue.string_value', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bool_value', full_name='com.newrelic.trace.v1.AttributeValue.bool_value', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='int_value', full_name='com.newrelic.trace.v1.AttributeValue.int_value', index=2, + number=3, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='double_value', full_name='com.newrelic.trace.v1.AttributeValue.double_value', index=3, + number=4, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='value', full_name='com.newrelic.trace.v1.AttributeValue.value', + index=0, containing_type=None, fields=[]), + ], + serialized_start=570, + serialized_end=686, + ) + + + _RECORDSTATUS = _descriptor.Descriptor( + name='RecordStatus', + full_name='com.newrelic.trace.v1.RecordStatus', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='messages_seen', full_name='com.newrelic.trace.v1.RecordStatus.messages_seen', index=0, + number=1, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=688, + serialized_end=725, + ) + + _SPAN_INTRINSICSENTRY.fields_by_name['value'].message_type = _ATTRIBUTEVALUE + _SPAN_INTRINSICSENTRY.containing_type = _SPAN + _SPAN_USERATTRIBUTESENTRY.fields_by_name['value'].message_type = _ATTRIBUTEVALUE + _SPAN_USERATTRIBUTESENTRY.containing_type = _SPAN + _SPAN_AGENTATTRIBUTESENTRY.fields_by_name['value'].message_type = _ATTRIBUTEVALUE + _SPAN_AGENTATTRIBUTESENTRY.containing_type = _SPAN + _SPAN.fields_by_name['intrinsics'].message_type = _SPAN_INTRINSICSENTRY + _SPAN.fields_by_name['user_attributes'].message_type = _SPAN_USERATTRIBUTESENTRY + _SPAN.fields_by_name['agent_attributes'].message_type = _SPAN_AGENTATTRIBUTESENTRY + _ATTRIBUTEVALUE.oneofs_by_name['value'].fields.append( + _ATTRIBUTEVALUE.fields_by_name['string_value']) + _ATTRIBUTEVALUE.fields_by_name['string_value'].containing_oneof = _ATTRIBUTEVALUE.oneofs_by_name['value'] + _ATTRIBUTEVALUE.oneofs_by_name['value'].fields.append( + _ATTRIBUTEVALUE.fields_by_name['bool_value']) + _ATTRIBUTEVALUE.fields_by_name['bool_value'].containing_oneof = _ATTRIBUTEVALUE.oneofs_by_name['value'] + _ATTRIBUTEVALUE.oneofs_by_name['value'].fields.append( + _ATTRIBUTEVALUE.fields_by_name['int_value']) + _ATTRIBUTEVALUE.fields_by_name['int_value'].containing_oneof = _ATTRIBUTEVALUE.oneofs_by_name['value'] + _ATTRIBUTEVALUE.oneofs_by_name['value'].fields.append( + _ATTRIBUTEVALUE.fields_by_name['double_value']) + _ATTRIBUTEVALUE.fields_by_name['double_value'].containing_oneof = _ATTRIBUTEVALUE.oneofs_by_name['value'] + DESCRIPTOR.message_types_by_name['Span'] = _SPAN + DESCRIPTOR.message_types_by_name['AttributeValue'] = _ATTRIBUTEVALUE + DESCRIPTOR.message_types_by_name['RecordStatus'] = _RECORDSTATUS + _sym_db.RegisterFileDescriptor(DESCRIPTOR) + + Span = _reflection.GeneratedProtocolMessageType('Span', (_message.Message,), { + + 'IntrinsicsEntry' : _reflection.GeneratedProtocolMessageType('IntrinsicsEntry', (_message.Message,), { + 'DESCRIPTOR' : _SPAN_INTRINSICSENTRY, + '__module__' : 'infinite_tracing_pb2' + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span.IntrinsicsEntry) + }) + , + + 'UserAttributesEntry' : _reflection.GeneratedProtocolMessageType('UserAttributesEntry', (_message.Message,), { + 'DESCRIPTOR' : _SPAN_USERATTRIBUTESENTRY, + '__module__' : 'infinite_tracing_pb2' + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span.UserAttributesEntry) + }) + , + + 'AgentAttributesEntry' : _reflection.GeneratedProtocolMessageType('AgentAttributesEntry', (_message.Message,), { + 'DESCRIPTOR' : _SPAN_AGENTATTRIBUTESENTRY, + '__module__' : 'infinite_tracing_pb2' + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span.AgentAttributesEntry) + }) + , + 'DESCRIPTOR' : _SPAN, + '__module__' : 'infinite_tracing_pb2' + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.Span) + }) + _sym_db.RegisterMessage(Span) + _sym_db.RegisterMessage(Span.IntrinsicsEntry) + _sym_db.RegisterMessage(Span.UserAttributesEntry) + _sym_db.RegisterMessage(Span.AgentAttributesEntry) + + AttributeValue = _reflection.GeneratedProtocolMessageType('AttributeValue', (_message.Message,), { + 'DESCRIPTOR' : _ATTRIBUTEVALUE, + '__module__' : 'infinite_tracing_pb2' + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.AttributeValue) + }) + _sym_db.RegisterMessage(AttributeValue) + + RecordStatus = _reflection.GeneratedProtocolMessageType('RecordStatus', (_message.Message,), { + 'DESCRIPTOR' : _RECORDSTATUS, + '__module__' : 'infinite_tracing_pb2' + # @@protoc_insertion_point(class_scope:com.newrelic.trace.v1.RecordStatus) + }) + _sym_db.RegisterMessage(RecordStatus) + + + _SPAN_INTRINSICSENTRY._options = None + _SPAN_USERATTRIBUTESENTRY._options = None + _SPAN_AGENTATTRIBUTESENTRY._options = None + + _INGESTSERVICE = _descriptor.ServiceDescriptor( + name='IngestService', + full_name='com.newrelic.trace.v1.IngestService', + file=DESCRIPTOR, + index=0, + serialized_options=None, + serialized_start=727, + serialized_end=828, + methods=[ + _descriptor.MethodDescriptor( + name='RecordSpan', + full_name='com.newrelic.trace.v1.IngestService.RecordSpan', + index=0, + containing_service=None, + input_type=_SPAN, + output_type=_RECORDSTATUS, + serialized_options=None, + ), + ]) + _sym_db.RegisterServiceDescriptor(_INGESTSERVICE) + + DESCRIPTOR.services_by_name['IngestService'] = _INGESTSERVICE + + # @@protoc_insertion_point(module_scope) + diff --git a/newrelic/core/infinite_tracing_v4_pb2.py b/newrelic/core/infinite_tracing_v4_pb2.py new file mode 100644 index 000000000..ae1739670 --- /dev/null +++ b/newrelic/core/infinite_tracing_v4_pb2.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- + +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: v1.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x08v1.proto\x12\x15\x63om.newrelic.trace.v1\"7\n\tSpanBatch\x12*\n\x05spans\x18\x01 \x03(\x0b\x32\x1b.com.newrelic.trace.v1.Span\"\x86\x04\n\x04Span\x12\x10\n\x08trace_id\x18\x01 \x01(\t\x12?\n\nintrinsics\x18\x02 \x03(\x0b\x32+.com.newrelic.trace.v1.Span.IntrinsicsEntry\x12H\n\x0fuser_attributes\x18\x03 \x03(\x0b\x32/.com.newrelic.trace.v1.Span.UserAttributesEntry\x12J\n\x10\x61gent_attributes\x18\x04 \x03(\x0b\x32\x30.com.newrelic.trace.v1.Span.AgentAttributesEntry\x1aX\n\x0fIntrinsicsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\x1a\\\n\x13UserAttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\x1a]\n\x14\x41gentAttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x34\n\x05value\x18\x02 \x01(\x0b\x32%.com.newrelic.trace.v1.AttributeValue:\x02\x38\x01\"t\n\x0e\x41ttributeValue\x12\x16\n\x0cstring_value\x18\x01 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x02 \x01(\x08H\x00\x12\x13\n\tint_value\x18\x03 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x42\x07\n\x05value\"%\n\x0cRecordStatus\x12\x15\n\rmessages_seen\x18\x01 \x01(\x04\x32\xc5\x01\n\rIngestService\x12T\n\nRecordSpan\x12\x1b.com.newrelic.trace.v1.Span\x1a#.com.newrelic.trace.v1.RecordStatus\"\x00(\x01\x30\x01\x12^\n\x0fRecordSpanBatch\x12 .com.newrelic.trace.v1.SpanBatch\x1a#.com.newrelic.trace.v1.RecordStatus\"\x00(\x01\x30\x01\x62\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'v1_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _SPAN_INTRINSICSENTRY._options = None + _SPAN_INTRINSICSENTRY._serialized_options = b'8\001' + _SPAN_USERATTRIBUTESENTRY._options = None + _SPAN_USERATTRIBUTESENTRY._serialized_options = b'8\001' + _SPAN_AGENTATTRIBUTESENTRY._options = None + _SPAN_AGENTATTRIBUTESENTRY._serialized_options = b'8\001' + _SPANBATCH._serialized_start=35 + _SPANBATCH._serialized_end=90 + _SPAN._serialized_start=93 + _SPAN._serialized_end=611 + _SPAN_INTRINSICSENTRY._serialized_start=334 + _SPAN_INTRINSICSENTRY._serialized_end=422 + _SPAN_USERATTRIBUTESENTRY._serialized_start=424 + _SPAN_USERATTRIBUTESENTRY._serialized_end=516 + _SPAN_AGENTATTRIBUTESENTRY._serialized_start=518 + _SPAN_AGENTATTRIBUTESENTRY._serialized_end=611 + _ATTRIBUTEVALUE._serialized_start=613 + _ATTRIBUTEVALUE._serialized_end=729 + _RECORDSTATUS._serialized_start=731 + _RECORDSTATUS._serialized_end=768 + _INGESTSERVICE._serialized_start=771 + _INGESTSERVICE._serialized_end=968 +# @@protoc_insertion_point(module_scope) diff --git a/setup.py b/setup.py index 5fdf2005f..be891626c 100644 --- a/setup.py +++ b/setup.py @@ -155,7 +155,7 @@ def build_extension(self, ext): "newrelic": ["newrelic.ini", "version.txt", "packages/urllib3/LICENSE.txt", "common/cacert.pem"], }, scripts=["scripts/newrelic-admin"], - extras_require={"infinite-tracing": ["grpcio", "protobuf<4"]}, + extras_require={"infinite-tracing": ["grpcio", "protobuf"]}, ) if with_setuptools: diff --git a/tox.ini b/tox.ini index 175c12ada..2d0480a45 100644 --- a/tox.ini +++ b/tox.ini @@ -54,7 +54,8 @@ envlist = python-agent_features-{py27,py37,py38,py39,py310}-{with,without}_extensions, python-agent_features-{pypy,pypy37}-without_extensions, python-agent_streaming-py27-grpc0125-{with,without}_extensions, - python-agent_streaming-{py37,py38,py39,py310}-{with,without}_extensions, + python-agent_streaming-{py37,py38,py39,py310}-protobuf04-{with,without}_extensions, + python-agent_streaming-py39-protobuf{03,0319}-{with,without}_extensions, python-agent_unittests-{py27,py37,py38,py39,py310}-{with,without}_extensions, python-agent_unittests-{pypy,pypy37}-without_extensions, python-application_celery-{py27,py37,py38,py39,py310,pypy,pypy37}, @@ -322,9 +323,12 @@ deps = framework_grpc-grpclatest: protobuf<4 framework_grpc-grpclatest: grpcio framework_grpc-grpclatest: grpcio-tools - framework_grpc-grpc0125: grpcio<1.26 - framework_grpc-grpc0125: grpcio-tools<1.26 - framework_grpc-grpc0125: protobuf<3.18.0 + grpc0125: grpcio<1.26 + grpc0125: grpcio-tools<1.26 + grpc0125: protobuf<3.18.0 + protobuf0319: protobuf<3.20 + protobuf03: protobuf<4 + protobuf04: protobuf<5 framework_pyramid: routes framework_pyramid-cornice: cornice!=5.0.0 framework_pyramid-Pyramid0104: Pyramid<1.5 From 7c52253e19b2f109187eadacaa8c414008bc339e Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Wed, 5 Oct 2022 16:36:38 -0700 Subject: [PATCH 45/49] Fix megalinter config (#648) Co-authored-by: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> --- .mega-linter.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.mega-linter.yml b/.mega-linter.yml index f8456d059..0f7205fcd 100644 --- a/.mega-linter.yml +++ b/.mega-linter.yml @@ -31,6 +31,8 @@ PYTHON_PYLINT_CONFIG_FILE: pyproject.toml PYTHON_ISORT_CONFIG_FILE: pyproject.toml PYTHON_BANDIT_CONFIG_FILE: pyproject.toml PYTHON_BANDIT_FILTER_REGEX_EXCLUDE: "tests" -PYTHON_BANDIT_PRE_COMMANDS: ["pip install bandit[toml]"] +PYTHON_BANDIT_PRE_COMMANDS: + - command: "pip install bandit[toml]" + cwd: "workspace" PYTHON_PYLINT_ARGUMENTS: "--fail-under=0 --fail-on=E" From 8f84692933266958f00d3730b302b260c0e2a3b0 Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Wed, 5 Oct 2022 16:54:27 -0700 Subject: [PATCH 46/49] Fix missing data on used instrumentation hooks. (#649) Co-authored-by: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> --- newrelic/core/environment.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/newrelic/core/environment.py b/newrelic/core/environment.py index 799b266a8..9fc6e2dd4 100644 --- a/newrelic/core/environment.py +++ b/newrelic/core/environment.py @@ -29,11 +29,6 @@ total_physical_memory, ) -# try: -# import pkg_resources -# except ImportError: -# pass - try: import newrelic.core._thread_utilization except ImportError: @@ -245,7 +240,7 @@ def get_version(name): # pylint: disable=function-redefined version = get_version(name) plugins.append("%s (%s)" % (name, version)) except Exception: - pass + plugins.append(name) env.append(("Plugin List", plugins)) From 94d699a97f982f38c916fe78ff7adb0085aa575e Mon Sep 17 00:00:00 2001 From: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Date: Wed, 5 Oct 2022 17:11:01 -0700 Subject: [PATCH 47/49] Fix crashes in aioredis transactions (#633) * Fix aioredis wrappers for transactions * Refactor aioredis test setup * Expand aioredis tox testing * Add no harm tests for aioredis transactions Co-authored-by: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> --- newrelic/hooks/datastore_aioredis.py | 92 +++++++++++- tests/datastore_aioredis/conftest.py | 31 ++-- .../test_custom_conn_pool.py | 36 ++--- .../test_execute_command.py | 18 +-- tests/datastore_aioredis/test_get_and_set.py | 17 --- .../datastore_aioredis/test_instance_info.py | 40 +++--- tests/datastore_aioredis/test_multiple_dbs.py | 61 ++++---- tests/datastore_aioredis/test_span_event.py | 16 --- tests/datastore_aioredis/test_trace_node.py | 20 --- tests/datastore_aioredis/test_transactions.py | 134 ++++++++++++++++++ .../test_uninstrumented_methods.py | 22 --- tox.ini | 2 +- 12 files changed, 309 insertions(+), 180 deletions(-) create mode 100644 tests/datastore_aioredis/test_transactions.py diff --git a/newrelic/hooks/datastore_aioredis.py b/newrelic/hooks/datastore_aioredis.py index 13f08fd19..a2267960c 100644 --- a/newrelic/hooks/datastore_aioredis.py +++ b/newrelic/hooks/datastore_aioredis.py @@ -12,16 +12,25 @@ # See the License for the specific language governing permissions and # limitations under the License. -from newrelic.api.datastore_trace import DatastoreTrace +from newrelic.api.datastore_trace import DatastoreTrace, DatastoreTraceWrapper from newrelic.api.time_trace import current_trace from newrelic.api.transaction import current_transaction -from newrelic.common.object_wrapper import wrap_function_wrapper +from newrelic.common.object_wrapper import wrap_function_wrapper, function_wrapper, FunctionWrapper from newrelic.hooks.datastore_redis import ( _redis_client_methods, _redis_multipart_commands, _redis_operation_re, ) +from newrelic.common.async_wrapper import async_wrapper + +import aioredis + +try: + AIOREDIS_VERSION = tuple(int(x) for x in getattr(aioredis, "__version__").split(".")) +except Exception: + AIOREDIS_VERSION = (0, 0, 0) + def _conn_attrs_to_dict(connection): host = getattr(connection, "host", None) @@ -45,13 +54,36 @@ def _instance_info(kwargs): def _wrap_AioRedis_method_wrapper(module, instance_class_name, operation): - async def _nr_wrapper_AioRedis_method_(wrapped, instance, args, kwargs): + + @function_wrapper + async def _nr_wrapper_AioRedis_async_method_(wrapped, instance, args, kwargs): transaction = current_transaction() if transaction is None: return await wrapped(*args, **kwargs) with DatastoreTrace(product="Redis", target=None, operation=operation): return await wrapped(*args, **kwargs) + + def _nr_wrapper_AioRedis_method_(wrapped, instance, args, kwargs): + # Check for transaction and return early if found. + # Method will return synchronously without executing, + # it will be added to the command stack and run later. + if AIOREDIS_VERSION < (2,): + # AioRedis v1 uses a RedisBuffer instead of a real connection for queueing up pipeline commands + from aioredis.commands.transaction import _RedisBuffer + if isinstance(instance._pool_or_conn, _RedisBuffer): + # Method will return synchronously without executing, + # it will be added to the command stack and run later. + return wrapped(*args, **kwargs) + else: + # AioRedis v2 uses a Pipeline object for a client and internally queues up pipeline commands + from aioredis.client import Pipeline + if isinstance(instance, Pipeline): + return wrapped(*args, **kwargs) + + # Method should be run when awaited, therefore we wrap in an async wrapper. + return _nr_wrapper_AioRedis_async_method_(wrapped)(*args, **kwargs) + name = "%s.%s" % (instance_class_name, operation) wrap_function_wrapper(module, name, _nr_wrapper_AioRedis_method_) @@ -108,6 +140,58 @@ async def wrap_Connection_send_command(wrapped, instance, args, kwargs): return await wrapped(*args, **kwargs) +def wrap_RedisConnection_execute(wrapped, instance, args, kwargs): + # RedisConnection in aioredis v1 returns a future instead of using coroutines + transaction = current_transaction() + if not transaction: + return wrapped(*args, **kwargs) + + host, port_path_or_id, db = (None, None, None) + + try: + dt = transaction.settings.datastore_tracer + if dt.instance_reporting.enabled or dt.database_name_reporting.enabled: + conn_kwargs = _conn_attrs_to_dict(instance) + host, port_path_or_id, db = _instance_info(conn_kwargs) + except Exception: + pass + + # Older Redis clients would when sending multi part commands pass + # them in as separate arguments to send_command(). Need to therefore + # detect those and grab the next argument from the set of arguments. + + operation = args[0].strip().lower() + + # If it's not a multi part command, there's no need to trace it, so + # we can return early. + + if operation.split()[0] not in _redis_multipart_commands: # Set the datastore info on the DatastoreTrace containing this function call. + trace = current_trace() + + # Find DatastoreTrace no matter how many other traces are inbetween + while trace is not None and not isinstance(trace, DatastoreTrace): + trace = getattr(trace, "parent", None) + + if trace is not None: + trace.host = host + trace.port_path_or_id = port_path_or_id + trace.database_name = db + + return wrapped(*args, **kwargs) + + # Convert multi args to single arg string + + if operation in _redis_multipart_commands and len(args) > 1: + operation = "%s %s" % (operation, args[1].strip().lower()) + + operation = _redis_operation_re.sub("_", operation) + + with DatastoreTrace( + product="Redis", target=None, operation=operation, host=host, port_path_or_id=port_path_or_id, database_name=db + ): + return wrapped(*args, **kwargs) + + def instrument_aioredis_client(module): # StrictRedis is just an alias of Redis, no need to wrap it as well. if hasattr(module, "Redis"): @@ -124,4 +208,4 @@ def instrument_aioredis_connection(module): if hasattr(module, "RedisConnection"): if hasattr(module.RedisConnection, "execute"): - wrap_function_wrapper(module, "RedisConnection.execute", wrap_Connection_send_command) \ No newline at end of file + wrap_function_wrapper(module, "RedisConnection.execute", wrap_RedisConnection_execute) diff --git a/tests/datastore_aioredis/conftest.py b/tests/datastore_aioredis/conftest.py index d144af2df..de9c6c04d 100644 --- a/tests/datastore_aioredis/conftest.py +++ b/tests/datastore_aioredis/conftest.py @@ -13,9 +13,11 @@ # limitations under the License. import aioredis -import asyncio import pytest +from testing_support.db_settings import redis_settings + +from testing_support.fixture.event_loop import event_loop as loop from testing_support.fixtures import ( # noqa: F401 code_coverage_fixture, collector_agent_registration_fixture, @@ -23,6 +25,10 @@ ) AIOREDIS_VERSION = tuple(int(x) for x in aioredis.__version__.split(".")[:2]) +SKIPIF_AIOREDIS_V1 = pytest.mark.skipif(AIOREDIS_VERSION < (2,), reason="Unsupported aioredis version.") +SKIPIF_AIOREDIS_V2 = pytest.mark.skipif(AIOREDIS_VERSION >= (2,), reason="Unsupported aioredis version.") +DB_SETTINGS = redis_settings()[0] + _coverage_source = [ "newrelic.hooks.datastore_aioredis", @@ -45,10 +51,19 @@ ) -event_loop = asyncio.get_event_loop() -asyncio.set_event_loop(event_loop) - - -@pytest.fixture() -def loop(): - yield event_loop +@pytest.fixture(params=("Redis", "StrictRedis")) +def client(request, loop): + if AIOREDIS_VERSION >= (2, 0): + if request.param == "Redis": + return aioredis.Redis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0) + elif request.param == "StrictRedis": + return aioredis.StrictRedis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0) + else: + raise NotImplementedError() + else: + if request.param == "Redis": + return loop.run_until_complete(aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS["host"], DB_SETTINGS["port"]), db=0)) + elif request.param == "StrictRedis": + pytest.skip("StrictRedis not implemented.") + else: + raise NotImplementedError() diff --git a/tests/datastore_aioredis/test_custom_conn_pool.py b/tests/datastore_aioredis/test_custom_conn_pool.py index 155765e5a..7644e8ffb 100644 --- a/tests/datastore_aioredis/test_custom_conn_pool.py +++ b/tests/datastore_aioredis/test_custom_conn_pool.py @@ -17,12 +17,6 @@ will not result in an error. """ -import asyncio -import pytest -import aioredis - -from conftest import event_loop, loop, AIOREDIS_VERSION - from newrelic.api.background_task import background_task # from testing_support.fixture.event_loop import event_loop as loop @@ -43,7 +37,7 @@ async def get_connection(self, name=None, *keys, **options): return self.connection async def release(self, connection): - self.connection.disconnect() + await self.connection.disconnect() async def execute(self, *args, **kwargs): return await self.connection.execute(*args, **kwargs) @@ -105,18 +99,6 @@ async def exercise_redis(client): await client.execute("CLIENT", "LIST") -if AIOREDIS_VERSION >= (2, 0): - clients = [ - aioredis.Redis(host=DB_SETTINGS["host"], port=_port, db=0), - aioredis.StrictRedis(host=DB_SETTINGS["host"], port=_port, db=0), - ] -else: - clients = [ - event_loop.run_until_complete(aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS["host"], _port), db=0)), - ] - - -@pytest.mark.parametrize("client", clients) @override_application_settings(_enable_instance_settings) @validate_transaction_metrics( "test_custom_conn_pool:test_fake_conn_pool_enable_instance", @@ -125,7 +107,7 @@ async def exercise_redis(client): background_task=True, ) @background_task() -def test_fake_conn_pool_enable_instance(client, loop): +def test_fake_conn_pool_enable_instance(client, loop, monkeypatch): # Get a real connection conn = getattr(client, "_pool_or_conn", None) if conn is None: @@ -135,14 +117,13 @@ def test_fake_conn_pool_enable_instance(client, loop): # have the `connection_kwargs` attribute. fake_pool = FakeConnectionPool(conn) - client.connection_pool = fake_pool - client._pool_or_conn = fake_pool + monkeypatch.setattr(client, "connection_pool", fake_pool, raising=False) + monkeypatch.setattr(client, "_pool_or_conn", fake_pool, raising=False) assert not hasattr(client.connection_pool, "connection_kwargs") loop.run_until_complete(exercise_redis(client)) -@pytest.mark.parametrize("client", clients) @override_application_settings(_disable_instance_settings) @validate_transaction_metrics( "test_custom_conn_pool:test_fake_conn_pool_disable_instance", @@ -151,15 +132,18 @@ def test_fake_conn_pool_enable_instance(client, loop): background_task=True, ) @background_task() -def test_fake_conn_pool_disable_instance(client, loop): +def test_fake_conn_pool_disable_instance(client, loop, monkeypatch): # Get a real connection - conn = loop.run_until_complete(client.connection_pool.get_connection("GET")) + conn = getattr(client, "_pool_or_conn", None) + if conn is None: + conn = loop.run_until_complete(client.connection_pool.get_connection("GET")) # Replace the original connection pool with one that doesn't # have the `connection_kwargs` attribute. fake_pool = FakeConnectionPool(conn) - client.connection_pool = fake_pool + monkeypatch.setattr(client, "connection_pool", fake_pool, raising=False) + monkeypatch.setattr(client, "_pool_or_conn", fake_pool, raising=False) assert not hasattr(client.connection_pool, "connection_kwargs") loop.run_until_complete(exercise_redis(client)) diff --git a/tests/datastore_aioredis/test_execute_command.py b/tests/datastore_aioredis/test_execute_command.py index 690007d6c..bbc8b2d4f 100644 --- a/tests/datastore_aioredis/test_execute_command.py +++ b/tests/datastore_aioredis/test_execute_command.py @@ -13,11 +13,10 @@ # limitations under the License. import pytest -import aioredis from newrelic.api.background_task import background_task from testing_support.fixtures import validate_transaction_metrics, override_application_settings -from conftest import event_loop, loop, AIOREDIS_VERSION +from conftest import AIOREDIS_VERSION from testing_support.db_settings import redis_settings from testing_support.util import instance_hostname @@ -70,19 +69,7 @@ async def exercise_redis_single_arg(client): await client.execute_command("CLIENT LIST") -if AIOREDIS_VERSION >= (2, 0): - clients = [ - aioredis.Redis(host=DB_SETTINGS["host"], port=_port, db=0), - aioredis.StrictRedis(host=DB_SETTINGS["host"], port=_port, db=0), - ] -else: - clients = [ - event_loop.run_until_complete(aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS["host"], _port), db=0)), - ] - - @SKIP_IF_AIOREDIS_V1 -@pytest.mark.parametrize("client", clients) @override_application_settings(_enable_instance_settings) @validate_transaction_metrics( "test_execute_command:test_redis_execute_command_as_one_arg_enable", @@ -96,7 +83,6 @@ def test_redis_execute_command_as_one_arg_enable(client, loop): @SKIP_IF_AIOREDIS_V1 -@pytest.mark.parametrize("client", clients) @override_application_settings(_disable_instance_settings) @validate_transaction_metrics( "test_execute_command:test_redis_execute_command_as_one_arg_disable", @@ -109,7 +95,6 @@ def test_redis_execute_command_as_one_arg_disable(client, loop): loop.run_until_complete(exercise_redis_single_arg(client)) -@pytest.mark.parametrize("client", clients) @override_application_settings(_enable_instance_settings) @validate_transaction_metrics( "test_execute_command:test_redis_execute_command_as_two_args_enable", @@ -122,7 +107,6 @@ def test_redis_execute_command_as_two_args_enable(client, loop): loop.run_until_complete(exercise_redis_multi_args(client)) -@pytest.mark.parametrize("client", clients) @override_application_settings(_disable_instance_settings) @validate_transaction_metrics( "test_execute_command:test_redis_execute_command_as_two_args_disable", diff --git a/tests/datastore_aioredis/test_get_and_set.py b/tests/datastore_aioredis/test_get_and_set.py index b363f14d5..a446d5f6c 100644 --- a/tests/datastore_aioredis/test_get_and_set.py +++ b/tests/datastore_aioredis/test_get_and_set.py @@ -12,12 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest -import aioredis - from newrelic.api.background_task import background_task -from conftest import event_loop, loop, AIOREDIS_VERSION from testing_support.fixtures import validate_transaction_metrics, override_application_settings from testing_support.db_settings import redis_settings from testing_support.util import instance_hostname @@ -64,23 +60,11 @@ _disable_rollup_metrics.append((_instance_metric_name, None)) -if AIOREDIS_VERSION >= (2, 0): - clients = [ - aioredis.Redis(host=DB_SETTINGS["host"], port=_port, db=0), - aioredis.StrictRedis(host=DB_SETTINGS["host"], port=_port, db=0), - ] -else: - clients = [ - event_loop.run_until_complete(aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS["host"], _port), db=0)), - ] - - async def exercise_redis(client): await client.set("key", "value") await client.get("key") -@pytest.mark.parametrize("client", clients) @override_application_settings(_enable_instance_settings) @validate_transaction_metrics( "test_get_and_set:test_redis_client_operation_enable_instance", @@ -93,7 +77,6 @@ def test_redis_client_operation_enable_instance(client, loop): loop.run_until_complete(exercise_redis(client)) -@pytest.mark.parametrize("client", clients) @override_application_settings(_disable_instance_settings) @validate_transaction_metrics( "test_get_and_set:test_redis_client_operation_disable_instance", diff --git a/tests/datastore_aioredis/test_instance_info.py b/tests/datastore_aioredis/test_instance_info.py index ffb5ab31d..4bb744149 100644 --- a/tests/datastore_aioredis/test_instance_info.py +++ b/tests/datastore_aioredis/test_instance_info.py @@ -17,7 +17,7 @@ import aioredis from newrelic.hooks.datastore_aioredis import _conn_attrs_to_dict, _instance_info -from conftest import event_loop, loop, AIOREDIS_VERSION +from conftest import AIOREDIS_VERSION, SKIPIF_AIOREDIS_V1 _instance_info_tests = [ ({}, ("localhost", "6379", "0")), @@ -29,8 +29,6 @@ ] -SKIP_IF_AIOREDIS_V1 = pytest.mark.skipif(AIOREDIS_VERSION < (2, 0), reason="Single arg commands not supported.") - if AIOREDIS_VERSION >= (2, 0): clients = [aioredis.Redis, aioredis.StrictRedis] class DisabledConnection(aioredis.Connection): @@ -48,22 +46,22 @@ class DisabledUnixConnection(aioredis.UnixDomainSocketConnection, DisabledConnec -@SKIP_IF_AIOREDIS_V1 -@pytest.mark.parametrize("client", clients) +@SKIPIF_AIOREDIS_V1 +@pytest.mark.parametrize("client_cls", clients) @pytest.mark.parametrize("kwargs,expected", _instance_info_tests) -def test_strict_redis_client_instance_info(client, kwargs, expected, loop): - r = client(**kwargs) +def test_strict_redis_client_instance_info(client_cls, kwargs, expected, loop): + r = client_cls(**kwargs) if isawaitable(r): r = loop.run_until_complete(r) conn_kwargs = r.connection_pool.connection_kwargs assert _instance_info(conn_kwargs) == expected -@SKIP_IF_AIOREDIS_V1 -@pytest.mark.parametrize("client", clients) +@SKIPIF_AIOREDIS_V1 +@pytest.mark.parametrize("client_cls", clients) @pytest.mark.parametrize("kwargs,expected", _instance_info_tests) -def test_strict_redis_connection_instance_info(client, kwargs, expected, loop): - r = client(**kwargs) +def test_strict_redis_connection_instance_info(client_cls, kwargs, expected, loop): + r = client_cls(**kwargs) if isawaitable(r): r = loop.run_until_complete(r) r.connection_pool.connection_class = DisabledConnection @@ -72,7 +70,7 @@ def test_strict_redis_connection_instance_info(client, kwargs, expected, loop): conn_kwargs = _conn_attrs_to_dict(connection) assert _instance_info(conn_kwargs) == expected finally: - r.connection_pool.release(connection) + loop.run_until_complete(r.connection_pool.release(connection)) _instance_info_from_url_tests = [ @@ -98,20 +96,20 @@ def test_strict_redis_connection_instance_info(client, kwargs, expected, loop): ] -@SKIP_IF_AIOREDIS_V1 -@pytest.mark.parametrize("client", clients) +@SKIPIF_AIOREDIS_V1 +@pytest.mark.parametrize("client_cls", clients) @pytest.mark.parametrize("args,kwargs,expected", _instance_info_from_url_tests) -def test_strict_redis_client_from_url(client, args, kwargs, expected): - r = client.from_url(*args, **kwargs) +def test_strict_redis_client_from_url(client_cls, args, kwargs, expected): + r = client_cls.from_url(*args, **kwargs) conn_kwargs = r.connection_pool.connection_kwargs assert _instance_info(conn_kwargs) == expected -@SKIP_IF_AIOREDIS_V1 -@pytest.mark.parametrize("client", clients) +@SKIPIF_AIOREDIS_V1 +@pytest.mark.parametrize("client_cls", clients) @pytest.mark.parametrize("args,kwargs,expected", _instance_info_from_url_tests) -def test_strict_redis_connection_from_url(client, args, kwargs, expected, loop): - r = client.from_url(*args, **kwargs) +def test_strict_redis_connection_from_url(client_cls, args, kwargs, expected, loop): + r = client_cls.from_url(*args, **kwargs) if r.connection_pool.connection_class in (aioredis.Connection, aioredis.connection.SSLConnection): r.connection_pool.connection_class = DisabledConnection elif r.connection_pool.connection_class is aioredis.UnixDomainSocketConnection: @@ -124,4 +122,4 @@ def test_strict_redis_connection_from_url(client, args, kwargs, expected, loop): conn_kwargs = _conn_attrs_to_dict(connection) assert _instance_info(conn_kwargs) == expected finally: - r.connection_pool.release(connection) + loop.run_until_complete(r.connection_pool.release(connection)) diff --git a/tests/datastore_aioredis/test_multiple_dbs.py b/tests/datastore_aioredis/test_multiple_dbs.py index 248fb847d..cb817c9f8 100644 --- a/tests/datastore_aioredis/test_multiple_dbs.py +++ b/tests/datastore_aioredis/test_multiple_dbs.py @@ -17,7 +17,7 @@ from newrelic.api.background_task import background_task from testing_support.fixtures import validate_transaction_metrics, override_application_settings -from conftest import event_loop, loop, AIOREDIS_VERSION +from conftest import AIOREDIS_VERSION from testing_support.db_settings import redis_settings from testing_support.util import instance_hostname @@ -100,30 +100,38 @@ ] ) - if AIOREDIS_VERSION >= (2, 0): - client_set = [ - ( - aioredis.Redis(host=DB_SETTINGS[0]["host"], port=DB_SETTINGS[0]["port"], db=0), - aioredis.Redis(host=DB_SETTINGS[1]["host"], port=DB_SETTINGS[1]["port"], db=0), - ), - ( - aioredis.StrictRedis(host=DB_SETTINGS[0]["host"], port=DB_SETTINGS[0]["port"], db=0), - aioredis.StrictRedis(host=DB_SETTINGS[1]["host"], port=DB_SETTINGS[1]["port"], db=0), - ), - ] - else: - client_set = [ - ( - event_loop.run_until_complete( - aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS[0]["host"], DB_SETTINGS[0]["port"]), db=0) - ), - event_loop.run_until_complete( - aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS[1]["host"], DB_SETTINGS[1]["port"]), db=0) - ), - ) - ] -else: - client_set = [] + +@pytest.fixture(params=("Redis", "StrictRedis")) +def client_set(request, loop): + if len(DB_SETTINGS) > 1: + if AIOREDIS_VERSION >= (2, 0): + if request.param == "Redis": + return ( + aioredis.Redis(host=DB_SETTINGS[0]["host"], port=DB_SETTINGS[0]["port"], db=0), + aioredis.Redis(host=DB_SETTINGS[1]["host"], port=DB_SETTINGS[1]["port"], db=0), + ) + elif request.param == "StrictRedis": + return ( + aioredis.StrictRedis(host=DB_SETTINGS[0]["host"], port=DB_SETTINGS[0]["port"], db=0), + aioredis.StrictRedis(host=DB_SETTINGS[1]["host"], port=DB_SETTINGS[1]["port"], db=0), + ) + else: + raise NotImplementedError() + else: + if request.param == "Redis": + return ( + loop.run_until_complete( + aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS[0]["host"], DB_SETTINGS[0]["port"]), db=0) + ), + loop.run_until_complete( + aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS[1]["host"], DB_SETTINGS[1]["port"]), db=0) + ), + ) + elif request.param == "StrictRedis": + pytest.skip("StrictRedis not implemented.") + else: + raise NotImplementedError() + async def exercise_redis(client_1, client_2): @@ -137,7 +145,6 @@ async def exercise_redis(client_1, client_2): @pytest.mark.skipif(len(DB_SETTINGS) < 2, reason="Env not configured with multiple databases") -@pytest.mark.parametrize("client_set", client_set) @override_application_settings(_enable_instance_settings) @validate_transaction_metrics( "test_multiple_dbs:test_multiple_datastores_enabled", @@ -151,7 +158,6 @@ def test_multiple_datastores_enabled(client_set, loop): @pytest.mark.skipif(len(DB_SETTINGS) < 2, reason="Env not configured with multiple databases") -@pytest.mark.parametrize("client_set", client_set) @override_application_settings(_disable_instance_settings) @validate_transaction_metrics( "test_multiple_dbs:test_multiple_datastores_disabled", @@ -165,7 +171,6 @@ def test_multiple_datastores_disabled(client_set, loop): @pytest.mark.skipif(len(DB_SETTINGS) < 2, reason="Env not configured with multiple databases") -@pytest.mark.parametrize("client_set", client_set) @validate_transaction_metrics( "test_multiple_dbs:test_concurrent_calls", scoped_metrics=_concurrent_scoped_metrics, diff --git a/tests/datastore_aioredis/test_span_event.py b/tests/datastore_aioredis/test_span_event.py index 0ab8923ca..1c9227e54 100644 --- a/tests/datastore_aioredis/test_span_event.py +++ b/tests/datastore_aioredis/test_span_event.py @@ -13,13 +13,11 @@ # limitations under the License. import pytest -import aioredis from newrelic.api.transaction import current_transaction from newrelic.api.background_task import background_task from testing_support.db_settings import redis_settings -from conftest import event_loop, loop, AIOREDIS_VERSION from testing_support.fixtures import override_application_settings from testing_support.validators.validate_span_events import validate_span_events from testing_support.util import instance_hostname @@ -40,19 +38,6 @@ "span_events.enabled": True, } -if AIOREDIS_VERSION >= (2, 0): - clients = [ - aioredis.Redis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0), - aioredis.StrictRedis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0), - ] -else: - clients = [ - event_loop.run_until_complete( - aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS["host"], DB_SETTINGS["port"]), db=0) - ), - ] - - async def _exercise_db(client): await client.set("key", "value") await client.get("key") @@ -63,7 +48,6 @@ async def _exercise_db(client): await client.execute("CLIENT", "LIST") -@pytest.mark.parametrize("client", clients) @pytest.mark.parametrize("db_instance_enabled", (True, False)) @pytest.mark.parametrize("instance_enabled", (True, False)) def test_span_events(client, instance_enabled, db_instance_enabled, loop): diff --git a/tests/datastore_aioredis/test_trace_node.py b/tests/datastore_aioredis/test_trace_node.py index fb1ac8545..e4fa1e3ba 100644 --- a/tests/datastore_aioredis/test_trace_node.py +++ b/tests/datastore_aioredis/test_trace_node.py @@ -12,10 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import aioredis -import pytest - -from conftest import event_loop, loop, AIOREDIS_VERSION from testing_support.fixtures import validate_tt_collector_json, override_application_settings from testing_support.util import instance_hostname from testing_support.db_settings import redis_settings @@ -73,18 +69,6 @@ "port_path_or_id": "VALUE NOT USED", } -if AIOREDIS_VERSION >= (2, 0): - clients = [ - aioredis.Redis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0), - aioredis.StrictRedis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0), - ] -else: - clients = [ - event_loop.run_until_complete( - aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS["host"], DB_SETTINGS["port"]), db=0) - ), - ] - async def exercise_redis(client): await client.set("key", "value") @@ -96,7 +80,6 @@ async def exercise_redis(client): await client.execute("CLIENT", "LIST") -@pytest.mark.parametrize("client", clients) @override_application_settings(_enable_instance_settings) @validate_tt_collector_json(datastore_params=_enabled_required, datastore_forgone_params=_enabled_forgone) @background_task() @@ -104,7 +87,6 @@ def test_trace_node_datastore_params_enable_instance(client, loop): loop.run_until_complete(exercise_redis(client)) -@pytest.mark.parametrize("client", clients) @override_application_settings(_disable_instance_settings) @validate_tt_collector_json(datastore_params=_disabled_required, datastore_forgone_params=_disabled_forgone) @background_task() @@ -112,7 +94,6 @@ def test_trace_node_datastore_params_disable_instance(client, loop): loop.run_until_complete(exercise_redis(client)) -@pytest.mark.parametrize("client", clients) @override_application_settings(_instance_only_settings) @validate_tt_collector_json(datastore_params=_instance_only_required, datastore_forgone_params=_instance_only_forgone) @background_task() @@ -120,7 +101,6 @@ def test_trace_node_datastore_params_instance_only(client, loop): loop.run_until_complete(exercise_redis(client)) -@pytest.mark.parametrize("client", clients) @override_application_settings(_database_only_settings) @validate_tt_collector_json(datastore_params=_database_only_required, datastore_forgone_params=_database_only_forgone) @background_task() diff --git a/tests/datastore_aioredis/test_transactions.py b/tests/datastore_aioredis/test_transactions.py new file mode 100644 index 000000000..168de008b --- /dev/null +++ b/tests/datastore_aioredis/test_transactions.py @@ -0,0 +1,134 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from newrelic.api.background_task import background_task +from testing_support.fixtures import validate_transaction_errors + +from conftest import SKIPIF_AIOREDIS_V1, SKIPIF_AIOREDIS_V2, AIOREDIS_VERSION + + +@background_task() +@pytest.mark.parametrize("in_transaction", (True, False)) +def test_pipelines_no_harm(client, in_transaction, loop): + async def exercise(): + if AIOREDIS_VERSION >= (2,): + pipe = client.pipeline(transaction=in_transaction) + else: + pipe = client.pipeline() # Transaction kwarg unsupported + + pipe.set("TXN", 1) + return await pipe.execute() + + status = loop.run_until_complete(exercise()) + assert status == [True] + + +def exercise_transaction_sync(pipe): + pipe.set("TXN", 1) + + +async def exercise_transaction_async(pipe): + await pipe.set("TXN", 1) + + +@SKIPIF_AIOREDIS_V1 +@pytest.mark.parametrize("exercise", (exercise_transaction_sync, exercise_transaction_async)) +@background_task() +def test_transactions_no_harm(client, loop, exercise): + status = loop.run_until_complete(client.transaction(exercise)) + assert status == [True] + + +@SKIPIF_AIOREDIS_V2 +@background_task() +def test_multi_exec_no_harm(client, loop): + async def exercise(): + pipe = client.multi_exec() + pipe.set("key", "value") + status = await pipe.execute() + assert status == [True] + + loop.run_until_complete(exercise()) + + +@SKIPIF_AIOREDIS_V1 +@background_task() +def test_pipeline_immediate_execution_no_harm(client, loop): + key = "TXN_WATCH" + async def exercise(): + await client.set(key, 1) + + if AIOREDIS_VERSION >= (2,): + pipe = client.pipeline(transaction=True) + else: + pipe = client.pipeline() # Transaction kwarg unsupported + + async with pipe: + await pipe.watch(key) + value = int(await pipe.get(key)) + assert value == 1 + value += 1 + pipe.multi() + pipe.set(key, value) + await pipe.execute() + + assert int(await client.get(key)) == 2 + + loop.run_until_complete(exercise()) + + +@SKIPIF_AIOREDIS_V1 +@background_task() +def test_transaction_immediate_execution_no_harm(client, loop): + key = "TXN_WATCH" + async def exercise(): + async def exercise_transaction(pipe): + value = int(await pipe.get(key)) + assert value == 1 + value += 1 + pipe.multi() + pipe.set(key, value) + await pipe.execute() + + await client.set(key, 1) + status = await client.transaction(exercise_transaction, key) + assert int(await client.get(key)) == 2 + + return status + + status = loop.run_until_complete(exercise()) + assert status == [] + + +@SKIPIF_AIOREDIS_V1 +@validate_transaction_errors([]) +@background_task() +def test_transaction_watch_error_no_harm(client, loop): + key = "TXN_WATCH" + async def exercise(): + async def exercise_transaction(pipe): + value = int(await pipe.get(key)) + if value == 1: + # Only run set the first pass, as this runs repeatedly until no watch error is raised. + await pipe.set(key, 2) + + await client.set(key, 1) + status = await client.transaction(exercise_transaction, key) + + return status + + status = loop.run_until_complete(exercise()) + assert status == [] diff --git a/tests/datastore_aioredis/test_uninstrumented_methods.py b/tests/datastore_aioredis/test_uninstrumented_methods.py index 43a2fe179..f1b36b1ca 100644 --- a/tests/datastore_aioredis/test_uninstrumented_methods.py +++ b/tests/datastore_aioredis/test_uninstrumented_methods.py @@ -12,27 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest -import aioredis - -from conftest import event_loop, loop, AIOREDIS_VERSION - -from testing_support.db_settings import redis_settings - -DB_SETTINGS = redis_settings()[0] - -if AIOREDIS_VERSION >= (2, 0): - clients = [ - aioredis.Redis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0), - aioredis.StrictRedis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0), - ] -else: - clients = [ - event_loop.run_until_complete( - aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS["host"], DB_SETTINGS["port"]), db=0) - ), - ] - IGNORED_METHODS = { "address", @@ -83,7 +62,6 @@ } -@pytest.mark.parametrize("client", clients) def test_uninstrumented_methods(client): methods = {m for m in dir(client) if not m[0] == "_"} is_wrapped = lambda m: hasattr(getattr(client, m), "__wrapped__") diff --git a/tox.ini b/tox.ini index 2d0480a45..a8d0b92c9 100644 --- a/tox.ini +++ b/tox.ini @@ -90,7 +90,7 @@ envlist = redis-datastore_redis-{py27,py37,py38,pypy,pypy37}-redis03, redis-datastore_redis-{py37,py38,py39,py310,pypy37}-redis{0400,latest}, redis-datastore_aioredis-{py37,py38,py39,py310,pypy37}-aioredislatest, - redis-datastore_aioredis-py39-aioredis01, + redis-datastore_aioredis-{py37,py310}-aioredis01, redis-datastore_aredis-{py37,py38,py39,pypy37}-aredislatest, solr-datastore_solrpy-{py27,pypy}-solrpy{00,01}, python-datastore_sqlite-{py27,py37,py38,py39,py310,pypy,pypy37}, From 7692deef9fb1c86e03797ff94ff668780aa6bb92 Mon Sep 17 00:00:00 2001 From: Lalleh Rafeei <84813886+lrafeei@users.noreply.github.com> Date: Wed, 5 Oct 2022 17:24:51 -0700 Subject: [PATCH 48/49] Use single import point for protobuf in grpc (#647) Co-authored-by: Hannah Stepanek --- tests/framework_grpc/_test_common.py | 10 +-- tests/framework_grpc/conftest.py | 57 +++++++++-------- .../sample_application/__init__.py | 64 +++++++++++-------- tox.ini | 4 +- 4 files changed, 73 insertions(+), 62 deletions(-) diff --git a/tests/framework_grpc/_test_common.py b/tests/framework_grpc/_test_common.py index 117d2d2b0..a71bb2f50 100644 --- a/tests/framework_grpc/_test_common.py +++ b/tests/framework_grpc/_test_common.py @@ -12,23 +12,23 @@ # See the License for the specific language governing permissions and # limitations under the License. -import grpc -import threading import functools +import threading + from newrelic.api.application import application_instance def create_request(streaming_request, count=1, timesout=False): - from sample_application.sample_application_pb2 import Message + from sample_application import Message def _message_stream(): for i in range(count): - yield Message(text='Hello World', count=count, timesout=timesout) + yield Message(text="Hello World", count=count, timesout=timesout) if streaming_request: request = _message_stream() else: - request = Message(text='Hello World', count=count, timesout=timesout) + request = Message(text="Hello World", count=count, timesout=timesout) return request diff --git a/tests/framework_grpc/conftest.py b/tests/framework_grpc/conftest.py index 1d54103ce..3e27d134d 100644 --- a/tests/framework_grpc/conftest.py +++ b/tests/framework_grpc/conftest.py @@ -13,52 +13,56 @@ # limitations under the License. import gc + import grpc import pytest -import random - -from testing_support.fixtures import (code_coverage_fixture, - collector_agent_registration_fixture, collector_available_fixture) +from testing_support.fixtures import ( # noqa + code_coverage_fixture, + collector_agent_registration_fixture, + collector_available_fixture, +) from testing_support.mock_external_grpc_server import MockExternalgRPCServer + import newrelic.packages.six as six _coverage_source = [ - 'newrelic.hooks.framework_grpc', + "newrelic.hooks.framework_grpc", ] code_coverage = code_coverage_fixture(source=_coverage_source) _default_settings = { - 'transaction_tracer.explain_threshold': 0.0, - 'transaction_tracer.transaction_threshold': 0.0, - 'transaction_tracer.stack_trace_threshold': 0.0, - 'debug.log_data_collector_payloads': True, - 'debug.record_transaction_failure': True, + "transaction_tracer.explain_threshold": 0.0, + "transaction_tracer.transaction_threshold": 0.0, + "transaction_tracer.stack_trace_threshold": 0.0, + "debug.log_data_collector_payloads": True, + "debug.record_transaction_failure": True, } collector_agent_registration = collector_agent_registration_fixture( - app_name='Python Agent Test (framework_grpc)', - default_settings=_default_settings) + app_name="Python Agent Test (framework_grpc)", default_settings=_default_settings +) -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def grpc_app_server(): with MockExternalgRPCServer() as server: yield server, server.port -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def mock_grpc_server(grpc_app_server): - from sample_application.sample_application_pb2_grpc import ( - add_SampleApplicationServicer_to_server) - from sample_application import SampleApplicationServicer + from sample_application import ( + SampleApplicationServicer, + add_SampleApplicationServicer_to_server, + ) + server, port = grpc_app_server - add_SampleApplicationServicer_to_server( - SampleApplicationServicer(), server) + add_SampleApplicationServicer_to_server(SampleApplicationServicer(), server) return port -@pytest.fixture(scope='function', autouse=True) +@pytest.fixture(scope="function", autouse=True) def gc_garbage_empty(): yield @@ -72,8 +76,8 @@ def gc_garbage_empty(): pass from grpc._channel import _Rendezvous - rendezvous_stored = sum(1 for o in gc.get_objects() - if hasattr(o, '__class__') and isinstance(o, _Rendezvous)) + + rendezvous_stored = sum(1 for o in gc.get_objects() if hasattr(o, "__class__") and isinstance(o, _Rendezvous)) assert rendezvous_stored == 0 @@ -89,17 +93,14 @@ def stub(stub_and_channel): @pytest.fixture(scope="session") def stub_and_channel(mock_grpc_server): port = mock_grpc_server - from sample_application.sample_application_pb2_grpc import ( - SampleApplicationStub) - stub, channel = create_stub_and_channel(port) with channel: yield stub, channel + def create_stub_and_channel(port): - from sample_application.sample_application_pb2_grpc import ( - SampleApplicationStub) + from sample_application import SampleApplicationStub - channel = grpc.insecure_channel('localhost:%s' % port) + channel = grpc.insecure_channel("localhost:%s" % port) stub = SampleApplicationStub(channel) return stub, channel diff --git a/tests/framework_grpc/sample_application/__init__.py b/tests/framework_grpc/sample_application/__init__.py index 32f13b3cd..cd5d3de10 100644 --- a/tests/framework_grpc/sample_application/__init__.py +++ b/tests/framework_grpc/sample_application/__init__.py @@ -14,84 +14,94 @@ import json import time -from newrelic.api.transaction import current_transaction + import grpc +import sample_application_pb2_grpc -from sample_application_pb2 import Message -from sample_application_pb2_grpc import ( - SampleApplicationServicer as _SampleApplicationServicer) +from newrelic.api.transaction import current_transaction + +# This import format is to resolve a bug within protobuf 4 +# Issues for reference: +# https://github.com/protocolbuffers/protobuf/issues/10075 +# https://github.com/protocolbuffers/protobuf/issues/10151 +# Within sample_application_pb2.py, the protobuf import can only +# be done once before the DESCRIPTOR value is set to None +# (in subsequent imports) instead of overriding/ignoring the imports. +# This ensures that the imports happen once. +Message = sample_application_pb2_grpc.sample__application__pb2.Message +add_SampleApplicationServicer_to_server = sample_application_pb2_grpc.add_SampleApplicationServicer_to_server +SampleApplicationStub = sample_application_pb2_grpc.SampleApplicationStub class Status(object): code = grpc.StatusCode.ABORTED - details = 'abort_with_status' + details = "abort_with_status" trailing_metadata = {} -class SampleApplicationServicer(_SampleApplicationServicer): - +class SampleApplicationServicer(sample_application_pb2_grpc.SampleApplicationServicer): def DoUnaryUnary(self, request, context): - context.set_trailing_metadata([('content-type', 'text/plain')]) + context.set_trailing_metadata([("content-type", "text/plain")]) if request.timesout: while context.is_active(): time.sleep(0.1) - return Message(text='unary_unary: %s' % request.text) + return Message(text="unary_unary: %s" % request.text) def DoUnaryStream(self, request, context): - context.set_trailing_metadata([('content-type', 'text/plain')]) + context.set_trailing_metadata([("content-type", "text/plain")]) if request.timesout: while context.is_active(): time.sleep(0.1) for i in range(request.count): - yield Message(text='unary_stream: %s' % request.text) + yield Message(text="unary_stream: %s" % request.text) def DoStreamUnary(self, request_iter, context): - context.set_trailing_metadata([('content-type', 'text/plain')]) + context.set_trailing_metadata([("content-type", "text/plain")]) for request in request_iter: if request.timesout: while context.is_active(): time.sleep(0.1) - return Message(text='stream_unary: %s' % request.text) + return Message(text="stream_unary: %s" % request.text) def DoStreamStream(self, request_iter, context): - context.set_trailing_metadata([('content-type', 'text/plain')]) + context.set_trailing_metadata([("content-type", "text/plain")]) for request in request_iter: if request.timesout: while context.is_active(): time.sleep(0.1) - yield Message(text='stream_stream: %s' % request.text) + yield Message(text="stream_stream: %s" % request.text) def DoUnaryUnaryRaises(self, request, context): - raise AssertionError('unary_unary: %s' % request.text) + raise AssertionError("unary_unary: %s" % request.text) def DoUnaryStreamRaises(self, request, context): - raise AssertionError('unary_stream: %s' % request.text) + raise AssertionError("unary_stream: %s" % request.text) def DoStreamUnaryRaises(self, request_iter, context): for request in request_iter: - raise AssertionError('stream_unary: %s' % request.text) + raise AssertionError("stream_unary: %s" % request.text) def DoStreamStreamRaises(self, request_iter, context): for request in request_iter: - raise AssertionError('stream_stream: %s' % request.text) + raise AssertionError("stream_stream: %s" % request.text) def NoTxnUnaryUnaryRaises(self, request, context): current_transaction().ignore_transaction = True - raise AssertionError('unary_unary: %s' % request.text) + raise AssertionError("unary_unary: %s" % request.text) def NoTxnUnaryStreamRaises(self, request, context): current_transaction().ignore_transaction = True - raise AssertionError('unary_stream: %s' % request.text) + raise AssertionError("unary_stream: %s" % request.text) def NoTxnStreamUnaryRaises(self, request_iter, context): current_transaction().ignore_transaction = True for request in request_iter: - raise AssertionError('stream_unary: %s' % request.text) + raise AssertionError("stream_unary: %s" % request.text) def NoTxnStreamStreamRaises(self, request_iter, context): current_transaction().ignore_transaction = True for request in request_iter: - raise AssertionError('stream_stream: %s' % request.text) + raise AssertionError("stream_stream: %s" % request.text) def NoTxnUnaryUnary(self, request, context): current_transaction().ignore_transaction = True @@ -110,16 +120,16 @@ def NoTxnStreamStream(self, request_iter, context): return self.DoStreamStream(request_iter, context) def DoUnaryUnaryAbort(self, request, context): - context.abort(grpc.StatusCode.ABORTED, 'aborting') + context.abort(grpc.StatusCode.ABORTED, "aborting") def DoUnaryStreamAbort(self, request, context): - context.abort(grpc.StatusCode.ABORTED, 'aborting') + context.abort(grpc.StatusCode.ABORTED, "aborting") def DoStreamUnaryAbort(self, request_iter, context): - context.abort(grpc.StatusCode.ABORTED, 'aborting') + context.abort(grpc.StatusCode.ABORTED, "aborting") def DoStreamStreamAbort(self, request_iter, context): - context.abort(grpc.StatusCode.ABORTED, 'aborting') + context.abort(grpc.StatusCode.ABORTED, "aborting") def DoUnaryUnaryAbortWithStatus(self, request, context): context.abort_with_status(Status) diff --git a/tox.ini b/tox.ini index a8d0b92c9..c50a1b75b 100644 --- a/tox.ini +++ b/tox.ini @@ -133,7 +133,7 @@ envlist = python-framework_graphql-{py37,py38,py39,py310,pypy37}-graphql03, ; temporarily disabling graphqlmaster tests python-framework_graphql-py37-graphql{0202,0203,0300,0301,0302}, - grpc-framework_grpc-{py27}-grpc0125, + grpc-framework_grpc-py27-grpc0125, grpc-framework_grpc-{py37,py38,py39,py310}-grpclatest, python-framework_pyramid-{pypy,py27,py38}-Pyramid0104, python-framework_pyramid-{pypy,py27,pypy37,py37,py38,py39,py310}-Pyramid0110-cornice, @@ -320,7 +320,7 @@ deps = framework_graphql-graphql0301: graphql-core<3.2 framework_graphql-graphql0302: graphql-core<3.3 framework_graphql-graphqlmaster: https://github.com/graphql-python/graphql-core/archive/main.zip - framework_grpc-grpclatest: protobuf<4 + framework_grpc-grpclatest: protobuf framework_grpc-grpclatest: grpcio framework_grpc-grpclatest: grpcio-tools grpc0125: grpcio<1.26 From f0079047dddfe9bf0448a9fd0e8384a0b047f348 Mon Sep 17 00:00:00 2001 From: admp-fh <115181356+admp-fh@users.noreply.github.com> Date: Thu, 6 Oct 2022 15:25:30 -0400 Subject: [PATCH 49/49] Make scripts and entry_points mutually exclusive based on setuptools presence (#650) When installing newrelic-admin with setuptools, the newrelic-admin shim is written to bin multiple times due to the inclusion of both `scripts` and `entry-points` as arguments to `setup`. This poses problems for installers that comply with newer PEP standards, such as the Pypa `installer` package: https://github.com/pypa/installer --- setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index be891626c..cdb4ac091 100644 --- a/setup.py +++ b/setup.py @@ -154,7 +154,6 @@ def build_extension(self, ext): package_data={ "newrelic": ["newrelic.ini", "version.txt", "packages/urllib3/LICENSE.txt", "common/cacert.pem"], }, - scripts=["scripts/newrelic-admin"], extras_require={"infinite-tracing": ["grpcio", "protobuf"]}, ) @@ -162,6 +161,8 @@ def build_extension(self, ext): kwargs["entry_points"] = { "console_scripts": ["newrelic-admin = newrelic.admin:main"], } +else: + kwargs["scripts"] = ["scripts/newrelic-admin"] def with_librt():