diff --git a/tests/agent_features/test_attributes_in_action.py b/tests/agent_features/test_attributes_in_action.py index fbedb302e..e51298dbf 100644 --- a/tests/agent_features/test_attributes_in_action.py +++ b/tests/agent_features/test_attributes_in_action.py @@ -24,13 +24,17 @@ validate_error_event_attributes, validate_error_event_attributes_outside_transaction, validate_error_trace_attributes_outside_transaction, - validate_transaction_error_trace_attributes, - validate_transaction_trace_attributes, ) from testing_support.validators.validate_span_events import validate_span_events +from testing_support.validators.validate_transaction_error_trace_attributes import ( + validate_transaction_error_trace_attributes, +) from testing_support.validators.validate_transaction_event_attributes import ( validate_transaction_event_attributes, ) +from testing_support.validators.validate_transaction_trace_attributes import ( + validate_transaction_trace_attributes, +) from newrelic.api.application import application_instance as application from newrelic.api.message_transaction import message_transaction diff --git a/tests/agent_features/test_collector_payloads.py b/tests/agent_features/test_collector_payloads.py index 17b46ce49..0c1b2367c 100644 --- a/tests/agent_features/test_collector_payloads.py +++ b/tests/agent_features/test_collector_payloads.py @@ -14,17 +14,30 @@ import pytest import webtest - -from testing_support.fixtures import (validate_error_trace_collector_json, - validate_tt_collector_json, validate_transaction_event_collector_json, - validate_error_event_collector_json, - validate_custom_event_collector_json, override_application_settings) - -from testing_support.sample_applications import (simple_app, - simple_exceptional_app, simple_custom_event_app) - -from testing_support.validators.validate_log_event_collector_json import validate_log_event_collector_json - +from testing_support.fixtures import ( + override_application_settings, + validate_custom_event_collector_json, +) +from testing_support.sample_applications import ( + simple_app, + simple_custom_event_app, + simple_exceptional_app, +) +from testing_support.validators.validate_error_event_collector_json import ( + validate_error_event_collector_json, +) +from testing_support.validators.validate_error_trace_collector_json import ( + validate_error_trace_collector_json, +) +from testing_support.validators.validate_log_event_collector_json import ( + validate_log_event_collector_json, +) +from testing_support.validators.validate_transaction_event_collector_json import ( + validate_transaction_event_collector_json, +) +from testing_support.validators.validate_tt_collector_json import ( + validate_tt_collector_json, +) exceptional_application = webtest.TestApp(simple_exceptional_app) normal_application = webtest.TestApp(simple_app) @@ -34,7 +47,7 @@ @validate_error_trace_collector_json() def test_error_trace_json(): try: - exceptional_application.get('/') + exceptional_application.get("/") except ValueError: pass @@ -42,34 +55,34 @@ def test_error_trace_json(): @validate_error_event_collector_json() def test_error_event_json(): try: - exceptional_application.get('/') + exceptional_application.get("/") except ValueError: pass @validate_tt_collector_json() def test_transaction_trace_json(): - normal_application.get('/') + normal_application.get("/") @validate_tt_collector_json(exclude_request_uri=True) -@override_application_settings({'attributes.exclude': set(('request.uri',))}) +@override_application_settings({"attributes.exclude": set(("request.uri",))}) def test_transaction_trace_json_no_request_uri(): - normal_application.get('/') + normal_application.get("/") @validate_transaction_event_collector_json() def test_transaction_event_json(): - normal_application.get('/') + normal_application.get("/") @validate_custom_event_collector_json() def test_custom_event_json(): - custom_event_application.get('/') + custom_event_application.get("/") @pytest.mark.xfail(reason="Unwritten validator") @validate_log_event_collector_json def test_log_event_json(): - normal_application.get('/') + normal_application.get("/") raise NotImplementedError("Fix my validator") diff --git a/tests/agent_features/test_event_loop_wait_time.py b/tests/agent_features/test_event_loop_wait_time.py index b4906337f..69e6fc102 100644 --- a/tests/agent_features/test_event_loop_wait_time.py +++ b/tests/agent_features/test_event_loop_wait_time.py @@ -16,16 +16,16 @@ import time import pytest -from testing_support.fixtures import ( - override_application_settings, - validate_transaction_trace_attributes, -) +from testing_support.fixtures import override_application_settings from testing_support.validators.validate_transaction_event_attributes import ( validate_transaction_event_attributes, ) from testing_support.validators.validate_transaction_metrics import ( validate_transaction_metrics, ) +from testing_support.validators.validate_transaction_trace_attributes import ( + validate_transaction_trace_attributes, +) from newrelic.api.background_task import background_task from newrelic.api.function_trace import FunctionTrace, function_trace @@ -75,7 +75,7 @@ async def wait_for_loop(ready, done, times=1): ), ) def test_record_event_loop_wait(event_loop, blocking_transaction_active, event_loop_visibility_enabled): - import asyncio + # import asyncio metric_count = 2 if event_loop_visibility_enabled else None execute_attributes = {"intrinsic": ("eventLoopTime",), "agent": (), "user": ()} @@ -184,7 +184,7 @@ def test_blocking_task_on_different_loop(): def test_record_event_loop_wait_on_different_task(event_loop): - import asyncio + # import asyncio async def recorder(ready, wait): ready.set() diff --git a/tests/agent_features/test_ignore_expected_errors.py b/tests/agent_features/test_ignore_expected_errors.py index 1a7fa266e..5cf61eced 100644 --- a/tests/agent_features/test_ignore_expected_errors.py +++ b/tests/agent_features/test_ignore_expected_errors.py @@ -19,11 +19,13 @@ validate_error_event_attributes_outside_transaction, validate_error_event_sample_data, validate_error_trace_attributes_outside_transaction, - validate_transaction_error_trace_attributes, ) from testing_support.validators.validate_time_metrics_outside_transaction import ( validate_time_metrics_outside_transaction, ) +from testing_support.validators.validate_transaction_error_trace_attributes import ( + validate_transaction_error_trace_attributes, +) from testing_support.validators.validate_transaction_errors import ( validate_transaction_errors, ) diff --git a/tests/agent_features/test_lambda_handler.py b/tests/agent_features/test_lambda_handler.py index f388aa51b..40b694407 100644 --- a/tests/agent_features/test_lambda_handler.py +++ b/tests/agent_features/test_lambda_handler.py @@ -16,15 +16,15 @@ from copy import deepcopy import pytest -from testing_support.fixtures import ( - override_application_settings, - validate_transaction_trace_attributes, -) +from testing_support.fixtures import override_application_settings from testing_support.validators.validate_transaction_event_attributes import ( validate_transaction_event_attributes, ) +from testing_support.validators.validate_transaction_trace_attributes import ( + validate_transaction_trace_attributes, +) -import newrelic.api.lambda_handler as lambda_handler +from newrelic.api import lambda_handler # NOTE: this fixture will force all tests in this file to assume that a cold diff --git a/tests/agent_features/test_notice_error.py b/tests/agent_features/test_notice_error.py index a4509e215..913ee9289 100644 --- a/tests/agent_features/test_notice_error.py +++ b/tests/agent_features/test_notice_error.py @@ -19,14 +19,22 @@ error_is_saved, override_application_settings, reset_core_stats_engine, - validate_application_error_event_count, - validate_application_error_trace_count, validate_transaction_error_event_count, validate_transaction_error_trace_count, ) +from testing_support.validators.validate_application_error_event_count import ( + validate_application_error_event_count, +) +from testing_support.validators.validate_application_error_trace_count import ( + validate_application_error_trace_count, +) +from testing_support.validators.validate_application_errors import ( + validate_application_errors, +) +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) -from testing_support.validators.validate_application_errors import validate_application_errors -from testing_support.validators.validate_transaction_errors import validate_transaction_errors from newrelic.api.application import application_instance as application from newrelic.api.application import application_settings from newrelic.api.background_task import background_task diff --git a/tests/agent_features/test_span_events.py b/tests/agent_features/test_span_events.py index 155642860..655efee8c 100644 --- a/tests/agent_features/test_span_events.py +++ b/tests/agent_features/test_span_events.py @@ -34,7 +34,6 @@ from newrelic.api.datastore_trace import DatastoreTrace from newrelic.api.external_trace import ExternalTrace from newrelic.api.function_trace import FunctionTrace, function_trace -from newrelic.api.graphql_trace import GraphQLOperationTrace, GraphQLResolverTrace from newrelic.api.memcache_trace import MemcacheTrace from newrelic.api.message_trace import MessageTrace from newrelic.api.solr_trace import SolrTrace @@ -125,8 +124,6 @@ def _test(): (DatastoreTrace, ("db_product", "db_target", "db_operation")), (ExternalTrace, ("lib", "url")), (FunctionTrace, ("name",)), - (GraphQLOperationTrace, ()), - (GraphQLResolverTrace, ()), (MemcacheTrace, ("command",)), (MessageTrace, ("lib", "operation", "dst_type", "dst_name")), (SolrTrace, ("lib", "command")), @@ -416,11 +413,11 @@ def _test(): @pytest.mark.parametrize("collect_span_events", (False, True)) @pytest.mark.parametrize("span_events_enabled", (False, True)) def test_collect_span_events_override(collect_span_events, span_events_enabled): - - if collect_span_events and span_events_enabled: - spans_expected = True - else: - spans_expected = False + spans_expected = collect_span_events and span_events_enabled + # if collect_span_events and span_events_enabled: + # spans_expected = True + # else: + # spans_expected = False span_count = 2 if spans_expected else 0 @@ -556,9 +553,9 @@ def _test(): def test_span_user_attribute_overrides_transaction_attribute(): transaction = current_transaction() - transaction.add_custom_attribute("foo", "a") + transaction.add_custom_parameter("foo", "a") add_custom_span_attribute("foo", "b") - transaction.add_custom_attribute("foo", "c") + transaction.add_custom_parameter("foo", "c") @override_application_settings({"attributes.include": "*"}) @@ -603,7 +600,7 @@ def _test(): transaction = current_transaction() for i in range(128): - transaction.add_custom_attribute("txn_attr%i" % i, "txnValue") + transaction.add_custom_parameter("txn_attr%i" % i, "txnValue") if i < 64: add_custom_span_attribute("span_attr%i" % i, "spanValue") @@ -620,8 +617,6 @@ def _test(): (DatastoreTrace, ("db_product", "db_target", "db_operation")), (ExternalTrace, ("lib", "url")), (FunctionTrace, ("name",)), - (GraphQLOperationTrace, ()), - (GraphQLResolverTrace, ()), (MemcacheTrace, ("command",)), (MessageTrace, ("lib", "operation", "dst_type", "dst_name")), (SolrTrace, ("lib", "command")), @@ -671,8 +666,6 @@ def _test(): (DatastoreTrace, ("db_product", "db_target", "db_operation")), (ExternalTrace, ("lib", "url")), (FunctionTrace, ("name",)), - (GraphQLOperationTrace, ()), - (GraphQLResolverTrace, ()), (MemcacheTrace, ("command",)), (MessageTrace, ("lib", "operation", "dst_type", "dst_name")), (SolrTrace, ("lib", "command")), @@ -716,8 +709,6 @@ def _test(): (DatastoreTrace, ("db_product", "db_target", "db_operation")), (ExternalTrace, ("lib", "url")), (FunctionTrace, ("name",)), - (GraphQLOperationTrace, ()), - (GraphQLResolverTrace, ()), (MemcacheTrace, ("command",)), (MessageTrace, ("lib", "operation", "dst_type", "dst_name")), (SolrTrace, ("lib", "command")), @@ -732,9 +723,9 @@ def test_span_event_notice_error_overrides_observed(trace_type, args): with trace_type(*args): try: raise ERROR - except: + except Exception: notice_error() - raise ValueError + raise ValueError # pylint: disable except ValueError: pass diff --git a/tests/agent_features/test_synthetics.py b/tests/agent_features/test_synthetics.py index ec7b78e96..2e08144cc 100644 --- a/tests/agent_features/test_synthetics.py +++ b/tests/agent_features/test_synthetics.py @@ -19,10 +19,14 @@ cat_enabled, make_synthetics_header, override_application_settings, +) +from testing_support.validators.validate_synthetics_event import ( + validate_synthetics_event, +) +from testing_support.validators.validate_synthetics_transaction_trace import ( validate_synthetics_transaction_trace, ) -from testing_support.validators.validate_synthetics_event import validate_synthetics_event from newrelic.api.web_transaction import web_transaction from newrelic.api.wsgi_application import wsgi_application from newrelic.common.encoding_utils import deobfuscate, json_decode diff --git a/tests/agent_features/test_web_transaction.py b/tests/agent_features/test_web_transaction.py index f2f08574a..66cf25858 100644 --- a/tests/agent_features/test_web_transaction.py +++ b/tests/agent_features/test_web_transaction.py @@ -24,10 +24,10 @@ validate_transaction_metrics, ) -import newrelic.packages.six as six from newrelic.api.application import application_instance from newrelic.api.web_transaction import WebTransaction from newrelic.api.wsgi_application import wsgi_application +from newrelic.packages import six application = webtest.TestApp(simple_app) diff --git a/tests/agent_features/test_wsgi_attributes.py b/tests/agent_features/test_wsgi_attributes.py index 7543e45d8..e90410b6d 100644 --- a/tests/agent_features/test_wsgi_attributes.py +++ b/tests/agent_features/test_wsgi_attributes.py @@ -17,9 +17,11 @@ dt_enabled, override_application_settings, validate_error_event_attributes, - validate_transaction_error_trace_attributes, ) from testing_support.sample_applications import fully_featured_app +from testing_support.validators.validate_transaction_error_trace_attributes import ( + validate_transaction_error_trace_attributes, +) from testing_support.validators.validate_transaction_event_attributes import ( validate_transaction_event_attributes, ) diff --git a/tests/datastore_aioredis/test_custom_conn_pool.py b/tests/datastore_aioredis/test_custom_conn_pool.py index e59760ea3..b09cf0bdd 100644 --- a/tests/datastore_aioredis/test_custom_conn_pool.py +++ b/tests/datastore_aioredis/test_custom_conn_pool.py @@ -18,10 +18,9 @@ """ from testing_support.db_settings import redis_settings +from testing_support.fixture.event_loop import event_loop as loop # noqa from testing_support.fixtures import override_application_settings from testing_support.util import instance_hostname - -# from testing_support.fixture.event_loop import event_loop as loop from testing_support.validators.validate_transaction_metrics import ( validate_transaction_metrics, ) @@ -111,7 +110,7 @@ async def exercise_redis(client): background_task=True, ) @background_task() -def test_fake_conn_pool_enable_instance(client, loop, monkeypatch): +def test_fake_conn_pool_enable_instance(client, loop, monkeypatch): # noqa # Get a real connection conn = getattr(client, "_pool_or_conn", None) if conn is None: @@ -136,7 +135,7 @@ def test_fake_conn_pool_enable_instance(client, loop, monkeypatch): background_task=True, ) @background_task() -def test_fake_conn_pool_disable_instance(client, loop, monkeypatch): +def test_fake_conn_pool_disable_instance(client, loop, monkeypatch): # noqa # Get a real connection conn = getattr(client, "_pool_or_conn", None) if conn is None: diff --git a/tests/datastore_aioredis/test_execute_command.py b/tests/datastore_aioredis/test_execute_command.py index f6ee9eb27..54851a659 100644 --- a/tests/datastore_aioredis/test_execute_command.py +++ b/tests/datastore_aioredis/test_execute_command.py @@ -13,7 +13,9 @@ # limitations under the License. import pytest -from conftest import AIOREDIS_VERSION # , event_loop, loop + +# import aioredis +from conftest import AIOREDIS_VERSION, loop # noqa # pylint: disable=E0611,W0611 from testing_support.db_settings import redis_settings from testing_support.fixtures import override_application_settings from testing_support.util import instance_hostname @@ -81,7 +83,7 @@ async def exercise_redis_single_arg(client): background_task=True, ) @background_task() -def test_redis_execute_command_as_one_arg_enable(client, loop): +def test_redis_execute_command_as_one_arg_enable(client, loop): # noqa loop.run_until_complete(exercise_redis_single_arg(client)) @@ -94,7 +96,7 @@ def test_redis_execute_command_as_one_arg_enable(client, loop): background_task=True, ) @background_task() -def test_redis_execute_command_as_one_arg_disable(client, loop): +def test_redis_execute_command_as_one_arg_disable(client, loop): # noqa loop.run_until_complete(exercise_redis_single_arg(client)) @@ -106,7 +108,7 @@ def test_redis_execute_command_as_one_arg_disable(client, loop): background_task=True, ) @background_task() -def test_redis_execute_command_as_two_args_enable(client, loop): +def test_redis_execute_command_as_two_args_enable(client, loop): # noqa loop.run_until_complete(exercise_redis_multi_args(client)) @@ -118,5 +120,5 @@ def test_redis_execute_command_as_two_args_enable(client, loop): background_task=True, ) @background_task() -def test_redis_execute_command_as_two_args_disable(client, loop): +def test_redis_execute_command_as_two_args_disable(client, loop): # noqa loop.run_until_complete(exercise_redis_multi_args(client)) diff --git a/tests/datastore_aioredis/test_multiple_dbs.py b/tests/datastore_aioredis/test_multiple_dbs.py index 3b9ea37dd..61d99d3ae 100644 --- a/tests/datastore_aioredis/test_multiple_dbs.py +++ b/tests/datastore_aioredis/test_multiple_dbs.py @@ -14,9 +14,7 @@ import aioredis import pytest -from conftest import AIOREDIS_VERSION # , event_loop, loop - -# from conftest import AIOREDIS_VERSION +from conftest import AIOREDIS_VERSION, loop # noqa from testing_support.db_settings import redis_settings from testing_support.fixtures import override_application_settings from testing_support.util import instance_hostname @@ -107,7 +105,7 @@ @pytest.fixture(params=("Redis", "StrictRedis")) -def client_set(request, loop): +def client_set(request, loop): # noqa if len(DB_SETTINGS) > 1: if AIOREDIS_VERSION >= (2, 0): if request.param == "Redis": @@ -157,7 +155,7 @@ async def exercise_redis(client_1, client_2): background_task=True, ) @background_task() -def test_multiple_datastores_enabled(client_set, loop): +def test_multiple_datastores_enabled(client_set, loop): # noqa loop.run_until_complete(exercise_redis(client_set[0], client_set[1])) @@ -170,7 +168,7 @@ def test_multiple_datastores_enabled(client_set, loop): background_task=True, ) @background_task() -def test_multiple_datastores_disabled(client_set, loop): +def test_multiple_datastores_disabled(client_set, loop): # noqa loop.run_until_complete(exercise_redis(client_set[0], client_set[1])) @@ -183,7 +181,7 @@ def test_multiple_datastores_disabled(client_set, loop): ) @override_application_settings(_enable_instance_settings) @background_task() -def test_concurrent_calls(client_set, loop): +def test_concurrent_calls(client_set, loop): # noqa # Concurrent calls made with original instrumenation taken from synchonous Redis # instrumentation had a bug where datastore info on concurrent calls to multiple instances # would result in all instances reporting as the host/port of the final call made. diff --git a/tests/datastore_aioredis/test_trace_node.py b/tests/datastore_aioredis/test_trace_node.py index e4fa1e3ba..92235f793 100644 --- a/tests/datastore_aioredis/test_trace_node.py +++ b/tests/datastore_aioredis/test_trace_node.py @@ -12,9 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -from testing_support.fixtures import validate_tt_collector_json, override_application_settings -from testing_support.util import instance_hostname +# import aioredis +# import pytest +# from conftest import AIOREDIS_VERSION, event_loop from testing_support.db_settings import redis_settings +from testing_support.fixtures import override_application_settings +from testing_support.util import instance_hostname +from testing_support.validators.validate_tt_collector_json import ( + validate_tt_collector_json, +) from newrelic.api.background_task import background_task diff --git a/tests/datastore_aredis/test_trace_node.py b/tests/datastore_aredis/test_trace_node.py index 9741bfbd6..9d5d86162 100644 --- a/tests/datastore_aredis/test_trace_node.py +++ b/tests/datastore_aredis/test_trace_node.py @@ -13,12 +13,13 @@ # limitations under the License. import aredis - -from testing_support.fixture.event_loop import event_loop as loop -from testing_support.fixtures import (validate_tt_collector_json, - override_application_settings) -from testing_support.util import instance_hostname from testing_support.db_settings import redis_settings +from testing_support.fixture.event_loop import event_loop as loop # noqa: F401 +from testing_support.fixtures import override_application_settings +from testing_support.util import instance_hostname +from testing_support.validators.validate_tt_collector_json import ( + validate_tt_collector_json, +) from newrelic.api.background_task import background_task @@ -28,100 +29,93 @@ # Settings _enable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': True, - 'datastore_tracer.database_name_reporting.enabled': True, + "datastore_tracer.instance_reporting.enabled": True, + "datastore_tracer.database_name_reporting.enabled": True, } _disable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': False, - 'datastore_tracer.database_name_reporting.enabled': False, + "datastore_tracer.instance_reporting.enabled": False, + "datastore_tracer.database_name_reporting.enabled": False, } _instance_only_settings = { - 'datastore_tracer.instance_reporting.enabled': True, - 'datastore_tracer.database_name_reporting.enabled': False, + "datastore_tracer.instance_reporting.enabled": True, + "datastore_tracer.database_name_reporting.enabled": False, } _database_only_settings = { - 'datastore_tracer.instance_reporting.enabled': False, - 'datastore_tracer.database_name_reporting.enabled': True, + "datastore_tracer.instance_reporting.enabled": False, + "datastore_tracer.database_name_reporting.enabled": True, } # Expected parameters _enabled_required = { - 'host': instance_hostname(DB_SETTINGS['host']), - 'port_path_or_id': str(DB_SETTINGS['port']), - 'db.instance': str(DATABASE_NUMBER), + "host": instance_hostname(DB_SETTINGS["host"]), + "port_path_or_id": str(DB_SETTINGS["port"]), + "db.instance": str(DATABASE_NUMBER), } _enabled_forgone = {} _disabled_required = {} _disabled_forgone = { - 'host': 'VALUE NOT USED', - 'port_path_or_id': 'VALUE NOT USED', - 'db.instance': 'VALUE NOT USED', + "host": "VALUE NOT USED", + "port_path_or_id": "VALUE NOT USED", + "db.instance": "VALUE NOT USED", } _instance_only_required = { - 'host': instance_hostname(DB_SETTINGS['host']), - 'port_path_or_id': str(DB_SETTINGS['port']), + "host": instance_hostname(DB_SETTINGS["host"]), + "port_path_or_id": str(DB_SETTINGS["port"]), } _instance_only_forgone = { - 'db.instance': str(DATABASE_NUMBER), + "db.instance": str(DATABASE_NUMBER), } _database_only_required = { - 'db.instance': str(DATABASE_NUMBER), + "db.instance": str(DATABASE_NUMBER), } _database_only_forgone = { - 'host': 'VALUE NOT USED', - 'port_path_or_id': 'VALUE NOT USED', + "host": "VALUE NOT USED", + "port_path_or_id": "VALUE NOT USED", } # Query + async def _exercise_db(): - client = aredis.StrictRedis(host=DB_SETTINGS['host'], - port=DB_SETTINGS['port'], db=DATABASE_NUMBER) + client = aredis.StrictRedis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=DATABASE_NUMBER) - await client.set('key', 'value') - await client.get('key') + await client.set("key", "value") + await client.get("key") - await client.execute_command('CLIENT', 'LIST', parse='LIST') + await client.execute_command("CLIENT", "LIST", parse="LIST") # Tests + @override_application_settings(_enable_instance_settings) -@validate_tt_collector_json( - datastore_params=_enabled_required, - datastore_forgone_params=_enabled_forgone) +@validate_tt_collector_json(datastore_params=_enabled_required, datastore_forgone_params=_enabled_forgone) @background_task() -def test_trace_node_datastore_params_enable_instance(loop): +def test_trace_node_datastore_params_enable_instance(loop): # noqa: F811 loop.run_until_complete(_exercise_db()) @override_application_settings(_disable_instance_settings) -@validate_tt_collector_json( - datastore_params=_disabled_required, - datastore_forgone_params=_disabled_forgone) +@validate_tt_collector_json(datastore_params=_disabled_required, datastore_forgone_params=_disabled_forgone) @background_task() -def test_trace_node_datastore_params_disable_instance(loop): +def test_trace_node_datastore_params_disable_instance(loop): # noqa: F811 loop.run_until_complete(_exercise_db()) @override_application_settings(_instance_only_settings) -@validate_tt_collector_json( - datastore_params=_instance_only_required, - datastore_forgone_params=_instance_only_forgone) +@validate_tt_collector_json(datastore_params=_instance_only_required, datastore_forgone_params=_instance_only_forgone) @background_task() -def test_trace_node_datastore_params_instance_only(loop): +def test_trace_node_datastore_params_instance_only(loop): # noqa: F811 loop.run_until_complete(_exercise_db()) @override_application_settings(_database_only_settings) -@validate_tt_collector_json( - datastore_params=_database_only_required, - datastore_forgone_params=_database_only_forgone) +@validate_tt_collector_json(datastore_params=_database_only_required, datastore_forgone_params=_database_only_forgone) @background_task() -def test_trace_node_datastore_params_database_only(loop): +def test_trace_node_datastore_params_database_only(loop): # noqa: F811 loop.run_until_complete(_exercise_db()) diff --git a/tests/datastore_asyncpg/test_query.py b/tests/datastore_asyncpg/test_query.py index a952e062a..838ced61d 100644 --- a/tests/datastore_asyncpg/test_query.py +++ b/tests/datastore_asyncpg/test_query.py @@ -12,17 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -import asyncio import os -import random from io import BytesIO import asyncpg import pytest from testing_support.db_settings import postgresql_settings -from testing_support.fixtures import validate_tt_collector_json -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.util import instance_hostname +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) +from testing_support.validators.validate_tt_collector_json import ( + validate_tt_collector_json, +) from newrelic.api.background_task import background_task diff --git a/tests/datastore_elasticsearch/test_trace_node.py b/tests/datastore_elasticsearch/test_trace_node.py index 65e773340..445b4a4eb 100644 --- a/tests/datastore_elasticsearch/test_trace_node.py +++ b/tests/datastore_elasticsearch/test_trace_node.py @@ -13,77 +13,81 @@ # limitations under the License. from elasticsearch import Elasticsearch - -from testing_support.fixtures import (validate_tt_collector_json, - override_application_settings, validate_tt_parenting) from testing_support.db_settings import elasticsearch_settings +from testing_support.fixtures import ( + override_application_settings, + validate_tt_parenting, +) from testing_support.util import instance_hostname +from testing_support.validators.validate_tt_collector_json import ( + validate_tt_collector_json, +) from newrelic.api.background_task import background_task ES_SETTINGS = elasticsearch_settings()[0] -ES_URL = 'http://%s:%s' % (ES_SETTINGS['host'], ES_SETTINGS['port']) +ES_URL = "http://%s:%s" % (ES_SETTINGS["host"], ES_SETTINGS["port"]) # Settings _enable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': True, - 'datastore_tracer.database_name_reporting.enabled': True, + "datastore_tracer.instance_reporting.enabled": True, + "datastore_tracer.database_name_reporting.enabled": True, } _disable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': False, - 'datastore_tracer.database_name_reporting.enabled': False, + "datastore_tracer.instance_reporting.enabled": False, + "datastore_tracer.database_name_reporting.enabled": False, } _instance_only_settings = { - 'datastore_tracer.instance_reporting.enabled': True, - 'datastore_tracer.database_name_reporting.enabled': False, + "datastore_tracer.instance_reporting.enabled": True, + "datastore_tracer.database_name_reporting.enabled": False, } # Expected parameters _enabled_required = { - 'host': instance_hostname(ES_SETTINGS['host']), - 'port_path_or_id': str(ES_SETTINGS['port']), + "host": instance_hostname(ES_SETTINGS["host"]), + "port_path_or_id": str(ES_SETTINGS["port"]), } _enabled_forgone = { - 'db.instance': 'VALUE NOT USED', + "db.instance": "VALUE NOT USED", } _disabled_required = {} _disabled_forgone = { - 'host': 'VALUE NOT USED', - 'port_path_or_id': 'VALUE NOT USED', - 'db.instance': 'VALUE NOT USED', + "host": "VALUE NOT USED", + "port_path_or_id": "VALUE NOT USED", + "db.instance": "VALUE NOT USED", } _instance_only_required = { - 'host': instance_hostname(ES_SETTINGS['host']), - 'port_path_or_id': str(ES_SETTINGS['port']), + "host": instance_hostname(ES_SETTINGS["host"]), + "port_path_or_id": str(ES_SETTINGS["port"]), } _instance_only_forgone = { - 'db.instance': 'VALUE NOT USED', + "db.instance": "VALUE NOT USED", } _tt_parenting = ( - 'TransactionNode', [ - ('DatastoreNode', []), + "TransactionNode", + [ + ("DatastoreNode", []), ], ) # Query + def _exercise_es(es): - es.index(index='contacts', doc_type='person', - body={'name': 'Joe Tester', 'age': 25, 'title': 'QA Master'}, id=1) + es.index(index="contacts", doc_type="person", body={"name": "Joe Tester", "age": 25, "title": "QA Master"}, id=1) # Tests + @override_application_settings(_enable_instance_settings) -@validate_tt_collector_json( - datastore_params=_enabled_required, - datastore_forgone_params=_enabled_forgone) +@validate_tt_collector_json(datastore_params=_enabled_required, datastore_forgone_params=_enabled_forgone) @validate_tt_parenting(_tt_parenting) @background_task() def test_trace_node_datastore_params_enable_instance(): @@ -92,9 +96,7 @@ def test_trace_node_datastore_params_enable_instance(): @override_application_settings(_disable_instance_settings) -@validate_tt_collector_json( - datastore_params=_disabled_required, - datastore_forgone_params=_disabled_forgone) +@validate_tt_collector_json(datastore_params=_disabled_required, datastore_forgone_params=_disabled_forgone) @validate_tt_parenting(_tt_parenting) @background_task() def test_trace_node_datastore_params_disable_instance(): @@ -103,9 +105,7 @@ def test_trace_node_datastore_params_disable_instance(): @override_application_settings(_instance_only_settings) -@validate_tt_collector_json( - datastore_params=_instance_only_required, - datastore_forgone_params=_instance_only_forgone) +@validate_tt_collector_json(datastore_params=_instance_only_required, datastore_forgone_params=_instance_only_forgone) @validate_tt_parenting(_tt_parenting) @background_task() def test_trace_node_datastore_params_instance_only(): diff --git a/tests/datastore_psycopg2/test_trace_node.py b/tests/datastore_psycopg2/test_trace_node.py index b9cd45788..9bfbcf42b 100644 --- a/tests/datastore_psycopg2/test_trace_node.py +++ b/tests/datastore_psycopg2/test_trace_node.py @@ -13,72 +13,78 @@ # limitations under the License. import psycopg2 - -from testing_support.fixtures import (validate_tt_collector_json, - override_application_settings, validate_tt_parenting) +from testing_support.fixtures import ( + override_application_settings, + validate_tt_parenting, +) from testing_support.util import instance_hostname +from testing_support.validators.validate_tt_collector_json import ( + validate_tt_collector_json, +) from utils import DB_SETTINGS from newrelic.api.background_task import background_task - # Settings _enable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': True, - 'datastore_tracer.database_name_reporting.enabled': True, + "datastore_tracer.instance_reporting.enabled": True, + "datastore_tracer.database_name_reporting.enabled": True, } _disable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': False, - 'datastore_tracer.database_name_reporting.enabled': False, + "datastore_tracer.instance_reporting.enabled": False, + "datastore_tracer.database_name_reporting.enabled": False, } # Expected parameters _enabled_required = { - 'host': instance_hostname(DB_SETTINGS['host']), - 'port_path_or_id': str(DB_SETTINGS['port']), - 'db.instance': DB_SETTINGS['name'], + "host": instance_hostname(DB_SETTINGS["host"]), + "port_path_or_id": str(DB_SETTINGS["port"]), + "db.instance": DB_SETTINGS["name"], } _enabled_forgone = {} _disabled_required = {} _disabled_forgone = { - 'host': 'VALUE NOT USED', - 'port_path_or_id': 'VALUE NOT USED', - 'db.instance': 'VALUE NOT USED', + "host": "VALUE NOT USED", + "port_path_or_id": "VALUE NOT USED", + "db.instance": "VALUE NOT USED", } _tt_parenting = ( - 'TransactionNode', [ - ('FunctionNode', []), - ('DatabaseNode', []), + "TransactionNode", + [ + ("FunctionNode", []), + ("DatabaseNode", []), ], ) # Query + def _exercise_db(): connection = psycopg2.connect( - database=DB_SETTINGS['name'], user=DB_SETTINGS['user'], - password=DB_SETTINGS['password'], host=DB_SETTINGS['host'], - port=DB_SETTINGS['port']) + database=DB_SETTINGS["name"], + user=DB_SETTINGS["user"], + password=DB_SETTINGS["password"], + host=DB_SETTINGS["host"], + port=DB_SETTINGS["port"], + ) try: cursor = connection.cursor() - cursor.execute("""SELECT setting from pg_settings where name=%s""", - ('server_version',)) + cursor.execute("""SELECT setting from pg_settings where name=%s""", ("server_version",)) finally: connection.close() # Tests + @override_application_settings(_enable_instance_settings) -@validate_tt_collector_json( - datastore_params=_enabled_required, - datastore_forgone_params=_enabled_forgone) +@validate_tt_collector_json(datastore_params=_enabled_required, datastore_forgone_params=_enabled_forgone) @validate_tt_parenting(_tt_parenting) @background_task() def test_trace_node_datastore_params_enable_instance(): @@ -86,9 +92,7 @@ def test_trace_node_datastore_params_enable_instance(): @override_application_settings(_disable_instance_settings) -@validate_tt_collector_json( - datastore_params=_disabled_required, - datastore_forgone_params=_disabled_forgone) +@validate_tt_collector_json(datastore_params=_disabled_required, datastore_forgone_params=_disabled_forgone) @validate_tt_parenting(_tt_parenting) @background_task() def test_trace_node_datastore_params_disable_instance(): diff --git a/tests/datastore_redis/test_trace_node.py b/tests/datastore_redis/test_trace_node.py index 39b7763ba..cc0d59919 100644 --- a/tests/datastore_redis/test_trace_node.py +++ b/tests/datastore_redis/test_trace_node.py @@ -13,11 +13,12 @@ # limitations under the License. import redis - -from testing_support.fixtures import (validate_tt_collector_json, - override_application_settings) -from testing_support.util import instance_hostname from testing_support.db_settings import redis_settings +from testing_support.fixtures import override_application_settings +from testing_support.util import instance_hostname +from testing_support.validators.validate_tt_collector_json import ( + validate_tt_collector_json, +) from newrelic.api.background_task import background_task @@ -27,100 +28,93 @@ # Settings _enable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': True, - 'datastore_tracer.database_name_reporting.enabled': True, + "datastore_tracer.instance_reporting.enabled": True, + "datastore_tracer.database_name_reporting.enabled": True, } _disable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': False, - 'datastore_tracer.database_name_reporting.enabled': False, + "datastore_tracer.instance_reporting.enabled": False, + "datastore_tracer.database_name_reporting.enabled": False, } _instance_only_settings = { - 'datastore_tracer.instance_reporting.enabled': True, - 'datastore_tracer.database_name_reporting.enabled': False, + "datastore_tracer.instance_reporting.enabled": True, + "datastore_tracer.database_name_reporting.enabled": False, } _database_only_settings = { - 'datastore_tracer.instance_reporting.enabled': False, - 'datastore_tracer.database_name_reporting.enabled': True, + "datastore_tracer.instance_reporting.enabled": False, + "datastore_tracer.database_name_reporting.enabled": True, } # Expected parameters _enabled_required = { - 'host': instance_hostname(DB_SETTINGS['host']), - 'port_path_or_id': str(DB_SETTINGS['port']), - 'db.instance': str(DATABASE_NUMBER), + "host": instance_hostname(DB_SETTINGS["host"]), + "port_path_or_id": str(DB_SETTINGS["port"]), + "db.instance": str(DATABASE_NUMBER), } _enabled_forgone = {} _disabled_required = {} _disabled_forgone = { - 'host': 'VALUE NOT USED', - 'port_path_or_id': 'VALUE NOT USED', - 'db.instance': 'VALUE NOT USED', + "host": "VALUE NOT USED", + "port_path_or_id": "VALUE NOT USED", + "db.instance": "VALUE NOT USED", } _instance_only_required = { - 'host': instance_hostname(DB_SETTINGS['host']), - 'port_path_or_id': str(DB_SETTINGS['port']), + "host": instance_hostname(DB_SETTINGS["host"]), + "port_path_or_id": str(DB_SETTINGS["port"]), } _instance_only_forgone = { - 'db.instance': str(DATABASE_NUMBER), + "db.instance": str(DATABASE_NUMBER), } _database_only_required = { - 'db.instance': str(DATABASE_NUMBER), + "db.instance": str(DATABASE_NUMBER), } _database_only_forgone = { - 'host': 'VALUE NOT USED', - 'port_path_or_id': 'VALUE NOT USED', + "host": "VALUE NOT USED", + "port_path_or_id": "VALUE NOT USED", } # Query + def _exercise_db(): - client = redis.StrictRedis(host=DB_SETTINGS['host'], - port=DB_SETTINGS['port'], db=DATABASE_NUMBER) + client = redis.StrictRedis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=DATABASE_NUMBER) - client.set('key', 'value') - client.get('key') + client.set("key", "value") + client.get("key") - client.execute_command('CLIENT', 'LIST', parse='LIST') + client.execute_command("CLIENT", "LIST", parse="LIST") # Tests + @override_application_settings(_enable_instance_settings) -@validate_tt_collector_json( - datastore_params=_enabled_required, - datastore_forgone_params=_enabled_forgone) +@validate_tt_collector_json(datastore_params=_enabled_required, datastore_forgone_params=_enabled_forgone) @background_task() def test_trace_node_datastore_params_enable_instance(): _exercise_db() @override_application_settings(_disable_instance_settings) -@validate_tt_collector_json( - datastore_params=_disabled_required, - datastore_forgone_params=_disabled_forgone) +@validate_tt_collector_json(datastore_params=_disabled_required, datastore_forgone_params=_disabled_forgone) @background_task() def test_trace_node_datastore_params_disable_instance(): _exercise_db() @override_application_settings(_instance_only_settings) -@validate_tt_collector_json( - datastore_params=_instance_only_required, - datastore_forgone_params=_instance_only_forgone) +@validate_tt_collector_json(datastore_params=_instance_only_required, datastore_forgone_params=_instance_only_forgone) @background_task() def test_trace_node_datastore_params_instance_only(): _exercise_db() @override_application_settings(_database_only_settings) -@validate_tt_collector_json( - datastore_params=_database_only_required, - datastore_forgone_params=_database_only_forgone) +@validate_tt_collector_json(datastore_params=_database_only_required, datastore_forgone_params=_database_only_forgone) @background_task() def test_trace_node_datastore_params_database_only(): _exercise_db() diff --git a/tests/framework_sanic/test_cross_application.py b/tests/framework_sanic/test_cross_application.py index 7199fae55..31dc3b9b9 100644 --- a/tests/framework_sanic/test_cross_application.py +++ b/tests/framework_sanic/test_cross_application.py @@ -14,6 +14,8 @@ import json import random + +# import re import string import pytest diff --git a/tests/framework_tornado/test_server.py b/tests/framework_tornado/test_server.py index 1d985b938..6f8e6bf2a 100644 --- a/tests/framework_tornado/test_server.py +++ b/tests/framework_tornado/test_server.py @@ -213,6 +213,7 @@ def test_nr_disabled(app): ) def test_web_socket(uri, name, app): # import asyncio + from tornado.websocket import websocket_connect namespace, func_name = name.split(":") diff --git a/tests/messagebroker_pika/test_pika_async_connection_consume.py b/tests/messagebroker_pika/test_pika_async_connection_consume.py index 7edf6b644..4e44c7ed7 100644 --- a/tests/messagebroker_pika/test_pika_async_connection_consume.py +++ b/tests/messagebroker_pika/test_pika_async_connection_consume.py @@ -12,39 +12,57 @@ # See the License for the specific language governing permissions and # limitations under the License. -from minversion import pika_version_info -from compat import basic_consume import functools + import pika -from pika.adapters.tornado_connection import TornadoConnection import pytest import six import tornado +from compat import basic_consume +from conftest import ( + BODY, + CORRELATION_ID, + EXCHANGE, + EXCHANGE_2, + HEADERS, + QUEUE, + QUEUE_2, + REPLY_TO, +) +from minversion import pika_version_info +from pika.adapters.tornado_connection import TornadoConnection +from testing_support.db_settings import rabbitmq_settings +from testing_support.fixtures import ( + capture_transaction_metrics, + function_not_called, + override_application_settings, +) +from testing_support.validators.validate_code_level_metrics import ( + validate_code_level_metrics, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) +from testing_support.validators.validate_tt_collector_json import ( + validate_tt_collector_json, +) from newrelic.api.background_task import background_task -from conftest import (QUEUE, QUEUE_2, EXCHANGE, EXCHANGE_2, CORRELATION_ID, - REPLY_TO, HEADERS, BODY) -from testing_support.fixtures import (capture_transaction_metrics, - validate_tt_collector_json, - function_not_called, override_application_settings) -from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics -from testing_support.db_settings import rabbitmq_settings -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics - DB_SETTINGS = rabbitmq_settings()[0] _message_broker_tt_params = { - 'queue_name': QUEUE, - 'routing_key': QUEUE, - 'correlation_id': CORRELATION_ID, - 'reply_to': REPLY_TO, - 'headers': HEADERS.copy(), + "queue_name": QUEUE, + "routing_key": QUEUE, + "correlation_id": CORRELATION_ID, + "reply_to": REPLY_TO, + "headers": HEADERS.copy(), } # Tornado's IO loop is not configurable in versions 5.x and up try: + class MyIOLoop(tornado.ioloop.IOLoop.configured_class()): def handle_callback_exception(self, *args, **kwargs): raise @@ -55,38 +73,44 @@ def handle_callback_exception(self, *args, **kwargs): connection_classes = [pika.SelectConnection, TornadoConnection] -parametrized_connection = pytest.mark.parametrize('ConnectionClass', - connection_classes) +parametrized_connection = pytest.mark.parametrize("ConnectionClass", connection_classes) _test_select_conn_basic_get_inside_txn_metrics = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, 1), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, 1), ] if six.PY3: _test_select_conn_basic_get_inside_txn_metrics.append( - (('Function/test_pika_async_connection_consume:' - 'test_async_connection_basic_get_inside_txn.' - '.on_message'), 1)) + ( + ( + "Function/test_pika_async_connection_consume:" + "test_async_connection_basic_get_inside_txn." + ".on_message" + ), + 1, + ) + ) else: - _test_select_conn_basic_get_inside_txn_metrics.append( - ('Function/test_pika_async_connection_consume:on_message', 1)) + _test_select_conn_basic_get_inside_txn_metrics.append(("Function/test_pika_async_connection_consume:on_message", 1)) @parametrized_connection -@pytest.mark.parametrize('callback_as_partial', [True, False]) -@validate_code_level_metrics("test_pika_async_connection_consume" + (".test_async_connection_basic_get_inside_txn." if six.PY3 else ""), "on_message") +@pytest.mark.parametrize("callback_as_partial", [True, False]) +@validate_code_level_metrics( + "test_pika_async_connection_consume" + (".test_async_connection_basic_get_inside_txn." if six.PY3 else ""), + "on_message", +) @validate_transaction_metrics( - ('test_pika_async_connection_consume:' - 'test_async_connection_basic_get_inside_txn'), - scoped_metrics=_test_select_conn_basic_get_inside_txn_metrics, - rollup_metrics=_test_select_conn_basic_get_inside_txn_metrics, - background_task=True) + ("test_pika_async_connection_consume:" "test_async_connection_basic_get_inside_txn"), + scoped_metrics=_test_select_conn_basic_get_inside_txn_metrics, + rollup_metrics=_test_select_conn_basic_get_inside_txn_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() -def test_async_connection_basic_get_inside_txn(producer, ConnectionClass, - callback_as_partial): +def test_async_connection_basic_get_inside_txn(producer, ConnectionClass, callback_as_partial): def on_message(channel, method_frame, header_frame, body): assert method_frame assert body == BODY @@ -104,9 +128,7 @@ def on_open_channel(channel): def on_open_connection(connection): connection.channel(on_open_callback=on_open_channel) - connection = ConnectionClass( - pika.ConnectionParameters(DB_SETTINGS['host']), - on_open_callback=on_open_connection) + connection = ConnectionClass(pika.ConnectionParameters(DB_SETTINGS["host"]), on_open_callback=on_open_connection) try: connection.ioloop.start() @@ -117,9 +139,8 @@ def on_open_connection(connection): @parametrized_connection -@pytest.mark.parametrize('callback_as_partial', [True, False]) -def test_select_connection_basic_get_outside_txn(producer, ConnectionClass, - callback_as_partial): +@pytest.mark.parametrize("callback_as_partial", [True, False]) +def test_select_connection_basic_get_outside_txn(producer, ConnectionClass, callback_as_partial): metrics_list = [] @capture_transaction_metrics(metrics_list) @@ -142,8 +163,8 @@ def on_open_connection(connection): connection.channel(on_open_callback=on_open_channel) connection = ConnectionClass( - pika.ConnectionParameters(DB_SETTINGS['host']), - on_open_callback=on_open_connection) + pika.ConnectionParameters(DB_SETTINGS["host"]), on_open_callback=on_open_connection + ) try: connection.ioloop.start() @@ -160,25 +181,24 @@ def on_open_connection(connection): _test_select_conn_basic_get_inside_txn_no_callback_metrics = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, None), ] @pytest.mark.skipif( - condition=pika_version_info[0] > 0, - reason='pika 1.0 removed the ability to use basic_get with callback=None') + condition=pika_version_info[0] > 0, reason="pika 1.0 removed the ability to use basic_get with callback=None" +) @parametrized_connection @validate_transaction_metrics( - ('test_pika_async_connection_consume:' - 'test_async_connection_basic_get_inside_txn_no_callback'), + ("test_pika_async_connection_consume:" "test_async_connection_basic_get_inside_txn_no_callback"), scoped_metrics=_test_select_conn_basic_get_inside_txn_no_callback_metrics, rollup_metrics=_test_select_conn_basic_get_inside_txn_no_callback_metrics, - background_task=True) + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() -def test_async_connection_basic_get_inside_txn_no_callback(producer, - ConnectionClass): +def test_async_connection_basic_get_inside_txn_no_callback(producer, ConnectionClass): def on_open_channel(channel): channel.basic_get(callback=None, queue=QUEUE) channel.close() @@ -188,9 +208,7 @@ def on_open_channel(channel): def on_open_connection(connection): connection.channel(on_open_callback=on_open_channel) - connection = ConnectionClass( - pika.ConnectionParameters(DB_SETTINGS['host']), - on_open_callback=on_open_connection) + connection = ConnectionClass(pika.ConnectionParameters(DB_SETTINGS["host"]), on_open_callback=on_open_connection) try: connection.ioloop.start() @@ -201,27 +219,26 @@ def on_open_connection(connection): _test_async_connection_basic_get_empty_metrics = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, None), ] @parametrized_connection -@pytest.mark.parametrize('callback_as_partial', [True, False]) +@pytest.mark.parametrize("callback_as_partial", [True, False]) @validate_transaction_metrics( - ('test_pika_async_connection_consume:' - 'test_async_connection_basic_get_empty'), - scoped_metrics=_test_async_connection_basic_get_empty_metrics, - rollup_metrics=_test_async_connection_basic_get_empty_metrics, - background_task=True) + ("test_pika_async_connection_consume:" "test_async_connection_basic_get_empty"), + scoped_metrics=_test_async_connection_basic_get_empty_metrics, + rollup_metrics=_test_async_connection_basic_get_empty_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() -def test_async_connection_basic_get_empty(ConnectionClass, - callback_as_partial): - QUEUE = 'test_async_empty' +def test_async_connection_basic_get_empty(ConnectionClass, callback_as_partial): + QUEUE = "test_async_empty" def on_message(channel, method_frame, header_frame, body): - assert False, body.decode('UTF-8') + assert False, body.decode("UTF-8") if callback_as_partial: on_message = functools.partial(on_message) @@ -235,9 +252,7 @@ def on_open_channel(channel): def on_open_connection(connection): connection.channel(on_open_callback=on_open_channel) - connection = ConnectionClass( - pika.ConnectionParameters(DB_SETTINGS['host']), - on_open_callback=on_open_connection) + connection = ConnectionClass(pika.ConnectionParameters(DB_SETTINGS["host"]), on_open_callback=on_open_connection) try: connection.ioloop.start() @@ -248,33 +263,42 @@ def on_open_connection(connection): _test_select_conn_basic_consume_in_txn_metrics = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, None), ] if six.PY3: _test_select_conn_basic_consume_in_txn_metrics.append( - (('Function/test_pika_async_connection_consume:' - 'test_async_connection_basic_consume_inside_txn.' - '.on_message'), 1)) + ( + ( + "Function/test_pika_async_connection_consume:" + "test_async_connection_basic_consume_inside_txn." + ".on_message" + ), + 1, + ) + ) else: - _test_select_conn_basic_consume_in_txn_metrics.append( - ('Function/test_pika_async_connection_consume:on_message', 1)) + _test_select_conn_basic_consume_in_txn_metrics.append(("Function/test_pika_async_connection_consume:on_message", 1)) @parametrized_connection @validate_transaction_metrics( - ('test_pika_async_connection_consume:' - 'test_async_connection_basic_consume_inside_txn'), - scoped_metrics=_test_select_conn_basic_consume_in_txn_metrics, - rollup_metrics=_test_select_conn_basic_consume_in_txn_metrics, - background_task=True) -@validate_code_level_metrics("test_pika_async_connection_consume" + (".test_async_connection_basic_consume_inside_txn." if six.PY3 else ""), "on_message") + ("test_pika_async_connection_consume:" "test_async_connection_basic_consume_inside_txn"), + scoped_metrics=_test_select_conn_basic_consume_in_txn_metrics, + rollup_metrics=_test_select_conn_basic_consume_in_txn_metrics, + background_task=True, +) +@validate_code_level_metrics( + "test_pika_async_connection_consume" + + (".test_async_connection_basic_consume_inside_txn." if six.PY3 else ""), + "on_message", +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() def test_async_connection_basic_consume_inside_txn(producer, ConnectionClass): def on_message(channel, method_frame, header_frame, body): - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY channel.basic_ack(method_frame.delivery_tag) channel.close() @@ -287,9 +311,7 @@ def on_open_channel(channel): def on_open_connection(connection): connection.channel(on_open_callback=on_open_channel) - connection = ConnectionClass( - pika.ConnectionParameters(DB_SETTINGS['host']), - on_open_callback=on_open_connection) + connection = ConnectionClass(pika.ConnectionParameters(DB_SETTINGS["host"]), on_open_callback=on_open_connection) try: connection.ioloop.start() @@ -300,46 +322,67 @@ def on_open_connection(connection): _test_select_conn_basic_consume_two_exchanges = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE_2, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE_2, None), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE_2, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE_2, None), ] if six.PY3: _test_select_conn_basic_consume_two_exchanges.append( - (('Function/test_pika_async_connection_consume:' - 'test_async_connection_basic_consume_two_exchanges.' - '.on_message_1'), 1)) + ( + ( + "Function/test_pika_async_connection_consume:" + "test_async_connection_basic_consume_two_exchanges." + ".on_message_1" + ), + 1, + ) + ) _test_select_conn_basic_consume_two_exchanges.append( - (('Function/test_pika_async_connection_consume:' - 'test_async_connection_basic_consume_two_exchanges.' - '.on_message_2'), 1)) + ( + ( + "Function/test_pika_async_connection_consume:" + "test_async_connection_basic_consume_two_exchanges." + ".on_message_2" + ), + 1, + ) + ) else: _test_select_conn_basic_consume_two_exchanges.append( - ('Function/test_pika_async_connection_consume:on_message_1', 1)) + ("Function/test_pika_async_connection_consume:on_message_1", 1) + ) _test_select_conn_basic_consume_two_exchanges.append( - ('Function/test_pika_async_connection_consume:on_message_2', 1)) + ("Function/test_pika_async_connection_consume:on_message_2", 1) + ) @parametrized_connection @validate_transaction_metrics( - ('test_pika_async_connection_consume:' - 'test_async_connection_basic_consume_two_exchanges'), - scoped_metrics=_test_select_conn_basic_consume_two_exchanges, - rollup_metrics=_test_select_conn_basic_consume_two_exchanges, - background_task=True) -@validate_code_level_metrics("test_pika_async_connection_consume" + (".test_async_connection_basic_consume_two_exchanges." if six.PY3 else ""), "on_message_1") -@validate_code_level_metrics("test_pika_async_connection_consume" + (".test_async_connection_basic_consume_two_exchanges." if six.PY3 else ""), "on_message_2") + ("test_pika_async_connection_consume:" "test_async_connection_basic_consume_two_exchanges"), + scoped_metrics=_test_select_conn_basic_consume_two_exchanges, + rollup_metrics=_test_select_conn_basic_consume_two_exchanges, + background_task=True, +) +@validate_code_level_metrics( + "test_pika_async_connection_consume" + + (".test_async_connection_basic_consume_two_exchanges." if six.PY3 else ""), + "on_message_1", +) +@validate_code_level_metrics( + "test_pika_async_connection_consume" + + (".test_async_connection_basic_consume_two_exchanges." if six.PY3 else ""), + "on_message_2", +) @background_task() -def test_async_connection_basic_consume_two_exchanges(producer, producer_2, - ConnectionClass): +def test_async_connection_basic_consume_two_exchanges(producer, producer_2, ConnectionClass): global events_received events_received = 0 def on_message_1(channel, method_frame, header_frame, body): channel.basic_ack(method_frame.delivery_tag) - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY global events_received @@ -352,7 +395,7 @@ def on_message_1(channel, method_frame, header_frame, body): def on_message_2(channel, method_frame, header_frame, body): channel.basic_ack(method_frame.delivery_tag) - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY global events_received @@ -370,9 +413,7 @@ def on_open_channel(channel): def on_open_connection(connection): connection.channel(on_open_callback=on_open_channel) - connection = ConnectionClass( - pika.ConnectionParameters(DB_SETTINGS['host']), - on_open_callback=on_open_connection) + connection = ConnectionClass(pika.ConnectionParameters(DB_SETTINGS["host"]), on_open_callback=on_open_connection) try: connection.ioloop.start() @@ -383,12 +424,11 @@ def on_open_connection(connection): # This should not create a transaction -@function_not_called('newrelic.core.stats_engine', - 'StatsEngine.record_transaction') -@override_application_settings({'debug.record_transaction_failure': True}) +@function_not_called("newrelic.core.stats_engine", "StatsEngine.record_transaction") +@override_application_settings({"debug.record_transaction_failure": True}) def test_tornado_connection_basic_consume_outside_transaction(producer): def on_message(channel, method_frame, header_frame, body): - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY channel.basic_ack(method_frame.delivery_tag) channel.close() @@ -401,9 +441,7 @@ def on_open_channel(channel): def on_open_connection(connection): connection.channel(on_open_callback=on_open_channel) - connection = TornadoConnection( - pika.ConnectionParameters(DB_SETTINGS['host']), - on_open_callback=on_open_connection) + connection = TornadoConnection(pika.ConnectionParameters(DB_SETTINGS["host"]), on_open_callback=on_open_connection) try: connection.ioloop.start() @@ -414,31 +452,44 @@ def on_open_connection(connection): if six.PY3: - _txn_name = ('test_pika_async_connection_consume:' - 'test_select_connection_basic_consume_outside_transaction.' - '.on_message') + _txn_name = ( + "test_pika_async_connection_consume:" + "test_select_connection_basic_consume_outside_transaction." + ".on_message" + ) _test_select_connection_consume_outside_txn_metrics = [ - (('Function/test_pika_async_connection_consume:' - 'test_select_connection_basic_consume_outside_transaction.' - '.on_message'), None)] + ( + ( + "Function/test_pika_async_connection_consume:" + "test_select_connection_basic_consume_outside_transaction." + ".on_message" + ), + None, + ) + ] else: - _txn_name = ( - 'test_pika_async_connection_consume:on_message') + _txn_name = "test_pika_async_connection_consume:on_message" _test_select_connection_consume_outside_txn_metrics = [ - ('Function/test_pika_async_connection_consume:on_message', None)] + ("Function/test_pika_async_connection_consume:on_message", None) + ] # This should create a transaction @validate_transaction_metrics( - _txn_name, - scoped_metrics=_test_select_connection_consume_outside_txn_metrics, - rollup_metrics=_test_select_connection_consume_outside_txn_metrics, - background_task=True, - group='Message/RabbitMQ/Exchange/%s' % EXCHANGE) -@validate_code_level_metrics("test_pika_async_connection_consume" + (".test_select_connection_basic_consume_outside_transaction." if six.PY3 else ""), "on_message") + _txn_name, + scoped_metrics=_test_select_connection_consume_outside_txn_metrics, + rollup_metrics=_test_select_connection_consume_outside_txn_metrics, + background_task=True, + group="Message/RabbitMQ/Exchange/%s" % EXCHANGE, +) +@validate_code_level_metrics( + "test_pika_async_connection_consume" + + (".test_select_connection_basic_consume_outside_transaction." if six.PY3 else ""), + "on_message", +) def test_select_connection_basic_consume_outside_transaction(producer): def on_message(channel, method_frame, header_frame, body): - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY channel.basic_ack(method_frame.delivery_tag) channel.close() @@ -452,8 +503,8 @@ def on_open_connection(connection): connection.channel(on_open_callback=on_open_channel) connection = pika.SelectConnection( - pika.ConnectionParameters(DB_SETTINGS['host']), - on_open_callback=on_open_connection) + pika.ConnectionParameters(DB_SETTINGS["host"]), on_open_callback=on_open_connection + ) try: connection.ioloop.start() diff --git a/tests/messagebroker_pika/test_pika_blocking_connection_consume.py b/tests/messagebroker_pika/test_pika_blocking_connection_consume.py index c96d42d98..7b41674a2 100644 --- a/tests/messagebroker_pika/test_pika_blocking_connection_consume.py +++ b/tests/messagebroker_pika/test_pika_blocking_connection_consume.py @@ -12,52 +12,56 @@ # See the License for the specific language governing permissions and # limitations under the License. -from compat import basic_consume import functools +import os + import pika import pytest import six -import os +from compat import basic_consume +from conftest import BODY, CORRELATION_ID, EXCHANGE, HEADERS, QUEUE, REPLY_TO +from testing_support.db_settings import rabbitmq_settings +from testing_support.fixtures import capture_transaction_metrics +from testing_support.validators.validate_code_level_metrics import ( + validate_code_level_metrics, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) +from testing_support.validators.validate_tt_collector_json import ( + validate_tt_collector_json, +) from newrelic.api.background_task import background_task from newrelic.api.transaction import end_of_transaction -from conftest import QUEUE, EXCHANGE, CORRELATION_ID, REPLY_TO, HEADERS, BODY -from testing_support.fixtures import (capture_transaction_metrics, - validate_tt_collector_json) -from testing_support.validators.validate_code_level_metrics import validate_code_level_metrics -from testing_support.db_settings import rabbitmq_settings -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics - DB_SETTINGS = rabbitmq_settings()[0] _message_broker_tt_params = { - 'queue_name': QUEUE, - 'routing_key': QUEUE, - 'correlation_id': CORRELATION_ID, - 'reply_to': REPLY_TO, - 'headers': HEADERS.copy(), + "queue_name": QUEUE, + "routing_key": QUEUE, + "correlation_id": CORRELATION_ID, + "reply_to": REPLY_TO, + "headers": HEADERS.copy(), } _test_blocking_connection_basic_get_metrics = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, 1), - (('Function/pika.adapters.blocking_connection:' - '_CallbackResult.set_value_once'), 1) + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, 1), + (("Function/pika.adapters.blocking_connection:" "_CallbackResult.set_value_once"), 1), ] @validate_transaction_metrics( - ('test_pika_blocking_connection_consume:' - 'test_blocking_connection_basic_get'), - scoped_metrics=_test_blocking_connection_basic_get_metrics, - rollup_metrics=_test_blocking_connection_basic_get_metrics, - background_task=True) + ("test_pika_blocking_connection_consume:" "test_blocking_connection_basic_get"), + scoped_metrics=_test_blocking_connection_basic_get_metrics, + rollup_metrics=_test_blocking_connection_basic_get_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() def test_blocking_connection_basic_get(producer): - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() method_frame, _, _ = channel.basic_get(QUEUE) assert method_frame @@ -65,23 +69,22 @@ def test_blocking_connection_basic_get(producer): _test_blocking_connection_basic_get_empty_metrics = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, None), ] @validate_transaction_metrics( - ('test_pika_blocking_connection_consume:' - 'test_blocking_connection_basic_get_empty'), - scoped_metrics=_test_blocking_connection_basic_get_empty_metrics, - rollup_metrics=_test_blocking_connection_basic_get_empty_metrics, - background_task=True) + ("test_pika_blocking_connection_consume:" "test_blocking_connection_basic_get_empty"), + scoped_metrics=_test_blocking_connection_basic_get_empty_metrics, + rollup_metrics=_test_blocking_connection_basic_get_empty_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() def test_blocking_connection_basic_get_empty(): - QUEUE = 'test_blocking_empty-%s' % os.getpid() - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + QUEUE = "test_blocking_empty-%s" % os.getpid() + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() channel.queue_declare(queue=QUEUE) @@ -97,8 +100,7 @@ def test_blocking_connection_basic_get_outside_transaction(producer): @capture_transaction_metrics(metrics_list) def test_basic_get(): - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() channel.queue_declare(queue=QUEUE) @@ -114,46 +116,57 @@ def test_basic_get(): _test_blocking_conn_basic_consume_no_txn_metrics = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, None), ] if six.PY3: - _txn_name = ('test_pika_blocking_connection_consume:' - 'test_blocking_connection_basic_consume_outside_transaction.' - '.on_message') + _txn_name = ( + "test_pika_blocking_connection_consume:" + "test_blocking_connection_basic_consume_outside_transaction." + ".on_message" + ) _test_blocking_conn_basic_consume_no_txn_metrics.append( - (('Function/test_pika_blocking_connection_consume:' - 'test_blocking_connection_basic_consume_outside_transaction.' - '.on_message'), None)) + ( + ( + "Function/test_pika_blocking_connection_consume:" + "test_blocking_connection_basic_consume_outside_transaction." + ".on_message" + ), + None, + ) + ) else: - _txn_name = ('test_pika_blocking_connection_consume:' - 'on_message') + _txn_name = "test_pika_blocking_connection_consume:" "on_message" _test_blocking_conn_basic_consume_no_txn_metrics.append( - ('Function/test_pika_blocking_connection_consume:on_message', None)) + ("Function/test_pika_blocking_connection_consume:on_message", None) + ) -@pytest.mark.parametrize('as_partial', [True, False]) -@validate_code_level_metrics("test_pika_blocking_connection_consume" + (".test_blocking_connection_basic_consume_outside_transaction." if six.PY3 else ""), "on_message") +@pytest.mark.parametrize("as_partial", [True, False]) +@validate_code_level_metrics( + "test_pika_blocking_connection_consume" + + (".test_blocking_connection_basic_consume_outside_transaction." if six.PY3 else ""), + "on_message", +) @validate_transaction_metrics( - _txn_name, - scoped_metrics=_test_blocking_conn_basic_consume_no_txn_metrics, - rollup_metrics=_test_blocking_conn_basic_consume_no_txn_metrics, - background_task=True, - group='Message/RabbitMQ/Exchange/%s' % EXCHANGE) + _txn_name, + scoped_metrics=_test_blocking_conn_basic_consume_no_txn_metrics, + rollup_metrics=_test_blocking_conn_basic_consume_no_txn_metrics, + background_task=True, + group="Message/RabbitMQ/Exchange/%s" % EXCHANGE, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) -def test_blocking_connection_basic_consume_outside_transaction(producer, - as_partial): +def test_blocking_connection_basic_consume_outside_transaction(producer, as_partial): def on_message(channel, method_frame, header_frame, body): - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY channel.stop_consuming() if as_partial: on_message = functools.partial(on_message) - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() basic_consume(channel, QUEUE, on_message) @@ -165,41 +178,51 @@ def on_message(channel, method_frame, header_frame, body): _test_blocking_conn_basic_consume_in_txn_metrics = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, None), ] if six.PY3: _test_blocking_conn_basic_consume_in_txn_metrics.append( - (('Function/test_pika_blocking_connection_consume:' - 'test_blocking_connection_basic_consume_inside_txn.' - '.on_message'), 1)) + ( + ( + "Function/test_pika_blocking_connection_consume:" + "test_blocking_connection_basic_consume_inside_txn." + ".on_message" + ), + 1, + ) + ) else: _test_blocking_conn_basic_consume_in_txn_metrics.append( - ('Function/test_pika_blocking_connection_consume:on_message', 1)) + ("Function/test_pika_blocking_connection_consume:on_message", 1) + ) -@pytest.mark.parametrize('as_partial', [True, False]) -@validate_code_level_metrics("test_pika_blocking_connection_consume" + (".test_blocking_connection_basic_consume_inside_txn." if six.PY3 else ""), "on_message") +@pytest.mark.parametrize("as_partial", [True, False]) +@validate_code_level_metrics( + "test_pika_blocking_connection_consume" + + (".test_blocking_connection_basic_consume_inside_txn." if six.PY3 else ""), + "on_message", +) @validate_transaction_metrics( - ('test_pika_blocking_connection_consume:' - 'test_blocking_connection_basic_consume_inside_txn'), - scoped_metrics=_test_blocking_conn_basic_consume_in_txn_metrics, - rollup_metrics=_test_blocking_conn_basic_consume_in_txn_metrics, - background_task=True) + ("test_pika_blocking_connection_consume:" "test_blocking_connection_basic_consume_inside_txn"), + scoped_metrics=_test_blocking_conn_basic_consume_in_txn_metrics, + rollup_metrics=_test_blocking_conn_basic_consume_in_txn_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() def test_blocking_connection_basic_consume_inside_txn(producer, as_partial): def on_message(channel, method_frame, header_frame, body): - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY channel.stop_consuming() if as_partial: on_message = functools.partial(on_message) - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() basic_consume(channel, QUEUE, on_message) try: @@ -210,33 +233,40 @@ def on_message(channel, method_frame, header_frame, body): _test_blocking_conn_basic_consume_stopped_txn_metrics = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, None), - ('OtherTransaction/Message/RabbitMQ/Exchange/Named/%s' % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, None), + ("OtherTransaction/Message/RabbitMQ/Exchange/Named/%s" % EXCHANGE, None), ] if six.PY3: _test_blocking_conn_basic_consume_stopped_txn_metrics.append( - (('Function/test_pika_blocking_connection_consume:' - 'test_blocking_connection_basic_consume_stopped_txn.' - '.on_message'), None)) + ( + ( + "Function/test_pika_blocking_connection_consume:" + "test_blocking_connection_basic_consume_stopped_txn." + ".on_message" + ), + None, + ) + ) else: _test_blocking_conn_basic_consume_stopped_txn_metrics.append( - ('Function/test_pika_blocking_connection_consume:on_message', None)) + ("Function/test_pika_blocking_connection_consume:on_message", None) + ) -@pytest.mark.parametrize('as_partial', [True, False]) +@pytest.mark.parametrize("as_partial", [True, False]) @validate_transaction_metrics( - ('test_pika_blocking_connection_consume:' - 'test_blocking_connection_basic_consume_stopped_txn'), - scoped_metrics=_test_blocking_conn_basic_consume_stopped_txn_metrics, - rollup_metrics=_test_blocking_conn_basic_consume_stopped_txn_metrics, - background_task=True) + ("test_pika_blocking_connection_consume:" "test_blocking_connection_basic_consume_stopped_txn"), + scoped_metrics=_test_blocking_conn_basic_consume_stopped_txn_metrics, + rollup_metrics=_test_blocking_conn_basic_consume_stopped_txn_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() def test_blocking_connection_basic_consume_stopped_txn(producer, as_partial): def on_message(channel, method_frame, header_frame, body): - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY channel.stop_consuming() @@ -245,8 +275,7 @@ def on_message(channel, method_frame, header_frame, body): if as_partial: on_message = functools.partial(on_message) - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() basic_consume(channel, QUEUE, on_message) try: diff --git a/tests/messagebroker_pika/test_pika_blocking_connection_consume_generator.py b/tests/messagebroker_pika/test_pika_blocking_connection_consume_generator.py index 4fff11487..816b28323 100644 --- a/tests/messagebroker_pika/test_pika_blocking_connection_consume_generator.py +++ b/tests/messagebroker_pika/test_pika_blocking_connection_consume_generator.py @@ -13,65 +13,66 @@ # limitations under the License. import pika +from conftest import BODY, CORRELATION_ID, EXCHANGE, HEADERS, QUEUE, REPLY_TO +from testing_support.db_settings import rabbitmq_settings +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) +from testing_support.validators.validate_tt_collector_json import ( + validate_tt_collector_json, +) from newrelic.api.background_task import background_task -from conftest import QUEUE, EXCHANGE, CORRELATION_ID, REPLY_TO, HEADERS, BODY -from testing_support.fixtures import validate_tt_collector_json -from testing_support.db_settings import rabbitmq_settings -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics - DB_SETTINGS = rabbitmq_settings()[0] _message_broker_tt_params = { - 'queue_name': QUEUE, - 'routing_key': QUEUE, - 'correlation_id': CORRELATION_ID, - 'reply_to': REPLY_TO, - 'headers': HEADERS.copy(), + "queue_name": QUEUE, + "routing_key": QUEUE, + "correlation_id": CORRELATION_ID, + "reply_to": REPLY_TO, + "headers": HEADERS.copy(), } _test_blocking_connection_consume_metrics = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/Unknown', None), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/Unknown", None), ] @validate_transaction_metrics( - ('test_pika_blocking_connection_consume_generator:' - 'test_blocking_connection_consume_break'), - scoped_metrics=_test_blocking_connection_consume_metrics, - rollup_metrics=_test_blocking_connection_consume_metrics, - background_task=True) + ("test_pika_blocking_connection_consume_generator:" "test_blocking_connection_consume_break"), + scoped_metrics=_test_blocking_connection_consume_metrics, + rollup_metrics=_test_blocking_connection_consume_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() def test_blocking_connection_consume_break(producer): - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() for method_frame, properties, body in channel.consume(QUEUE): - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY break @validate_transaction_metrics( - ('test_pika_blocking_connection_consume_generator:' - 'test_blocking_connection_consume_connection_close'), - scoped_metrics=_test_blocking_connection_consume_metrics, - rollup_metrics=_test_blocking_connection_consume_metrics, - background_task=True) + ("test_pika_blocking_connection_consume_generator:" "test_blocking_connection_consume_connection_close"), + scoped_metrics=_test_blocking_connection_consume_metrics, + rollup_metrics=_test_blocking_connection_consume_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() def test_blocking_connection_consume_connection_close(producer): - connection = pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) + connection = pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) channel = connection.channel() try: for method_frame, properties, body in channel.consume(QUEUE): - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY channel.close() connection.close() @@ -82,16 +83,15 @@ def test_blocking_connection_consume_connection_close(producer): @validate_transaction_metrics( - ('test_pika_blocking_connection_consume_generator:' - 'test_blocking_connection_consume_timeout'), - scoped_metrics=_test_blocking_connection_consume_metrics, - rollup_metrics=_test_blocking_connection_consume_metrics, - background_task=True) + ("test_pika_blocking_connection_consume_generator:" "test_blocking_connection_consume_timeout"), + scoped_metrics=_test_blocking_connection_consume_metrics, + rollup_metrics=_test_blocking_connection_consume_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() def test_blocking_connection_consume_timeout(producer): - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() for result in channel.consume(QUEUE, inactivity_timeout=0.01): @@ -99,7 +99,7 @@ def test_blocking_connection_consume_timeout(producer): if result and any(result): method_frame, properties, body = result channel.basic_ack(method_frame.delivery_tag) - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY else: # timeout hit! @@ -107,16 +107,15 @@ def test_blocking_connection_consume_timeout(producer): @validate_transaction_metrics( - ('test_pika_blocking_connection_consume_generator:' - 'test_blocking_connection_consume_exception_in_for_loop'), - scoped_metrics=_test_blocking_connection_consume_metrics, - rollup_metrics=_test_blocking_connection_consume_metrics, - background_task=True) + ("test_pika_blocking_connection_consume_generator:" "test_blocking_connection_consume_exception_in_for_loop"), + scoped_metrics=_test_blocking_connection_consume_metrics, + rollup_metrics=_test_blocking_connection_consume_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() def test_blocking_connection_consume_exception_in_for_loop(producer): - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() try: @@ -128,29 +127,28 @@ def test_blocking_connection_consume_exception_in_for_loop(producer): # Expected error pass except Exception as e: - assert False, 'Wrong exception was raised: %s' % e + assert False, "Wrong exception was raised: %s" % e else: - assert False, 'No exception was raised!' + assert False, "No exception was raised!" _test_blocking_connection_consume_empty_metrics = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/Unknown', None), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/Unknown", None), ] @validate_transaction_metrics( - ('test_pika_blocking_connection_consume_generator:' - 'test_blocking_connection_consume_exception_in_generator'), - scoped_metrics=_test_blocking_connection_consume_empty_metrics, - rollup_metrics=_test_blocking_connection_consume_empty_metrics, - background_task=True) + ("test_pika_blocking_connection_consume_generator:" "test_blocking_connection_consume_exception_in_generator"), + scoped_metrics=_test_blocking_connection_consume_empty_metrics, + rollup_metrics=_test_blocking_connection_consume_empty_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() def test_blocking_connection_consume_exception_in_generator(): - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() try: @@ -161,29 +159,28 @@ def test_blocking_connection_consume_exception_in_generator(): # Expected error pass except Exception as e: - assert False, 'Wrong exception was raised: %s' % e + assert False, "Wrong exception was raised: %s" % e else: - assert False, 'No exception was raised!' + assert False, "No exception was raised!" _test_blocking_connection_consume_many_metrics = [ - ('MessageBroker/RabbitMQ/Exchange/Produce/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/%s' % EXCHANGE, None), - ('MessageBroker/RabbitMQ/Exchange/Consume/Named/Unknown', None), + ("MessageBroker/RabbitMQ/Exchange/Produce/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/%s" % EXCHANGE, None), + ("MessageBroker/RabbitMQ/Exchange/Consume/Named/Unknown", None), ] @validate_transaction_metrics( - ('test_pika_blocking_connection_consume_generator:' - 'test_blocking_connection_consume_many'), - scoped_metrics=_test_blocking_connection_consume_many_metrics, - rollup_metrics=_test_blocking_connection_consume_many_metrics, - background_task=True) + ("test_pika_blocking_connection_consume_generator:" "test_blocking_connection_consume_many"), + scoped_metrics=_test_blocking_connection_consume_many_metrics, + rollup_metrics=_test_blocking_connection_consume_many_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() def test_blocking_connection_consume_many(produce_five): - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() consumed = 0 @@ -196,22 +193,21 @@ def test_blocking_connection_consume_many(produce_five): @validate_transaction_metrics( - ('test_pika_blocking_connection_consume_generator:' - 'test_blocking_connection_consume_using_methods'), - scoped_metrics=_test_blocking_connection_consume_metrics, - rollup_metrics=_test_blocking_connection_consume_metrics, - background_task=True) + ("test_pika_blocking_connection_consume_generator:" "test_blocking_connection_consume_using_methods"), + scoped_metrics=_test_blocking_connection_consume_metrics, + rollup_metrics=_test_blocking_connection_consume_metrics, + background_task=True, +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) @background_task() def test_blocking_connection_consume_using_methods(producer): - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() consumer = channel.consume(QUEUE, inactivity_timeout=0.01) method, properties, body = next(consumer) - assert hasattr(method, '_nr_start_time') + assert hasattr(method, "_nr_start_time") assert body == BODY result = next(consumer) @@ -224,28 +220,28 @@ def test_blocking_connection_consume_using_methods(producer): pass else: # this is not - assert False, 'No exception was raised!' + assert False, "No exception was raised!" result = consumer.close() assert result is None @validate_transaction_metrics( - 'Named/%s' % EXCHANGE, - scoped_metrics=_test_blocking_connection_consume_metrics, - rollup_metrics=_test_blocking_connection_consume_metrics, - background_task=True, - group='Message/RabbitMQ/Exchange') + "Named/%s" % EXCHANGE, + scoped_metrics=_test_blocking_connection_consume_metrics, + rollup_metrics=_test_blocking_connection_consume_metrics, + background_task=True, + group="Message/RabbitMQ/Exchange", +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) def test_blocking_connection_consume_outside_txn(producer): - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() consumer = channel.consume(QUEUE) try: for method_frame, properties, body in consumer: - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY break finally: @@ -254,26 +250,24 @@ def test_blocking_connection_consume_outside_txn(producer): def test_blocking_connection_consume_many_outside_txn(produce_five): - @validate_transaction_metrics( - 'Named/%s' % EXCHANGE, - scoped_metrics=_test_blocking_connection_consume_metrics, - rollup_metrics=_test_blocking_connection_consume_metrics, - background_task=True, - group='Message/RabbitMQ/Exchange') - @validate_tt_collector_json( - message_broker_params=_message_broker_tt_params) + "Named/%s" % EXCHANGE, + scoped_metrics=_test_blocking_connection_consume_metrics, + rollup_metrics=_test_blocking_connection_consume_metrics, + background_task=True, + group="Message/RabbitMQ/Exchange", + ) + @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) def consume_it(consumer, up_next=None): if up_next is None: method_frame, properties, body = next(consumer) else: method_frame, properties, body = up_next - assert hasattr(method_frame, '_nr_start_time') + assert hasattr(method_frame, "_nr_start_time") assert body == BODY return next(consumer) - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() consumer = channel.consume(QUEUE) @@ -288,21 +282,21 @@ def consume_it(consumer, up_next=None): @validate_transaction_metrics( - 'Named/%s' % EXCHANGE, - scoped_metrics=_test_blocking_connection_consume_metrics, - rollup_metrics=_test_blocking_connection_consume_metrics, - background_task=True, - group='Message/RabbitMQ/Exchange') + "Named/%s" % EXCHANGE, + scoped_metrics=_test_blocking_connection_consume_metrics, + rollup_metrics=_test_blocking_connection_consume_metrics, + background_task=True, + group="Message/RabbitMQ/Exchange", +) @validate_tt_collector_json(message_broker_params=_message_broker_tt_params) def test_blocking_connection_consume_using_methods_outside_txn(producer): - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() consumer = channel.consume(QUEUE, inactivity_timeout=0.01) method, properties, body = next(consumer) - assert hasattr(method, '_nr_start_time') + assert hasattr(method, "_nr_start_time") assert body == BODY result = next(consumer) @@ -315,22 +309,21 @@ def test_blocking_connection_consume_using_methods_outside_txn(producer): pass else: # this is not - assert False, 'No exception was raised!' + assert False, "No exception was raised!" result = consumer.close() assert result is None @validate_transaction_metrics( - ('test_pika_blocking_connection_consume_generator:' - 'test_blocking_connection_consume_exception_on_creation'), - scoped_metrics=_test_blocking_connection_consume_empty_metrics, - rollup_metrics=_test_blocking_connection_consume_empty_metrics, - background_task=True) + ("test_pika_blocking_connection_consume_generator:" "test_blocking_connection_consume_exception_on_creation"), + scoped_metrics=_test_blocking_connection_consume_empty_metrics, + rollup_metrics=_test_blocking_connection_consume_empty_metrics, + background_task=True, +) @background_task() def test_blocking_connection_consume_exception_on_creation(): - with pika.BlockingConnection( - pika.ConnectionParameters(DB_SETTINGS['host'])) as connection: + with pika.BlockingConnection(pika.ConnectionParameters(DB_SETTINGS["host"])) as connection: channel = connection.channel() try: @@ -340,4 +333,4 @@ def test_blocking_connection_consume_exception_on_creation(): pass else: # this is not - assert False, 'TypeError was not raised' + assert False, "TypeError was not raised" diff --git a/tests/messagebroker_pika/test_pika_produce.py b/tests/messagebroker_pika/test_pika_produce.py index 0960159fa..dbc9af030 100644 --- a/tests/messagebroker_pika/test_pika_produce.py +++ b/tests/messagebroker_pika/test_pika_produce.py @@ -15,14 +15,16 @@ import pika import pytest from testing_support.db_settings import rabbitmq_settings -from testing_support.fixtures import ( - override_application_settings, - validate_tt_collector_json, -) +from testing_support.fixtures import override_application_settings from testing_support.validators.validate_messagebroker_headers import ( validate_messagebroker_headers, ) -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) +from testing_support.validators.validate_tt_collector_json import ( + validate_tt_collector_json, +) from newrelic.api.background_task import background_task from newrelic.api.transaction import current_transaction @@ -46,7 +48,7 @@ def cache_pika_headers(wrapped, instance, args, kwargs): QUEUE = "test-pika-queue" CORRELATION_ID = "testingpika" REPLY_TO = "testing" -HEADERS = {u"MYHEADER": u"pikatest"} +HEADERS = {"MYHEADER": "pikatest"} _message_broker_tt_included_params = { "routing_key": QUEUE, diff --git a/tests/testing_support/fixtures.py b/tests/testing_support/fixtures.py index 9e99e89b2..f642d1f6f 100644 --- a/tests/testing_support/fixtures.py +++ b/tests/testing_support/fixtures.py @@ -54,7 +54,6 @@ transient_function_wrapper, wrap_function_wrapper, ) -from newrelic.common.system_info import LOCALHOST_EQUIVALENTS from newrelic.config import initialize from newrelic.core.agent import shutdown_agent from newrelic.core.attribute import create_attributes @@ -89,14 +88,6 @@ def _environ_as_bool(name, default=False): return flag -def _lookup_string_table(name, string_table, default=None): - try: - index = int(name.lstrip("`")) - return string_table[index] - except ValueError: - return default - - if _environ_as_bool("NEW_RELIC_HIGH_SECURITY"): DeveloperModeClient.RESPONSES["connect"]["high_security"] = True @@ -444,338 +435,6 @@ def check_event_attributes(event_data, required_params=None, forgone_params=None assert intrinsics[param] == value, ((param, value), intrinsics) -def validate_application_error_trace_count(num_errors): - """Validate error event data for a single error occurring outside of a - transaction. - """ - - @function_wrapper - def _validate_application_error_trace_count(wrapped, instace, args, kwargs): - - try: - result = wrapped(*args, **kwargs) - except: - raise - else: - - stats = core_application_stats_engine(None) - assert len(stats.error_data()) == num_errors - - return result - - return _validate_application_error_trace_count - - -def validate_application_error_event_count(num_errors): - """Validate error event data for a single error occurring outside of a - transaction. - """ - - @function_wrapper - def _validate_application_error_event_count(wrapped, instace, args, kwargs): - - try: - result = wrapped(*args, **kwargs) - except: - raise - else: - - stats = core_application_stats_engine(None) - assert len(list(stats.error_events)) == num_errors - - return result - - return _validate_application_error_event_count - - -def validate_synthetics_transaction_trace(required_params=None, forgone_params=None, should_exist=True): - required_params = required_params or {} - forgone_params = forgone_params or {} - - @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") - def _validate_synthetics_transaction_trace(wrapped, instance, args, kwargs): - try: - result = wrapped(*args, **kwargs) - except: - raise - else: - - # Now that transaction has been recorded, generate - # a transaction trace - - connections = SQLConnections() - trace_data = instance.transaction_trace_data(connections) - - # Check that synthetics resource id is in TT header - - header = trace_data[0] - header_key = "synthetics_resource_id" - - if should_exist: - assert header_key in required_params - assert header[9] == required_params[header_key], "name=%r, header=%r" % (header_key, header) - else: - assert header[9] is None - - # Check that synthetics ids are in TT custom params - - pack_data = unpack_field(trace_data[0][4]) - tt_intrinsics = pack_data[0][4]["intrinsics"] - - for name in required_params: - assert name in tt_intrinsics, "name=%r, intrinsics=%r" % (name, tt_intrinsics) - assert tt_intrinsics[name] == required_params[name], "name=%r, value=%r, intrinsics=%r" % ( - name, - required_params[name], - tt_intrinsics, - ) - - for name in forgone_params: - assert name not in tt_intrinsics, "name=%r, intrinsics=%r" % (name, tt_intrinsics) - - return result - - return _validate_synthetics_transaction_trace - - -def validate_tt_collector_json( - required_params=None, - forgone_params=None, - should_exist=True, - datastore_params=None, - datastore_forgone_params=None, - message_broker_params=None, - message_broker_forgone_params=None, - exclude_request_uri=False, -): - """make assertions based off the cross-agent spec on transaction traces""" - required_params = required_params or {} - forgone_params = forgone_params or {} - datastore_params = datastore_params or {} - datastore_forgone_params = datastore_forgone_params or {} - message_broker_params = message_broker_params or {} - message_broker_forgone_params = message_broker_forgone_params or [] - - @function_wrapper - def _validate_wrapper(wrapped, instance, args, kwargs): - - traces_recorded = [] - - @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") - def _validate_tt_collector_json(wrapped, instance, args, kwargs): - - result = wrapped(*args, **kwargs) - - # Now that transaction has been recorded, generate - # a transaction trace - - connections = SQLConnections() - trace_data = instance.transaction_trace_data(connections) - traces_recorded.append(trace_data) - - return result - - def _validate_trace(trace): - assert isinstance(trace[0], float) # absolute start time (ms) - assert isinstance(trace[1], float) # duration (ms) - assert trace[0] > 0 # absolute time (ms) - assert isinstance(trace[2], six.string_types) # transaction name - if trace[2].startswith("WebTransaction"): - if exclude_request_uri: - assert trace[3] is None # request url - else: - assert isinstance(trace[3], six.string_types) - # query parameters should not be captured - assert "?" not in trace[3] - - # trace details -- python agent always uses condensed trace array - - trace_details, string_table = unpack_field(trace[4]) - assert len(trace_details) == 5 - assert isinstance(trace_details[0], float) # start time (ms) - - # the next two items should be empty dicts, old parameters stuff, - # placeholders for now - - assert isinstance(trace_details[1], dict) - assert len(trace_details[1]) == 0 - assert isinstance(trace_details[2], dict) - assert len(trace_details[2]) == 0 - - # root node in slot 3 - - root_node = trace_details[3] - assert isinstance(root_node[0], float) # entry timestamp - assert isinstance(root_node[1], float) # exit timestamp - assert root_node[2] == "ROOT" - assert isinstance(root_node[3], dict) - assert len(root_node[3]) == 0 # spec shows empty (for root) - children = root_node[4] - assert isinstance(children, list) - - # there are two optional items at the end of trace segments, - # class name that segment is in, and method name function is in; - # Python agent does not use these (only Java does) - - # let's just test the first child - trace_segment = children[0] - assert isinstance(trace_segment[0], float) # entry timestamp - assert isinstance(trace_segment[1], float) # exit timestamp - assert isinstance(trace_segment[2], six.string_types) # scope - assert isinstance(trace_segment[3], dict) # request params - assert isinstance(trace_segment[4], list) # children - - assert trace_segment[0] >= root_node[0] # trace starts after root - - def _check_params_and_start_time(node): - children = node[4] - for child in children: - assert child[0] >= node[0] # child started after parent - _check_params_and_start_time(child) - - params = node[3] - assert isinstance(params, dict) - - # We should always report exclusive_duration_millis on a - # segment. This allows us to override exclusive time - # calculations on APM. - assert "exclusive_duration_millis" in params - assert isinstance(params["exclusive_duration_millis"], float) - - segment_name = _lookup_string_table(node[2], string_table, default=node[2]) - if segment_name.startswith("Datastore"): - for key in datastore_params: - assert key in params, key - assert params[key] == datastore_params[key] - for key in datastore_forgone_params: - assert key not in params, key - - # if host is reported, it cannot be localhost - if "host" in params: - assert params["host"] not in LOCALHOST_EQUIVALENTS - - elif segment_name.startswith("MessageBroker"): - for key in message_broker_params: - assert key in params, key - assert params[key] == message_broker_params[key] - for key in message_broker_forgone_params: - assert key not in params, key - - _check_params_and_start_time(trace_segment) - - attributes = trace_details[4] - - assert "intrinsics" in attributes - assert "userAttributes" in attributes - assert "agentAttributes" in attributes - - assert isinstance(trace[5], six.string_types) # GUID - assert trace[6] is None # reserved for future use - assert trace[7] is False # deprecated force persist flag - - # x-ray session ID - - assert trace[8] is None - - # Synthetics ID - - assert trace[9] is None or isinstance(trace[9], six.string_types) - - assert isinstance(string_table, list) - for name in string_table: - assert isinstance(name, six.string_types) # metric name - - _new_wrapper = _validate_tt_collector_json(wrapped) - val = _new_wrapper(*args, **kwargs) - trace_data = traces_recorded.pop() - trace = trace_data[0] # 1st trace - _validate_trace(trace) - return val - - return _validate_wrapper - - -def validate_transaction_trace_attributes( - required_params=None, forgone_params=None, should_exist=True, url=None, index=-1 -): - required_params = required_params or {} - forgone_params = forgone_params or {} - - trace_data = [] - - @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") - def _validate_transaction_trace_attributes(wrapped, instance, args, kwargs): - - result = wrapped(*args, **kwargs) - - # Now that transaction has been recorded, generate - # a transaction trace - - connections = SQLConnections() - _trace_data = instance.transaction_trace_data(connections) - trace_data.append(_trace_data) - - return result - - @function_wrapper - def wrapper(wrapped, instance, args, kwargs): - _new_wrapper = _validate_transaction_trace_attributes(wrapped) - result = _new_wrapper(*args, **kwargs) - - _trace_data = trace_data[index] - trace_data[:] = [] - - if url is not None: - trace_url = _trace_data[0][3] - assert url == trace_url - - pack_data = unpack_field(_trace_data[0][4]) - assert len(pack_data) == 2 - assert len(pack_data[0]) == 5 - parameters = pack_data[0][4] - - assert "intrinsics" in parameters - assert "userAttributes" in parameters - assert "agentAttributes" in parameters - - check_attributes(parameters, required_params, forgone_params) - - return result - - return wrapper - - -def validate_transaction_error_trace_attributes(required_params=None, forgone_params=None, exact_attrs=None): - """Check the error trace for attributes, expect only one error to be - present in the transaction. - """ - required_params = required_params or {} - forgone_params = forgone_params or {} - exact_attrs = exact_attrs or {} - - @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") - def _validate_transaction_error_trace(wrapped, instance, args, kwargs): - try: - result = wrapped(*args, **kwargs) - except: - raise - else: - - error_data = instance.error_data() - - # there should be only one error - assert len(error_data) == 1 - traced_error = error_data[0] - - check_error_attributes( - traced_error.parameters, required_params, forgone_params, exact_attrs, is_transaction=True - ) - - return result - - return _validate_transaction_error_trace - - def check_error_attributes( parameters, required_params=None, forgone_params=None, exact_attrs=None, is_transaction=True ): @@ -833,134 +492,6 @@ def check_attributes(parameters, required_params=None, forgone_params=None, exac assert intrinsics[param] == value, ((param, value), intrinsics) -def validate_error_trace_collector_json(): - @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") - def _validate_error_trace_collector_json(wrapped, instance, args, kwargs): - try: - result = wrapped(*args, **kwargs) - except: - raise - else: - errors = instance.error_data() - - # recreate what happens right before data is sent to the collector - # in data_collector.py via ApplicationSession.send_errors - agent_run_id = 666 - payload = (agent_run_id, errors) - collector_json = json_encode(payload) - - decoded_json = json.loads(collector_json) - - assert decoded_json[0] == agent_run_id - err = decoded_json[1][0] - assert len(err) == 5 - assert isinstance(err[0], (int, float)) - assert isinstance(err[1], six.string_types) # path - assert isinstance(err[2], six.string_types) # error message - assert isinstance(err[3], six.string_types) # exception name - parameters = err[4] - - parameter_fields = ["userAttributes", "stack_trace", "agentAttributes", "intrinsics"] - - for field in parameter_fields: - assert field in parameters - - assert "request_uri" not in parameters - - return result - - return _validate_error_trace_collector_json - - -def validate_error_event_collector_json(num_errors=1): - """Validate the format, types and number of errors of the data we - send to the collector for harvest. - """ - - @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") - def _validate_error_event_collector_json(wrapped, instance, args, kwargs): - try: - result = wrapped(*args, **kwargs) - except: - raise - else: - - samples = list(instance.error_events) - s_info = instance.error_events.sampling_info - agent_run_id = 666 - - # emulate the payload used in data_collector.py - - payload = (agent_run_id, s_info, samples) - collector_json = json_encode(payload) - - decoded_json = json.loads(collector_json) - - assert decoded_json[0] == agent_run_id - - sampling_info = decoded_json[1] - - harvest_config = instance.settings.event_harvest_config - reservoir_size = harvest_config.harvest_limits.error_event_data - - assert sampling_info["reservoir_size"] == reservoir_size - assert sampling_info["events_seen"] == num_errors - - error_events = decoded_json[2] - - assert len(error_events) == num_errors - for event in error_events: - - # event is an array containing intrinsics, user-attributes, - # and agent-attributes - - assert len(event) == 3 - for d in event: - assert isinstance(d, dict) - - return result - - return _validate_error_event_collector_json - - -def validate_transaction_event_collector_json(): - @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") - def _validate_transaction_event_collector_json(wrapped, instance, args, kwargs): - try: - result = wrapped(*args, **kwargs) - except: - raise - else: - samples = list(instance.transaction_events) - - # recreate what happens right before data is sent to the collector - # in data_collector.py during the harvest via analytic_event_data - agent_run_id = 666 - payload = (agent_run_id, samples) - collector_json = json_encode(payload) - - decoded_json = json.loads(collector_json) - - assert decoded_json[0] == agent_run_id - - # list of events - - events = decoded_json[1] - - for event in events: - - # event is an array containing intrinsics, user-attributes, - # and agent-attributes - - assert len(event) == 3 - for d in event: - assert isinstance(d, dict) - - return result - - return _validate_transaction_event_collector_json - - def validate_custom_event_collector_json(num_events=1): """Validate the format, types and number of custom events.""" diff --git a/tests/testing_support/validators/validate_application_error_event_count.py b/tests/testing_support/validators/validate_application_error_event_count.py new file mode 100644 index 000000000..8812ed7ed --- /dev/null +++ b/tests/testing_support/validators/validate_application_error_event_count.py @@ -0,0 +1,39 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from testing_support.fixtures import core_application_stats_engine + +from newrelic.common.object_wrapper import function_wrapper + + +def validate_application_error_event_count(num_errors): + """Validate error event data for a single error occurring outside of a + transaction. + """ + + @function_wrapper + def _validate_application_error_event_count(wrapped, instace, args, kwargs): + + try: + result = wrapped(*args, **kwargs) + except: + raise + else: + + stats = core_application_stats_engine(None) + assert len(list(stats.error_events)) == num_errors + + return result + + return _validate_application_error_event_count diff --git a/tests/testing_support/validators/validate_application_error_trace_count.py b/tests/testing_support/validators/validate_application_error_trace_count.py new file mode 100644 index 000000000..d700ea5eb --- /dev/null +++ b/tests/testing_support/validators/validate_application_error_trace_count.py @@ -0,0 +1,39 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from testing_support.fixtures import core_application_stats_engine + +from newrelic.common.object_wrapper import function_wrapper + + +def validate_application_error_trace_count(num_errors): + """Validate error event data for a single error occurring outside of a + transaction. + """ + + @function_wrapper + def _validate_application_error_trace_count(wrapped, instace, args, kwargs): + + try: + result = wrapped(*args, **kwargs) + except: + raise + else: + + stats = core_application_stats_engine(None) + assert len(stats.error_data()) == num_errors + + return result + + return _validate_application_error_trace_count diff --git a/tests/testing_support/validators/validate_error_event_collector_json.py b/tests/testing_support/validators/validate_error_event_collector_json.py new file mode 100644 index 000000000..44940818f --- /dev/null +++ b/tests/testing_support/validators/validate_error_event_collector_json.py @@ -0,0 +1,69 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json + +from newrelic.common.encoding_utils import json_encode +from newrelic.common.object_wrapper import transient_function_wrapper + + +def validate_error_event_collector_json(num_errors=1): + """Validate the format, types and number of errors of the data we + send to the collector for harvest. + """ + + @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") + def _validate_error_event_collector_json(wrapped, instance, args, kwargs): + try: + result = wrapped(*args, **kwargs) + except: + raise + else: + + samples = list(instance.error_events) + s_info = instance.error_events.sampling_info + agent_run_id = 666 + + # emulate the payload used in data_collector.py + + payload = (agent_run_id, s_info, samples) + collector_json = json_encode(payload) + + decoded_json = json.loads(collector_json) + + assert decoded_json[0] == agent_run_id + + sampling_info = decoded_json[1] + + harvest_config = instance.settings.event_harvest_config + reservoir_size = harvest_config.harvest_limits.error_event_data + + assert sampling_info["reservoir_size"] == reservoir_size + assert sampling_info["events_seen"] == num_errors + + error_events = decoded_json[2] + + assert len(error_events) == num_errors + for event in error_events: + + # event is an array containing intrinsics, user-attributes, + # and agent-attributes + + assert len(event) == 3 + for d in event: + assert isinstance(d, dict) + + return result + + return _validate_error_event_collector_json diff --git a/tests/testing_support/validators/validate_error_trace_collector_json.py b/tests/testing_support/validators/validate_error_trace_collector_json.py new file mode 100644 index 000000000..e4d14ee21 --- /dev/null +++ b/tests/testing_support/validators/validate_error_trace_collector_json.py @@ -0,0 +1,58 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json + +from newrelic.common.encoding_utils import json_encode +from newrelic.common.object_wrapper import transient_function_wrapper +from newrelic.packages import six + + +def validate_error_trace_collector_json(): + @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") + def _validate_error_trace_collector_json(wrapped, instance, args, kwargs): + try: + result = wrapped(*args, **kwargs) + except: + raise + else: + errors = instance.error_data() + + # recreate what happens right before data is sent to the collector + # in data_collector.py via ApplicationSession.send_errors + agent_run_id = 666 + payload = (agent_run_id, errors) + collector_json = json_encode(payload) + + decoded_json = json.loads(collector_json) + + assert decoded_json[0] == agent_run_id + err = decoded_json[1][0] + assert len(err) == 5 + assert isinstance(err[0], (int, float)) + assert isinstance(err[1], six.string_types) # path + assert isinstance(err[2], six.string_types) # error message + assert isinstance(err[3], six.string_types) # exception name + parameters = err[4] + + parameter_fields = ["userAttributes", "stack_trace", "agentAttributes", "intrinsics"] + + for field in parameter_fields: + assert field in parameters + + assert "request_uri" not in parameters + + return result + + return _validate_error_trace_collector_json diff --git a/tests/testing_support/validators/validate_non_transaction_error_event.py b/tests/testing_support/validators/validate_non_transaction_error_event.py index fa14ae37d..97048d103 100644 --- a/tests/testing_support/validators/validate_non_transaction_error_event.py +++ b/tests/testing_support/validators/validate_non_transaction_error_event.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import time +from time import time from testing_support.fixtures import core_application_stats_engine @@ -54,7 +54,7 @@ def _validate_non_transaction_error_event(wrapped, instace, args, kwargs): assert intrinsics["error.class"] == required_intrinsics["error.class"] assert intrinsics["error.message"].startswith(required_intrinsics["error.message"]) assert intrinsics["error.expected"] == required_intrinsics["error.expected"] - now = time.time() + now = time() assert isinstance(intrinsics["timestamp"], int) assert intrinsics["timestamp"] <= 1000.0 * now diff --git a/tests/testing_support/validators/validate_synthetics_transaction_trace.py b/tests/testing_support/validators/validate_synthetics_transaction_trace.py new file mode 100644 index 000000000..7227d0327 --- /dev/null +++ b/tests/testing_support/validators/validate_synthetics_transaction_trace.py @@ -0,0 +1,67 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from newrelic.common.encoding_utils import unpack_field +from newrelic.common.object_wrapper import transient_function_wrapper +from newrelic.core.database_utils import SQLConnections + + +def validate_synthetics_transaction_trace(required_params=None, forgone_params=None, should_exist=True): + required_params = required_params or {} + forgone_params = forgone_params or {} + + @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") + def _validate_synthetics_transaction_trace(wrapped, instance, args, kwargs): + try: + result = wrapped(*args, **kwargs) + except: + raise + else: + + # Now that transaction has been recorded, generate + # a transaction trace + + connections = SQLConnections() + trace_data = instance.transaction_trace_data(connections) + + # Check that synthetics resource id is in TT header + + header = trace_data[0] + header_key = "synthetics_resource_id" + + if should_exist: + assert header_key in required_params + assert header[9] == required_params[header_key], "name=%r, header=%r" % (header_key, header) + else: + assert header[9] is None + + # Check that synthetics ids are in TT custom params + + pack_data = unpack_field(trace_data[0][4]) + tt_intrinsics = pack_data[0][4]["intrinsics"] + + for name in required_params: + assert name in tt_intrinsics, "name=%r, intrinsics=%r" % (name, tt_intrinsics) + assert tt_intrinsics[name] == required_params[name], "name=%r, value=%r, intrinsics=%r" % ( + name, + required_params[name], + tt_intrinsics, + ) + + for name in forgone_params: + assert name not in tt_intrinsics, "name=%r, intrinsics=%r" % (name, tt_intrinsics) + + return result + + return _validate_synthetics_transaction_trace diff --git a/tests/testing_support/validators/validate_transaction_error_trace_attributes.py b/tests/testing_support/validators/validate_transaction_error_trace_attributes.py new file mode 100644 index 000000000..90734d348 --- /dev/null +++ b/tests/testing_support/validators/validate_transaction_error_trace_attributes.py @@ -0,0 +1,49 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from testing_support.fixtures import check_error_attributes + +from newrelic.common.object_wrapper import transient_function_wrapper + + +def validate_transaction_error_trace_attributes(required_params=None, forgone_params=None, exact_attrs=None): + """Check the error trace for attributes, expect only one error to be + present in the transaction. + """ + required_params = required_params or {} + forgone_params = forgone_params or {} + exact_attrs = exact_attrs or {} + + @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") + def _validate_transaction_error_trace(wrapped, instance, args, kwargs): + try: + result = wrapped(*args, **kwargs) + except: + raise + else: + + error_data = instance.error_data() + + # there should be only one error + assert len(error_data) == 1 + traced_error = error_data[0] + + check_error_attributes( + traced_error.parameters, required_params, forgone_params, exact_attrs, is_transaction=True + ) + + return result + + return _validate_transaction_error_trace diff --git a/tests/testing_support/validators/validate_transaction_event_collector_json.py b/tests/testing_support/validators/validate_transaction_event_collector_json.py new file mode 100644 index 000000000..765a3b8e0 --- /dev/null +++ b/tests/testing_support/validators/validate_transaction_event_collector_json.py @@ -0,0 +1,56 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json + +from newrelic.common.encoding_utils import json_encode +from newrelic.common.object_wrapper import transient_function_wrapper + + +def validate_transaction_event_collector_json(): + @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") + def _validate_transaction_event_collector_json(wrapped, instance, args, kwargs): + try: + result = wrapped(*args, **kwargs) + except: + raise + else: + samples = list(instance.transaction_events) + + # recreate what happens right before data is sent to the collector + # in data_collector.py during the harvest via analytic_event_data + agent_run_id = 666 + payload = (agent_run_id, samples) + collector_json = json_encode(payload) + + decoded_json = json.loads(collector_json) + + assert decoded_json[0] == agent_run_id + + # list of events + + events = decoded_json[1] + + for event in events: + + # event is an array containing intrinsics, user-attributes, + # and agent-attributes + + assert len(event) == 3 + for d in event: + assert isinstance(d, dict) + + return result + + return _validate_transaction_event_collector_json diff --git a/tests/testing_support/validators/validate_transaction_trace_attributes.py b/tests/testing_support/validators/validate_transaction_trace_attributes.py new file mode 100644 index 000000000..dafe9b920 --- /dev/null +++ b/tests/testing_support/validators/validate_transaction_trace_attributes.py @@ -0,0 +1,69 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from testing_support.fixtures import check_attributes + +from newrelic.common.encoding_utils import unpack_field +from newrelic.common.object_wrapper import function_wrapper, transient_function_wrapper +from newrelic.core.database_utils import SQLConnections + + +def validate_transaction_trace_attributes( + required_params=None, forgone_params=None, should_exist=True, url=None, index=-1 +): + required_params = required_params or {} + forgone_params = forgone_params or {} + + trace_data = [] + + @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") + def _validate_transaction_trace_attributes(wrapped, instance, args, kwargs): + + result = wrapped(*args, **kwargs) + + # Now that transaction has been recorded, generate + # a transaction trace + + connections = SQLConnections() + _trace_data = instance.transaction_trace_data(connections) + trace_data.append(_trace_data) + + return result + + @function_wrapper + def wrapper(wrapped, instance, args, kwargs): + _new_wrapper = _validate_transaction_trace_attributes(wrapped) + result = _new_wrapper(*args, **kwargs) + + _trace_data = trace_data[index] + trace_data[:] = [] + + if url is not None: + trace_url = _trace_data[0][3] + assert url == trace_url + + pack_data = unpack_field(_trace_data[0][4]) + assert len(pack_data) == 2 + assert len(pack_data[0]) == 5 + parameters = pack_data[0][4] + + assert "intrinsics" in parameters + assert "userAttributes" in parameters + assert "agentAttributes" in parameters + + check_attributes(parameters, required_params, forgone_params) + + return result + + return wrapper diff --git a/tests/testing_support/validators/validate_tt_collector_json.py b/tests/testing_support/validators/validate_tt_collector_json.py new file mode 100644 index 000000000..85e393280 --- /dev/null +++ b/tests/testing_support/validators/validate_tt_collector_json.py @@ -0,0 +1,184 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from newrelic.common.encoding_utils import unpack_field +from newrelic.common.object_wrapper import function_wrapper, transient_function_wrapper +from newrelic.common.system_info import LOCALHOST_EQUIVALENTS +from newrelic.core.database_utils import SQLConnections +from newrelic.packages import six + + +def _lookup_string_table(name, string_table, default=None): + try: + index = int(name.lstrip("`")) + return string_table[index] + except ValueError: + return default + + +def validate_tt_collector_json( + required_params=None, + forgone_params=None, + should_exist=True, + datastore_params=None, + datastore_forgone_params=None, + message_broker_params=None, + message_broker_forgone_params=None, + exclude_request_uri=False, +): + """make assertions based off the cross-agent spec on transaction traces""" + required_params = required_params or {} + forgone_params = forgone_params or {} + datastore_params = datastore_params or {} + datastore_forgone_params = datastore_forgone_params or {} + message_broker_params = message_broker_params or {} + message_broker_forgone_params = message_broker_forgone_params or [] + + @function_wrapper + def _validate_wrapper(wrapped, instance, args, kwargs): + + traces_recorded = [] + + @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") + def _validate_tt_collector_json(wrapped, instance, args, kwargs): + + result = wrapped(*args, **kwargs) + + # Now that transaction has been recorded, generate + # a transaction trace + + connections = SQLConnections() + trace_data = instance.transaction_trace_data(connections) + traces_recorded.append(trace_data) + + return result + + def _validate_trace(trace): + assert isinstance(trace[0], float) # absolute start time (ms) + assert isinstance(trace[1], float) # duration (ms) + assert trace[0] > 0 # absolute time (ms) + assert isinstance(trace[2], six.string_types) # transaction name + if trace[2].startswith("WebTransaction"): + if exclude_request_uri: + assert trace[3] is None # request url + else: + assert isinstance(trace[3], six.string_types) + # query parameters should not be captured + assert "?" not in trace[3] + + # trace details -- python agent always uses condensed trace array + + trace_details, string_table = unpack_field(trace[4]) + assert len(trace_details) == 5 + assert isinstance(trace_details[0], float) # start time (ms) + + # the next two items should be empty dicts, old parameters stuff, + # placeholders for now + + assert isinstance(trace_details[1], dict) + assert len(trace_details[1]) == 0 + assert isinstance(trace_details[2], dict) + assert len(trace_details[2]) == 0 + + # root node in slot 3 + + root_node = trace_details[3] + assert isinstance(root_node[0], float) # entry timestamp + assert isinstance(root_node[1], float) # exit timestamp + assert root_node[2] == "ROOT" + assert isinstance(root_node[3], dict) + assert len(root_node[3]) == 0 # spec shows empty (for root) + children = root_node[4] + assert isinstance(children, list) + + # there are two optional items at the end of trace segments, + # class name that segment is in, and method name function is in; + # Python agent does not use these (only Java does) + + # let's just test the first child + trace_segment = children[0] + assert isinstance(trace_segment[0], float) # entry timestamp + assert isinstance(trace_segment[1], float) # exit timestamp + assert isinstance(trace_segment[2], six.string_types) # scope + assert isinstance(trace_segment[3], dict) # request params + assert isinstance(trace_segment[4], list) # children + + assert trace_segment[0] >= root_node[0] # trace starts after root + + def _check_params_and_start_time(node): + children = node[4] + for child in children: + assert child[0] >= node[0] # child started after parent + _check_params_and_start_time(child) + + params = node[3] + assert isinstance(params, dict) + + # We should always report exclusive_duration_millis on a + # segment. This allows us to override exclusive time + # calculations on APM. + assert "exclusive_duration_millis" in params + assert isinstance(params["exclusive_duration_millis"], float) + + segment_name = _lookup_string_table(node[2], string_table, default=node[2]) + if segment_name.startswith("Datastore"): + for key in datastore_params: + assert key in params, key + assert params[key] == datastore_params[key] + for key in datastore_forgone_params: + assert key not in params, key + + # if host is reported, it cannot be localhost + if "host" in params: + assert params["host"] not in LOCALHOST_EQUIVALENTS + + elif segment_name.startswith("MessageBroker"): + for key in message_broker_params: + assert key in params, key + assert params[key] == message_broker_params[key] + for key in message_broker_forgone_params: + assert key not in params, key + + _check_params_and_start_time(trace_segment) + + attributes = trace_details[4] + + assert "intrinsics" in attributes + assert "userAttributes" in attributes + assert "agentAttributes" in attributes + + assert isinstance(trace[5], six.string_types) # GUID + assert trace[6] is None # reserved for future use + assert trace[7] is False # deprecated force persist flag + + # x-ray session ID + + assert trace[8] is None + + # Synthetics ID + + assert trace[9] is None or isinstance(trace[9], six.string_types) + + assert isinstance(string_table, list) + for name in string_table: + assert isinstance(name, six.string_types) # metric name + + _new_wrapper = _validate_tt_collector_json(wrapped) + val = _new_wrapper(*args, **kwargs) + trace_data = traces_recorded.pop() + trace = trace_data[0] # 1st trace + _validate_trace(trace) + return val + + return _validate_wrapper