diff --git a/newrelic/config.py b/newrelic/config.py index 730b8ed4c..8e10218f9 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -4362,26 +4362,11 @@ def _process_module_builtin_defaults(): "instrument_celery_worker", ) - _process_module_definition( - "celery.execute.trace", - "newrelic.hooks.application_celery", - "instrument_celery_execute_trace", - ) - _process_module_definition( - "celery.task.trace", - "newrelic.hooks.application_celery", - "instrument_celery_execute_trace", - ) _process_module_definition( "celery.app.base", "newrelic.hooks.application_celery", "instrument_celery_app_base", ) - _process_module_definition( - "celery.app.trace", - "newrelic.hooks.application_celery", - "instrument_celery_execute_trace", - ) _process_module_definition("billiard.pool", "newrelic.hooks.application_celery", "instrument_billiard_pool") _process_module_definition("flup.server.cgi", "newrelic.hooks.adapter_flup", "instrument_flup_server_cgi") diff --git a/newrelic/hooks/application_celery.py b/newrelic/hooks/application_celery.py index 4798c83d6..25a86a4a6 100644 --- a/newrelic/hooks/application_celery.py +++ b/newrelic/hooks/application_celery.py @@ -28,43 +28,45 @@ from newrelic.api.message_trace import MessageTrace from newrelic.api.pre_function import wrap_pre_function from newrelic.api.transaction import current_transaction -from newrelic.common.object_names import callable_name from newrelic.common.object_wrapper import FunctionWrapper, wrap_function_wrapper from newrelic.core.agent import shutdown_agent +UNKNOWN_TASK_NAME = "" +MAPPING_TASK_NAMES = {"celery.starmap", "celery.map"} -def CeleryTaskWrapper(wrapped, application=None, name=None): - def wrapper(wrapped, instance, args, kwargs): - transaction = current_transaction(active_only=False) - if callable(name): - # Start Hotfix v2.2.1. - # if instance and inspect.ismethod(wrapped): - # _name = name(instance, *args, **kwargs) - # else: - # _name = name(*args, **kwargs) +def task_name(*args, **kwargs): + # Grab the current task, which can be located in either place + if args: + task = args[0] + elif "task" in kwargs: + task = kwargs["task"] + else: + return UNKNOWN_TASK_NAME # Failsafe - if instance is not None: - _name = name(instance, *args, **kwargs) - else: - _name = name(*args, **kwargs) - # End Hotfix v2.2.1. + # Task can be either a task instance or a signature, which subclasses dict, or an actual dict in some cases. + task_name = getattr(task, "name", None) or task.get("task", UNKNOWN_TASK_NAME) - elif name is None: - _name = callable_name(wrapped) + # Under mapping tasks, the root task name isn't descriptive enough so we append the + # subtask name to differentiate between different mapping tasks + if task_name in MAPPING_TASK_NAMES: + try: + subtask = kwargs["task"]["task"] + task_name = "/".join((task_name, subtask)) + except Exception: + pass - else: - _name = name + return task_name - # Helper for obtaining the appropriate application object. If - # has an activate() method assume it is a valid application - # object. Don't check by type so se can easily mock it for - # testing if need be. - def _application(): - if hasattr(application, "activate"): - return application - return application_instance(application) +def CeleryTaskWrapper(wrapped): + def wrapper(wrapped, instance, args, kwargs): + transaction = current_transaction(active_only=False) + + if instance is not None: + _name = task_name(instance, *args, **kwargs) + else: + _name = task_name(*args, **kwargs) # A Celery Task can be called either outside of a transaction, or # within the context of an existing transaction. There are 3 @@ -95,13 +97,14 @@ def _application(): return wrapped(*args, **kwargs) else: - with BackgroundTask(_application(), _name, "Celery", source=instance) as transaction: + with BackgroundTask(application_instance(), _name, "Celery", source=instance) as transaction: # Attempt to grab distributed tracing headers try: # Headers on earlier versions of Celery may end up as attributes # on the request context instead of as custom headers. Handler this # by defaulting to using vars() if headers is not available - headers = getattr(wrapped.request, "headers", None) or vars(wrapped.request) + request = instance.request + headers = getattr(request, "headers", None) or vars(request) settings = transaction.settings if headers is not None and settings is not None: @@ -128,20 +131,30 @@ def _application(): # instrumentation via FunctionWrapper() relies on __call__ being called which # in turn executes the wrapper() function defined above. Since the micro # optimization bypasses __call__ method it breaks our instrumentation of - # celery. To circumvent this problem, we added a run() attribute to our + # celery. + # + # For versions of celery 2.5.3 to 2.5.5+ + # Celery has included a monkey-patching provision which did not perform this + # optimization on functions that were monkey-patched. Unfortunately, our + # wrappers are too transparent for celery to detect that they've even been + # monky-patched. To circumvent this, we set the __module__ of our wrapped task + # to this file which causes celery to properly detect that it has been patched. + # + # For versions of celery 2.5.3 to 2.5.5 + # To circumvent this problem, we added a run() attribute to our # FunctionWrapper which points to our __call__ method. This causes Celery # to execute our __call__ method which in turn applies the wrapper # correctly before executing the task. - # - # This is only a problem in Celery versions 2.5.3 to 2.5.5. The later - # versions included a monkey-patching provision which did not perform this - # optimization on functions that were monkey-patched. class TaskWrapper(FunctionWrapper): def run(self, *args, **kwargs): return self.__call__(*args, **kwargs) - return TaskWrapper(wrapped, wrapper) + wrapped_task = TaskWrapper(wrapped, wrapper) + # Reset __module__ to be less transparent so celery detects our monkey-patching + wrapped_task.__module__ = CeleryTaskWrapper.__module__ + + return wrapped_task def instrument_celery_app_task(module): @@ -162,11 +175,8 @@ def instrument_celery_app_task(module): # the task doesn't pass through it. For Celery 2.5+ need to wrap # the tracer instead. - def task_name(task, *args, **kwargs): - return task.name - if module.BaseTask.__module__ == module.__name__: - module.BaseTask.__call__ = CeleryTaskWrapper(module.BaseTask.__call__, name=task_name) + module.BaseTask.__call__ = CeleryTaskWrapper(module.BaseTask.__call__) def wrap_Celery_send_task(wrapped, instance, args, kwargs): @@ -195,28 +205,6 @@ def instrument_celery_app_base(module): wrap_function_wrapper(module, "Celery.send_task", wrap_Celery_send_task) -def instrument_celery_execute_trace(module): - # Triggered for 'celery.execute_trace'. - - if hasattr(module, "build_tracer"): - # Need to add a wrapper for background task entry point. - - # In Celery 2.5+ we need to wrap the task when tracer is being - # created. Note that in Celery 2.5 the 'build_tracer' function - # actually resided in the module 'celery.execute.task'. In - # Celery 3.0 the 'build_tracer' function moved to - # 'celery.task.trace'. - - _build_tracer = module.build_tracer - - def build_tracer(name, task, *args, **kwargs): - task = task or module.tasks[name] - task = CeleryTaskWrapper(task, name=name) - return _build_tracer(name, task, *args, **kwargs) - - module.build_tracer = build_tracer - - def instrument_celery_worker(module): # Triggered for 'celery.worker' and 'celery.concurrency.processes'. diff --git a/tests/application_celery/_target_application.py b/tests/application_celery/_target_application.py index 0b1ca1c31..374d677bc 100644 --- a/tests/application_celery/_target_application.py +++ b/tests/application_celery/_target_application.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from celery import Celery +from celery import Celery, shared_task from testing_support.validators.validate_distributed_trace_accepted import ( validate_distributed_trace_accepted, ) @@ -44,6 +44,11 @@ def nested_add(x, y): return add(x, y) +@shared_task +def shared_task_add(x, y): + return x + y + + @app.task @validate_distributed_trace_accepted(transport_type="AMQP") def assert_dt(): diff --git a/tests/application_celery/conftest.py b/tests/application_celery/conftest.py index 45245ed60..d0a38fa35 100644 --- a/tests/application_celery/conftest.py +++ b/tests/application_celery/conftest.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import pytest from testing_support.fixtures import ( # noqa: F401; pylint: disable=W0611 collector_agent_registration_fixture, collector_available_fixture, @@ -27,3 +28,23 @@ collector_agent_registration = collector_agent_registration_fixture( app_name="Python Agent Test (application_celery)", default_settings=_default_settings ) + + +@pytest.fixture(scope="session") +def celery_config(): + # Used by celery pytest plugin to configure Celery instance + return { + "broker_url": "memory://", + "result_backend": "cache+memory://", + } + + +@pytest.fixture(scope="session") +def celery_worker_parameters(): + # Used by celery pytest plugin to configure worker instance + return {"shutdown_timeout": 120} + + +@pytest.fixture(scope="session", autouse=True) +def celery_worker_available(celery_session_worker): + yield celery_session_worker diff --git a/tests/application_celery/test_celery.py b/tests/application_celery/test_application.py similarity index 82% rename from tests/application_celery/test_celery.py rename to tests/application_celery/test_application.py index 508217c0e..c9e5bb3b2 100644 --- a/tests/application_celery/test_celery.py +++ b/tests/application_celery/test_application.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from _target_application import add, nested_add, tsum +from _target_application import add, nested_add, shared_task_add, tsum from testing_support.validators.validate_code_level_metrics import ( validate_code_level_metrics, ) @@ -28,7 +28,7 @@ @validate_transaction_metrics( - name="test_celery:test_celery_task_as_function_trace", + name="test_application:test_celery_task_as_function_trace", scoped_metrics=[("Function/_target_application.add", 1)], background_task=True, ) @@ -58,7 +58,7 @@ def test_celery_task_as_background_task(): @validate_transaction_metrics( - name="test_celery:test_celery_tasks_multiple_function_traces", + name="test_application:test_celery_tasks_multiple_function_traces", scoped_metrics=[("Function/_target_application.add", 1), ("Function/_target_application.tsum", 1)], background_task=True, ) @@ -90,7 +90,7 @@ def test_celery_tasks_ignore_transaction(): @validate_transaction_metrics( - name="test_celery:test_celery_tasks_end_transaction", + name="test_application:test_celery_tasks_end_transaction", scoped_metrics=[("Function/_target_application.add", 1)], background_task=True, ) @@ -126,3 +126,18 @@ def test_celery_nested_tasks(): add_result = nested_add(1, 2) assert add_result == 3 + + +@validate_transaction_metrics( + name="_target_application.shared_task_add", group="Celery", scoped_metrics=[], background_task=True +) +@validate_code_level_metrics("_target_application", "shared_task_add") +def test_celery_shared_task_as_background_task(): + """ + Calling shared_task_add() outside of a transaction means the agent will create + a background transaction (with a group of 'Celery') and record shared_task_add() + as a background task. + + """ + result = shared_task_add(3, 4) + assert result == 7 diff --git a/tests/application_celery/test_celery_distributed_tracing.py b/tests/application_celery/test_distributed_tracing.py similarity index 65% rename from tests/application_celery/test_celery_distributed_tracing.py rename to tests/application_celery/test_distributed_tracing.py index 6226d646a..2b6eccf54 100644 --- a/tests/application_celery/test_celery_distributed_tracing.py +++ b/tests/application_celery/test_distributed_tracing.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest from _target_application import add, assert_dt from testing_support.fixtures import override_application_settings from testing_support.validators.validate_transaction_count import ( @@ -23,29 +22,8 @@ ) from newrelic.api.background_task import background_task -from newrelic.packages import six - -skip_if_py2 = pytest.mark.skipif( - six.PY2, reason="Celery has no pytest plugin for Python 2, making testing very difficult." -) - - -@pytest.fixture(scope="module") -def celery_config(): - # Used by celery pytest plugin to configure Celery instance - return { - "broker_url": "memory://", - "result_backend": "cache+memory://", - } - - -@pytest.fixture(scope="module") -def celery_worker_parameters(): - # Used by celery pytest plugin to configure worker instance - return {"shutdown_timeout": 120} -@skip_if_py2 @validate_transaction_metrics( name="_target_application.assert_dt", group="Celery", @@ -57,20 +35,17 @@ def celery_worker_parameters(): index=-2, ) @validate_transaction_metrics( - name="test_celery_distributed_tracing:test_celery_task_distributed_tracing_enabled", + name="test_distributed_tracing:test_celery_task_distributed_tracing_enabled", background_task=True, ) @validate_transaction_count(2) @background_task() -def test_celery_task_distributed_tracing_enabled(celery_worker): +def test_celery_task_distributed_tracing_enabled(): result = assert_dt.apply_async() - while not result.ready(): - pass - result = result.result + result = result.get() assert result == 1 -@skip_if_py2 @override_application_settings({"distributed_tracing.enabled": False}) @validate_transaction_metrics( name="_target_application.add", @@ -83,14 +58,12 @@ def test_celery_task_distributed_tracing_enabled(celery_worker): index=-2, ) @validate_transaction_metrics( - name="test_celery_distributed_tracing:test_celery_task_distributed_tracing_disabled", + name="test_distributed_tracing:test_celery_task_distributed_tracing_disabled", background_task=True, ) @validate_transaction_count(2) @background_task() -def test_celery_task_distributed_tracing_disabled(celery_worker): +def test_celery_task_distributed_tracing_disabled(): result = add.apply_async((1, 2)) - while not result.ready(): - pass - result = result.result + result = result.get() assert result == 3 diff --git a/tests/application_celery/test_celery_max_tasks_per_child.py b/tests/application_celery/test_max_tasks_per_child.py similarity index 78% rename from tests/application_celery/test_celery_max_tasks_per_child.py rename to tests/application_celery/test_max_tasks_per_child.py index 5c9fc5080..b7ec1a778 100644 --- a/tests/application_celery/test_celery_max_tasks_per_child.py +++ b/tests/application_celery/test_max_tasks_per_child.py @@ -17,11 +17,20 @@ from billiard.pool import Worker from testing_support.validators.validate_function_called import validate_function_called +from newrelic.common.object_wrapper import transient_function_wrapper + class OnExit(Exception): pass +@transient_function_wrapper("newrelic.core.agent", "Agent.shutdown_agent") +def mock_agent_shutdown(wrapped, instance, args, kwargs): + # Prevent agent from actually shutting down and blocking further tests + pass + + +@mock_agent_shutdown @validate_function_called("newrelic.core.agent", "Agent.shutdown_agent") def test_max_tasks_per_child(): def on_exit(*args, **kwargs): diff --git a/tests/application_celery/test_task_methods.py b/tests/application_celery/test_task_methods.py new file mode 100644 index 000000000..f1d78f32f --- /dev/null +++ b/tests/application_celery/test_task_methods.py @@ -0,0 +1,307 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from _target_application import add, tsum +from celery import chain, chord, group +from testing_support.validators.validate_transaction_count import ( + validate_transaction_count, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) + +FORGONE_TASK_METRICS = [("Function/_target_application.add", None), ("Function/_target_application.tsum", None)] + + +def test_task_wrapping_detection(): + """ + Ensure celery detects our monkeypatching properly and will run our instrumentation + on __call__ and runs that instead of micro-optimizing it away to a run() call. + + If this is not working, most other tests in this file will fail as the different ways + of running celery tasks will not all run our instrumentation. + """ + from celery.app.trace import task_has_custom + + assert task_has_custom(add, "__call__") + + +@validate_transaction_metrics( + name="_target_application.add", + group="Celery", + scoped_metrics=FORGONE_TASK_METRICS, + rollup_metrics=FORGONE_TASK_METRICS, + background_task=True, +) +@validate_transaction_count(1) +def test_celery_task_call(): + """ + Executes task in local process and returns the result directly. + """ + result = add(3, 4) + assert result == 7 + + +@validate_transaction_metrics( + name="_target_application.add", + group="Celery", + scoped_metrics=FORGONE_TASK_METRICS, + rollup_metrics=FORGONE_TASK_METRICS, + background_task=True, +) +@validate_transaction_count(1) +def test_celery_task_apply(): + """ + Executes task in local process and returns an EagerResult. + """ + result = add.apply((3, 4)) + result = result.get() + assert result == 7 + + +@validate_transaction_metrics( + name="_target_application.add", + group="Celery", + scoped_metrics=FORGONE_TASK_METRICS, + rollup_metrics=FORGONE_TASK_METRICS, + background_task=True, +) +@validate_transaction_count(1) +def test_celery_task_delay(): + """ + Executes task on worker process and returns an AsyncResult. + """ + result = add.delay(3, 4) + result = result.get() + assert result == 7 + + +@validate_transaction_metrics( + name="_target_application.add", + group="Celery", + scoped_metrics=FORGONE_TASK_METRICS, + rollup_metrics=FORGONE_TASK_METRICS, + background_task=True, +) +@validate_transaction_count(1) +def test_celery_task_apply_async(): + """ + Executes task on worker process and returns an AsyncResult. + """ + result = add.apply_async((3, 4)) + result = result.get() + assert result == 7 + + +@validate_transaction_metrics( + name="_target_application.add", + group="Celery", + scoped_metrics=FORGONE_TASK_METRICS, + rollup_metrics=FORGONE_TASK_METRICS, + background_task=True, +) +@validate_transaction_count(1) +def test_celery_app_send_task(celery_session_app): + """ + Executes task on worker process and returns an AsyncResult. + """ + result = celery_session_app.send_task("_target_application.add", (3, 4)) + result = result.get() + assert result == 7 + + +@validate_transaction_metrics( + name="_target_application.add", + group="Celery", + scoped_metrics=FORGONE_TASK_METRICS, + rollup_metrics=FORGONE_TASK_METRICS, + background_task=True, +) +@validate_transaction_count(1) +def test_celery_task_signature(): + """ + Executes task on worker process and returns an AsyncResult. + """ + result = add.s(3, 4).delay() + result = result.get() + assert result == 7 + + +@validate_transaction_metrics( + name="_target_application.add", + group="Celery", + scoped_metrics=FORGONE_TASK_METRICS, + rollup_metrics=FORGONE_TASK_METRICS, + background_task=True, +) +@validate_transaction_metrics( + name="_target_application.add", + group="Celery", + scoped_metrics=FORGONE_TASK_METRICS, + rollup_metrics=FORGONE_TASK_METRICS, + background_task=True, + index=-2, +) +@validate_transaction_count(2) +def test_celery_task_link(): + """ + Executes multiple tasks on worker process and returns an AsyncResult. + """ + result = add.apply_async((3, 4), link=[add.s(5)]) + result = result.get() + assert result == 7 # Linked task result won't be returned + + +@validate_transaction_metrics( + name="_target_application.add", + group="Celery", + scoped_metrics=FORGONE_TASK_METRICS, + rollup_metrics=FORGONE_TASK_METRICS, + background_task=True, +) +@validate_transaction_metrics( + name="_target_application.add", + group="Celery", + scoped_metrics=FORGONE_TASK_METRICS, + rollup_metrics=FORGONE_TASK_METRICS, + background_task=True, + index=-2, +) +@validate_transaction_count(2) +def test_celery_chain(): + """ + Executes multiple tasks on worker process and returns an AsyncResult. + """ + result = chain(add.s(3, 4), add.s(5))() + + result = result.get() + assert result == 12 + + +@validate_transaction_metrics( + name="_target_application.add", + group="Celery", + scoped_metrics=FORGONE_TASK_METRICS, + rollup_metrics=FORGONE_TASK_METRICS, + background_task=True, +) +@validate_transaction_metrics( + name="_target_application.add", + group="Celery", + scoped_metrics=FORGONE_TASK_METRICS, + rollup_metrics=FORGONE_TASK_METRICS, + background_task=True, + index=-2, +) +@validate_transaction_count(2) +def test_celery_group(): + """ + Executes multiple tasks on worker process and returns an AsyncResult. + """ + result = group(add.s(3, 4), add.s(1, 2))() + result = result.get() + assert result == [7, 3] + + +@validate_transaction_metrics( + name="_target_application.tsum", + group="Celery", + scoped_metrics=FORGONE_TASK_METRICS, + rollup_metrics=FORGONE_TASK_METRICS, + background_task=True, +) +@validate_transaction_metrics( + name="_target_application.add", + group="Celery", + scoped_metrics=FORGONE_TASK_METRICS, + rollup_metrics=FORGONE_TASK_METRICS, + background_task=True, + index=-2, +) +@validate_transaction_metrics( + name="_target_application.add", + group="Celery", + scoped_metrics=FORGONE_TASK_METRICS, + rollup_metrics=FORGONE_TASK_METRICS, + background_task=True, + index=-3, +) +@validate_transaction_count(3) +def test_celery_chord(): + """ + Executes 2 add tasks, followed by a tsum task on the worker process and returns an AsyncResult. + """ + result = chord([add.s(3, 4), add.s(1, 2)])(tsum.s()) + result = result.get() + assert result == 10 + + +@validate_transaction_metrics( + name="celery.map/_target_application.tsum", + group="Celery", + scoped_metrics=[("Function/_target_application.tsum", 2)], + rollup_metrics=[("Function/_target_application.tsum", 2)], + background_task=True, +) +@validate_transaction_count(1) +def test_celery_task_map(): + """ + Executes map task on worker process with original task as a subtask and returns an AsyncResult. + """ + result = tsum.map([(3, 4), (1, 2)]).apply() + result = result.get() + assert result == [7, 3] + + +@validate_transaction_metrics( + name="celery.starmap/_target_application.add", + group="Celery", + scoped_metrics=[("Function/_target_application.add", 2)], + rollup_metrics=[("Function/_target_application.add", 2)], + background_task=True, +) +@validate_transaction_count(1) +def test_celery_task_starmap(): + """ + Executes starmap task on worker process with original task as a subtask and returns an AsyncResult. + """ + result = add.starmap([(3, 4), (1, 2)]).apply_async() + result = result.get() + assert result == [7, 3] + + +@validate_transaction_metrics( + name="celery.starmap/_target_application.add", + group="Celery", + scoped_metrics=[("Function/_target_application.add", 1)], + rollup_metrics=[("Function/_target_application.add", 1)], + background_task=True, +) +@validate_transaction_metrics( + name="celery.starmap/_target_application.add", + group="Celery", + scoped_metrics=[("Function/_target_application.add", 1)], + rollup_metrics=[("Function/_target_application.add", 1)], + background_task=True, + index=-2, +) +@validate_transaction_count(2) +def test_celery_task_chunks(): + """ + Executes multiple tasks on worker process and returns an AsyncResult. + """ + result = add.chunks([(3, 4), (1, 2)], n=1).apply_async() + result = result.get() + assert result == [[7], [3]] diff --git a/tox.ini b/tox.ini index c92f4806a..cd29bb541 100644 --- a/tox.ini +++ b/tox.ini @@ -91,7 +91,8 @@ envlist = python-agent_streaming-py39-protobuf{03,0319}-{with,without}_extensions, python-agent_unittests-{py27,py37,py38,py39,py310,py311,py312}-{with,without}_extensions, python-agent_unittests-{pypy27,pypy310}-without_extensions, - python-application_celery-{py27,py37,py38,py39,py310,py311,py312,pypy27,pypy310}, + python-application_celery-{py37,py38,py39,py310,py311,py312,pypy310}-celerylatest, + python-application_celery-py311-celery{0503,0502,0501}, python-component_djangorestframework-{py37,py38,py39,py310,py311,py312}-djangorestframeworklatest, python-component_flask_rest-py37-flaskrestx110, python-component_flask_rest-{py38,py39,py310,py311,py312,pypy310}-flaskrestxlatest, @@ -201,7 +202,10 @@ deps = agent_features: beautifulsoup4 agent_features-{py37,py38,py39,py310,py311,py312,pypy310}: protobuf agent_features-{py27,pypy27}: protobuf<3.18.0 - application_celery: celery[pytest]<6.0 + application_celery-celerylatest: celery[pytest] + application_celery-celery0503: celery[pytest]<5.4 + application_celery-celery0502: celery[pytest]<5.3 + application_celery-celery0501: celery[pytest]<5.2 application_celery-{py37,pypy310}: importlib-metadata<5.0 mlmodel_sklearn: pandas mlmodel_sklearn: protobuf