From 4b836c3b1cbcb95ad3bc011ea76b7b2096b376c0 Mon Sep 17 00:00:00 2001 From: Hood Chatham Date: Mon, 10 Jan 2022 10:30:55 -0800 Subject: [PATCH 01/29] Add test for missing _multiprocessing module --- .../_multiprocessing.py | 5 ++++ joblib/test/test_missing_multiprocessing.py | 25 +++++++++++++++++++ 2 files changed, 30 insertions(+) create mode 100644 joblib/test/missing_multiprocessing/_multiprocessing.py create mode 100644 joblib/test/test_missing_multiprocessing.py diff --git a/joblib/test/missing_multiprocessing/_multiprocessing.py b/joblib/test/missing_multiprocessing/_multiprocessing.py new file mode 100644 index 000000000..9d37231c5 --- /dev/null +++ b/joblib/test/missing_multiprocessing/_multiprocessing.py @@ -0,0 +1,5 @@ +""" +Simulate a missing _multiprocessing module by raising an ImportError. +test_missing_multiprocessing adds this folder to the path. +""" +raise ImportError("No _multiprocessing module!") diff --git a/joblib/test/test_missing_multiprocessing.py b/joblib/test/test_missing_multiprocessing.py new file mode 100644 index 000000000..05ae8ad97 --- /dev/null +++ b/joblib/test/test_missing_multiprocessing.py @@ -0,0 +1,25 @@ +""" +Pyodide and other single-threaded Python builds will be missing the +_multiprocessing module. Test that joblib still works in this environment. +""" + +import os +import subprocess +import sys + +def test_missing_multiprocessing(): + """ + Test that import joblib works even if _multiprocessing is missing. + + pytest has already imported everything from joblib, so the easiest way to + test importing it, we need to invoke a separate Python process. This also + makes it easy to ensure that we don't break other tests by importing a bad + `_multiprocessing` module. + """ + env = dict(os.environ) + # For subprocess, use current sys.path with our custom version of + # multiprocessing inserted. + env["PYTHONPATH"] = ":".join(["./test/missing_multiprocessing"] + sys.path) + subprocess.check_call([sys.executable, "-c", "import joblib"], env=env) + + From d515fd66ab984ee72a8333630e728c0c84bd1ba8 Mon Sep 17 00:00:00 2001 From: Hood Chatham Date: Mon, 10 Jan 2022 10:38:50 -0800 Subject: [PATCH 02/29] Run formatter, update comments --- joblib/test/test_missing_multiprocessing.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/joblib/test/test_missing_multiprocessing.py b/joblib/test/test_missing_multiprocessing.py index 05ae8ad97..eb4247ca1 100644 --- a/joblib/test/test_missing_multiprocessing.py +++ b/joblib/test/test_missing_multiprocessing.py @@ -7,19 +7,18 @@ import subprocess import sys + def test_missing_multiprocessing(): """ Test that import joblib works even if _multiprocessing is missing. - pytest has already imported everything from joblib, so the easiest way to - test importing it, we need to invoke a separate Python process. This also - makes it easy to ensure that we don't break other tests by importing a bad - `_multiprocessing` module. + pytest has already imported everything from joblib. The most reasonable way + to test importing joblib with modified environment is to invoke a separate + Python process. This also ensures that we don't break other tests by + importing a bad `_multiprocessing` module. """ env = dict(os.environ) # For subprocess, use current sys.path with our custom version of # multiprocessing inserted. env["PYTHONPATH"] = ":".join(["./test/missing_multiprocessing"] + sys.path) subprocess.check_call([sys.executable, "-c", "import joblib"], env=env) - - From 9552ef255e4ad508f6246f78aa7b44bf0e4e45af Mon Sep 17 00:00:00 2001 From: Hood Chatham Date: Mon, 10 Jan 2022 10:44:29 -0800 Subject: [PATCH 03/29] Ignore missing_multiprocessing folder in pytest --- setup.cfg | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.cfg b/setup.cfg index e88d7b0c4..5a21c70bc 100644 --- a/setup.cfg +++ b/setup.cfg @@ -16,6 +16,7 @@ addopts = --doctest-modules -p no:warnings --ignore joblib/externals + --ignore joblib/test/missing_multiprocessing testpaths = joblib [flake8] From 0b78306da8dca858894af27bbb201545dda9ff8c Mon Sep 17 00:00:00 2001 From: Hood Chatham Date: Mon, 10 Jan 2022 10:57:54 -0800 Subject: [PATCH 04/29] Fix test to be independent of which directory pytest is run in --- joblib/test/test_missing_multiprocessing.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/joblib/test/test_missing_multiprocessing.py b/joblib/test/test_missing_multiprocessing.py index eb4247ca1..1fae7a653 100644 --- a/joblib/test/test_missing_multiprocessing.py +++ b/joblib/test/test_missing_multiprocessing.py @@ -20,5 +20,6 @@ def test_missing_multiprocessing(): env = dict(os.environ) # For subprocess, use current sys.path with our custom version of # multiprocessing inserted. - env["PYTHONPATH"] = ":".join(["./test/missing_multiprocessing"] + sys.path) - subprocess.check_call([sys.executable, "-c", "import joblib"], env=env) + import joblib + env["PYTHONPATH"] = ":".join([joblib.__path__[0] + "/test/missing_multiprocessing"] + sys.path) + subprocess.check_call([sys.executable, "-c", "import joblib; import sys; print(sys.modules['_multiprocessing'])"], env=env) From a228f3f31998d3d6700a8d5f70a2e7c3c76b84ad Mon Sep 17 00:00:00 2001 From: Hood Chatham Date: Mon, 10 Jan 2022 11:03:26 -0800 Subject: [PATCH 05/29] Tidy up --- joblib/test/test_missing_multiprocessing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/joblib/test/test_missing_multiprocessing.py b/joblib/test/test_missing_multiprocessing.py index 1fae7a653..1c2423abb 100644 --- a/joblib/test/test_missing_multiprocessing.py +++ b/joblib/test/test_missing_multiprocessing.py @@ -22,4 +22,4 @@ def test_missing_multiprocessing(): # multiprocessing inserted. import joblib env["PYTHONPATH"] = ":".join([joblib.__path__[0] + "/test/missing_multiprocessing"] + sys.path) - subprocess.check_call([sys.executable, "-c", "import joblib; import sys; print(sys.modules['_multiprocessing'])"], env=env) + subprocess.check_call([sys.executable, "-c", "import joblib"], env=env) From afe4bba66e80608f2598ba7b2c0192cfecde8122 Mon Sep 17 00:00:00 2001 From: Hood Chatham Date: Mon, 10 Jan 2022 11:06:23 -0800 Subject: [PATCH 06/29] Apply formatter --- joblib/test/test_missing_multiprocessing.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/joblib/test/test_missing_multiprocessing.py b/joblib/test/test_missing_multiprocessing.py index 1c2423abb..17f3e80ae 100644 --- a/joblib/test/test_missing_multiprocessing.py +++ b/joblib/test/test_missing_multiprocessing.py @@ -21,5 +21,8 @@ def test_missing_multiprocessing(): # For subprocess, use current sys.path with our custom version of # multiprocessing inserted. import joblib - env["PYTHONPATH"] = ":".join([joblib.__path__[0] + "/test/missing_multiprocessing"] + sys.path) + + env["PYTHONPATH"] = ":".join( + [joblib.__path__[0] + "/test/missing_multiprocessing"] + sys.path + ) subprocess.check_call([sys.executable, "-c", "import joblib"], env=env) From 0bb2f0d15f6839384a147e2bfd8f255f34a26746 Mon Sep 17 00:00:00 2001 From: Hood Chatham Date: Mon, 10 Jan 2022 13:43:43 -0800 Subject: [PATCH 07/29] Update test according to rth's suggestion --- joblib/test/missing_multiprocessing/_multiprocessing.py | 5 ----- joblib/test/test_missing_multiprocessing.py | 7 +++---- 2 files changed, 3 insertions(+), 9 deletions(-) delete mode 100644 joblib/test/missing_multiprocessing/_multiprocessing.py diff --git a/joblib/test/missing_multiprocessing/_multiprocessing.py b/joblib/test/missing_multiprocessing/_multiprocessing.py deleted file mode 100644 index 9d37231c5..000000000 --- a/joblib/test/missing_multiprocessing/_multiprocessing.py +++ /dev/null @@ -1,5 +0,0 @@ -""" -Simulate a missing _multiprocessing module by raising an ImportError. -test_missing_multiprocessing adds this folder to the path. -""" -raise ImportError("No _multiprocessing module!") diff --git a/joblib/test/test_missing_multiprocessing.py b/joblib/test/test_missing_multiprocessing.py index 17f3e80ae..7a00cae72 100644 --- a/joblib/test/test_missing_multiprocessing.py +++ b/joblib/test/test_missing_multiprocessing.py @@ -8,7 +8,7 @@ import sys -def test_missing_multiprocessing(): +def test_missing_multiprocessing(tmp_path): """ Test that import joblib works even if _multiprocessing is missing. @@ -17,12 +17,11 @@ def test_missing_multiprocessing(): Python process. This also ensures that we don't break other tests by importing a bad `_multiprocessing` module. """ + (tmp_path / "_multiprocessing").write_text('raise ImportError("No _multiprocessing module!")') env = dict(os.environ) # For subprocess, use current sys.path with our custom version of # multiprocessing inserted. - import joblib - env["PYTHONPATH"] = ":".join( - [joblib.__path__[0] + "/test/missing_multiprocessing"] + sys.path + [str(tmp_path)] + sys.path ) subprocess.check_call([sys.executable, "-c", "import joblib"], env=env) From 473ccacab210c87f8eccc2f49c053832273b44da Mon Sep 17 00:00:00 2001 From: Hood Chatham Date: Mon, 10 Jan 2022 13:45:57 -0800 Subject: [PATCH 08/29] Fix file name --- joblib/test/test_missing_multiprocessing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/joblib/test/test_missing_multiprocessing.py b/joblib/test/test_missing_multiprocessing.py index 7a00cae72..6598677fe 100644 --- a/joblib/test/test_missing_multiprocessing.py +++ b/joblib/test/test_missing_multiprocessing.py @@ -17,7 +17,7 @@ def test_missing_multiprocessing(tmp_path): Python process. This also ensures that we don't break other tests by importing a bad `_multiprocessing` module. """ - (tmp_path / "_multiprocessing").write_text('raise ImportError("No _multiprocessing module!")') + (tmp_path / "_multiprocessing.py").write_text('raise ImportError("No _multiprocessing module!")') env = dict(os.environ) # For subprocess, use current sys.path with our custom version of # multiprocessing inserted. From cfd27a01ba75a04ccac98ea1d736b859d8401113 Mon Sep 17 00:00:00 2001 From: Hood Chatham Date: Mon, 10 Jan 2022 13:47:04 -0800 Subject: [PATCH 09/29] Apply formatter --- joblib/test/test_missing_multiprocessing.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/joblib/test/test_missing_multiprocessing.py b/joblib/test/test_missing_multiprocessing.py index 6598677fe..041db55f5 100644 --- a/joblib/test/test_missing_multiprocessing.py +++ b/joblib/test/test_missing_multiprocessing.py @@ -17,11 +17,11 @@ def test_missing_multiprocessing(tmp_path): Python process. This also ensures that we don't break other tests by importing a bad `_multiprocessing` module. """ - (tmp_path / "_multiprocessing.py").write_text('raise ImportError("No _multiprocessing module!")') + (tmp_path / "_multiprocessing.py").write_text( + 'raise ImportError("No _multiprocessing module!")' + ) env = dict(os.environ) # For subprocess, use current sys.path with our custom version of # multiprocessing inserted. - env["PYTHONPATH"] = ":".join( - [str(tmp_path)] + sys.path - ) + env["PYTHONPATH"] = ":".join([str(tmp_path)] + sys.path) subprocess.check_call([sys.executable, "-c", "import joblib"], env=env) From 045cfd2ae68f872848013a7d1fc2a3da05a43ece Mon Sep 17 00:00:00 2001 From: Hood Chatham Date: Mon, 10 Jan 2022 14:11:34 -0800 Subject: [PATCH 10/29] Revert change to setup.cfg --- setup.cfg | 1 - 1 file changed, 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 5a21c70bc..e88d7b0c4 100644 --- a/setup.cfg +++ b/setup.cfg @@ -16,7 +16,6 @@ addopts = --doctest-modules -p no:warnings --ignore joblib/externals - --ignore joblib/test/missing_multiprocessing testpaths = joblib [flake8] From f50c2d38480d243d30b655971fee614e518a22fa Mon Sep 17 00:00:00 2001 From: Gael Varoquaux Date: Thu, 3 Feb 2022 14:10:31 +0100 Subject: [PATCH 11/29] ENH: make joblib robust to non-working multiprocessing --- joblib/__init__.py | 3 +-- joblib/_cloudpickle_wrapper.py | 16 ++++++++++++++++ joblib/_multiprocessing_helpers.py | 1 + joblib/parallel.py | 16 +++++++++++++--- 4 files changed, 31 insertions(+), 5 deletions(-) create mode 100644 joblib/_cloudpickle_wrapper.py diff --git a/joblib/__init__.py b/joblib/__init__.py index d72ee044e..dc0dc10dd 100644 --- a/joblib/__init__.py +++ b/joblib/__init__.py @@ -123,8 +123,7 @@ from .parallel import register_parallel_backend from .parallel import parallel_backend from .parallel import effective_n_jobs - -from .externals.loky import wrap_non_picklable_objects +from ._cloudpickle_wrapper import wrap_non_picklable_objects __all__ = ['Memory', 'MemorizedResult', 'PrintTime', 'Logger', 'hash', 'dump', diff --git a/joblib/_cloudpickle_wrapper.py b/joblib/_cloudpickle_wrapper.py new file mode 100644 index 000000000..9cca8490f --- /dev/null +++ b/joblib/_cloudpickle_wrapper.py @@ -0,0 +1,16 @@ +""" +Small shim of loky's cloudpickle_wrapper to avoid failure when +multiprocessing is not available. +""" + + +from ._multiprocessing_helpers import mp + +def my_wrap_non_picklable_objects(obj, keep_wrapper=True): + return obj + +if mp is None: + wrap_non_picklable_objects = my_wrap_non_picklable_objects +else: + from .external.loky import wrap_non_picklable_objects + diff --git a/joblib/_multiprocessing_helpers.py b/joblib/_multiprocessing_helpers.py index 1c5de2f8b..b80bc630e 100644 --- a/joblib/_multiprocessing_helpers.py +++ b/joblib/_multiprocessing_helpers.py @@ -14,6 +14,7 @@ if mp: try: import multiprocessing as mp + import _multiprocessing except ImportError: mp = None diff --git a/joblib/parallel.py b/joblib/parallel.py index 687557eb6..90efd7fa4 100644 --- a/joblib/parallel.py +++ b/joblib/parallel.py @@ -27,7 +27,6 @@ ThreadingBackend, SequentialBackend, LokyBackend) from .externals.cloudpickle import dumps, loads -from .externals import loky # Make sure that those two classes are part of the public joblib.parallel API # so that 3rd party backend implementers can import them from here. @@ -39,12 +38,23 @@ 'multiprocessing': MultiprocessingBackend, 'threading': ThreadingBackend, 'sequential': SequentialBackend, - 'loky': LokyBackend, } # name of the backend used by default by Parallel outside of any context # managed by ``parallel_backend``. -DEFAULT_BACKEND = 'loky' + +# threading is the only backend that is always everywhere +DEFAULT_BACKEND = 'threading' + DEFAULT_N_JOBS = 1 + +# if multiprocessing is available, so is loky, we set it as the default +# backend +if mp is not None: + from .externals import loky + BACKENDS['loky'] = LokyBackend + DEFAULT_BACKEND = 'loky' + + DEFAULT_THREAD_BACKEND = 'threading' # Thread local value that can be overridden by the ``parallel_backend`` context From 8e838a11bf69279d19d33f656ef199b8477cb842 Mon Sep 17 00:00:00 2001 From: Gael Varoquaux Date: Thu, 3 Feb 2022 14:17:43 +0100 Subject: [PATCH 12/29] Fix stupid error --- joblib/_cloudpickle_wrapper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/joblib/_cloudpickle_wrapper.py b/joblib/_cloudpickle_wrapper.py index 9cca8490f..a60434057 100644 --- a/joblib/_cloudpickle_wrapper.py +++ b/joblib/_cloudpickle_wrapper.py @@ -12,5 +12,5 @@ def my_wrap_non_picklable_objects(obj, keep_wrapper=True): if mp is None: wrap_non_picklable_objects = my_wrap_non_picklable_objects else: - from .external.loky import wrap_non_picklable_objects + from .externals.loky import wrap_non_picklable_objects From b56e6d58dae9c0173f2e8153793ea2354b1ab63c Mon Sep 17 00:00:00 2001 From: Gael Varoquaux Date: Thu, 3 Feb 2022 15:39:10 +0100 Subject: [PATCH 13/29] Fix all test not to depend on multiprocessing --- joblib/_cloudpickle_wrapper.py | 4 +-- joblib/_multiprocessing_helpers.py | 2 +- joblib/test/test_cloudpickle_wrapper.py | 26 ++++++++++++++ joblib/test/test_memmapping.py | 5 ++- joblib/test/test_missing_multiprocessing.py | 7 +++- joblib/test/test_module.py | 5 ++- joblib/test/test_parallel.py | 40 ++++++++++++++------- 7 files changed, 71 insertions(+), 18 deletions(-) create mode 100644 joblib/test/test_cloudpickle_wrapper.py diff --git a/joblib/_cloudpickle_wrapper.py b/joblib/_cloudpickle_wrapper.py index a60434057..860c0c204 100644 --- a/joblib/_cloudpickle_wrapper.py +++ b/joblib/_cloudpickle_wrapper.py @@ -9,8 +9,8 @@ def my_wrap_non_picklable_objects(obj, keep_wrapper=True): return obj + if mp is None: wrap_non_picklable_objects = my_wrap_non_picklable_objects else: - from .externals.loky import wrap_non_picklable_objects - + from .externals.loky import wrap_non_picklable_objects # noqa diff --git a/joblib/_multiprocessing_helpers.py b/joblib/_multiprocessing_helpers.py index b80bc630e..bde4bc190 100644 --- a/joblib/_multiprocessing_helpers.py +++ b/joblib/_multiprocessing_helpers.py @@ -14,7 +14,7 @@ if mp: try: import multiprocessing as mp - import _multiprocessing + import _multiprocessing # noqa except ImportError: mp = None diff --git a/joblib/test/test_cloudpickle_wrapper.py b/joblib/test/test_cloudpickle_wrapper.py new file mode 100644 index 000000000..b9dd7ff3c --- /dev/null +++ b/joblib/test/test_cloudpickle_wrapper.py @@ -0,0 +1,26 @@ +""" +Test that our implementation of wrap_non_picklable_objects mimics +properly the loky implementation. +""" + +from .._cloudpickle_wrapper import wrap_non_picklable_objects +from .._cloudpickle_wrapper import my_wrap_non_picklable_objects + +def a_function(x): + return x + + +class AClass(object): + + def __call__(self, x): + return x + + +def test_wrap_non_picklable_objects(): + # Mostly a smoke test: test that we can use callable in the same way + # with both our implementation of wrap_non_picklable_objects and the + # upstream one + for obj in (a_function, AClass()): + wrapped_obj = wrap_non_picklable_objects(obj) + my_wrapped_obj = my_wrap_non_picklable_objects(obj) + assert wrapped_obj(1) == my_wrapped_obj(1) diff --git a/joblib/test/test_memmapping.py b/joblib/test/test_memmapping.py index f110751fb..635f328f0 100644 --- a/joblib/test/test_memmapping.py +++ b/joblib/test/test_memmapping.py @@ -146,7 +146,8 @@ def reconstruct_array_or_memmap(x): assert_array_equal(b3_reconstructed, b3) -@skipif(sys.platform != "win32", +@with_multiprocessing +@skipif((sys.platform != "win32") or (), reason="PermissionError only easily triggerable on Windows") def test_resource_tracker_retries_when_permissionerror(tmpdir): # Test resource_tracker retry mechanism when unlinking memmaps. See more @@ -355,6 +356,7 @@ def test_pool_with_memmap_array_view(factory, tmpdir): @with_numpy +@with_multiprocessing @parametrize("backend", ["multiprocessing", "loky"]) def test_permission_error_windows_reference_cycle(backend): # Non regression test for: @@ -389,6 +391,7 @@ def test_permission_error_windows_reference_cycle(backend): @with_numpy +@with_multiprocessing @parametrize("backend", ["multiprocessing", "loky"]) def test_permission_error_windows_memmap_sent_to_parent(backend): # Second non-regression test for: diff --git a/joblib/test/test_missing_multiprocessing.py b/joblib/test/test_missing_multiprocessing.py index 041db55f5..251925ced 100644 --- a/joblib/test/test_missing_multiprocessing.py +++ b/joblib/test/test_missing_multiprocessing.py @@ -24,4 +24,9 @@ def test_missing_multiprocessing(tmp_path): # For subprocess, use current sys.path with our custom version of # multiprocessing inserted. env["PYTHONPATH"] = ":".join([str(tmp_path)] + sys.path) - subprocess.check_call([sys.executable, "-c", "import joblib"], env=env) + subprocess.check_call( + [sys.executable, "-c", + "import joblib, math; " + "joblib.Parallel(n_jobs=1)(" + "joblib.delayed(math.sqrt)(i**2) for i in range(10))" + ], env=env) diff --git a/joblib/test/test_module.py b/joblib/test/test_module.py index 9c3b12b90..a2257a414 100644 --- a/joblib/test/test_module.py +++ b/joblib/test/test_module.py @@ -1,7 +1,7 @@ import sys import joblib -import pytest from joblib.testing import check_subprocess_call +from joblib.test.common import with_multiprocessing def test_version(): @@ -9,6 +9,7 @@ def test_version(): "There are no __version__ argument on the joblib module") +@with_multiprocessing def test_no_start_method_side_effect_on_import(): # check that importing joblib does not implicitly set the global # start_method for multiprocessing. @@ -22,6 +23,7 @@ def test_no_start_method_side_effect_on_import(): check_subprocess_call([sys.executable, '-c', code]) +@with_multiprocessing def test_no_semaphore_tracker_on_import(): # check that importing joblib does not implicitly spawn a resource tracker # or a semaphore tracker @@ -38,6 +40,7 @@ def test_no_semaphore_tracker_on_import(): check_subprocess_call([sys.executable, '-c', code]) +@with_multiprocessing def test_no_resource_tracker_on_import(): code = """if True: import joblib diff --git a/joblib/test/test_parallel.py b/joblib/test/test_parallel.py index 0d27fa97f..fcc26e50f 100644 --- a/joblib/test/test_parallel.py +++ b/joblib/test/test_parallel.py @@ -24,14 +24,17 @@ import joblib from joblib import parallel from joblib import dump, load -from joblib.externals.loky import get_reusable_executor + +from joblib._multiprocessing_helpers import mp from joblib.test.common import np, with_numpy from joblib.test.common import with_multiprocessing from joblib.testing import (parametrize, raises, check_subprocess_call, skipif, SkipTest, warns) -from joblib.externals.loky.process_executor import TerminatedWorkerError +if mp is not None: + # Loky is not available if multiprocessing is not + from joblib.externals.loky import get_reusable_executor from queue import Queue @@ -69,7 +72,10 @@ ALL_VALID_BACKENDS = [None] + sorted(BACKENDS.keys()) # Add instances of backend classes deriving from ParallelBackendBase ALL_VALID_BACKENDS += [BACKENDS[backend_str]() for backend_str in BACKENDS] -PROCESS_BACKENDS = ['multiprocessing', 'loky'] +if mp is None: + PROCESS_BACKENDS = [] +else: + PROCESS_BACKENDS = ['multiprocessing', 'loky'] PARALLEL_BACKENDS = PROCESS_BACKENDS + ['threading'] if hasattr(mp, 'get_context'): @@ -1208,7 +1214,10 @@ def test_memmapping_leaks(backend, tmpdir): raise AssertionError('temporary directory of Parallel was not removed') -@parametrize('backend', [None, 'loky', 'threading']) +@parametrize('backend', + ([None, 'threading'] if mp is None + else [None, 'loky', 'threading']) + ) def test_lambda_expression(backend): # cloudpickle is used to pickle delayed callables results = Parallel(n_jobs=2, backend=backend)( @@ -1238,6 +1247,7 @@ def test_backend_batch_statistics_reset(backend): p._backend._DEFAULT_SMOOTHED_BATCH_DURATION) +@with_multiprocessing def test_backend_hinting_and_constraints(): for n_jobs in [1, 2, -1]: assert type(Parallel(n_jobs=n_jobs)._backend) == LokyBackend @@ -1438,7 +1448,8 @@ def _recursive_parallel(nesting_limit=None): return Parallel()(delayed(_recursive_parallel)() for i in range(2)) -@parametrize('backend', ['loky', 'threading']) +@parametrize('backend', + (['threading'] if mp is None else ['loky', 'threading'])) def test_thread_bomb_mitigation(backend): # Test that recursive parallelism raises a recursion rather than # saturating the operating system resources by creating a unbounded number @@ -1447,13 +1458,18 @@ def test_thread_bomb_mitigation(backend): with raises(BaseException) as excinfo: _recursive_parallel() exc = excinfo.value - if backend == "loky" and isinstance(exc, TerminatedWorkerError): - # The recursion exception can itself cause an error when pickling it to - # be send back to the parent process. In this case the worker crashes - # but the original traceback is still printed on stderr. This could be - # improved but does not seem simple to do and this is is not critical - # for users (as long as there is no process or thread bomb happening). - pytest.xfail("Loky worker crash when serializing RecursionError") + if backend == "loky": + # Local import because loky may not be importable for lack of + # multiprocessing + from joblib.externals.loky.process_executor import TerminatedWorkerError # noqa + if isinstance(exc, TerminatedWorkerError): + # The recursion exception can itself cause an error when + # pickling it to be send back to the parent process. In this + # case the worker crashes but the original traceback is still + # printed on stderr. This could be improved but does not seem + # simple to do and this is is not critical for users (as long + # as there is no process or thread bomb happening). + pytest.xfail("Loky worker crash when serializing RecursionError") else: assert isinstance(exc, RecursionError) From 7bd1771b707174a1b300638c75d10ea5fac323c4 Mon Sep 17 00:00:00 2001 From: Gael Varoquaux Date: Thu, 3 Feb 2022 15:42:32 +0100 Subject: [PATCH 14/29] Fix linting --- joblib/_cloudpickle_wrapper.py | 1 + joblib/test/test_cloudpickle_wrapper.py | 1 + 2 files changed, 2 insertions(+) diff --git a/joblib/_cloudpickle_wrapper.py b/joblib/_cloudpickle_wrapper.py index 860c0c204..3dbe3ae71 100644 --- a/joblib/_cloudpickle_wrapper.py +++ b/joblib/_cloudpickle_wrapper.py @@ -6,6 +6,7 @@ from ._multiprocessing_helpers import mp + def my_wrap_non_picklable_objects(obj, keep_wrapper=True): return obj diff --git a/joblib/test/test_cloudpickle_wrapper.py b/joblib/test/test_cloudpickle_wrapper.py index b9dd7ff3c..733f51c72 100644 --- a/joblib/test/test_cloudpickle_wrapper.py +++ b/joblib/test/test_cloudpickle_wrapper.py @@ -6,6 +6,7 @@ from .._cloudpickle_wrapper import wrap_non_picklable_objects from .._cloudpickle_wrapper import my_wrap_non_picklable_objects + def a_function(x): return x From 477949c0269adaafdc27817099541f5baeb65682 Mon Sep 17 00:00:00 2001 From: Gael Varoquaux Date: Fri, 4 Feb 2022 19:13:32 +0100 Subject: [PATCH 15/29] Changelog and documentation --- CHANGES.rst | 4 ++++ doc/parallel.rst | 7 +++++++ joblib/parallel.py | 7 +++++-- 3 files changed, 16 insertions(+), 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index bff605c94..e43b1ee16 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -4,6 +4,10 @@ Latest changes Development version ------------------- +- Make sure that joblib works even when multiprocessing is not available, + for instance with Pyodide + https://github.com/joblib/joblib/pull/1256 + Release 1.1.0 -------------- diff --git a/doc/parallel.rst b/doc/parallel.rst index 466d613af..f64a2c622 100644 --- a/doc/parallel.rst +++ b/doc/parallel.rst @@ -71,6 +71,13 @@ call to :class:`joblib.Parallel` but this is now considered a bad pattern (when done in a library) as it does not make it possible to override that choice with the ``parallel_backend`` context manager. + +.. topic:: The loky backend may not always be available + + Some rare systems do not support multiprocessing (for instance + pyiodine). In this case the loky backend is not availble and the + default backend falls back to threading. + Besides builtin joblib backends, we can use `Joblib Apache Spark Backend `_ to distribute joblib tasks on a Spark cluster. diff --git a/joblib/parallel.py b/joblib/parallel.py index 90efd7fa4..df431792e 100644 --- a/joblib/parallel.py +++ b/joblib/parallel.py @@ -145,7 +145,8 @@ class parallel_backend(object): 'threading' is a low-overhead alternative that is most efficient for functions that release the Global Interpreter Lock: e.g. I/O-bound code or CPU-bound code in a few calls to native code that explicitly releases the - GIL. + GIL. Note that on some rare systems, the loky backend may not be + available (systems without multiprocessing, such as pyiodine). In addition, if the `dask` and `distributed` Python packages are installed, it is possible to use the 'dask' backend for better scheduling of nested @@ -446,7 +447,9 @@ class Parallel(Logger): - "loky" used by default, can induce some communication and memory overhead when exchanging input and - output data with the worker Python processes. + output data with the worker Python processes. On some rare + systems (such as pyiodine), the loky backend may not be + available. - "multiprocessing" previous process-based backend based on `multiprocessing.Pool`. Less robust than `loky`. - "threading" is a very low-overhead backend but it suffers From 439c60cf4178fbf8c9a1394e15f9f0dd366ddfbf Mon Sep 17 00:00:00 2001 From: Gael Varoquaux Date: Sat, 5 Feb 2022 21:44:52 +0100 Subject: [PATCH 16/29] Backend fallback if multiprocessing is not available --- joblib/parallel.py | 31 +++++++++++++++++++++++++------ joblib/test/test_parallel.py | 14 +++++++++++++- 2 files changed, 38 insertions(+), 7 deletions(-) diff --git a/joblib/parallel.py b/joblib/parallel.py index df431792e..e452bf7b3 100644 --- a/joblib/parallel.py +++ b/joblib/parallel.py @@ -35,7 +35,6 @@ BACKENDS = { - 'multiprocessing': MultiprocessingBackend, 'threading': ThreadingBackend, 'sequential': SequentialBackend, } @@ -47,9 +46,12 @@ DEFAULT_N_JOBS = 1 +MAYBE_AVAILABLE_BACKENDS = {'multiprocessing', 'loky'} + # if multiprocessing is available, so is loky, we set it as the default # backend if mp is not None: + BACKENDS['multiprocessing'] = MultiprocessingBackend from .externals import loky BACKENDS['loky'] = LokyBackend DEFAULT_BACKEND = 'loky' @@ -145,8 +147,9 @@ class parallel_backend(object): 'threading' is a low-overhead alternative that is most efficient for functions that release the Global Interpreter Lock: e.g. I/O-bound code or CPU-bound code in a few calls to native code that explicitly releases the - GIL. Note that on some rare systems, the loky backend may not be - available (systems without multiprocessing, such as pyiodine). + GIL. Note that on some rare systems (such as pyiodine), + multiprocessing and loky may not be available, in which case joblib + defaults to threading. In addition, if the `dask` and `distributed` Python packages are installed, it is possible to use the 'dask' backend for better scheduling of nested @@ -195,9 +198,19 @@ class parallel_backend(object): def __init__(self, backend, n_jobs=-1, inner_max_num_threads=None, **backend_params): if isinstance(backend, str): - if backend not in BACKENDS and backend in EXTERNAL_BACKENDS: - register = EXTERNAL_BACKENDS[backend] - register() + if backend not in BACKENDS: + if backend in EXTERNAL_BACKENDS: + register = EXTERNAL_BACKENDS[backend] + register() + elif backend in MAYBE_AVAILABLE_BACKENDS: + warnings.warn( + f"joblib backend '{backend}' is not available on ", + f"your system, falling back to {DEFAULT_BACKEND}.", + stacklevel=2) + BACKENDS[backend] = BACKENDS[DEFAULT_BACKEND] + else: + raise ValueError("Invalid backend: %s, expected one of %r" + % (backend, sorted(BACKENDS.keys()))) backend = BACKENDS[backend](**backend_params) @@ -703,6 +716,12 @@ def __init__(self, n_jobs=None, backend=None, verbose=0, timeout=None, # preload modules on the forkserver helper process. self._backend_args['context'] = backend backend = MultiprocessingBackend(nesting_level=nesting_level) + elif backend not in BACKENDS and backend in MAYBE_AVAILABLE_BACKENDS: + warnings.warn( + f"joblib backend '{backend}' is not available on ", + f"your system, falling back to {DEFAULT_BACKEND}.", + stacklevel=2) + BACKENDS[backend] = BACKENDS[DEFAULT_BACKEND] else: try: backend_factory = BACKENDS[backend] diff --git a/joblib/test/test_parallel.py b/joblib/test/test_parallel.py index fcc26e50f..2850044b3 100644 --- a/joblib/test/test_parallel.py +++ b/joblib/test/test_parallel.py @@ -575,8 +575,14 @@ def effective_n_jobs(self, n_jobs=1): def test_invalid_backend(): - with raises(ValueError): + with raises(ValueError) as excinfo: Parallel(backend='unit-testing') + assert "Invalid backend:" in str(excinfo.value) + + with raises(ValueError) as excinfo: + with parallel_backend( 'unit-testing'): + pass + assert "Invalid backend:" in str(excinfo.value) @parametrize('backend', ALL_VALID_BACKENDS) @@ -606,6 +612,12 @@ def test_overwrite_default_backend(): parallel.DEFAULT_BACKEND = DEFAULT_BACKEND assert _active_backend_type() == DefaultBackend +@skipif(mp is not None) +def test_backend_no_multiprocessing(): + with warns(UserWarning, + match="joblib backend '.*' is not available on.*"): + Parallel(backend='loky')(delayed(square)(i) for i in range(3)) + def check_backend_context_manager(backend_name): with parallel_backend(backend_name, n_jobs=3): From a730ce89e79429c5181f8c27f8cd1c4209290898 Mon Sep 17 00:00:00 2001 From: Gael Varoquaux Date: Sat, 5 Feb 2022 21:53:44 +0100 Subject: [PATCH 17/29] Fix prior commit --- joblib/test/test_parallel.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/joblib/test/test_parallel.py b/joblib/test/test_parallel.py index 2850044b3..4ce6d06ac 100644 --- a/joblib/test/test_parallel.py +++ b/joblib/test/test_parallel.py @@ -580,7 +580,7 @@ def test_invalid_backend(): assert "Invalid backend:" in str(excinfo.value) with raises(ValueError) as excinfo: - with parallel_backend( 'unit-testing'): + with parallel_backend('unit-testing'): pass assert "Invalid backend:" in str(excinfo.value) @@ -612,7 +612,8 @@ def test_overwrite_default_backend(): parallel.DEFAULT_BACKEND = DEFAULT_BACKEND assert _active_backend_type() == DefaultBackend -@skipif(mp is not None) + +@skipif(mp is not None, reason="Only without multiprocessing") def test_backend_no_multiprocessing(): with warns(UserWarning, match="joblib backend '.*' is not available on.*"): From 3cbf561d9f82bd76f2a7bf62f1378a109904eafb Mon Sep 17 00:00:00 2001 From: Gael Varoquaux Date: Sat, 5 Feb 2022 22:06:51 +0100 Subject: [PATCH 18/29] Fix tests --- joblib/parallel.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/joblib/parallel.py b/joblib/parallel.py index e452bf7b3..0362b280f 100644 --- a/joblib/parallel.py +++ b/joblib/parallel.py @@ -204,6 +204,7 @@ def __init__(self, backend, n_jobs=-1, inner_max_num_threads=None, register() elif backend in MAYBE_AVAILABLE_BACKENDS: warnings.warn( + UserWarning, f"joblib backend '{backend}' is not available on ", f"your system, falling back to {DEFAULT_BACKEND}.", stacklevel=2) @@ -718,6 +719,7 @@ def __init__(self, n_jobs=None, backend=None, verbose=0, timeout=None, backend = MultiprocessingBackend(nesting_level=nesting_level) elif backend not in BACKENDS and backend in MAYBE_AVAILABLE_BACKENDS: warnings.warn( + UserWarning, f"joblib backend '{backend}' is not available on ", f"your system, falling back to {DEFAULT_BACKEND}.", stacklevel=2) From fce2c7aa8691eaa53459796c3d68129635048979 Mon Sep 17 00:00:00 2001 From: Gael Varoquaux Date: Mon, 7 Feb 2022 21:10:39 +0100 Subject: [PATCH 19/29] Update joblib/parallel.py Co-authored-by: Olivier Grisel --- joblib/parallel.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/joblib/parallel.py b/joblib/parallel.py index 0362b280f..1ec8299a9 100644 --- a/joblib/parallel.py +++ b/joblib/parallel.py @@ -462,7 +462,7 @@ class Parallel(Logger): - "loky" used by default, can induce some communication and memory overhead when exchanging input and output data with the worker Python processes. On some rare - systems (such as pyiodine), the loky backend may not be + systems (such as Pyiodide), the loky backend may not be available. - "multiprocessing" previous process-based backend based on `multiprocessing.Pool`. Less robust than `loky`. From 6949cd84e9f5a4dec21b20fa4435e7b2ef03fbd5 Mon Sep 17 00:00:00 2001 From: Gael Varoquaux Date: Mon, 7 Feb 2022 21:10:46 +0100 Subject: [PATCH 20/29] Update doc/parallel.rst Co-authored-by: Olivier Grisel --- doc/parallel.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/parallel.rst b/doc/parallel.rst index f64a2c622..8514e2d7a 100644 --- a/doc/parallel.rst +++ b/doc/parallel.rst @@ -75,7 +75,7 @@ choice with the ``parallel_backend`` context manager. .. topic:: The loky backend may not always be available Some rare systems do not support multiprocessing (for instance - pyiodine). In this case the loky backend is not availble and the + Pyodide). In this case the loky backend is not available and the default backend falls back to threading. Besides builtin joblib backends, we can use From 3496274a6357d3f7a36ed1cd6d346152b82a0cb8 Mon Sep 17 00:00:00 2001 From: Gael Varoquaux Date: Mon, 7 Feb 2022 21:23:40 +0100 Subject: [PATCH 21/29] fix tests --- joblib/parallel.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/joblib/parallel.py b/joblib/parallel.py index 1ec8299a9..39ee6c8f5 100644 --- a/joblib/parallel.py +++ b/joblib/parallel.py @@ -205,7 +205,7 @@ def __init__(self, backend, n_jobs=-1, inner_max_num_threads=None, elif backend in MAYBE_AVAILABLE_BACKENDS: warnings.warn( UserWarning, - f"joblib backend '{backend}' is not available on ", + f"joblib backend '{backend}' is not available on " f"your system, falling back to {DEFAULT_BACKEND}.", stacklevel=2) BACKENDS[backend] = BACKENDS[DEFAULT_BACKEND] @@ -720,7 +720,7 @@ def __init__(self, n_jobs=None, backend=None, verbose=0, timeout=None, elif backend not in BACKENDS and backend in MAYBE_AVAILABLE_BACKENDS: warnings.warn( UserWarning, - f"joblib backend '{backend}' is not available on ", + f"joblib backend '{backend}' is not available on " f"your system, falling back to {DEFAULT_BACKEND}.", stacklevel=2) BACKENDS[backend] = BACKENDS[DEFAULT_BACKEND] From 928909c11f4466e3efadd17d5d6fb131d6a0b3b0 Mon Sep 17 00:00:00 2001 From: Gael Varoquaux Date: Mon, 7 Feb 2022 21:40:21 +0100 Subject: [PATCH 22/29] fix tests --- joblib/parallel.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/joblib/parallel.py b/joblib/parallel.py index 39ee6c8f5..622457862 100644 --- a/joblib/parallel.py +++ b/joblib/parallel.py @@ -204,9 +204,9 @@ def __init__(self, backend, n_jobs=-1, inner_max_num_threads=None, register() elif backend in MAYBE_AVAILABLE_BACKENDS: warnings.warn( - UserWarning, f"joblib backend '{backend}' is not available on " f"your system, falling back to {DEFAULT_BACKEND}.", + UserWarning, stacklevel=2) BACKENDS[backend] = BACKENDS[DEFAULT_BACKEND] else: @@ -719,9 +719,9 @@ def __init__(self, n_jobs=None, backend=None, verbose=0, timeout=None, backend = MultiprocessingBackend(nesting_level=nesting_level) elif backend not in BACKENDS and backend in MAYBE_AVAILABLE_BACKENDS: warnings.warn( - UserWarning, f"joblib backend '{backend}' is not available on " f"your system, falling back to {DEFAULT_BACKEND}.", + UserWarning, stacklevel=2) BACKENDS[backend] = BACKENDS[DEFAULT_BACKEND] else: From b94141be37797a1410efa2008d336114e15a54fb Mon Sep 17 00:00:00 2001 From: Gael Varoquaux Date: Mon, 7 Feb 2022 21:55:13 +0100 Subject: [PATCH 23/29] Better tests --- joblib/parallel.py | 4 +++- joblib/test/test_parallel.py | 4 ++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/joblib/parallel.py b/joblib/parallel.py index 622457862..dd372a829 100644 --- a/joblib/parallel.py +++ b/joblib/parallel.py @@ -717,13 +717,15 @@ def __init__(self, n_jobs=None, backend=None, verbose=0, timeout=None, # preload modules on the forkserver helper process. self._backend_args['context'] = backend backend = MultiprocessingBackend(nesting_level=nesting_level) + elif backend not in BACKENDS and backend in MAYBE_AVAILABLE_BACKENDS: warnings.warn( f"joblib backend '{backend}' is not available on " f"your system, falling back to {DEFAULT_BACKEND}.", UserWarning, stacklevel=2) - BACKENDS[backend] = BACKENDS[DEFAULT_BACKEND] + backend = BACKENDS[backend] = BACKENDS[DEFAULT_BACKEND] + else: try: backend_factory = BACKENDS[backend] diff --git a/joblib/test/test_parallel.py b/joblib/test/test_parallel.py index 4ce6d06ac..903486c1b 100644 --- a/joblib/test/test_parallel.py +++ b/joblib/test/test_parallel.py @@ -619,6 +619,10 @@ def test_backend_no_multiprocessing(): match="joblib backend '.*' is not available on.*"): Parallel(backend='loky')(delayed(square)(i) for i in range(3)) + # The below should now work without problems + with parallel_backend('loky'): + Parallel()(delayed(square)(i) for i in range(3)) + def check_backend_context_manager(backend_name): with parallel_backend(backend_name, n_jobs=3): From 9c2d03e0158bf789eda77d83c6849253f28140ef Mon Sep 17 00:00:00 2001 From: Gael Varoquaux Date: Mon, 7 Feb 2022 22:00:55 +0100 Subject: [PATCH 24/29] Fix tests --- joblib/parallel.py | 3 ++- joblib/test/test_parallel.py | 13 +++++++------ 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/joblib/parallel.py b/joblib/parallel.py index dd372a829..8a0f351c4 100644 --- a/joblib/parallel.py +++ b/joblib/parallel.py @@ -724,7 +724,8 @@ def __init__(self, n_jobs=None, backend=None, verbose=0, timeout=None, f"your system, falling back to {DEFAULT_BACKEND}.", UserWarning, stacklevel=2) - backend = BACKENDS[backend] = BACKENDS[DEFAULT_BACKEND] + BACKENDS[backend] = BACKENDS[DEFAULT_BACKEND] + backend = BACKENDS[DEFAULT_BACKEND] else: try: diff --git a/joblib/test/test_parallel.py b/joblib/test/test_parallel.py index 903486c1b..48e03c5eb 100644 --- a/joblib/test/test_parallel.py +++ b/joblib/test/test_parallel.py @@ -1375,12 +1375,13 @@ def test_invalid_backend_hinting_and_constraints(): # requiring shared memory semantics. Parallel(prefer='processes', require='sharedmem') - # It is inconsistent to ask explicitly for a process-based parallelism - # while requiring shared memory semantics. - with raises(ValueError): - Parallel(backend='loky', require='sharedmem') - with raises(ValueError): - Parallel(backend='multiprocessing', require='sharedmem') + if mp is not None: + # It is inconsistent to ask explicitly for a process-based + # parallelism while requiring shared memory semantics. + with raises(ValueError): + Parallel(backend='loky', require='sharedmem') + with raises(ValueError): + Parallel(backend='multiprocessing', require='sharedmem') def test_global_parallel_backend(): From 0623569d5ae00bd77e9a3e1c59de1babcff1a276 Mon Sep 17 00:00:00 2001 From: Gael Varoquaux Date: Mon, 7 Feb 2022 22:28:45 +0100 Subject: [PATCH 25/29] more test fixing --- joblib/parallel.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/joblib/parallel.py b/joblib/parallel.py index 8a0f351c4..9eb630811 100644 --- a/joblib/parallel.py +++ b/joblib/parallel.py @@ -725,7 +725,7 @@ def __init__(self, n_jobs=None, backend=None, verbose=0, timeout=None, UserWarning, stacklevel=2) BACKENDS[backend] = BACKENDS[DEFAULT_BACKEND] - backend = BACKENDS[DEFAULT_BACKEND] + backend = BACKENDS[DEFAULT_BACKEND](nesting_level=nesting_level) else: try: From 60629a5f49d6405ce0d1445ae0b98ab140266c7b Mon Sep 17 00:00:00 2001 From: Gael Varoquaux Date: Mon, 7 Feb 2022 22:39:52 +0100 Subject: [PATCH 26/29] More robust tests --- joblib/test/test_parallel.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/joblib/test/test_parallel.py b/joblib/test/test_parallel.py index 48e03c5eb..a913dabad 100644 --- a/joblib/test/test_parallel.py +++ b/joblib/test/test_parallel.py @@ -1501,7 +1501,7 @@ def _run_parallel_sum(): return env_vars, parallel_sum(100) -@parametrize("backend", [None, 'loky']) +@parametrize("backend", ([None, 'loky'] if mp is not None else [None])) @skipif(parallel_sum is None, reason="Need OpenMP helper compiled") def test_parallel_thread_limit(backend): results = Parallel(n_jobs=2, backend=backend)( From eb370e44b3698e2cb59bdae64bf14ca1e2064398 Mon Sep 17 00:00:00 2001 From: Gael Varoquaux Date: Mon, 7 Feb 2022 22:48:17 +0100 Subject: [PATCH 27/29] Tmp to find test called --- joblib/parallel.py | 1 + 1 file changed, 1 insertion(+) diff --git a/joblib/parallel.py b/joblib/parallel.py index 9eb630811..d28324136 100644 --- a/joblib/parallel.py +++ b/joblib/parallel.py @@ -726,6 +726,7 @@ def __init__(self, n_jobs=None, backend=None, verbose=0, timeout=None, stacklevel=2) BACKENDS[backend] = BACKENDS[DEFAULT_BACKEND] backend = BACKENDS[DEFAULT_BACKEND](nesting_level=nesting_level) + raise ValueError else: try: From 93d2bd12f0d249e84228e659c33e30775dc51de9 Mon Sep 17 00:00:00 2001 From: Gael Varoquaux Date: Mon, 7 Feb 2022 22:53:08 +0100 Subject: [PATCH 28/29] more debugging --- joblib/parallel.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/joblib/parallel.py b/joblib/parallel.py index d28324136..2368ef41c 100644 --- a/joblib/parallel.py +++ b/joblib/parallel.py @@ -724,9 +724,8 @@ def __init__(self, n_jobs=None, backend=None, verbose=0, timeout=None, f"your system, falling back to {DEFAULT_BACKEND}.", UserWarning, stacklevel=2) - BACKENDS[backend] = BACKENDS[DEFAULT_BACKEND] - backend = BACKENDS[DEFAULT_BACKEND](nesting_level=nesting_level) - raise ValueError + #BACKENDS[backend] = BACKENDS[DEFAULT_BACKEND] + #backend = BACKENDS[DEFAULT_BACKEND](nesting_level=nesting_level) else: try: From b0b3663746edaf20e8bc6713f9bb6b34cac05870 Mon Sep 17 00:00:00 2001 From: Gael Varoquaux Date: Mon, 7 Feb 2022 22:56:56 +0100 Subject: [PATCH 29/29] This should fix things --- joblib/parallel.py | 4 ++-- joblib/test/test_parallel.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/joblib/parallel.py b/joblib/parallel.py index 2368ef41c..9eb630811 100644 --- a/joblib/parallel.py +++ b/joblib/parallel.py @@ -724,8 +724,8 @@ def __init__(self, n_jobs=None, backend=None, verbose=0, timeout=None, f"your system, falling back to {DEFAULT_BACKEND}.", UserWarning, stacklevel=2) - #BACKENDS[backend] = BACKENDS[DEFAULT_BACKEND] - #backend = BACKENDS[DEFAULT_BACKEND](nesting_level=nesting_level) + BACKENDS[backend] = BACKENDS[DEFAULT_BACKEND] + backend = BACKENDS[DEFAULT_BACKEND](nesting_level=nesting_level) else: try: diff --git a/joblib/test/test_parallel.py b/joblib/test/test_parallel.py index a913dabad..f3b2a8413 100644 --- a/joblib/test/test_parallel.py +++ b/joblib/test/test_parallel.py @@ -276,6 +276,7 @@ def raise_exception(backend): raise ValueError +@with_multiprocessing def test_nested_loop_with_exception_with_loky(): with raises(ValueError): with Parallel(n_jobs=2, backend="loky") as parallel: