From 91d3229167c796269149c7bf936d0842b6511420 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Fri, 29 Apr 2022 09:43:34 -0500 Subject: [PATCH] Backport PR #792: Centralize app cleanup --- .github/workflows/python-tests.yml | 2 + jupyter_server/pytest_plugin.py | 46 ++--------- jupyter_server/serverapp.py | 9 +- .../services/kernels/kernelmanager.py | 38 ++++++--- tests/auth/test_authorizer.py | 3 - tests/services/kernels/test_api.py | 22 ++--- tests/services/kernels/test_cull.py | 8 +- tests/services/sessions/test_api.py | 82 ++++--------------- tests/test_terminal.py | 22 ++--- 9 files changed, 71 insertions(+), 161 deletions(-) diff --git a/.github/workflows/python-tests.yml b/.github/workflows/python-tests.yml index 7b8775dd99..8b454a4d22 100644 --- a/.github/workflows/python-tests.yml +++ b/.github/workflows/python-tests.yml @@ -137,6 +137,8 @@ jobs: steps: - uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 - uses: jupyterlab/maintainer-tools/.github/actions/test-sdist@v1 + with: + test_command: pytest --vv || pytest -vv --lf python_tests_check: # This job does nothing and is only used for the branch protection if: always() diff --git a/jupyter_server/pytest_plugin.py b/jupyter_server/pytest_plugin.py index 7b35795c63..8b8317c049 100644 --- a/jupyter_server/pytest_plugin.py +++ b/jupyter_server/pytest_plugin.py @@ -299,10 +299,7 @@ def jp_ensure_app_fixture(request): @pytest.fixture(scope="function") def jp_serverapp(jp_ensure_app_fixture, jp_server_config, jp_argv, jp_configurable_serverapp): """Starts a Jupyter Server instance based on the established configuration values.""" - app = jp_configurable_serverapp(config=jp_server_config, argv=jp_argv) - yield app - app.remove_server_info_file() - app.remove_browser_open_files() + return jp_configurable_serverapp(config=jp_server_config, argv=jp_argv) @pytest.fixture @@ -463,48 +460,19 @@ def inner(nbpath): @pytest.fixture(autouse=True) -def jp_server_cleanup(): +def jp_server_cleanup(io_loop): yield + app: ServerApp = ServerApp.instance() + loop = io_loop.asyncio_loop + loop.run_until_complete(app._cleanup()) ServerApp.clear_instance() @pytest.fixture def jp_cleanup_subprocesses(jp_serverapp): - """Clean up subprocesses started by a Jupyter Server, i.e. kernels and terminal.""" + """DEPRECATED: The jp_server_cleanup fixture automatically cleans up the singleton ServerApp class""" async def _(): - terminal_cleanup = jp_serverapp.web_app.settings["terminal_manager"].terminate_all - kernel_cleanup = jp_serverapp.kernel_manager.shutdown_all - - async def kernel_cleanup_steps(): - # Try a graceful shutdown with a timeout - try: - await asyncio.wait_for(kernel_cleanup(), timeout=15.0) - except asyncio.TimeoutError: - # Now force a shutdown - try: - await asyncio.wait_for(kernel_cleanup(now=True), timeout=15.0) - except asyncio.TimeoutError: - print(Exception("Kernel never shutdown!")) - except Exception as e: - print(e) - - if asyncio.iscoroutinefunction(terminal_cleanup): - try: - await terminal_cleanup() - except Exception as e: - print(e) - else: - try: - await terminal_cleanup() - except Exception as e: - print(e) - if asyncio.iscoroutinefunction(kernel_cleanup): - await kernel_cleanup_steps() - else: - try: - kernel_cleanup() - except Exception as e: - print(e) + pass return _ diff --git a/jupyter_server/serverapp.py b/jupyter_server/serverapp.py index ca0c38a1ca..dba0a58f42 100644 --- a/jupyter_server/serverapp.py +++ b/jupyter_server/serverapp.py @@ -2427,6 +2427,8 @@ async def cleanup_kernels(self): The kernels will shutdown themselves when this process no longer exists, but explicit shutdown allows the KernelManagers to cleanup the connection files. """ + if not getattr(self, "kernel_manager", None): + return n_kernels = len(self.kernel_manager.list_kernel_ids()) kernel_msg = trans.ngettext( "Shutting down %d kernel", "Shutting down %d kernels", n_kernels @@ -2453,6 +2455,8 @@ async def cleanup_terminals(self): async def cleanup_extensions(self): """Call shutdown hooks in all extensions.""" + if not getattr(self, "extension_manager", None): + return n_extensions = len(self.extension_manager.extension_apps) extension_msg = trans.ngettext( "Shutting down %d extension", "Shutting down %d extensions", n_extensions @@ -2732,6 +2736,8 @@ async def _cleanup(self): await self.cleanup_extensions() await self.cleanup_kernels() await self.cleanup_terminals() + if getattr(self, "session_manager", None): + self.session_manager.close() def start_ioloop(self): """Start the IO Loop.""" @@ -2760,7 +2766,8 @@ def start(self): async def _stop(self): """Cleanup resources and stop the IO Loop.""" await self._cleanup() - self.io_loop.stop() + if getattr(self, "io_loop", None): + self.io_loop.stop() def stop(self, from_signal=False): """Cleanup resources and stop the server.""" diff --git a/jupyter_server/services/kernels/kernelmanager.py b/jupyter_server/services/kernels/kernelmanager.py index f9b9af23bd..190d60eeb4 100644 --- a/jupyter_server/services/kernels/kernelmanager.py +++ b/jupyter_server/services/kernels/kernelmanager.py @@ -149,6 +149,7 @@ def _default_kernel_buffers(self): def __init__(self, **kwargs): self.pinned_superclass = MultiKernelManager + self._pending_kernel_tasks = {} self.pinned_superclass.__init__(self, **kwargs) self.last_kernel_activity = utcnow() @@ -216,9 +217,11 @@ async def start_kernel(self, kernel_id=None, path=None, **kwargs): kwargs["kernel_id"] = kernel_id kernel_id = await ensure_async(self.pinned_superclass.start_kernel(self, **kwargs)) self._kernel_connections[kernel_id] = 0 - fut = asyncio.ensure_future(self._finish_kernel_start(kernel_id)) + task = asyncio.create_task(self._finish_kernel_start(kernel_id)) if not getattr(self, "use_pending_kernels", None): - await fut + await task + else: + self._pending_kernel_tasks[kernel_id] = task # add busy/activity markers: kernel = self.get_kernel(kernel_id) kernel.execution_state = "starting" @@ -245,8 +248,8 @@ async def _finish_kernel_start(self, kernel_id): if hasattr(km, "ready"): try: await km.ready - except Exception: - self.log.exception(km.ready.exception()) + except Exception as e: + self.log.exception(e) return self._kernel_ports[kernel_id] = km.ports @@ -372,7 +375,7 @@ def stop_buffering(self, kernel_id): buffer_info = self._kernel_buffers.pop(kernel_id) # close buffering streams for stream in buffer_info["channels"].values(): - if not stream.closed(): + if not stream.socket.closed: stream.on_recv(None) stream.close() @@ -387,13 +390,18 @@ def stop_buffering(self, kernel_id): def shutdown_kernel(self, kernel_id, now=False, restart=False): """Shutdown a kernel by kernel_id""" self._check_kernel_id(kernel_id) - self.stop_watching_activity(kernel_id) - self.stop_buffering(kernel_id) # Decrease the metric of number of kernels # running for the relevant kernel type by 1 KERNEL_CURRENTLY_RUNNING_TOTAL.labels(type=self._kernels[kernel_id].kernel_name).dec() + if kernel_id in self._pending_kernel_tasks: + task = self._pending_kernel_tasks.pop(kernel_id) + task.cancel() + else: + self.stop_watching_activity(kernel_id) + self.stop_buffering(kernel_id) + self.pinned_superclass.shutdown_kernel(self, kernel_id, now=now, restart=restart) async def restart_kernel(self, kernel_id, now=False): @@ -533,7 +541,8 @@ def stop_watching_activity(self, kernel_id): """Stop watching IOPub messages on a kernel for activity.""" kernel = self._kernels[kernel_id] if getattr(kernel, "_activity_stream", None): - kernel._activity_stream.close() + if not kernel._activity_stream.socket.closed: + kernel._activity_stream.close() kernel._activity_stream = None def initialize_culler(self): @@ -638,19 +647,24 @@ def __init__(self, **kwargs): self.pinned_superclass = AsyncMultiKernelManager self.pinned_superclass.__init__(self, **kwargs) self.last_kernel_activity = utcnow() + self._pending_kernel_tasks = {} async def shutdown_kernel(self, kernel_id, now=False, restart=False): """Shutdown a kernel by kernel_id""" self._check_kernel_id(kernel_id) - self.stop_watching_activity(kernel_id) - self.stop_buffering(kernel_id) # Decrease the metric of number of kernels # running for the relevant kernel type by 1 KERNEL_CURRENTLY_RUNNING_TOTAL.labels(type=self._kernels[kernel_id].kernel_name).dec() + if kernel_id in self._pending_kernel_tasks: + task = self._pending_kernel_tasks.pop(kernel_id) + task.cancel() + else: + self.stop_watching_activity(kernel_id) + self.stop_buffering(kernel_id) + # Finish shutting down the kernel before clearing state to avoid a race condition. - ret = await self.pinned_superclass.shutdown_kernel( + return await self.pinned_superclass.shutdown_kernel( self, kernel_id, now=now, restart=restart ) - return ret diff --git a/tests/auth/test_authorizer.py b/tests/auth/test_authorizer.py index 096437b47a..1fc694d3c4 100644 --- a/tests/auth/test_authorizer.py +++ b/tests/auth/test_authorizer.py @@ -205,7 +205,6 @@ async def test_authorized_requests( send_request, tmp_path, jp_serverapp, - jp_cleanup_subprocesses, method, url, body, @@ -275,5 +274,3 @@ async def test_authorized_requests( code = await send_request(url, body=body, method=method) assert code in expected_codes - - await jp_cleanup_subprocesses() diff --git a/tests/services/kernels/test_api.py b/tests/services/kernels/test_api.py index bb91a588e4..b7c43453f5 100644 --- a/tests/services/kernels/test_api.py +++ b/tests/services/kernels/test_api.py @@ -67,7 +67,7 @@ async def test_no_kernels(jp_fetch): @pytest.mark.timeout(TEST_TIMEOUT) -async def test_default_kernels(jp_fetch, jp_base_url, jp_cleanup_subprocesses): +async def test_default_kernels(jp_fetch, jp_base_url): r = await jp_fetch("api", "kernels", method="POST", allow_nonstandard_methods=True) kernel = json.loads(r.body.decode()) assert r.headers["location"] == url_path_join(jp_base_url, "/api/kernels/", kernel["id"]) @@ -79,13 +79,10 @@ async def test_default_kernels(jp_fetch, jp_base_url, jp_cleanup_subprocesses): ["frame-ancestors 'self'", "report-uri " + report_uri, "default-src 'none'"] ) assert r.headers["Content-Security-Policy"] == expected_csp - await jp_cleanup_subprocesses() @pytest.mark.timeout(TEST_TIMEOUT) -async def test_main_kernel_handler( - jp_fetch, jp_base_url, jp_cleanup_subprocesses, jp_serverapp, pending_kernel_is_ready -): +async def test_main_kernel_handler(jp_fetch, jp_base_url, jp_serverapp, pending_kernel_is_ready): # Start the first kernel r = await jp_fetch( "api", "kernels", method="POST", body=json.dumps({"name": NATIVE_KERNEL_NAME}) @@ -158,11 +155,10 @@ async def test_main_kernel_handler( ) kernel3 = json.loads(r.body.decode()) assert isinstance(kernel3, dict) - await jp_cleanup_subprocesses() @pytest.mark.timeout(TEST_TIMEOUT) -async def test_kernel_handler(jp_fetch, jp_cleanup_subprocesses, pending_kernel_is_ready): +async def test_kernel_handler(jp_fetch, jp_serverapp, pending_kernel_is_ready): # Create a kernel r = await jp_fetch( "api", "kernels", method="POST", body=json.dumps({"name": NATIVE_KERNEL_NAME}) @@ -206,13 +202,10 @@ async def test_kernel_handler(jp_fetch, jp_cleanup_subprocesses, pending_kernel_ with pytest.raises(tornado.httpclient.HTTPClientError) as e: await jp_fetch("api", "kernels", bad_id, method="DELETE") assert expected_http_error(e, 404, "Kernel does not exist: " + bad_id) - await jp_cleanup_subprocesses() @pytest.mark.timeout(TEST_TIMEOUT) -async def test_kernel_handler_startup_error( - jp_fetch, jp_cleanup_subprocesses, jp_serverapp, jp_kernelspecs -): +async def test_kernel_handler_startup_error(jp_fetch, jp_serverapp, jp_kernelspecs): if getattr(jp_serverapp.kernel_manager, "use_pending_kernels", False): return @@ -223,7 +216,7 @@ async def test_kernel_handler_startup_error( @pytest.mark.timeout(TEST_TIMEOUT) async def test_kernel_handler_startup_error_pending( - jp_fetch, jp_ws_fetch, jp_cleanup_subprocesses, jp_serverapp, jp_kernelspecs + jp_fetch, jp_ws_fetch, jp_serverapp, jp_kernelspecs ): if not getattr(jp_serverapp.kernel_manager, "use_pending_kernels", False): return @@ -238,9 +231,7 @@ async def test_kernel_handler_startup_error_pending( @pytest.mark.timeout(TEST_TIMEOUT) -async def test_connection( - jp_fetch, jp_ws_fetch, jp_http_port, jp_auth_header, jp_cleanup_subprocesses -): +async def test_connection(jp_fetch, jp_ws_fetch, jp_http_port, jp_auth_header): # Create kernel r = await jp_fetch( "api", "kernels", method="POST", body=json.dumps({"name": NATIVE_KERNEL_NAME}) @@ -274,4 +265,3 @@ async def test_connection( r = await jp_fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) assert model["connections"] == 0 - await jp_cleanup_subprocesses() diff --git a/tests/services/kernels/test_cull.py b/tests/services/kernels/test_cull.py index 97f1a57bbe..2aa47bbac3 100644 --- a/tests/services/kernels/test_cull.py +++ b/tests/services/kernels/test_cull.py @@ -43,7 +43,7 @@ ), ], ) -async def test_cull_idle(jp_fetch, jp_ws_fetch, jp_cleanup_subprocesses): +async def test_cull_idle(jp_fetch, jp_ws_fetch): r = await jp_fetch("api", "kernels", method="POST", allow_nonstandard_methods=True) kernel = json.loads(r.body.decode()) kid = kernel["id"] @@ -59,7 +59,6 @@ async def test_cull_idle(jp_fetch, jp_ws_fetch, jp_cleanup_subprocesses): ws.close() culled = await get_cull_status(kid, jp_fetch) # not connected, should be culled assert culled - await jp_cleanup_subprocesses() # Pending kernels was released in Jupyter Client 7.1 @@ -89,9 +88,7 @@ async def test_cull_idle(jp_fetch, jp_ws_fetch, jp_cleanup_subprocesses): ], ) @pytest.mark.timeout(30) -async def test_cull_dead( - jp_fetch, jp_ws_fetch, jp_serverapp, jp_cleanup_subprocesses, jp_kernelspecs -): +async def test_cull_dead(jp_fetch, jp_ws_fetch, jp_serverapp, jp_kernelspecs): r = await jp_fetch("api", "kernels", method="POST", allow_nonstandard_methods=True) kernel = json.loads(r.body.decode()) kid = kernel["id"] @@ -105,7 +102,6 @@ async def test_cull_dead( assert model["connections"] == 0 culled = await get_cull_status(kid, jp_fetch) # connected, should not be culled assert culled - await jp_cleanup_subprocesses() async def get_cull_status(kid, jp_fetch): diff --git a/tests/services/sessions/test_api.py b/tests/services/sessions/test_api.py index 19c165cbb1..d2a234e4b2 100644 --- a/tests/services/sessions/test_api.py +++ b/tests/services/sessions/test_api.py @@ -211,7 +211,7 @@ def assert_session_equality(actual, expected): @pytest.mark.timeout(TEST_TIMEOUT) -async def test_create(session_client, jp_base_url, jp_cleanup_subprocesses, jp_serverapp): +async def test_create(session_client, jp_base_url, jp_serverapp): # Make sure no sessions exist. resp = await session_client.list() sessions = j(resp) @@ -251,14 +251,9 @@ async def test_create(session_client, jp_base_url, jp_cleanup_subprocesses, jp_s got = j(resp) assert_session_equality(got, new_session) - # Need to find a better solution to this. - await jp_cleanup_subprocesses() - @pytest.mark.timeout(TEST_TIMEOUT) -async def test_create_bad( - session_client, jp_base_url, jp_cleanup_subprocesses, jp_serverapp, jp_kernelspecs -): +async def test_create_bad(session_client, jp_base_url, jp_serverapp, jp_kernelspecs): if getattr(jp_serverapp.kernel_manager, "use_pending_kernels", False): return @@ -272,16 +267,12 @@ async def test_create_bad( with pytest.raises(HTTPClientError): await session_client.create("foo/nb1.ipynb") - # Need to find a better solution to this. - await jp_cleanup_subprocesses() - @pytest.mark.timeout(TEST_TIMEOUT) async def test_create_bad_pending( session_client, jp_base_url, jp_ws_fetch, - jp_cleanup_subprocesses, jp_serverapp, jp_kernelspecs, ): @@ -310,14 +301,9 @@ async def test_create_bad_pending( if os.name != "nt": assert "non_existent_path" in session["kernel"]["reason"] - # Need to find a better solution to this. - await jp_cleanup_subprocesses() - @pytest.mark.timeout(TEST_TIMEOUT) -async def test_create_file_session( - session_client, jp_cleanup_subprocesses, jp_serverapp, session_is_ready -): +async def test_create_file_session(session_client, jp_serverapp, session_is_ready): resp = await session_client.create("foo/nb1.py", type="file") assert resp.code == 201 newsession = j(resp) @@ -325,41 +311,31 @@ async def test_create_file_session( assert newsession["type"] == "file" sid = newsession["id"] await session_is_ready(sid) - await jp_cleanup_subprocesses() @pytest.mark.timeout(TEST_TIMEOUT) -async def test_create_console_session( - session_client, jp_cleanup_subprocesses, jp_serverapp, session_is_ready -): +async def test_create_console_session(session_client, jp_serverapp, session_is_ready): resp = await session_client.create("foo/abc123", type="console") assert resp.code == 201 newsession = j(resp) assert newsession["path"] == "foo/abc123" assert newsession["type"] == "console" - # Need to find a better solution to this. sid = newsession["id"] await session_is_ready(sid) - await jp_cleanup_subprocesses() @pytest.mark.timeout(TEST_TIMEOUT) -async def test_create_deprecated(session_client, jp_cleanup_subprocesses, jp_serverapp): +async def test_create_deprecated(session_client, jp_serverapp): resp = await session_client.create_deprecated("foo/nb1.ipynb") assert resp.code == 201 newsession = j(resp) assert newsession["path"] == "foo/nb1.ipynb" assert newsession["type"] == "notebook" assert newsession["notebook"]["path"] == "foo/nb1.ipynb" - # Need to find a better solution to this. - sid = newsession["id"] - await jp_cleanup_subprocesses() @pytest.mark.timeout(TEST_TIMEOUT) -async def test_create_with_kernel_id( - session_client, jp_fetch, jp_base_url, jp_cleanup_subprocesses, jp_serverapp -): +async def test_create_with_kernel_id(session_client, jp_fetch, jp_base_url, jp_serverapp): # create a new kernel resp = await jp_fetch("api/kernels", method="POST", allow_nonstandard_methods=True) kernel = j(resp) @@ -384,14 +360,10 @@ async def test_create_with_kernel_id( resp = await session_client.get(sid) got = j(resp) assert_session_equality(got, new_session) - # Need to find a better solution to this. - await jp_cleanup_subprocesses() @pytest.mark.timeout(TEST_TIMEOUT) -async def test_create_with_bad_kernel_id( - session_client, jp_cleanup_subprocesses, jp_serverapp, session_is_ready -): +async def test_create_with_bad_kernel_id(session_client, jp_serverapp, session_is_ready): resp = await session_client.create("foo/nb1.py", type="file") assert resp.code == 201 newsession = j(resp) @@ -401,11 +373,10 @@ async def test_create_with_bad_kernel_id( # TODO assert newsession["path"] == "foo/nb1.py" assert newsession["type"] == "file" - await jp_cleanup_subprocesses() @pytest.mark.timeout(TEST_TIMEOUT) -async def test_delete(session_client, jp_cleanup_subprocesses, jp_serverapp, session_is_ready): +async def test_delete(session_client, jp_serverapp, session_is_ready): resp = await session_client.create("foo/nb1.ipynb") newsession = j(resp) @@ -422,12 +393,10 @@ async def test_delete(session_client, jp_cleanup_subprocesses, jp_serverapp, ses with pytest.raises(tornado.httpclient.HTTPClientError) as e: await session_client.get(sid) assert expected_http_error(e, 404) - # Need to find a better solution to this. - await jp_cleanup_subprocesses() @pytest.mark.timeout(TEST_TIMEOUT) -async def test_modify_path(session_client, jp_cleanup_subprocesses, jp_serverapp, session_is_ready): +async def test_modify_path(session_client, jp_serverapp, session_is_ready): resp = await session_client.create("foo/nb1.ipynb") newsession = j(resp) sid = newsession["id"] @@ -437,14 +406,10 @@ async def test_modify_path(session_client, jp_cleanup_subprocesses, jp_serverapp changed = j(resp) assert changed["id"] == sid assert changed["path"] == "nb2.ipynb" - # Need to find a better solution to this. - await jp_cleanup_subprocesses() @pytest.mark.timeout(TEST_TIMEOUT) -async def test_modify_path_deprecated( - session_client, jp_cleanup_subprocesses, jp_serverapp, session_is_ready -): +async def test_modify_path_deprecated(session_client, jp_serverapp, session_is_ready): resp = await session_client.create("foo/nb1.ipynb") newsession = j(resp) sid = newsession["id"] @@ -454,12 +419,10 @@ async def test_modify_path_deprecated( changed = j(resp) assert changed["id"] == sid assert changed["notebook"]["path"] == "nb2.ipynb" - # Need to find a better solution to this. - await jp_cleanup_subprocesses() @pytest.mark.timeout(TEST_TIMEOUT) -async def test_modify_type(session_client, jp_cleanup_subprocesses, jp_serverapp, session_is_ready): +async def test_modify_type(session_client, jp_serverapp, session_is_ready): resp = await session_client.create("foo/nb1.ipynb") newsession = j(resp) sid = newsession["id"] @@ -469,14 +432,10 @@ async def test_modify_type(session_client, jp_cleanup_subprocesses, jp_serverapp changed = j(resp) assert changed["id"] == sid assert changed["type"] == "console" - # Need to find a better solution to this. - await jp_cleanup_subprocesses() @pytest.mark.timeout(TEST_TIMEOUT) -async def test_modify_kernel_name( - session_client, jp_fetch, jp_cleanup_subprocesses, jp_serverapp, session_is_ready -): +async def test_modify_kernel_name(session_client, jp_fetch, jp_serverapp, session_is_ready): resp = await session_client.create("foo/nb1.ipynb") before = j(resp) sid = before["id"] @@ -497,14 +456,9 @@ async def test_modify_kernel_name( if not getattr(jp_serverapp.kernel_manager, "use_pending_kernels", False): assert kernel_list == [after["kernel"]] - # Need to find a better solution to this. - await jp_cleanup_subprocesses() - @pytest.mark.timeout(TEST_TIMEOUT) -async def test_modify_kernel_id( - session_client, jp_fetch, jp_cleanup_subprocesses, jp_serverapp, session_is_ready -): +async def test_modify_kernel_id(session_client, jp_fetch, jp_serverapp, session_is_ready): resp = await session_client.create("foo/nb1.ipynb") before = j(resp) sid = before["id"] @@ -532,14 +486,9 @@ async def test_modify_kernel_id( if not getattr(jp_serverapp.kernel_manager, "use_pending_kernels", False): assert kernel_list == [kernel] - # Need to find a better solution to this. - await jp_cleanup_subprocesses() - @pytest.mark.timeout(TEST_TIMEOUT) -async def test_restart_kernel( - session_client, jp_base_url, jp_fetch, jp_ws_fetch, jp_cleanup_subprocesses, session_is_ready -): +async def test_restart_kernel(session_client, jp_base_url, jp_fetch, jp_ws_fetch, session_is_ready): # Create a session. resp = await session_client.create("foo/nb1.ipynb") assert resp.code == 201 @@ -596,6 +545,3 @@ async def test_restart_kernel( r = await jp_fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) assert model["connections"] == 1 - - # Need to find a better solution to this. - await jp_cleanup_subprocesses() diff --git a/tests/test_terminal.py b/tests/test_terminal.py index d1eef0b1d4..be0cc31d92 100644 --- a/tests/test_terminal.py +++ b/tests/test_terminal.py @@ -60,7 +60,7 @@ async def test_no_terminals(jp_fetch): assert len(data) == 0 -async def test_terminal_create(jp_fetch, jp_cleanup_subprocesses): +async def test_terminal_create(jp_fetch): resp = await jp_fetch( "api", "terminals", @@ -80,13 +80,10 @@ async def test_terminal_create(jp_fetch, jp_cleanup_subprocesses): data = json.loads(resp_list.body.decode()) assert len(data) == 1 - assert data[0] == term - await jp_cleanup_subprocesses() + assert data[0]["name"] == term["name"] -async def test_terminal_create_with_kwargs( - jp_fetch, jp_ws_fetch, terminal_path, jp_cleanup_subprocesses -): +async def test_terminal_create_with_kwargs(jp_fetch, jp_ws_fetch, terminal_path): resp_create = await jp_fetch( "api", "terminals", @@ -109,12 +106,9 @@ async def test_terminal_create_with_kwargs( data = json.loads(resp_get.body.decode()) assert data["name"] == term_name - await jp_cleanup_subprocesses() -async def test_terminal_create_with_cwd( - jp_fetch, jp_ws_fetch, terminal_path, jp_cleanup_subprocesses -): +async def test_terminal_create_with_cwd(jp_fetch, jp_ws_fetch, terminal_path): resp = await jp_fetch( "api", "terminals", @@ -145,11 +139,10 @@ async def test_terminal_create_with_cwd( ws.close() assert os.path.basename(terminal_path) in message_stdout - await jp_cleanup_subprocesses() async def test_terminal_create_with_relative_cwd( - jp_fetch, jp_ws_fetch, jp_root_dir, terminal_root_dir, jp_cleanup_subprocesses + jp_fetch, jp_ws_fetch, jp_root_dir, terminal_root_dir ): resp = await jp_fetch( "api", @@ -182,7 +175,6 @@ async def test_terminal_create_with_relative_cwd( expected = terminal_root_dir.name if sys.platform == "win32" else str(terminal_root_dir) assert expected in message_stdout - await jp_cleanup_subprocesses() async def test_terminal_create_with_bad_cwd(jp_fetch, jp_ws_fetch, jp_cleanup_subprocesses): @@ -217,7 +209,6 @@ async def test_terminal_create_with_bad_cwd(jp_fetch, jp_ws_fetch, jp_cleanup_su ws.close() assert non_existing_path not in message_stdout - await jp_cleanup_subprocesses() async def test_culling_config(jp_server_config, jp_configurable_serverapp): @@ -229,7 +220,7 @@ async def test_culling_config(jp_server_config, jp_configurable_serverapp): assert terminal_mgr_settings.cull_interval == CULL_INTERVAL -async def test_culling(jp_server_config, jp_fetch, jp_cleanup_subprocesses): +async def test_culling(jp_server_config, jp_fetch): # POST request resp = await jp_fetch( "api", @@ -259,4 +250,3 @@ async def test_culling(jp_server_config, jp_fetch, jp_cleanup_subprocesses): await asyncio.sleep(1) assert culled - await jp_cleanup_subprocesses()