diff --git a/continuous_integration/environment-3.10.yaml b/continuous_integration/environment-3.10.yaml index aa46cc7aab..196a1cc88e 100644 --- a/continuous_integration/environment-3.10.yaml +++ b/continuous_integration/environment-3.10.yaml @@ -18,6 +18,8 @@ dependencies: - ipykernel - ipywidgets - jinja2 + - jupyter-server-proxy + - jupyterlab - locket - msgpack-python - netcdf4 diff --git a/continuous_integration/environment-3.11.yaml b/continuous_integration/environment-3.11.yaml index 44814f0070..b64f59b99f 100644 --- a/continuous_integration/environment-3.11.yaml +++ b/continuous_integration/environment-3.11.yaml @@ -18,6 +18,8 @@ dependencies: - ipykernel - ipywidgets - jinja2 + - jupyter-server-proxy + - jupyterlab - locket - msgpack-python - netcdf4 diff --git a/continuous_integration/environment-3.12.yaml b/continuous_integration/environment-3.12.yaml index c66d04e4a7..e77ea9533a 100644 --- a/continuous_integration/environment-3.12.yaml +++ b/continuous_integration/environment-3.12.yaml @@ -18,6 +18,8 @@ dependencies: - ipykernel - ipywidgets - jinja2 + - jupyter-server-proxy + - jupyterlab - locket - msgpack-python - netcdf4 diff --git a/continuous_integration/environment-3.9.yaml b/continuous_integration/environment-3.9.yaml index 8f8e531177..97a2cdd375 100644 --- a/continuous_integration/environment-3.9.yaml +++ b/continuous_integration/environment-3.9.yaml @@ -20,6 +20,8 @@ dependencies: - ipykernel - ipywidgets - jinja2 + - jupyter-server-proxy + - jupyterlab - locket - lz4 # Only tested here - msgpack-python diff --git a/distributed/cli/tests/test_dask_worker.py b/distributed/cli/tests/test_dask_worker.py index 34023e4c4d..771dd6d2af 100644 --- a/distributed/cli/tests/test_dask_worker.py +++ b/distributed/cli/tests/test_dask_worker.py @@ -18,7 +18,7 @@ from distributed.compatibility import LINUX, WINDOWS from distributed.deploy.utils import nprocesses_nthreads from distributed.metrics import time -from distributed.utils import open_port +from distributed.utils import get_ip, open_port from distributed.utils_test import ( gen_cluster, inc, @@ -485,39 +485,32 @@ def func(dask_worker): @pytest.mark.slow -@gen_cluster( - client=True, nthreads=[], scheduler_kwargs={"dashboard_address": "localhost:8787"} -) -async def test_dashboard_non_standard_ports(c, s, requires_default_ports): +def test_dashboard_non_standard_ports(): pytest.importorskip("bokeh") requests = pytest.importorskip("requests") - try: - import jupyter_server_proxy # noqa: F401 - - proxy_exists = True - except ImportError: - proxy_exists = False - - with popen( - [ - "dask", - "worker", - s.address, - "--dashboard-address", - ":4833", - "--host", - "127.0.0.1", - ] - ): - await c.wait_for_workers(1) - - response = requests.get("http://127.0.0.1:4833/status") + s_host = "127.0.0.1" + # use internal ip instead of localhost ip to verify GlobalProxyHandler will update + # to allow internal host ip of a worker. + w_host = get_ip() + s_port = "3233" + s_dashboard_port = "3232" + w_dashboard_port = "4833" + s_cmd = f"dask scheduler --host {s_host} --port {s_port} --dashboard-address :{s_dashboard_port}" + w_cmd = f"dask worker {s_host}:{s_port} --dashboard-address :{w_dashboard_port} --host {w_host}" + + with popen(s_cmd.split()), popen(w_cmd.split()): + with Client(f"{s_host}:{s_port}") as c: + c.wait_for_workers(1) + + response = requests.get(f"http://{s_host}:{w_dashboard_port}/status") response.raise_for_status() + # TEST PROXYING WORKS - if proxy_exists: - response = requests.get("http://127.0.0.1:8787/proxy/4833/127.0.0.1/status") - response.raise_for_status() + response = requests.get( + f"http://{s_host}:{s_dashboard_port}/proxy/{w_dashboard_port}/{w_host}/status" + ) + response.raise_for_status() with pytest.raises(requests.ConnectionError): requests.get("http://localhost:4833/status/") diff --git a/distributed/http/proxy.py b/distributed/http/proxy.py index 02da2f34ae..29ca16e5fb 100644 --- a/distributed/http/proxy.py +++ b/distributed/http/proxy.py @@ -19,6 +19,15 @@ def initialize(self, dask_server=None, extra=None): self.scheduler = dask_server self.extra = extra or {} + # `get_current_user` and `prepare` method impls reference + # issue in tornado & jupyter server compat here + # https://github.com/jupyter-server/jupyter_server/issues/1012 + def get_current_user(self): + return "dask" + + async def prepare(self): + web.authenticated(lambda rq: None)(self) + async def http_get(self, port, host, proxied_path): # route here first # incoming URI /proxy/{port}/{host}/{proxied_path} @@ -29,6 +38,9 @@ async def http_get(self, port, host, proxied_path): uri = f"/proxy/{port}/{proxied_path}" self.request.uri = uri + if self.host not in self.host_allowlist: + self.host_allowlist.append(self.host) + # slash is removed during regex in handler proxied_path = "/%s" % proxied_path @@ -41,6 +53,8 @@ async def http_get(self, port, host, proxied_path): return await self.proxy(port, proxied_path) async def open(self, port, host, proxied_path): + if host not in self.host_allowlist: + self.host_allowlist.append(host) # finally, proxy to other address/port return await self.proxy_open(host, port, proxied_path) diff --git a/distributed/tests/test_jupyter.py b/distributed/tests/test_jupyter.py index 7c2774062a..52ca25c169 100644 --- a/distributed/tests/test_jupyter.py +++ b/distributed/tests/test_jupyter.py @@ -10,6 +10,7 @@ from tornado.httpclient import AsyncHTTPClient from distributed import Client, Scheduler +from distributed.compatibility import MACOS, WINDOWS from distributed.core import Status from distributed.utils import open_port from distributed.utils_test import gen_test, popen @@ -21,6 +22,17 @@ pytestmark = pytest.mark.filterwarnings("ignore:Jupyter is migrating its paths") +if WINDOWS: + try: + import jupyter_server # noqa: F401 + except ImportError: + pass + else: + pytest.skip( + allow_module_level=True, + reason="Windows struggles running these tests w/ jupyter server", + ) + @gen_test() async def test_jupyter_server(): @@ -73,7 +85,8 @@ async def test_jupyter_idle_timeout(): assert s.status not in (Status.closed, Status.closing) - await asyncio.sleep(s.idle_timeout) + # small bit of extra time to catch up + await asyncio.sleep(s.idle_timeout + 0.5) assert s.status in (Status.closed, Status.closing) @@ -94,12 +107,12 @@ async def test_jupyter_idle_timeout_returned(): assert next_idle is not None assert next_idle > last_idle - assert s.check_idle() is None - # ^ NOTE: this probably should be `== next_idle`; - # see discussion in https://github.com/dask/distributed/pull/7687#discussion_r1145095196 + assert s.check_idle() is next_idle @pytest.mark.slow +@pytest.mark.xfail(WINDOWS, reason="Subprocess launching scheduler TimeoutError") +@pytest.mark.xfail(MACOS, reason="Client fails to connect on OSX") def test_shutsdown_cleanly(loop): port = open_port() with concurrent.futures.ThreadPoolExecutor() as tpe: diff --git a/pyproject.toml b/pyproject.toml index e41227120b..3e62a4f061 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -110,6 +110,9 @@ addopts = ''' -p no:legacypath''' filterwarnings = [ "error", + # xref: https://github.com/jupyter/jupyter_core/pull/292 + # xref: https://github.com/jupyter/jupyter_core/issues/309 + '''ignore:Jupyter is migrating its paths to use standard platformdirs''', # https://github.com/dask-contrib/dask-expr/issues/945 '''ignore:dask_expr does not support the DataFrameIOFunction''', '''ignore:Please use `dok_matrix` from the `scipy\.sparse` namespace, the `scipy\.sparse\.dok` namespace is deprecated.:DeprecationWarning''',