diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index d009d1f275..e7f793e68e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -1,4 +1,4 @@ -name: CI +name: Jupyter Server Tests on: push: branches: @@ -24,6 +24,10 @@ jobs: - name: Install the Python dependencies run: | pip install -e .[test] + - name: ipykernel fix on windows 3.8 + if: matrix.os == 'windows-latest' && matrix.python-version == '3.8' + run: | + pip install --upgrade git+https://github.com/ipython/ipykernel.git - name: Run the tests run: | - nosetests -v jupyter_server + pytest diff --git a/.travis.yml b/.travis.yml index 85cec18987..f840d2de9b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -37,7 +37,7 @@ script: else true fi - - 'if [[ $GROUP == python ]]; then nosetests -v jupyter_server; fi' + - 'if [[ $GROUP == python ]]; then pytest; fi' - | if [[ $GROUP == docs ]]; then EXIT_STATUS=0 diff --git a/appveyor.yml b/appveyor.yml index d3b50146c6..79c6e6dddc 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -5,13 +5,18 @@ matrix: environment: matrix: - CONDA_PY: 35 - CONDA_INSTALL_LOCN: "C:\\Miniconda36-x64" + CONDA_PY_SPEC: 3.5 + CONDA_INSTALL_LOCN: "C:\\Miniconda35-x64" - CONDA_PY: 36 + CONDA_PY_SPEC: 3.6 CONDA_INSTALL_LOCN: "C:\\Miniconda36-x64" - CONDA_PY: 37 - CONDA_INSTALL_LOCN: "C:\\Miniconda36-x64" + CONDA_PY_SPEC: 3.7 + CONDA_INSTALL_LOCN: "C:\\Miniconda37-x64" - CONDA_PY: 38 - CONDA_INSTALL_LOCN: "C:\\Miniconda36-x64" + CONDA_PY_SPEC: 3.8 + # appveyor doesn't come with this directory, so use 37's since we're creating an env anyway + CONDA_INSTALL_LOCN: "C:\\Miniconda37-x64" platform: - x64 @@ -20,11 +25,17 @@ build: off install: - cmd: call %CONDA_INSTALL_LOCN%\Scripts\activate.bat + - cmd: set CONDA_PY=%CONDA_PY% + - cmd: set CONDA_PY_SPEC=%CONDA_PY_SPEC% - cmd: conda config --set show_channel_urls true - cmd: conda config --add channels conda-forge - #- cmd: conda update --yes --quiet conda - - cmd: conda install -y pyzmq tornado jupyter_client nbformat nbconvert ipykernel pip nose + - cmd: conda update --yes --quiet conda + - cmd: conda info -a + - cmd: conda create -y -q -n test-env-%CONDA_PY% python=%CONDA_PY_SPEC% pyzmq tornado jupyter_client nbformat nbconvert ipykernel pip nose + - cmd: conda activate test-env-%CONDA_PY% - cmd: pip install .[test] +# FIXME: Use patch for python 3.8, windows issues (https://github.com/ipython/ipykernel/pull/456) - remove once released + - IF %CONDA_PY% == 38 pip install --upgrade git+https://github.com/ipython/ipykernel.git test_script: - - nosetests -v jupyter_server --exclude-dir jupyter_server\tests\selenium + - pytest diff --git a/jupyter_server/auth/tests/test_security.py b/jupyter_server/auth/tests/test_security.py deleted file mode 100644 index a17e80087e..0000000000 --- a/jupyter_server/auth/tests/test_security.py +++ /dev/null @@ -1,25 +0,0 @@ -# coding: utf-8 -from ..security import passwd, passwd_check, salt_len -import nose.tools as nt - -def test_passwd_structure(): - p = passwd('passphrase') - algorithm, salt, hashed = p.split(':') - nt.assert_equal(algorithm, 'sha1') - nt.assert_equal(len(salt), salt_len) - nt.assert_equal(len(hashed), 40) - -def test_roundtrip(): - p = passwd('passphrase') - nt.assert_equal(passwd_check(p, 'passphrase'), True) - -def test_bad(): - p = passwd('passphrase') - nt.assert_equal(passwd_check(p, p), False) - nt.assert_equal(passwd_check(p, 'a:b:c:d'), False) - nt.assert_equal(passwd_check(p, 'a:b'), False) - -def test_passwd_check_unicode(): - # GH issue #4524 - phash = u'sha1:23862bc21dd3:7a415a95ae4580582e314072143d9c382c491e4f' - assert passwd_check(phash, u"łe¶ŧ←↓→") \ No newline at end of file diff --git a/jupyter_server/extension/tests/test_extension.py b/jupyter_server/extension/tests/test_extension.py deleted file mode 100644 index 1515317fe2..0000000000 --- a/jupyter_server/extension/tests/test_extension.py +++ /dev/null @@ -1,92 +0,0 @@ -import os -import tempfile -import nose.tools as nt - -from ...extension.application import ExtensionApp -from ...serverapp import ServerApp - - -class MockExtension(ExtensionApp): - extension_name = 'mock' - - -class ExtensionTestingMixin: - - port = 12341 - - def server_kwargs(self): - return dict( - port=self.port, - port_retries=0, - open_browser=False, - allow_root=True - ) - - def tearDown(self): - #self.serverapp.stop() - self.serverapp.http_server.stop() - self.serverapp.clear_instance() - - -class TestExtensionAppInitialize(ExtensionTestingMixin): - - def setUp(self): - self.serverapp = ServerApp(**self.server_kwargs()) - self.serverapp.init_signal = lambda : None - self.serverapp.init_terminals = lambda : None - # clear log handlers and propagate to root for nose to capture it - # needs to be redone after initialize, which reconfigures logging - self.serverapp.log.propagate = True - self.serverapp.log.handlers = [] - self.serverapp.initialize(argv=[]) - - def test_instance_creation(self): - self.extension = MockExtension() - extension = self.extension - nt.assert_equal(extension.static_paths, []) - nt.assert_equal(extension.template_paths, []) - nt.assert_equal(extension.settings, {}) - nt.assert_equal(extension.handlers, []) - - def test_instance_creation_with_arg(self): - self.extension = MockExtension(static_paths=['test']) - nt.assert_in('test', self.extension.static_paths) - - def test_initialize(self): - self.extension = MockExtension() - self.extension.initialize(self.serverapp) - nt.assert_true(isinstance(self.extension.serverapp, ServerApp)) - - -class TestExtensionServerInitialize(ExtensionTestingMixin): - - def test_initialize_server(self): - self.extension = MockExtension() - self.serverapp = self.extension.initialize_server(**self.server_kwargs()) - nt.assert_true(isinstance(self.serverapp, ServerApp)) - - def test_initialize_server_argv(self): - argv = [ - '--ServerApp.tornado_settings={"test":"hello world"}' - ] - self.extension = MockExtension() - self.serverapp = self.extension.initialize_server( - argv=argv, **self.server_kwargs()) - - nt.assert_in("test", self.serverapp.tornado_settings) - - def test_initialize_mixed_argv(self): - argv = [ - '--ServerApp.tornado_settings={"test":"hello world"}', - '--MockExtension.static_paths=["test"]' - ] - # Initialize server - self.extension = MockExtension() - self.serverapp = self.extension.initialize_server( - argv=argv, - **self.server_kwargs() - ) - self.extension.initialize(self.serverapp, argv=argv) - nt.assert_in("test", self.serverapp.tornado_settings) - nt.assert_in("test", self.extension.static_paths) - diff --git a/jupyter_server/nbconvert/tests/test_nbconvert_handlers.py b/jupyter_server/nbconvert/tests/test_nbconvert_handlers.py deleted file mode 100644 index d16421e036..0000000000 --- a/jupyter_server/nbconvert/tests/test_nbconvert_handlers.py +++ /dev/null @@ -1,134 +0,0 @@ -# coding: utf-8 -import io -import json -import os -from os.path import join as pjoin -import shutil - -import requests - -from jupyter_server.utils import url_path_join -from jupyter_server.tests.launchserver import ServerTestBase, assert_http_error -from nbformat import write -from nbformat.v4 import ( - new_notebook, new_markdown_cell, new_code_cell, new_output, -) - -from ipython_genutils.testing.decorators import onlyif_cmds_exist - -from base64 import encodebytes - - -class NbconvertAPI(object): - """Wrapper for nbconvert API calls.""" - def __init__(self, request): - self.request = request - - def _req(self, verb, path, body=None, params=None): - response = self.request(verb, - url_path_join('nbconvert', path), - data=body, params=params, - ) - response.raise_for_status() - return response - - def from_file(self, format, path, name, download=False): - return self._req('GET', url_path_join(format, path, name), - params={'download':download}) - - def from_post(self, format, nbmodel): - body = json.dumps(nbmodel) - return self._req('POST', format, body) - - def list_formats(self): - return self._req('GET', '') - -png_green_pixel = encodebytes(b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00' -b'\x00\x00\x01\x00\x00x00\x01\x08\x02\x00\x00\x00\x90wS\xde\x00\x00\x00\x0cIDAT' -b'\x08\xd7c\x90\xfb\xcf\x00\x00\x02\\\x01\x1e.~d\x87\x00\x00\x00\x00IEND\xaeB`\x82' -).decode('ascii') - -class APITest(ServerTestBase): - def setUp(self): - rootdir = self.root_dir - - if not os.path.isdir(pjoin(rootdir, 'foo')): - subdir = pjoin(rootdir, 'foo') - - os.mkdir(subdir) - - # Make sure that we clean this up when we're done. - # By using addCleanup this will happen correctly even if we fail - # later in setUp. - @self.addCleanup - def cleanup_dir(): - shutil.rmtree(subdir, ignore_errors=True) - - nb = new_notebook() - - nb.cells.append(new_markdown_cell(u'Created by test ³')) - cc1 = new_code_cell(source=u'print(2*6)') - cc1.outputs.append(new_output(output_type="stream", text=u'12')) - cc1.outputs.append(new_output(output_type="execute_result", - data={'image/png' : png_green_pixel}, - execution_count=1, - )) - nb.cells.append(cc1) - - with io.open(pjoin(rootdir, 'foo', 'testnb.ipynb'), 'w', - encoding='utf-8') as f: - write(nb, f, version=4) - - self.nbconvert_api = NbconvertAPI(self.request) - - @onlyif_cmds_exist('pandoc') - def test_from_file(self): - r = self.nbconvert_api.from_file('html', 'foo', 'testnb.ipynb') - self.assertEqual(r.status_code, 200) - self.assertIn(u'text/html', r.headers['Content-Type']) - self.assertIn(u'Created by test', r.text) - self.assertIn(u'print', r.text) - - r = self.nbconvert_api.from_file('python', 'foo', 'testnb.ipynb') - self.assertIn(u'text/x-python', r.headers['Content-Type']) - self.assertIn(u'print(2*6)', r.text) - - @onlyif_cmds_exist('pandoc') - def test_from_file_404(self): - with assert_http_error(404): - self.nbconvert_api.from_file('html', 'foo', 'thisdoesntexist.ipynb') - - @onlyif_cmds_exist('pandoc') - def test_from_file_download(self): - r = self.nbconvert_api.from_file('python', 'foo', 'testnb.ipynb', download=True) - content_disposition = r.headers['Content-Disposition'] - self.assertIn('attachment', content_disposition) - self.assertIn('testnb.py', content_disposition) - - @onlyif_cmds_exist('pandoc') - def test_from_file_zip(self): - r = self.nbconvert_api.from_file('latex', 'foo', 'testnb.ipynb', download=True) - self.assertIn(u'application/zip', r.headers['Content-Type']) - self.assertIn(u'.zip', r.headers['Content-Disposition']) - - @onlyif_cmds_exist('pandoc') - def test_from_post(self): - nbmodel = self.request('GET', 'api/contents/foo/testnb.ipynb').json() - - r = self.nbconvert_api.from_post(format='html', nbmodel=nbmodel) - self.assertEqual(r.status_code, 200) - self.assertIn(u'text/html', r.headers['Content-Type']) - self.assertIn(u'Created by test', r.text) - self.assertIn(u'print', r.text) - - r = self.nbconvert_api.from_post(format='python', nbmodel=nbmodel) - self.assertIn(u'text/x-python', r.headers['Content-Type']) - self.assertIn(u'print(2*6)', r.text) - - @onlyif_cmds_exist('pandoc') - def test_from_post_zip(self): - nbmodel = self.request('GET', 'api/contents/foo/testnb.ipynb').json() - - r = self.nbconvert_api.from_post(format='latex', nbmodel=nbmodel) - self.assertIn(u'application/zip', r.headers['Content-Type']) - self.assertIn(u'.zip', r.headers['Content-Disposition']) diff --git a/jupyter_server/serverapp.py b/jupyter_server/serverapp.py index 767d0cb727..65d3562071 100755 --- a/jupyter_server/serverapp.py +++ b/jupyter_server/serverapp.py @@ -951,7 +951,9 @@ def _default_allow_remote(self): help=_("Extra keyword arguments to pass to `get_secure_cookie`." " See tornado's get_secure_cookie docs for details.") ) - ssl_options = Dict(config=True, + ssl_options = Dict( + allow_none=True, + config=True, help=_("""Supply SSL options for the tornado HTTPServer. See the tornado docs for details.""")) @@ -1277,7 +1279,7 @@ def init_logging(self): logger.setLevel(self.log.level) def init_webapp(self): - """initialize tornado webapp and httpserver""" + """initialize tornado webapp""" self.tornado_settings['allow_origin'] = self.allow_origin self.tornado_settings['websocket_compression_options'] = self.websocket_compression_options if self.allow_origin_pat: @@ -1304,56 +1306,29 @@ def init_webapp(self): self.log, self.base_url, self.default_url, self.tornado_settings, self.jinja_environment_options, ) - ssl_options = self.ssl_options if self.certfile: - ssl_options['certfile'] = self.certfile + self.ssl_options['certfile'] = self.certfile if self.keyfile: - ssl_options['keyfile'] = self.keyfile + self.ssl_options['keyfile'] = self.keyfile if self.client_ca: - ssl_options['ca_certs'] = self.client_ca - if not ssl_options: + self.ssl_options['ca_certs'] = self.client_ca + if len(self.ssl_options) == 0: # None indicates no SSL config - ssl_options = None + self.ssl_options = None else: # SSL may be missing, so only import it if it's to be used import ssl # PROTOCOL_TLS selects the highest ssl/tls protocol version that both the client and # server support. When PROTOCOL_TLS is not available use PROTOCOL_SSLv23. # PROTOCOL_TLS is new in version 2.7.13, 3.5.3 and 3.6 - ssl_options.setdefault( + self.ssl_options.setdefault( 'ssl_version', getattr(ssl, 'PROTOCOL_TLS', ssl.PROTOCOL_SSLv23) ) - if ssl_options.get('ca_certs', False): - ssl_options.setdefault('cert_reqs', ssl.CERT_REQUIRED) + if self.ssl_options.get('ca_certs', False): + self.ssl_options.setdefault('cert_reqs', ssl.CERT_REQUIRED) - self.login_handler_class.validate_security(self, ssl_options=ssl_options) - self.http_server = httpserver.HTTPServer(self.web_app, ssl_options=ssl_options, - xheaders=self.trust_xheaders, - max_body_size=self.max_body_size, - max_buffer_size=self.max_buffer_size) - - success = None - for port in random_ports(self.port, self.port_retries+1): - try: - self.http_server.listen(port, self.ip) - except socket.error as e: - if e.errno == errno.EADDRINUSE: - self.log.info(_('The port %i is already in use, trying another port.') % port) - continue - elif e.errno in (errno.EACCES, getattr(errno, 'WSAEACCES', errno.EACCES)): - self.log.warning(_("Permission to listen on port %i denied") % port) - continue - else: - raise - else: - self.port = port - success = True - break - if not success: - self.log.critical(_('ERROR: the Jupyter server could not be started because ' - 'no available port could be found.')) - self.exit(1) + self.login_handler_class.validate_security(self, ssl_options=self.ssl_options) @property def display_url(self): @@ -1489,7 +1464,7 @@ def init_components(self): def init_server_extension_config(self): """Consolidate server extensions specified by all configs. - The resulting list is stored on self.nbserver_extensions and updates config object. + The resulting list is stored on self.jpserver_extensions and updates config object. The extension API is experimental, and may change in future releases. """ @@ -1518,6 +1493,7 @@ def init_server_extensions(self): The extension API is experimental, and may change in future releases. """ + # Initialize extensions for modulename, enabled in sorted(self.jpserver_extensions.items()): if enabled: try: @@ -1579,8 +1555,103 @@ def init_shutdown_no_activity(self): pc = ioloop.PeriodicCallback(self.shutdown_no_activity, 60000) pc.start() + @property + def http_server(self): + """An instance of Tornado's HTTPServer class for the Server Web Application.""" + try: + return self._http_server + except AttributeError: + raise AttributeError( + 'An HTTPServer instance has not been created for the ' + 'Server Web Application. To create an HTTPServer for this ' + 'application, call `.init_httpserver()`.' + ) + + def init_httpserver(self): + """Creates an instance of a Tornado HTTPServer for the Server Web Application + and sets the http_server attribute. + """ + # Check that a web_app has been initialized before starting a server. + if not hasattr(self, 'web_app'): + raise AttributeError('A tornado web application has not be initialized. ' + 'Try calling `.init_webapp()` first.') + + # Create an instance of the server. + self._http_server = httpserver.HTTPServer( + self.web_app, + ssl_options=self.ssl_options, + xheaders=self.trust_xheaders, + max_body_size=self.max_body_size, + max_buffer_size=self.max_buffer_size + ) + success = None + for port in random_ports(self.port, self.port_retries+1): + try: + self.http_server.listen(port, self.ip) + except socket.error as e: + if e.errno == errno.EADDRINUSE: + self.log.info(_('The port %i is already in use, trying another port.') % port) + continue + elif e.errno in (errno.EACCES, getattr(errno, 'WSAEACCES', errno.EACCES)): + self.log.warning(_("Permission to listen on port %i denied") % port) + continue + else: + raise + else: + self.port = port + success = True + break + if not success: + self.log.critical(_('ERROR: the Jupyter server could not be started because ' + 'no available port could be found.')) + self.exit(1) + + @staticmethod + def _init_asyncio_patch(): + """set default asyncio policy to be compatible with tornado + Tornado 6 (at least) is not compatible with the default + asyncio implementation on Windows + Pick the older SelectorEventLoopPolicy on Windows + if the known-incompatible default policy is in use. + do this as early as possible to make it a low priority and overrideable + ref: https://github.com/tornadoweb/tornado/issues/2608 + FIXME: if/when tornado supports the defaults in asyncio, + remove and bump tornado requirement for py38 + """ + if sys.platform.startswith("win") and sys.version_info >= (3, 8): + import asyncio + try: + from asyncio import ( + WindowsProactorEventLoopPolicy, + WindowsSelectorEventLoopPolicy, + ) + except ImportError: + pass + # not affected + else: + if type(asyncio.get_event_loop_policy()) is WindowsProactorEventLoopPolicy: + # WindowsProactorEventLoopPolicy is not compatible with tornado 6 + # fallback to the pre-3.8 default of Selector + asyncio.set_event_loop_policy(WindowsSelectorEventLoopPolicy()) + @catch_config_error - def initialize(self, argv=None, load_extensions=True): + def initialize(self, argv=None, load_extensions=True, new_httpserver=True): + """Initialize the Server application class, configurables, web application, and http server. + + Parameters + ---------- + argv: list or None + CLI arguments to parse. + + load_extensions: bool + If True, the server will load server extensions listed in the jpserver_extension trait. + Otherwise, no server extensions will be loaded. + + new_httpserver: bool + If True, a tornado HTTPServer instance will be created and configured for the Server Web + Application. This will set the http_server attribute of this class. + """ + self._init_asyncio_patch() super(ServerApp, self).initialize(argv) self.init_logging() if self._dispatching: @@ -1590,6 +1661,8 @@ def initialize(self, argv=None, load_extensions=True): self.init_server_extension_config() self.init_components() self.init_webapp() + if new_httpserver: + self.init_httpserver() self.init_terminals() self.init_signal() if load_extensions: @@ -1718,12 +1791,7 @@ def launch_browser(self): new=self.webbrowser_open_new) threading.Thread(target=b).start() - def start(self): - """ Start the Jupyter server app, after initialization - - This method takes no arguments so all configuration and initialization - must be done prior to calling this method.""" - + def start_app(self): super(ServerApp, self).start() if not self.allow_root: @@ -1763,6 +1831,8 @@ def start(self): ' %s' % self.display_url, ])) + def start_ioloop(self): + """Start the IO Loop.""" self.io_loop = ioloop.IOLoop.current() if sys.platform.startswith('win'): # add no-op to wake every 5s @@ -1772,15 +1842,25 @@ def start(self): try: self.io_loop.start() except KeyboardInterrupt: - info(_("Interrupted...")) + self.log.info(_("Interrupted...")) finally: self.remove_server_info_file() self.remove_browser_open_file() self.cleanup_kernels() + def start(self): + """ Start the Jupyter server app, after initialization + + This method takes no arguments so all configuration and initialization + must be done prior to calling this method.""" + self.start_app() + self.start_ioloop() + def stop(self): def _stop(): - self.http_server.stop() + # Stop a server if its set. + if hasattr(self, '_http_server'): + self.http_server.stop() self.io_loop.stop() self.io_loop.add_callback(_stop) diff --git a/jupyter_server/services/api/tests/test_api.py b/jupyter_server/services/api/tests/test_api.py deleted file mode 100644 index be8310ba3c..0000000000 --- a/jupyter_server/services/api/tests/test_api.py +++ /dev/null @@ -1,47 +0,0 @@ -"""Test the basic /api endpoints""" - -from datetime import timedelta - -from jupyter_server._tz import isoformat, utcnow -from jupyter_server.utils import url_path_join -from jupyter_server.tests.launchserver import ServerTestBase - - -class KernelAPITest(ServerTestBase): - """Test the kernels web service API""" - - def _req(self, verb, path, **kwargs): - r = self.request(verb, url_path_join('api', path)) - r.raise_for_status() - return r - - def get(self, path, **kwargs): - return self._req('GET', path) - - def test_get_spec(self): - r = self.get('spec.yaml') - assert r.text - - def test_get_status(self): - r = self.get('status') - data = r.json() - assert data['connections'] == 0 - assert data['kernels'] == 0 - assert data['last_activity'].endswith('Z') - assert data['started'].endswith('Z') - assert data['started'] == isoformat(self.server.web_app.settings['started']) - - def test_no_track_activity(self): - # initialize with old last api activity - old = utcnow() - timedelta(days=1) - settings = self.server.web_app.settings - settings['api_last_activity'] = old - # accessing status doesn't update activity - self.get('status') - assert settings['api_last_activity'] == old - # accessing with ?no_track_activity doesn't update activity - self.get('contents?no_track_activity=1') - assert settings['api_last_activity'] == old - # accessing without ?no_track_activity does update activity - self.get('contents') - assert settings['api_last_activity'] > old diff --git a/jupyter_server/services/config/tests/test_config_api.py b/jupyter_server/services/config/tests/test_config_api.py deleted file mode 100644 index b7b9d8f293..0000000000 --- a/jupyter_server/services/config/tests/test_config_api.py +++ /dev/null @@ -1,69 +0,0 @@ -# coding: utf-8 -"""Test the config webservice API.""" - -import json - -import requests - -from jupyter_server.utils import url_path_join -from jupyter_server.tests.launchserver import ServerTestBase - - -class ConfigAPI(object): - """Wrapper for notebook API calls.""" - def __init__(self, request): - self.request = request - - def _req(self, verb, section, body=None): - response = self.request(verb, - url_path_join('api/config', section), - data=body, - ) - response.raise_for_status() - return response - - def get(self, section): - return self._req('GET', section) - - def set(self, section, values): - return self._req('PUT', section, json.dumps(values)) - - def modify(self, section, values): - return self._req('PATCH', section, json.dumps(values)) - - -class APITest(ServerTestBase): - """Test the config web service API""" - def setUp(self): - self.config_api = ConfigAPI(self.request) - - def test_create_retrieve_config(self): - sample = {'foo': 'bar', 'baz': 73} - r = self.config_api.set('example', sample) - self.assertEqual(r.status_code, 204) - - r = self.config_api.get('example') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.json(), sample) - - def test_modify(self): - sample = {'foo': 'bar', 'baz': 73, - 'sub': {'a': 6, 'b': 7}, 'sub2': {'c': 8}} - self.config_api.set('example', sample) - - r = self.config_api.modify('example', {'foo': None, # should delete foo - 'baz': 75, - 'wib': [1,2,3], - 'sub': {'a': 8, 'b': None, 'd': 9}, - 'sub2': {'c': None} # should delete sub2 - }) - self.assertEqual(r.status_code, 200) - self.assertEqual(r.json(), {'baz': 75, 'wib': [1,2,3], - 'sub': {'a': 8, 'd': 9}}) - - def test_get_unknown(self): - # We should get an empty config dictionary instead of a 404 - r = self.config_api.get('nonexistant') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.json(), {}) - diff --git a/jupyter_server/services/contents/tests/test_contents_api.py b/jupyter_server/services/contents/tests/test_contents_api.py deleted file mode 100644 index f50536a7d3..0000000000 --- a/jupyter_server/services/contents/tests/test_contents_api.py +++ /dev/null @@ -1,721 +0,0 @@ -# coding: utf-8 -"""Test the contents webservice API.""" - -from contextlib import contextmanager -from functools import partial -import io -import json -import os -import shutil -import sys -from unicodedata import normalize - -pjoin = os.path.join - -import requests - -from ..filecheckpoints import GenericFileCheckpoints - -from traitlets.config import Config -from jupyter_server.utils import url_path_join, url_escape, to_os_path -from jupyter_server.tests.launchserver import ServerTestBase, assert_http_error -from nbformat import write, from_dict -from nbformat.v4 import ( - new_notebook, new_markdown_cell, -) -from nbformat import v2 -from ipython_genutils import py3compat -from ipython_genutils.tempdir import TemporaryDirectory - -from base64 import encodebytes, decodebytes - - -def uniq_stable(elems): - """uniq_stable(elems) -> list - - Return from an iterable, a list of all the unique elements in the input, - maintaining the order in which they first appear. - """ - seen = set() - return [x for x in elems if x not in seen and not seen.add(x)] - -def notebooks_only(dir_model): - return [nb for nb in dir_model['content'] if nb['type']=='notebook'] - -def dirs_only(dir_model): - return [x for x in dir_model['content'] if x['type']=='directory'] - - -class API(object): - """Wrapper for contents API calls.""" - def __init__(self, request): - self.request = request - - def _req(self, verb, path, body=None, params=None): - response = self.request(verb, - url_path_join('api/contents', path), - data=body, params=params, - ) - response.raise_for_status() - return response - - def list(self, path='/'): - return self._req('GET', path) - - def read(self, path, type=None, format=None, content=None): - params = {} - if type is not None: - params['type'] = type - if format is not None: - params['format'] = format - if content == False: - params['content'] = '0' - return self._req('GET', path, params=params) - - def create_untitled(self, path='/', ext='.ipynb'): - body = None - if ext: - body = json.dumps({'ext': ext}) - return self._req('POST', path, body) - - def mkdir_untitled(self, path='/'): - return self._req('POST', path, json.dumps({'type': 'directory'})) - - def copy(self, copy_from, path='/'): - body = json.dumps({'copy_from':copy_from}) - return self._req('POST', path, body) - - def create(self, path='/'): - return self._req('PUT', path) - - def upload(self, path, body): - return self._req('PUT', path, body) - - def mkdir(self, path='/'): - return self._req('PUT', path, json.dumps({'type': 'directory'})) - - def copy_put(self, copy_from, path='/'): - body = json.dumps({'copy_from':copy_from}) - return self._req('PUT', path, body) - - def save(self, path, body): - return self._req('PUT', path, body) - - def delete(self, path='/'): - return self._req('DELETE', path) - - def rename(self, path, new_path): - body = json.dumps({'path': new_path}) - return self._req('PATCH', path, body) - - def get_checkpoints(self, path): - return self._req('GET', url_path_join(path, 'checkpoints')) - - def new_checkpoint(self, path): - return self._req('POST', url_path_join(path, 'checkpoints')) - - def restore_checkpoint(self, path, checkpoint_id): - return self._req('POST', url_path_join(path, 'checkpoints', checkpoint_id)) - - def delete_checkpoint(self, path, checkpoint_id): - return self._req('DELETE', url_path_join(path, 'checkpoints', checkpoint_id)) - - -class APITest(ServerTestBase): - """Test the kernels web service API""" - dirs_nbs = [('', 'inroot'), - ('Directory with spaces in', 'inspace'), - (u'unicodé', 'innonascii'), - ('foo', 'a'), - ('foo', 'b'), - ('foo', 'name with spaces'), - ('foo', u'unicodé'), - ('foo/bar', 'baz'), - ('ordering', 'A'), - ('ordering', 'b'), - ('ordering', 'C'), - (u'å b', u'ç d'), - ] - hidden_dirs = ['.hidden', '__pycache__'] - - # Don't include root dir. - dirs = uniq_stable([py3compat.cast_unicode(d) for (d,n) in dirs_nbs[1:]]) - top_level_dirs = {normalize('NFC', d.split('/')[0]) for d in dirs} - - @staticmethod - def _blob_for_name(name): - return name.encode('utf-8') + b'\xFF' - - @staticmethod - def _txt_for_name(name): - return u'%s text file' % name - - def to_os_path(self, api_path): - return to_os_path(api_path, root=self.root_dir) - - def make_dir(self, api_path): - """Create a directory at api_path""" - os_path = self.to_os_path(api_path) - try: - os.makedirs(os_path) - except OSError: - print("Directory already exists: %r" % os_path) - - def make_txt(self, api_path, txt): - """Make a text file at a given api_path""" - os_path = self.to_os_path(api_path) - with io.open(os_path, 'w', encoding='utf-8') as f: - f.write(txt) - - def make_blob(self, api_path, blob): - """Make a binary file at a given api_path""" - os_path = self.to_os_path(api_path) - with io.open(os_path, 'wb') as f: - f.write(blob) - - def make_nb(self, api_path, nb): - """Make a notebook file at a given api_path""" - os_path = self.to_os_path(api_path) - - with io.open(os_path, 'w', encoding='utf-8') as f: - write(nb, f, version=4) - - def delete_dir(self, api_path): - """Delete a directory at api_path, removing any contents.""" - os_path = self.to_os_path(api_path) - shutil.rmtree(os_path, ignore_errors=True) - - def delete_file(self, api_path): - """Delete a file at the given path if it exists.""" - if self.isfile(api_path): - os.unlink(self.to_os_path(api_path)) - - def isfile(self, api_path): - return os.path.isfile(self.to_os_path(api_path)) - - def isdir(self, api_path): - return os.path.isdir(self.to_os_path(api_path)) - - def setUp(self): - for d in (self.dirs + self.hidden_dirs): - self.make_dir(d) - self.addCleanup(partial(self.delete_dir, d)) - - for d, name in self.dirs_nbs: - # create a notebook - nb = new_notebook() - nbname = u'{}/{}.ipynb'.format(d, name) - self.make_nb(nbname, nb) - self.addCleanup(partial(self.delete_file, nbname)) - - # create a text file - txt = self._txt_for_name(name) - txtname = u'{}/{}.txt'.format(d, name) - self.make_txt(txtname, txt) - self.addCleanup(partial(self.delete_file, txtname)) - - blob = self._blob_for_name(name) - blobname = u'{}/{}.blob'.format(d, name) - self.make_blob(blobname, blob) - self.addCleanup(partial(self.delete_file, blobname)) - - self.api = API(self.request) - - def test_list_notebooks(self): - nbs = notebooks_only(self.api.list().json()) - self.assertEqual(len(nbs), 1) - self.assertEqual(nbs[0]['name'], 'inroot.ipynb') - - nbs = notebooks_only(self.api.list('/Directory with spaces in/').json()) - self.assertEqual(len(nbs), 1) - self.assertEqual(nbs[0]['name'], 'inspace.ipynb') - - nbs = notebooks_only(self.api.list(u'/unicodé/').json()) - self.assertEqual(len(nbs), 1) - self.assertEqual(nbs[0]['name'], 'innonascii.ipynb') - self.assertEqual(nbs[0]['path'], u'unicodé/innonascii.ipynb') - - nbs = notebooks_only(self.api.list('/foo/bar/').json()) - self.assertEqual(len(nbs), 1) - self.assertEqual(nbs[0]['name'], 'baz.ipynb') - self.assertEqual(nbs[0]['path'], 'foo/bar/baz.ipynb') - - nbs = notebooks_only(self.api.list('foo').json()) - self.assertEqual(len(nbs), 4) - nbnames = { normalize('NFC', n['name']) for n in nbs } - expected = [ u'a.ipynb', u'b.ipynb', u'name with spaces.ipynb', u'unicodé.ipynb'] - expected = { normalize('NFC', name) for name in expected } - self.assertEqual(nbnames, expected) - - nbs = notebooks_only(self.api.list('ordering').json()) - nbnames = {n['name'] for n in nbs} - expected = {'A.ipynb', 'b.ipynb', 'C.ipynb'} - self.assertEqual(nbnames, expected) - - def test_list_dirs(self): - dirs = dirs_only(self.api.list().json()) - dir_names = {normalize('NFC', d['name']) for d in dirs} - self.assertEqual(dir_names, self.top_level_dirs) # Excluding hidden dirs - - def test_get_dir_no_content(self): - for d in self.dirs: - model = self.api.read(d, content=False).json() - self.assertEqual(model['path'], d) - self.assertEqual(model['type'], 'directory') - self.assertIn('content', model) - self.assertEqual(model['content'], None) - - def test_list_nonexistant_dir(self): - with assert_http_error(404): - self.api.list('nonexistant') - - def test_get_nb_contents(self): - for d, name in self.dirs_nbs: - path = url_path_join(d, name + '.ipynb') - nb = self.api.read(path).json() - self.assertEqual(nb['name'], u'%s.ipynb' % name) - self.assertEqual(nb['path'], path) - self.assertEqual(nb['type'], 'notebook') - self.assertIn('content', nb) - self.assertEqual(nb['format'], 'json') - self.assertIn('metadata', nb['content']) - self.assertIsInstance(nb['content']['metadata'], dict) - - def test_get_nb_no_content(self): - for d, name in self.dirs_nbs: - path = url_path_join(d, name + '.ipynb') - nb = self.api.read(path, content=False).json() - self.assertEqual(nb['name'], u'%s.ipynb' % name) - self.assertEqual(nb['path'], path) - self.assertEqual(nb['type'], 'notebook') - self.assertIn('content', nb) - self.assertEqual(nb['content'], None) - - def test_get_nb_invalid(self): - nb = { - 'nbformat': 4, - 'metadata': {}, - 'cells': [{ - 'cell_type': 'wrong', - 'metadata': {}, - }], - } - path = u'å b/Validate tést.ipynb' - self.make_txt(path, py3compat.cast_unicode(json.dumps(nb))) - model = self.api.read(path).json() - self.assertEqual(model['path'], path) - self.assertEqual(model['type'], 'notebook') - self.assertIn('content', model) - self.assertIn('message', model) - self.assertIn("validation failed", model['message'].lower()) - - def test_get_contents_no_such_file(self): - # Name that doesn't exist - should be a 404 - with assert_http_error(404): - self.api.read('foo/q.ipynb') - - def test_get_text_file_contents(self): - for d, name in self.dirs_nbs: - path = url_path_join(d, name + '.txt') - model = self.api.read(path).json() - self.assertEqual(model['name'], u'%s.txt' % name) - self.assertEqual(model['path'], path) - self.assertIn('content', model) - self.assertEqual(model['format'], 'text') - self.assertEqual(model['type'], 'file') - self.assertEqual(model['content'], self._txt_for_name(name)) - - # Name that doesn't exist - should be a 404 - with assert_http_error(404): - self.api.read('foo/q.txt') - - # Specifying format=text should fail on a non-UTF-8 file - with assert_http_error(400): - self.api.read('foo/bar/baz.blob', type='file', format='text') - - def test_get_binary_file_contents(self): - for d, name in self.dirs_nbs: - path = url_path_join(d, name + '.blob') - model = self.api.read(path).json() - self.assertEqual(model['name'], u'%s.blob' % name) - self.assertEqual(model['path'], path) - self.assertIn('content', model) - self.assertEqual(model['format'], 'base64') - self.assertEqual(model['type'], 'file') - self.assertEqual( - decodebytes(model['content'].encode('ascii')), - self._blob_for_name(name), - ) - - # Name that doesn't exist - should be a 404 - with assert_http_error(404): - self.api.read('foo/q.txt') - - def test_get_bad_type(self): - with assert_http_error(400): - self.api.read(u'unicodé', type='file') # this is a directory - - with assert_http_error(400): - self.api.read(u'unicodé/innonascii.ipynb', type='directory') - - def _check_created(self, resp, path, type='notebook'): - self.assertEqual(resp.status_code, 201) - location_header = py3compat.str_to_unicode(resp.headers['Location']) - self.assertEqual(location_header, url_path_join(self.url_prefix, u'api/contents', url_escape(path))) - rjson = resp.json() - self.assertEqual(rjson['name'], path.rsplit('/', 1)[-1]) - self.assertEqual(rjson['path'], path) - self.assertEqual(rjson['type'], type) - isright = self.isdir if type == 'directory' else self.isfile - assert isright(path) - - def test_create_untitled(self): - resp = self.api.create_untitled(path=u'å b') - self._check_created(resp, u'å b/Untitled.ipynb') - - # Second time - resp = self.api.create_untitled(path=u'å b') - self._check_created(resp, u'å b/Untitled1.ipynb') - - # And two directories down - resp = self.api.create_untitled(path='foo/bar') - self._check_created(resp, 'foo/bar/Untitled.ipynb') - - def test_create_untitled_txt(self): - resp = self.api.create_untitled(path='foo/bar', ext='.txt') - self._check_created(resp, 'foo/bar/untitled.txt', type='file') - - resp = self.api.read(path='foo/bar/untitled.txt') - model = resp.json() - self.assertEqual(model['type'], 'file') - self.assertEqual(model['format'], 'text') - self.assertEqual(model['content'], '') - - def test_upload(self): - nb = new_notebook() - nbmodel = {'content': nb, 'type': 'notebook'} - path = u'å b/Upload tést.ipynb' - resp = self.api.upload(path, body=json.dumps(nbmodel)) - self._check_created(resp, path) - - def test_mkdir_untitled(self): - resp = self.api.mkdir_untitled(path=u'å b') - self._check_created(resp, u'å b/Untitled Folder', type='directory') - - # Second time - resp = self.api.mkdir_untitled(path=u'å b') - self._check_created(resp, u'å b/Untitled Folder 1', type='directory') - - # And two directories down - resp = self.api.mkdir_untitled(path='foo/bar') - self._check_created(resp, 'foo/bar/Untitled Folder', type='directory') - - def test_mkdir(self): - path = u'å b/New ∂ir' - resp = self.api.mkdir(path) - self._check_created(resp, path, type='directory') - - def test_mkdir_hidden_400(self): - with assert_http_error(400): - resp = self.api.mkdir(u'å b/.hidden') - - def test_upload_txt(self): - body = u'ünicode téxt' - model = { - 'content' : body, - 'format' : 'text', - 'type' : 'file', - } - path = u'å b/Upload tést.txt' - resp = self.api.upload(path, body=json.dumps(model)) - - # check roundtrip - resp = self.api.read(path) - model = resp.json() - self.assertEqual(model['type'], 'file') - self.assertEqual(model['format'], 'text') - self.assertEqual(model['content'], body) - - def test_upload_b64(self): - body = b'\xFFblob' - b64body = encodebytes(body).decode('ascii') - model = { - 'content' : b64body, - 'format' : 'base64', - 'type' : 'file', - } - path = u'å b/Upload tést.blob' - resp = self.api.upload(path, body=json.dumps(model)) - - # check roundtrip - resp = self.api.read(path) - model = resp.json() - self.assertEqual(model['type'], 'file') - self.assertEqual(model['path'], path) - self.assertEqual(model['format'], 'base64') - decoded = decodebytes(model['content'].encode('ascii')) - self.assertEqual(decoded, body) - - def test_upload_v2(self): - nb = v2.new_notebook() - ws = v2.new_worksheet() - nb.worksheets.append(ws) - ws.cells.append(v2.new_code_cell(input='print("hi")')) - nbmodel = {'content': nb, 'type': 'notebook'} - path = u'å b/Upload tést.ipynb' - resp = self.api.upload(path, body=json.dumps(nbmodel)) - self._check_created(resp, path) - resp = self.api.read(path) - data = resp.json() - self.assertEqual(data['content']['nbformat'], 4) - - def test_copy(self): - resp = self.api.copy(u'å b/ç d.ipynb', u'å b') - self._check_created(resp, u'å b/ç d-Copy1.ipynb') - - resp = self.api.copy(u'å b/ç d.ipynb', u'å b') - self._check_created(resp, u'å b/ç d-Copy2.ipynb') - - def test_copy_copy(self): - resp = self.api.copy(u'å b/ç d.ipynb', u'å b') - self._check_created(resp, u'å b/ç d-Copy1.ipynb') - - resp = self.api.copy(u'å b/ç d-Copy1.ipynb', u'å b') - self._check_created(resp, u'å b/ç d-Copy2.ipynb') - - def test_copy_path(self): - resp = self.api.copy(u'foo/a.ipynb', u'å b') - self._check_created(resp, u'å b/a.ipynb') - - resp = self.api.copy(u'foo/a.ipynb', u'å b') - self._check_created(resp, u'å b/a-Copy1.ipynb') - - def test_copy_put_400(self): - with assert_http_error(400): - resp = self.api.copy_put(u'å b/ç d.ipynb', u'å b/cøpy.ipynb') - - def test_copy_dir_400(self): - # can't copy directories - with assert_http_error(400): - resp = self.api.copy(u'å b', u'foo') - - def test_delete(self): - for d, name in self.dirs_nbs: - print('%r, %r' % (d, name)) - resp = self.api.delete(url_path_join(d, name + '.ipynb')) - self.assertEqual(resp.status_code, 204) - - for d in self.dirs + ['/']: - nbs = notebooks_only(self.api.list(d).json()) - print('------') - print(d) - print(nbs) - self.assertEqual(nbs, []) - - def test_delete_dirs(self): - # depth-first delete everything, so we don't try to delete empty directories - for name in sorted(self.dirs + ['/'], key=len, reverse=True): - listing = self.api.list(name).json()['content'] - for model in listing: - self.api.delete(model['path']) - listing = self.api.list('/').json()['content'] - self.assertEqual(listing, []) - - def test_delete_non_empty_dir(self): - if sys.platform == 'win32': - self.skipTest("Disabled deleting non-empty dirs on Windows") - # Test that non empty directory can be deleted - self.api.delete(u'å b') - # Check if directory has actually been deleted - with assert_http_error(404): - self.api.list(u'å b') - - def test_rename(self): - resp = self.api.rename('foo/a.ipynb', 'foo/z.ipynb') - self.assertEqual(resp.headers['Location'].split('/')[-1], 'z.ipynb') - self.assertEqual(resp.json()['name'], 'z.ipynb') - self.assertEqual(resp.json()['path'], 'foo/z.ipynb') - assert self.isfile('foo/z.ipynb') - - nbs = notebooks_only(self.api.list('foo').json()) - nbnames = set(n['name'] for n in nbs) - self.assertIn('z.ipynb', nbnames) - self.assertNotIn('a.ipynb', nbnames) - - def test_checkpoints_follow_file(self): - - # Read initial file state - orig = self.api.read('foo/a.ipynb') - - # Create a checkpoint of initial state - r = self.api.new_checkpoint('foo/a.ipynb') - cp1 = r.json() - - # Modify file and save - nbcontent = json.loads(orig.text)['content'] - nb = from_dict(nbcontent) - hcell = new_markdown_cell('Created by test') - nb.cells.append(hcell) - nbmodel = {'content': nb, 'type': 'notebook'} - self.api.save('foo/a.ipynb', body=json.dumps(nbmodel)) - - # Rename the file. - self.api.rename('foo/a.ipynb', 'foo/z.ipynb') - - # Looking for checkpoints in the old location should yield no results. - self.assertEqual(self.api.get_checkpoints('foo/a.ipynb').json(), []) - - # Looking for checkpoints in the new location should work. - cps = self.api.get_checkpoints('foo/z.ipynb').json() - self.assertEqual(cps, [cp1]) - - # Delete the file. The checkpoint should be deleted as well. - self.api.delete('foo/z.ipynb') - cps = self.api.get_checkpoints('foo/z.ipynb').json() - self.assertEqual(cps, []) - - def test_rename_existing(self): - with assert_http_error(409): - self.api.rename('foo/a.ipynb', 'foo/b.ipynb') - - def test_save(self): - resp = self.api.read('foo/a.ipynb') - nbcontent = json.loads(resp.text)['content'] - nb = from_dict(nbcontent) - nb.cells.append(new_markdown_cell(u'Created by test ³')) - - nbmodel = {'content': nb, 'type': 'notebook'} - resp = self.api.save('foo/a.ipynb', body=json.dumps(nbmodel)) - - nbcontent = self.api.read('foo/a.ipynb').json()['content'] - newnb = from_dict(nbcontent) - self.assertEqual(newnb.cells[0].source, - u'Created by test ³') - - def test_checkpoints(self): - resp = self.api.read('foo/a.ipynb') - r = self.api.new_checkpoint('foo/a.ipynb') - self.assertEqual(r.status_code, 201) - cp1 = r.json() - self.assertEqual(set(cp1), {'id', 'last_modified'}) - self.assertEqual(r.headers['Location'].split('/')[-1], cp1['id']) - - # Modify it - nbcontent = json.loads(resp.text)['content'] - nb = from_dict(nbcontent) - hcell = new_markdown_cell('Created by test') - nb.cells.append(hcell) - # Save - nbmodel= {'content': nb, 'type': 'notebook'} - resp = self.api.save('foo/a.ipynb', body=json.dumps(nbmodel)) - - # List checkpoints - cps = self.api.get_checkpoints('foo/a.ipynb').json() - self.assertEqual(cps, [cp1]) - - nbcontent = self.api.read('foo/a.ipynb').json()['content'] - nb = from_dict(nbcontent) - self.assertEqual(nb.cells[0].source, 'Created by test') - - # Restore cp1 - r = self.api.restore_checkpoint('foo/a.ipynb', cp1['id']) - self.assertEqual(r.status_code, 204) - nbcontent = self.api.read('foo/a.ipynb').json()['content'] - nb = from_dict(nbcontent) - self.assertEqual(nb.cells, []) - - # Delete cp1 - r = self.api.delete_checkpoint('foo/a.ipynb', cp1['id']) - self.assertEqual(r.status_code, 204) - cps = self.api.get_checkpoints('foo/a.ipynb').json() - self.assertEqual(cps, []) - - def test_file_checkpoints(self): - """ - Test checkpointing of non-notebook files. - """ - filename = 'foo/a.txt' - resp = self.api.read(filename) - orig_content = json.loads(resp.text)['content'] - - # Create a checkpoint. - r = self.api.new_checkpoint(filename) - self.assertEqual(r.status_code, 201) - cp1 = r.json() - self.assertEqual(set(cp1), {'id', 'last_modified'}) - self.assertEqual(r.headers['Location'].split('/')[-1], cp1['id']) - - # Modify the file and save. - new_content = orig_content + '\nsecond line' - model = { - 'content': new_content, - 'type': 'file', - 'format': 'text', - } - resp = self.api.save(filename, body=json.dumps(model)) - - # List checkpoints - cps = self.api.get_checkpoints(filename).json() - self.assertEqual(cps, [cp1]) - - content = self.api.read(filename).json()['content'] - self.assertEqual(content, new_content) - - # Restore cp1 - r = self.api.restore_checkpoint(filename, cp1['id']) - self.assertEqual(r.status_code, 204) - restored_content = self.api.read(filename).json()['content'] - self.assertEqual(restored_content, orig_content) - - # Delete cp1 - r = self.api.delete_checkpoint(filename, cp1['id']) - self.assertEqual(r.status_code, 204) - cps = self.api.get_checkpoints(filename).json() - self.assertEqual(cps, []) - - @contextmanager - def patch_cp_root(self, dirname): - """ - Temporarily patch the root dir of our checkpoint manager. - """ - cpm = self.server.contents_manager.checkpoints - old_dirname = cpm.root_dir - cpm.root_dir = dirname - try: - yield - finally: - cpm.root_dir = old_dirname - - def test_checkpoints_separate_root(self): - """ - Test that FileCheckpoints functions correctly even when it's - using a different root dir from FileContentsManager. This also keeps - the implementation honest for use with ContentsManagers that don't map - models to the filesystem - - Override this method to a no-op when testing other managers. - """ - with TemporaryDirectory() as td: - with self.patch_cp_root(td): - self.test_checkpoints() - - with TemporaryDirectory() as td: - with self.patch_cp_root(td): - self.test_file_checkpoints() - - -class GenericFileCheckpointsAPITest(APITest): - """ - Run the tests from APITest with GenericFileCheckpoints. - """ - config = Config() - config.FileContentsManager.checkpoints_class = GenericFileCheckpoints - - def test_config_did_something(self): - - self.assertIsInstance( - self.server.contents_manager.checkpoints, - GenericFileCheckpoints, - ) - - diff --git a/jupyter_server/services/contents/tests/test_fileio.py b/jupyter_server/services/contents/tests/test_fileio.py deleted file mode 100644 index 256c664ae8..0000000000 --- a/jupyter_server/services/contents/tests/test_fileio.py +++ /dev/null @@ -1,131 +0,0 @@ -# encoding: utf-8 -"""Tests for file IO""" - -# Copyright (c) Jupyter Development Team. -# Distributed under the terms of the Modified BSD License. - -import io as stdlib_io -import os.path -import stat - -import nose.tools as nt - -from ipython_genutils.testing.decorators import skip_win32 -from ..fileio import atomic_writing - -from ipython_genutils.tempdir import TemporaryDirectory - -umask = 0 - -def test_atomic_writing(): - class CustomExc(Exception): pass - - with TemporaryDirectory() as td: - f1 = os.path.join(td, 'penguin') - with stdlib_io.open(f1, 'w') as f: - f.write(u'Before') - - if os.name != 'nt': - os.chmod(f1, 0o701) - orig_mode = stat.S_IMODE(os.stat(f1).st_mode) - - f2 = os.path.join(td, 'flamingo') - try: - os.symlink(f1, f2) - have_symlink = True - except (AttributeError, NotImplementedError, OSError): - # AttributeError: Python doesn't support it - # NotImplementedError: The system doesn't support it - # OSError: The user lacks the privilege (Windows) - have_symlink = False - - with nt.assert_raises(CustomExc): - with atomic_writing(f1) as f: - f.write(u'Failing write') - raise CustomExc - - # Because of the exception, the file should not have been modified - with stdlib_io.open(f1, 'r') as f: - nt.assert_equal(f.read(), u'Before') - - with atomic_writing(f1) as f: - f.write(u'Overwritten') - - with stdlib_io.open(f1, 'r') as f: - nt.assert_equal(f.read(), u'Overwritten') - - if os.name != 'nt': - mode = stat.S_IMODE(os.stat(f1).st_mode) - nt.assert_equal(mode, orig_mode) - - if have_symlink: - # Check that writing over a file preserves a symlink - with atomic_writing(f2) as f: - f.write(u'written from symlink') - - with stdlib_io.open(f1, 'r') as f: - nt.assert_equal(f.read(), u'written from symlink') - -def _save_umask(): - global umask - umask = os.umask(0) - os.umask(umask) - -def _restore_umask(): - os.umask(umask) - -@skip_win32 -@nt.with_setup(_save_umask, _restore_umask) -def test_atomic_writing_umask(): - with TemporaryDirectory() as td: - os.umask(0o022) - f1 = os.path.join(td, '1') - with atomic_writing(f1) as f: - f.write(u'1') - mode = stat.S_IMODE(os.stat(f1).st_mode) - nt.assert_equal(mode, 0o644, '{:o} != 644'.format(mode)) - - os.umask(0o057) - f2 = os.path.join(td, '2') - with atomic_writing(f2) as f: - f.write(u'2') - mode = stat.S_IMODE(os.stat(f2).st_mode) - nt.assert_equal(mode, 0o620, '{:o} != 620'.format(mode)) - - -def test_atomic_writing_newlines(): - with TemporaryDirectory() as td: - path = os.path.join(td, 'testfile') - - lf = u'a\nb\nc\n' - plat = lf.replace(u'\n', os.linesep) - crlf = lf.replace(u'\n', u'\r\n') - - # test default - with stdlib_io.open(path, 'w') as f: - f.write(lf) - with stdlib_io.open(path, 'r', newline='') as f: - read = f.read() - nt.assert_equal(read, plat) - - # test newline=LF - with stdlib_io.open(path, 'w', newline='\n') as f: - f.write(lf) - with stdlib_io.open(path, 'r', newline='') as f: - read = f.read() - nt.assert_equal(read, lf) - - # test newline=CRLF - with atomic_writing(path, newline='\r\n') as f: - f.write(lf) - with stdlib_io.open(path, 'r', newline='') as f: - read = f.read() - nt.assert_equal(read, crlf) - - # test newline=no convert - text = u'crlf\r\ncr\rlf\n' - with atomic_writing(path, newline='') as f: - f.write(text) - with stdlib_io.open(path, 'r', newline='') as f: - read = f.read() - nt.assert_equal(read, text) diff --git a/jupyter_server/services/contents/tests/test_largefilemanager.py b/jupyter_server/services/contents/tests/test_largefilemanager.py deleted file mode 100644 index 13d294b9b0..0000000000 --- a/jupyter_server/services/contents/tests/test_largefilemanager.py +++ /dev/null @@ -1,113 +0,0 @@ -from unittest import TestCase -from ipython_genutils.tempdir import TemporaryDirectory -from ..largefilemanager import LargeFileManager -import os -from tornado import web - - -def _make_dir(contents_manager, api_path): - """ - Make a directory. - """ - os_path = contents_manager._get_os_path(api_path) - try: - os.makedirs(os_path) - except OSError: - print("Directory already exists: %r" % os_path) - - -class TestLargeFileManager(TestCase): - - def setUp(self): - self._temp_dir = TemporaryDirectory() - self.td = self._temp_dir.name - self.contents_manager = LargeFileManager(root_dir=self.td) - - def make_dir(self, api_path): - """make a subdirectory at api_path - - override in subclasses if contents are not on the filesystem. - """ - _make_dir(self.contents_manager, api_path) - - def test_save(self): - - cm = self.contents_manager - # Create a notebook - model = cm.new_untitled(type='notebook') - name = model['name'] - path = model['path'] - - # Get the model with 'content' - full_model = cm.get(path) - # Save the notebook - model = cm.save(full_model, path) - assert isinstance(model, dict) - self.assertIn('name', model) - self.assertIn('path', model) - self.assertEqual(model['name'], name) - self.assertEqual(model['path'], path) - - try: - model = {'name': 'test', 'path': 'test', 'chunk': 1} - cm.save(model, model['path']) - except web.HTTPError as e: - self.assertEqual('HTTP 400: Bad Request (No file type provided)', str(e)) - - try: - model = {'name': 'test', 'path': 'test', 'chunk': 1, 'type': 'notebook'} - cm.save(model, model['path']) - except web.HTTPError as e: - self.assertEqual('HTTP 400: Bad Request (File type "notebook" is not supported for large file transfer)', str(e)) - - try: - model = {'name': 'test', 'path': 'test', 'chunk': 1, 'type': 'file'} - cm.save(model, model['path']) - except web.HTTPError as e: - self.assertEqual('HTTP 400: Bad Request (No file content provided)', str(e)) - - try: - model = {'name': 'test', 'path': 'test', 'chunk': 2, 'type': 'file', - 'content': u'test', 'format': 'json'} - cm.save(model, model['path']) - except web.HTTPError as e: - self.assertEqual("HTTP 400: Bad Request (Must specify format of file contents as 'text' or 'base64')", - str(e)) - - # Save model for different chunks - model = {'name': 'test', 'path': 'test', 'type': 'file', - 'content': u'test==', 'format': 'text'} - name = model['name'] - path = model['path'] - cm.save(model, path) - - for chunk in (1, 2, -1): - for fm in ('text', 'base64'): - full_model = cm.get(path) - full_model['chunk'] = chunk - full_model['format'] = fm - model_res = cm.save(full_model, path) - assert isinstance(model_res, dict) - - self.assertIn('name', model_res) - self.assertIn('path', model_res) - self.assertNotIn('chunk', model_res) - self.assertEqual(model_res['name'], name) - self.assertEqual(model_res['path'], path) - - # Test in sub-directory - # Create a directory and notebook in that directory - sub_dir = '/foo/' - self.make_dir('foo') - model = cm.new_untitled(path=sub_dir, type='notebook') - name = model['name'] - path = model['path'] - model = cm.get(path) - - # Change the name in the model for rename - model = cm.save(model, path) - assert isinstance(model, dict) - self.assertIn('name', model) - self.assertIn('path', model) - self.assertEqual(model['name'], 'Untitled.ipynb') - self.assertEqual(model['path'], 'foo/Untitled.ipynb') diff --git a/jupyter_server/services/contents/tests/test_manager.py b/jupyter_server/services/contents/tests/test_manager.py deleted file mode 100644 index 0e6b0fb2b2..0000000000 --- a/jupyter_server/services/contents/tests/test_manager.py +++ /dev/null @@ -1,642 +0,0 @@ -# coding: utf-8 -"""Tests for the notebook manager.""" -from __future__ import print_function - -import os -import sys -import time -from contextlib import contextmanager -from itertools import combinations - -from nose import SkipTest -from tornado.web import HTTPError -from unittest import TestCase -from tempfile import NamedTemporaryFile - -from nbformat import v4 as nbformat - -from ipython_genutils.tempdir import TemporaryDirectory -from traitlets import TraitError -from ipython_genutils.testing import decorators as dec - -from ..filemanager import FileContentsManager - - -def _make_dir(contents_manager, api_path): - """ - Make a directory. - """ - os_path = contents_manager._get_os_path(api_path) - try: - os.makedirs(os_path) - except OSError: - print("Directory already exists: %r" % os_path) - - -class TestFileContentsManager(TestCase): - - @contextmanager - def assertRaisesHTTPError(self, status, msg=None): - msg = msg or "Should have raised HTTPError(%i)" % status - try: - yield - except HTTPError as e: - self.assertEqual(e.status_code, status) - else: - self.fail(msg) - - def symlink(self, contents_manager, src, dst): - """Make a symlink to src from dst - - src and dst are api_paths - """ - src_os_path = contents_manager._get_os_path(src) - dst_os_path = contents_manager._get_os_path(dst) - print(src_os_path, dst_os_path, os.path.isfile(src_os_path)) - os.symlink(src_os_path, dst_os_path) - - def test_root_dir(self): - with TemporaryDirectory() as td: - fm = FileContentsManager(root_dir=td) - self.assertEqual(fm.root_dir, td) - - def test_missing_root_dir(self): - with TemporaryDirectory() as td: - root = os.path.join(td, 'notebook', 'dir', 'is', 'missing') - self.assertRaises(TraitError, FileContentsManager, root_dir=root) - - def test_invalid_root_dir(self): - with NamedTemporaryFile() as tf: - self.assertRaises(TraitError, FileContentsManager, root_dir=tf.name) - - def test_get_os_path(self): - # full filesystem path should be returned with correct operating system - # separators. - with TemporaryDirectory() as td: - root = td - fm = FileContentsManager(root_dir=root) - path = fm._get_os_path('/path/to/notebook/test.ipynb') - rel_path_list = '/path/to/notebook/test.ipynb'.split('/') - fs_path = os.path.join(fm.root_dir, *rel_path_list) - self.assertEqual(path, fs_path) - - fm = FileContentsManager(root_dir=root) - path = fm._get_os_path('test.ipynb') - fs_path = os.path.join(fm.root_dir, 'test.ipynb') - self.assertEqual(path, fs_path) - - fm = FileContentsManager(root_dir=root) - path = fm._get_os_path('////test.ipynb') - fs_path = os.path.join(fm.root_dir, 'test.ipynb') - self.assertEqual(path, fs_path) - - def test_checkpoint_subdir(self): - subd = u'sub ∂ir' - cp_name = 'test-cp.ipynb' - with TemporaryDirectory() as td: - root = td - os.mkdir(os.path.join(td, subd)) - fm = FileContentsManager(root_dir=root) - cpm = fm.checkpoints - cp_dir = cpm.checkpoint_path( - 'cp', 'test.ipynb' - ) - cp_subdir = cpm.checkpoint_path( - 'cp', '/%s/test.ipynb' % subd - ) - self.assertNotEqual(cp_dir, cp_subdir) - self.assertEqual(cp_dir, os.path.join(root, cpm.checkpoint_dir, cp_name)) - self.assertEqual(cp_subdir, os.path.join(root, subd, cpm.checkpoint_dir, cp_name)) - - @dec.skipif(sys.platform == 'win32' and sys.version_info[0] < 3) - def test_bad_symlink(self): - with TemporaryDirectory() as td: - cm = FileContentsManager(root_dir=td) - path = 'test bad symlink' - _make_dir(cm, path) - - file_model = cm.new_untitled(path=path, ext='.txt') - - # create a broken symlink - self.symlink(cm, "target", '%s/%s' % (path, 'bad symlink')) - model = cm.get(path) - - contents = { - content['name']: content for content in model['content'] - } - self.assertTrue('untitled.txt' in contents) - self.assertEqual(contents['untitled.txt'], file_model) - # broken symlinks should still be shown in the contents manager - self.assertTrue('bad symlink' in contents) - - @dec.skipif(sys.platform == 'win32' and sys.version_info[0] < 3) - def test_good_symlink(self): - with TemporaryDirectory() as td: - cm = FileContentsManager(root_dir=td) - parent = 'test good symlink' - name = 'good symlink' - path = '{0}/{1}'.format(parent, name) - _make_dir(cm, parent) - - file_model = cm.new(path=parent + '/zfoo.txt') - - # create a good symlink - self.symlink(cm, file_model['path'], path) - symlink_model = cm.get(path, content=False) - dir_model = cm.get(parent) - self.assertEqual( - sorted(dir_model['content'], key=lambda x: x['name']), - [symlink_model, file_model], - ) - - def test_403(self): - if hasattr(os, 'getuid'): - if os.getuid() == 0: - raise SkipTest("Can't test permissions as root") - if sys.platform.startswith('win'): - raise SkipTest("Can't test permissions on Windows") - - with TemporaryDirectory() as td: - cm = FileContentsManager(root_dir=td) - model = cm.new_untitled(type='file') - os_path = cm._get_os_path(model['path']) - - os.chmod(os_path, 0o400) - try: - with cm.open(os_path, 'w') as f: - f.write(u"don't care") - except HTTPError as e: - self.assertEqual(e.status_code, 403) - else: - self.fail("Should have raised HTTPError(403)") - - def test_escape_root(self): - with TemporaryDirectory() as td: - cm = FileContentsManager(root_dir=td) - # make foo, bar next to root - with open(os.path.join(cm.root_dir, '..', 'foo'), 'w') as f: - f.write('foo') - with open(os.path.join(cm.root_dir, '..', 'bar'), 'w') as f: - f.write('bar') - - with self.assertRaisesHTTPError(404): - cm.get('..') - with self.assertRaisesHTTPError(404): - cm.get('foo/../../../bar') - with self.assertRaisesHTTPError(404): - cm.delete('../foo') - with self.assertRaisesHTTPError(404): - cm.rename('../foo', '../bar') - with self.assertRaisesHTTPError(404): - cm.save(model={ - 'type': 'file', - 'content': u'', - 'format': 'text', - }, path='../foo') - - -class TestContentsManager(TestCase): - @contextmanager - def assertRaisesHTTPError(self, status, msg=None): - msg = msg or "Should have raised HTTPError(%i)" % status - try: - yield - except HTTPError as e: - self.assertEqual(e.status_code, status) - else: - self.fail(msg) - - def make_populated_dir(self, api_path): - cm = self.contents_manager - - self.make_dir(api_path) - - cm.new(path="/".join([api_path, "nb.ipynb"])) - cm.new(path="/".join([api_path, "file.txt"])) - - def check_populated_dir_files(self, api_path): - dir_model = self.contents_manager.get(api_path) - - self.assertEqual(dir_model['path'], api_path) - self.assertEqual(dir_model['type'], "directory") - - for entry in dir_model['content']: - if entry['type'] == "directory": - continue - elif entry['type'] == "file": - self.assertEqual(entry['name'], "file.txt") - complete_path = "/".join([api_path, "file.txt"]) - self.assertEqual(entry["path"], complete_path) - elif entry['type'] == "notebook": - self.assertEqual(entry['name'], "nb.ipynb") - complete_path = "/".join([api_path, "nb.ipynb"]) - self.assertEqual(entry["path"], complete_path) - - def setUp(self): - self._temp_dir = TemporaryDirectory() - self.td = self._temp_dir.name - self.contents_manager = FileContentsManager( - root_dir=self.td, - ) - - def tearDown(self): - self._temp_dir.cleanup() - - def make_dir(self, api_path): - """make a subdirectory at api_path - - override in subclasses if contents are not on the filesystem. - """ - _make_dir(self.contents_manager, api_path) - - def add_code_cell(self, nb): - output = nbformat.new_output("display_data", {'application/javascript': "alert('hi');"}) - cell = nbformat.new_code_cell("print('hi')", outputs=[output]) - nb.cells.append(cell) - - def new_notebook(self): - cm = self.contents_manager - model = cm.new_untitled(type='notebook') - name = model['name'] - path = model['path'] - - full_model = cm.get(path) - nb = full_model['content'] - nb['metadata']['counter'] = int(1e6 * time.time()) - self.add_code_cell(nb) - - cm.save(full_model, path) - return nb, name, path - - def test_new_untitled(self): - cm = self.contents_manager - # Test in root directory - model = cm.new_untitled(type='notebook') - assert isinstance(model, dict) - self.assertIn('name', model) - self.assertIn('path', model) - self.assertIn('type', model) - self.assertEqual(model['type'], 'notebook') - self.assertEqual(model['name'], 'Untitled.ipynb') - self.assertEqual(model['path'], 'Untitled.ipynb') - - # Test in sub-directory - model = cm.new_untitled(type='directory') - assert isinstance(model, dict) - self.assertIn('name', model) - self.assertIn('path', model) - self.assertIn('type', model) - self.assertEqual(model['type'], 'directory') - self.assertEqual(model['name'], 'Untitled Folder') - self.assertEqual(model['path'], 'Untitled Folder') - sub_dir = model['path'] - - model = cm.new_untitled(path=sub_dir) - assert isinstance(model, dict) - self.assertIn('name', model) - self.assertIn('path', model) - self.assertIn('type', model) - self.assertEqual(model['type'], 'file') - self.assertEqual(model['name'], 'untitled') - self.assertEqual(model['path'], '%s/untitled' % sub_dir) - - # Test with a compound extension - model = cm.new_untitled(path=sub_dir, ext='.foo.bar') - self.assertEqual(model['name'], 'untitled.foo.bar') - model = cm.new_untitled(path=sub_dir, ext='.foo.bar') - self.assertEqual(model['name'], 'untitled1.foo.bar') - - def test_modified_date(self): - - cm = self.contents_manager - - # Create a new notebook. - nb, name, path = self.new_notebook() - model = cm.get(path) - - # Add a cell and save. - self.add_code_cell(model['content']) - cm.save(model, path) - - # Reload notebook and verify that last_modified incremented. - saved = cm.get(path) - self.assertGreaterEqual(saved['last_modified'], model['last_modified']) - - # Move the notebook and verify that last_modified stayed the same. - # (The frontend fires a warning if last_modified increases on the - # renamed file.) - new_path = 'renamed.ipynb' - cm.rename(path, new_path) - renamed = cm.get(new_path) - self.assertGreaterEqual( - renamed['last_modified'], - saved['last_modified'], - ) - - def test_get(self): - cm = self.contents_manager - # Create a notebook - model = cm.new_untitled(type='notebook') - name = model['name'] - path = model['path'] - - # Check that we 'get' on the notebook we just created - model2 = cm.get(path) - assert isinstance(model2, dict) - self.assertIn('name', model2) - self.assertIn('path', model2) - self.assertEqual(model['name'], name) - self.assertEqual(model['path'], path) - - nb_as_file = cm.get(path, content=True, type='file') - self.assertEqual(nb_as_file['path'], path) - self.assertEqual(nb_as_file['type'], 'file') - self.assertEqual(nb_as_file['format'], 'text') - self.assertNotIsInstance(nb_as_file['content'], dict) - - nb_as_bin_file = cm.get(path, content=True, type='file', format='base64') - self.assertEqual(nb_as_bin_file['format'], 'base64') - - # Test in sub-directory - sub_dir = '/foo/' - self.make_dir('foo') - model = cm.new_untitled(path=sub_dir, ext='.ipynb') - model2 = cm.get(sub_dir + name) - assert isinstance(model2, dict) - self.assertIn('name', model2) - self.assertIn('path', model2) - self.assertIn('content', model2) - self.assertEqual(model2['name'], 'Untitled.ipynb') - self.assertEqual(model2['path'], '{0}/{1}'.format(sub_dir.strip('/'), name)) - - # Test with a regular file. - file_model_path = cm.new_untitled(path=sub_dir, ext='.txt')['path'] - file_model = cm.get(file_model_path) - self.assertDictContainsSubset( - { - 'content': u'', - 'format': u'text', - 'mimetype': u'text/plain', - 'name': u'untitled.txt', - 'path': u'foo/untitled.txt', - 'type': u'file', - 'writable': True, - }, - file_model, - ) - self.assertIn('created', file_model) - self.assertIn('last_modified', file_model) - - # Test getting directory model - - # Create a sub-sub directory to test getting directory contents with a - # subdir. - self.make_dir('foo/bar') - dirmodel = cm.get('foo') - self.assertEqual(dirmodel['type'], 'directory') - self.assertIsInstance(dirmodel['content'], list) - self.assertEqual(len(dirmodel['content']), 3) - self.assertEqual(dirmodel['path'], 'foo') - self.assertEqual(dirmodel['name'], 'foo') - - # Directory contents should match the contents of each individual entry - # when requested with content=False. - model2_no_content = cm.get(sub_dir + name, content=False) - file_model_no_content = cm.get(u'foo/untitled.txt', content=False) - sub_sub_dir_no_content = cm.get('foo/bar', content=False) - self.assertEqual(sub_sub_dir_no_content['path'], 'foo/bar') - self.assertEqual(sub_sub_dir_no_content['name'], 'bar') - - for entry in dirmodel['content']: - # Order isn't guaranteed by the spec, so this is a hacky way of - # verifying that all entries are matched. - if entry['path'] == sub_sub_dir_no_content['path']: - self.assertEqual(entry, sub_sub_dir_no_content) - elif entry['path'] == model2_no_content['path']: - self.assertEqual(entry, model2_no_content) - elif entry['path'] == file_model_no_content['path']: - self.assertEqual(entry, file_model_no_content) - else: - self.fail("Unexpected directory entry: %s" % entry()) - - with self.assertRaises(HTTPError): - cm.get('foo', type='file') - - def test_update(self): - cm = self.contents_manager - # Create a notebook - model = cm.new_untitled(type='notebook') - name = model['name'] - path = model['path'] - - # Change the name in the model for rename - model['path'] = 'test.ipynb' - model = cm.update(model, path) - assert isinstance(model, dict) - self.assertIn('name', model) - self.assertIn('path', model) - self.assertEqual(model['name'], 'test.ipynb') - - # Make sure the old name is gone - self.assertRaises(HTTPError, cm.get, path) - - # Test in sub-directory - # Create a directory and notebook in that directory - sub_dir = '/foo/' - self.make_dir('foo') - model = cm.new_untitled(path=sub_dir, type='notebook') - path = model['path'] - - # Change the name in the model for rename - d = path.rsplit('/', 1)[0] - new_path = model['path'] = d + '/test_in_sub.ipynb' - model = cm.update(model, path) - assert isinstance(model, dict) - self.assertIn('name', model) - self.assertIn('path', model) - self.assertEqual(model['name'], 'test_in_sub.ipynb') - self.assertEqual(model['path'], new_path) - - # Make sure the old name is gone - self.assertRaises(HTTPError, cm.get, path) - - def test_save(self): - cm = self.contents_manager - # Create a notebook - model = cm.new_untitled(type='notebook') - name = model['name'] - path = model['path'] - - # Get the model with 'content' - full_model = cm.get(path) - - # Save the notebook - model = cm.save(full_model, path) - assert isinstance(model, dict) - self.assertIn('name', model) - self.assertIn('path', model) - self.assertEqual(model['name'], name) - self.assertEqual(model['path'], path) - - # Test in sub-directory - # Create a directory and notebook in that directory - sub_dir = '/foo/' - self.make_dir('foo') - model = cm.new_untitled(path=sub_dir, type='notebook') - name = model['name'] - path = model['path'] - model = cm.get(path) - - # Change the name in the model for rename - model = cm.save(model, path) - assert isinstance(model, dict) - self.assertIn('name', model) - self.assertIn('path', model) - self.assertEqual(model['name'], 'Untitled.ipynb') - self.assertEqual(model['path'], 'foo/Untitled.ipynb') - - def test_delete(self): - cm = self.contents_manager - # Create a notebook - nb, name, path = self.new_notebook() - - # Delete the notebook - cm.delete(path) - - # Check that deleting a non-existent path raises an error. - self.assertRaises(HTTPError, cm.delete, path) - - # Check that a 'get' on the deleted notebook raises and error - self.assertRaises(HTTPError, cm.get, path) - - def test_rename(self): - cm = self.contents_manager - # Create a new notebook - nb, name, path = self.new_notebook() - - # Rename the notebook - cm.rename(path, "changed_path") - - # Attempting to get the notebook under the old name raises an error - self.assertRaises(HTTPError, cm.get, path) - # Fetching the notebook under the new name is successful - assert isinstance(cm.get("changed_path"), dict) - - # Ported tests on nested directory renaming from pgcontents - all_dirs = ['foo', 'bar', 'foo/bar', 'foo/bar/foo', 'foo/bar/foo/bar'] - unchanged_dirs = all_dirs[:2] - changed_dirs = all_dirs[2:] - - for _dir in all_dirs: - self.make_populated_dir(_dir) - self.check_populated_dir_files(_dir) - - # Renaming to an existing directory should fail - for src, dest in combinations(all_dirs, 2): - with self.assertRaisesHTTPError(409): - cm.rename(src, dest) - - # Creating a notebook in a non_existant directory should fail - with self.assertRaisesHTTPError(404): - cm.new_untitled("foo/bar_diff", ext=".ipynb") - - cm.rename("foo/bar", "foo/bar_diff") - - # Assert that unchanged directories remain so - for unchanged in unchanged_dirs: - self.check_populated_dir_files(unchanged) - - # Assert changed directories can no longer be accessed under old names - for changed_dirname in changed_dirs: - with self.assertRaisesHTTPError(404): - cm.get(changed_dirname) - - new_dirname = changed_dirname.replace("foo/bar", "foo/bar_diff", 1) - - self.check_populated_dir_files(new_dirname) - - # Created a notebook in the renamed directory should work - cm.new_untitled("foo/bar_diff", ext=".ipynb") - - def test_delete_root(self): - cm = self.contents_manager - with self.assertRaises(HTTPError) as err: - cm.delete('') - self.assertEqual(err.exception.status_code, 400) - - def test_copy(self): - cm = self.contents_manager - parent = u'å b' - name = u'nb √.ipynb' - path = u'{0}/{1}'.format(parent, name) - self.make_dir(parent) - - orig = cm.new(path=path) - # copy with unspecified name - copy = cm.copy(path) - self.assertEqual(copy['name'], orig['name'].replace('.ipynb', '-Copy1.ipynb')) - - # copy with specified name - copy2 = cm.copy(path, u'å b/copy 2.ipynb') - self.assertEqual(copy2['name'], u'copy 2.ipynb') - self.assertEqual(copy2['path'], u'å b/copy 2.ipynb') - # copy with specified path - copy2 = cm.copy(path, u'/') - self.assertEqual(copy2['name'], name) - self.assertEqual(copy2['path'], name) - - def test_trust_notebook(self): - cm = self.contents_manager - nb, name, path = self.new_notebook() - - untrusted = cm.get(path)['content'] - assert not cm.notary.check_cells(untrusted) - - # print(untrusted) - cm.trust_notebook(path) - trusted = cm.get(path)['content'] - # print(trusted) - assert cm.notary.check_cells(trusted) - - def test_mark_trusted_cells(self): - cm = self.contents_manager - nb, name, path = self.new_notebook() - - cm.mark_trusted_cells(nb, path) - for cell in nb.cells: - if cell.cell_type == 'code': - assert not cell.metadata.trusted - - cm.trust_notebook(path) - nb = cm.get(path)['content'] - for cell in nb.cells: - if cell.cell_type == 'code': - assert cell.metadata.trusted - - def test_check_and_sign(self): - cm = self.contents_manager - nb, name, path = self.new_notebook() - - cm.mark_trusted_cells(nb, path) - cm.check_and_sign(nb, path) - assert not cm.notary.check_signature(nb) - - cm.trust_notebook(path) - nb = cm.get(path)['content'] - cm.mark_trusted_cells(nb, path) - cm.check_and_sign(nb, path) - assert cm.notary.check_signature(nb) - - -class TestContentsManagerNoAtomic(TestContentsManager): - """ - Make same test in no atomic case than in atomic case, using inheritance - """ - - def setUp(self): - self._temp_dir = TemporaryDirectory() - self.td = self._temp_dir.name - self.contents_manager = FileContentsManager( - root_dir = self.td, - ) - self.contents_manager.use_atomic_writing = False diff --git a/jupyter_server/services/kernels/tests/test_kernels_api.py b/jupyter_server/services/kernels/tests/test_kernels_api.py deleted file mode 100644 index 7a593bc8b2..0000000000 --- a/jupyter_server/services/kernels/tests/test_kernels_api.py +++ /dev/null @@ -1,206 +0,0 @@ -"""Test the kernels service API.""" - -import json -import time - -from traitlets.config import Config - -from tornado.httpclient import HTTPRequest -from tornado.ioloop import IOLoop -from tornado.websocket import websocket_connect - -from jupyter_client.kernelspec import NATIVE_KERNEL_NAME - -from jupyter_server.utils import url_path_join -from jupyter_server.tests.launchserver import ServerTestBase, assert_http_error - - - -class KernelAPI(object): - """Wrapper for kernel REST API requests""" - def __init__(self, request, base_url, headers): - self.request = request - self.base_url = base_url - self.headers = headers - - def _req(self, verb, path, body=None): - response = self.request(verb, - url_path_join('api/kernels', path), data=body) - - if 400 <= response.status_code < 600: - try: - response.reason = response.json()['message'] - except: - pass - response.raise_for_status() - - return response - - def list(self): - return self._req('GET', '') - - def get(self, id): - return self._req('GET', id) - - def start(self, name=NATIVE_KERNEL_NAME): - body = json.dumps({'name': name}) - return self._req('POST', '', body) - - def shutdown(self, id): - return self._req('DELETE', id) - - def interrupt(self, id): - return self._req('POST', url_path_join(id, 'interrupt')) - - def restart(self, id): - return self._req('POST', url_path_join(id, 'restart')) - - def websocket(self, id): - loop = IOLoop() - loop.make_current() - req = HTTPRequest( - url_path_join(self.base_url.replace('http', 'ws', 1), 'api/kernels', id, 'channels'), - headers=self.headers, - ) - f = websocket_connect(req) - return loop.run_sync(lambda : f) - - -class KernelAPITest(ServerTestBase): - """Test the kernels web service API""" - def setUp(self): - self.kern_api = KernelAPI(self.request, - base_url=self.base_url(), - headers=self.auth_headers(), - ) - - def tearDown(self): - for k in self.kern_api.list().json(): - self.kern_api.shutdown(k['id']) - - def test_no_kernels(self): - """Make sure there are no kernels running at the start""" - kernels = self.kern_api.list().json() - self.assertEqual(kernels, []) - - def test_default_kernel(self): - # POST request - r = self.kern_api._req('POST', '') - kern1 = r.json() - self.assertEqual(r.headers['location'], url_path_join(self.url_prefix, 'api/kernels', kern1['id'])) - self.assertEqual(r.status_code, 201) - self.assertIsInstance(kern1, dict) - - report_uri = url_path_join(self.url_prefix, 'api/security/csp-report') - expected_csp = '; '.join([ - "frame-ancestors 'self'", - 'report-uri ' + report_uri, - "default-src 'none'" - ]) - self.assertEqual(r.headers['Content-Security-Policy'], expected_csp) - - def test_main_kernel_handler(self): - # POST request - r = self.kern_api.start() - kern1 = r.json() - self.assertEqual(r.headers['location'], url_path_join(self.url_prefix, 'api/kernels', kern1['id'])) - self.assertEqual(r.status_code, 201) - self.assertIsInstance(kern1, dict) - - report_uri = url_path_join(self.url_prefix, 'api/security/csp-report') - expected_csp = '; '.join([ - "frame-ancestors 'self'", - 'report-uri ' + report_uri, - "default-src 'none'" - ]) - self.assertEqual(r.headers['Content-Security-Policy'], expected_csp) - - # GET request - r = self.kern_api.list() - self.assertEqual(r.status_code, 200) - assert isinstance(r.json(), list) - self.assertEqual(r.json()[0]['id'], kern1['id']) - self.assertEqual(r.json()[0]['name'], kern1['name']) - - # create another kernel and check that they both are added to the - # list of kernels from a GET request - kern2 = self.kern_api.start().json() - assert isinstance(kern2, dict) - r = self.kern_api.list() - kernels = r.json() - self.assertEqual(r.status_code, 200) - assert isinstance(kernels, list) - self.assertEqual(len(kernels), 2) - - # Interrupt a kernel - r = self.kern_api.interrupt(kern2['id']) - self.assertEqual(r.status_code, 204) - - # Restart a kernel - r = self.kern_api.restart(kern2['id']) - rekern = r.json() - self.assertEqual(rekern['id'], kern2['id']) - self.assertEqual(rekern['name'], kern2['name']) - - def test_kernel_handler(self): - # GET kernel with given id - kid = self.kern_api.start().json()['id'] - r = self.kern_api.get(kid) - kern1 = r.json() - self.assertEqual(r.status_code, 200) - assert isinstance(kern1, dict) - self.assertIn('id', kern1) - self.assertEqual(kern1['id'], kid) - - # Request a bad kernel id and check that a JSON - # message is returned! - bad_id = '111-111-111-111-111' - with assert_http_error(404, 'Kernel does not exist: ' + bad_id): - self.kern_api.get(bad_id) - - # DELETE kernel with id - r = self.kern_api.shutdown(kid) - self.assertEqual(r.status_code, 204) - kernels = self.kern_api.list().json() - self.assertEqual(kernels, []) - - # Request to delete a non-existent kernel id - bad_id = '111-111-111-111-111' - with assert_http_error(404, 'Kernel does not exist: ' + bad_id): - self.kern_api.shutdown(bad_id) - - def test_connections(self): - kid = self.kern_api.start().json()['id'] - model = self.kern_api.get(kid).json() - self.assertEqual(model['connections'], 0) - - ws = self.kern_api.websocket(kid) - model = self.kern_api.get(kid).json() - self.assertEqual(model['connections'], 1) - ws.close() - # give it some time to close on the other side: - for i in range(10): - model = self.kern_api.get(kid).json() - if model['connections'] > 0: - time.sleep(0.1) - else: - break - model = self.kern_api.get(kid).json() - self.assertEqual(model['connections'], 0) - - -class KernelFilterTest(ServerTestBase): - # A special install of ServerTestBase where only `kernel_info_request` - # messages are allowed. - - config = Config({ - 'ServerApp': { - 'MappingKernelManager': { - 'allowed_message_types': ['kernel_info_request'] - } - } - }) - - # Sanity check verifying that the configurable was properly set. - def test_config(self): - self.assertEqual(self.server.kernel_manager.allowed_message_types, ['kernel_info_request']) diff --git a/jupyter_server/services/kernelspecs/tests/test_kernelspecs_api.py b/jupyter_server/services/kernelspecs/tests/test_kernelspecs_api.py deleted file mode 100644 index b9f696f87c..0000000000 --- a/jupyter_server/services/kernelspecs/tests/test_kernelspecs_api.py +++ /dev/null @@ -1,141 +0,0 @@ -# coding: utf-8 -"""Test the kernel specs webservice API.""" - -import errno -import io -import json -import os -import shutil - -pjoin = os.path.join - -import requests - -from jupyter_client.kernelspec import NATIVE_KERNEL_NAME -from jupyter_server.utils import url_path_join, url_escape -from jupyter_server.tests.launchserver import ServerTestBase, assert_http_error - -# Copied from jupyter_client.tests.test_kernelspec so updating that doesn't -# break these tests -sample_kernel_json = { - 'argv':['cat', '{connection_file}'], - 'display_name':'Test kernel', -} - -some_resource = u"The very model of a modern major general" - - -class KernelSpecAPI(object): - """Wrapper for API calls.""" - - def __init__(self, request): - self.request = request - - def _req(self, verb, path, body=None): - response = self.request(verb, - path, - data=body, - ) - response.raise_for_status() - return response - - def list(self): - return self._req('GET', 'api/kernelspecs') - - def kernel_spec_info(self, name): - return self._req('GET', url_path_join('api/kernelspecs', name)) - - def kernel_resource(self, name, path): - return self._req('GET', url_path_join('kernelspecs', name, path)) - - -class APITest(ServerTestBase): - """Test the kernelspec web service API""" - - def setUp(self): - self.create_spec('sample') - self.create_spec('sample 2') - self.ks_api = KernelSpecAPI(self.request) - - def create_spec(self, name): - sample_kernel_dir = pjoin(self.data_dir, 'kernels', name) - try: - os.makedirs(sample_kernel_dir) - except OSError as e: - if e.errno != errno.EEXIST: - raise - - with open(pjoin(sample_kernel_dir, 'kernel.json'), 'w') as f: - json.dump(sample_kernel_json, f) - - with io.open(pjoin(sample_kernel_dir, 'resource.txt'), 'w', - encoding='utf-8') as f: - f.write(some_resource) - - def test_list_kernelspecs_bad(self): - """Can list kernelspecs when one is invalid""" - bad_kernel_dir = pjoin(self.data_dir, 'kernels', 'bad') - try: - os.makedirs(bad_kernel_dir) - except OSError as e: - if e.errno != errno.EEXIST: - raise - - with open(pjoin(bad_kernel_dir, 'kernel.json'), 'w') as f: - f.write("garbage") - - model = self.ks_api.list().json() - assert isinstance(model, dict) - self.assertEqual(model['default'], NATIVE_KERNEL_NAME) - specs = model['kernelspecs'] - assert isinstance(specs, dict) - # 2: the sample kernelspec created in setUp, and the native Python kernel - self.assertGreaterEqual(len(specs), 2) - - shutil.rmtree(bad_kernel_dir) - - def test_list_kernelspecs(self): - model = self.ks_api.list().json() - assert isinstance(model, dict) - self.assertEqual(model['default'], NATIVE_KERNEL_NAME) - specs = model['kernelspecs'] - assert isinstance(specs, dict) - - # 2: the sample kernelspec created in setUp, and the native Python kernel - self.assertGreaterEqual(len(specs), 2) - - def is_sample_kernelspec(s): - return s['name'] == 'sample' and s['spec']['display_name'] == 'Test kernel' - - def is_default_kernelspec(s): - return s['name'] == NATIVE_KERNEL_NAME and s['spec']['display_name'].startswith("Python") - - assert any(is_sample_kernelspec(s) for s in specs.values()), specs - assert any(is_default_kernelspec(s) for s in specs.values()), specs - - def test_get_kernelspec(self): - model = self.ks_api.kernel_spec_info('Sample').json() # Case insensitive - self.assertEqual(model['name'].lower(), 'sample') - self.assertIsInstance(model['spec'], dict) - self.assertEqual(model['spec']['display_name'], 'Test kernel') - self.assertIsInstance(model['resources'], dict) - - def test_get_kernelspec_spaces(self): - model = self.ks_api.kernel_spec_info('sample%202').json() - self.assertEqual(model['name'].lower(), 'sample 2') - - def test_get_nonexistant_kernelspec(self): - with assert_http_error(404): - self.ks_api.kernel_spec_info('nonexistant') - - def test_get_kernel_resource_file(self): - res = self.ks_api.kernel_resource('sAmple', 'resource.txt') - self.assertEqual(res.text, some_resource) - - def test_get_nonexistant_resource(self): - with assert_http_error(404): - self.ks_api.kernel_resource('nonexistant', 'resource.txt') - - with assert_http_error(404): - self.ks_api.kernel_resource('sample', 'nonexistant.txt') - diff --git a/jupyter_server/services/nbconvert/tests/test_nbconvert_api.py b/jupyter_server/services/nbconvert/tests/test_nbconvert_api.py deleted file mode 100644 index 87a56edf7d..0000000000 --- a/jupyter_server/services/nbconvert/tests/test_nbconvert_api.py +++ /dev/null @@ -1,35 +0,0 @@ -import requests - -from jupyter_server.utils import url_path_join -from jupyter_server.tests.launchserver import ServerTestBase - - -class NbconvertAPI(object): - """Wrapper for nbconvert API calls.""" - - def __init__(self, request): - self.request = request - - def _req(self, verb, path, body=None, params=None): - response = self.request(verb, - url_path_join('api/nbconvert', path), - data=body, params=params, - ) - response.raise_for_status() - return response - - def list_formats(self): - return self._req('GET', '') - - -class APITest(ServerTestBase): - - def setUp(self): - self.nbconvert_api = NbconvertAPI(self.request) - - def test_list_formats(self): - formats = self.nbconvert_api.list_formats().json() - self.assertIsInstance(formats, dict) - self.assertIn('python', formats) - self.assertIn('html', formats) - self.assertEqual(formats['python']['output_mimetype'], 'text/x-python') diff --git a/jupyter_server/services/sessions/tests/test_sessionmanager.py b/jupyter_server/services/sessions/tests/test_sessionmanager.py deleted file mode 100644 index 7b5d6433c8..0000000000 --- a/jupyter_server/services/sessions/tests/test_sessionmanager.py +++ /dev/null @@ -1,256 +0,0 @@ -"""Tests for the session manager.""" - -from functools import partial -from unittest import TestCase - -from tornado import gen, web -from tornado.ioloop import IOLoop - -from ..sessionmanager import SessionManager -from jupyter_server.services.kernels.kernelmanager import MappingKernelManager -from jupyter_server.services.contents.manager import ContentsManager -from jupyter_server._tz import utcnow, isoformat - -class DummyKernel(object): - def __init__(self, kernel_name='python'): - self.kernel_name = kernel_name - -dummy_date = utcnow() -dummy_date_s = isoformat(dummy_date) - -class DummyMKM(MappingKernelManager): - """MappingKernelManager interface that doesn't start kernels, for testing""" - def __init__(self, *args, **kwargs): - super(DummyMKM, self).__init__(*args, **kwargs) - self.id_letters = iter(u'ABCDEFGHIJK') - - def _new_id(self): - return next(self.id_letters) - - def start_kernel(self, kernel_id=None, path=None, kernel_name='python', **kwargs): - kernel_id = kernel_id or self._new_id() - k = self._kernels[kernel_id] = DummyKernel(kernel_name=kernel_name) - self._kernel_connections[kernel_id] = 0 - k.last_activity = dummy_date - k.execution_state = 'idle' - return kernel_id - - def shutdown_kernel(self, kernel_id, now=False): - del self._kernels[kernel_id] - - -class TestSessionManager(TestCase): - - def setUp(self): - self.sm = SessionManager( - kernel_manager=DummyMKM(), - contents_manager=ContentsManager(), - ) - self.loop = IOLoop() - self.addCleanup(partial(self.loop.close, all_fds=True)) - - def create_sessions(self, *kwarg_list): - @gen.coroutine - def co_add(): - sessions = [] - for kwargs in kwarg_list: - kwargs.setdefault('type', 'notebook') - session = yield self.sm.create_session(**kwargs) - sessions.append(session) - raise gen.Return(sessions) - return self.loop.run_sync(co_add) - - def create_session(self, **kwargs): - return self.create_sessions(kwargs)[0] - - def test_get_session(self): - sm = self.sm - session_id = self.create_session(path='/path/to/test.ipynb', kernel_name='bar')['id'] - model = self.loop.run_sync(lambda: sm.get_session(session_id=session_id)) - expected = {'id':session_id, - 'path': u'/path/to/test.ipynb', - 'notebook': {'path': u'/path/to/test.ipynb', 'name': None}, - 'type': 'notebook', - 'name': None, - 'kernel': { - 'id': 'A', - 'name': 'bar', - 'connections': 0, - 'last_activity': dummy_date_s, - 'execution_state': 'idle', - }} - self.assertEqual(model, expected) - - def test_bad_get_session(self): - # Should raise error if a bad key is passed to the database. - sm = self.sm - session_id = self.create_session(path='/path/to/test.ipynb', - kernel_name='foo')['id'] - with self.assertRaises(TypeError): - self.loop.run_sync(lambda: sm.get_session(bad_id=session_id)) # Bad keyword - - def test_get_session_dead_kernel(self): - sm = self.sm - session = self.create_session(path='/path/to/1/test1.ipynb', kernel_name='python') - # kill the kernel - sm.kernel_manager.shutdown_kernel(session['kernel']['id']) - with self.assertRaises(KeyError): - self.loop.run_sync(lambda: sm.get_session(session_id=session['id'])) - # no sessions left - listed = self.loop.run_sync(lambda: sm.list_sessions()) - self.assertEqual(listed, []) - - def test_list_sessions(self): - sm = self.sm - sessions = self.create_sessions( - dict(path='/path/to/1/test1.ipynb', kernel_name='python'), - dict(path='/path/to/2/test2.py', type='file', kernel_name='python'), - dict(path='/path/to/3', name='foo', type='console', kernel_name='python'), - ) - - sessions = self.loop.run_sync(lambda: sm.list_sessions()) - expected = [ - { - 'id':sessions[0]['id'], - 'path': u'/path/to/1/test1.ipynb', - 'type': 'notebook', - 'notebook': {'path': u'/path/to/1/test1.ipynb', 'name': None}, - 'name': None, - 'kernel': { - 'id': 'A', - 'name':'python', - 'connections': 0, - 'last_activity': dummy_date_s, - 'execution_state': 'idle', - } - }, { - 'id':sessions[1]['id'], - 'path': u'/path/to/2/test2.py', - 'type': 'file', - 'name': None, - 'kernel': { - 'id': 'B', - 'name':'python', - 'connections': 0, - 'last_activity': dummy_date_s, - 'execution_state': 'idle', - } - }, { - 'id':sessions[2]['id'], - 'path': u'/path/to/3', - 'type': 'console', - 'name': 'foo', - 'kernel': { - 'id': 'C', - 'name':'python', - 'connections': 0, - 'last_activity': dummy_date_s, - 'execution_state': 'idle', - } - } - ] - self.assertEqual(sessions, expected) - - def test_list_sessions_dead_kernel(self): - sm = self.sm - sessions = self.create_sessions( - dict(path='/path/to/1/test1.ipynb', kernel_name='python'), - dict(path='/path/to/2/test2.ipynb', kernel_name='python'), - ) - # kill one of the kernels - sm.kernel_manager.shutdown_kernel(sessions[0]['kernel']['id']) - listed = self.loop.run_sync(lambda: sm.list_sessions()) - expected = [ - { - 'id': sessions[1]['id'], - 'path': u'/path/to/2/test2.ipynb', - 'type': 'notebook', - 'name': None, - 'notebook': {'path': u'/path/to/2/test2.ipynb', 'name': None}, - 'kernel': { - 'id': 'B', - 'name':'python', - 'connections': 0, - 'last_activity': dummy_date_s, - 'execution_state': 'idle', - } - } - ] - self.assertEqual(listed, expected) - - def test_update_session(self): - sm = self.sm - session_id = self.create_session(path='/path/to/test.ipynb', - kernel_name='julia')['id'] - self.loop.run_sync(lambda: sm.update_session(session_id, path='/path/to/new_name.ipynb')) - model = self.loop.run_sync(lambda: sm.get_session(session_id=session_id)) - expected = {'id':session_id, - 'path': u'/path/to/new_name.ipynb', - 'type': 'notebook', - 'name': None, - 'notebook': {'path': u'/path/to/new_name.ipynb', 'name': None}, - 'kernel': { - 'id': 'A', - 'name':'julia', - 'connections': 0, - 'last_activity': dummy_date_s, - 'execution_state': 'idle', - } - } - self.assertEqual(model, expected) - - def test_bad_update_session(self): - # try to update a session with a bad keyword ~ raise error - sm = self.sm - session_id = self.create_session(path='/path/to/test.ipynb', - kernel_name='ir')['id'] - with self.assertRaises(TypeError): - self.loop.run_sync(lambda: sm.update_session(session_id=session_id, bad_kw='test.ipynb')) # Bad keyword - - def test_delete_session(self): - sm = self.sm - sessions = self.create_sessions( - dict(path='/path/to/1/test1.ipynb', kernel_name='python'), - dict(path='/path/to/2/test2.ipynb', kernel_name='python'), - dict(path='/path/to/3', name='foo', type='console', kernel_name='python'), - ) - self.loop.run_sync(lambda: sm.delete_session(sessions[1]['id'])) - new_sessions = self.loop.run_sync(lambda: sm.list_sessions()) - expected = [{ - 'id': sessions[0]['id'], - 'path': u'/path/to/1/test1.ipynb', - 'type': 'notebook', - 'name': None, - 'notebook': {'path': u'/path/to/1/test1.ipynb', 'name': None}, - 'kernel': { - 'id': 'A', - 'name':'python', - 'connections': 0, - 'last_activity': dummy_date_s, - 'execution_state': 'idle', - } - }, { - 'id': sessions[2]['id'], - 'type': 'console', - 'path': u'/path/to/3', - 'name': 'foo', - 'kernel': { - 'id': 'C', - 'name':'python', - 'connections': 0, - 'last_activity': dummy_date_s, - 'execution_state': 'idle', - } - } - ] - self.assertEqual(new_sessions, expected) - - def test_bad_delete_session(self): - # try to delete a session that doesn't exist ~ raise error - sm = self.sm - self.create_session(path='/path/to/test.ipynb', kernel_name='python') - with self.assertRaises(TypeError): - self.loop.run_sync(lambda : sm.delete_session(bad_kwarg='23424')) # Bad keyword - with self.assertRaises(web.HTTPError): - self.loop.run_sync(lambda : sm.delete_session(session_id='23424')) # nonexistent - diff --git a/jupyter_server/services/sessions/tests/test_sessions_api.py b/jupyter_server/services/sessions/tests/test_sessions_api.py deleted file mode 100644 index ee2e4e9e0e..0000000000 --- a/jupyter_server/services/sessions/tests/test_sessions_api.py +++ /dev/null @@ -1,259 +0,0 @@ -"""Test the sessions web service API.""" - -import errno -from functools import partial -import io -import os -import json -import requests -import shutil -import time - -pjoin = os.path.join - -from jupyter_server.utils import url_path_join -from jupyter_server.tests.launchserver import ServerTestBase, assert_http_error -from nbformat.v4 import new_notebook -from nbformat import write - - -class SessionAPI(object): - """Wrapper for notebook API calls.""" - - def __init__(self, request): - self.request = request - - def _req(self, verb, path, body=None): - response = self.request(verb, - url_path_join('api/sessions', path), data=body) - - if 400 <= response.status_code < 600: - try: - response.reason = response.json()['message'] - except: - pass - response.raise_for_status() - - return response - - def list(self): - return self._req('GET', '') - - def get(self, id): - return self._req('GET', id) - - def create(self, path, type='notebook', kernel_name='python', kernel_id=None): - body = json.dumps({'path': path, - 'type': type, - 'kernel': {'name': kernel_name, - 'id': kernel_id}}) - return self._req('POST', '', body) - - def create_deprecated(self, path): - body = json.dumps({'notebook': {'path': path}, - 'kernel': {'name': 'python', - 'id': 'foo'}}) - return self._req('POST', '', body) - - def modify_path(self, id, path): - body = json.dumps({'path': path}) - return self._req('PATCH', id, body) - - def modify_path_deprecated(self, id, path): - body = json.dumps({'notebook': {'path': path}}) - return self._req('PATCH', id, body) - - def modify_type(self, id, type): - body = json.dumps({'type': type}) - return self._req('PATCH', id, body) - - def modify_kernel_name(self, id, kernel_name): - body = json.dumps({'kernel': {'name': kernel_name}}) - return self._req('PATCH', id, body) - - def modify_kernel_id(self, id, kernel_id): - # Also send a dummy name to show that id takes precedence. - body = json.dumps({'kernel': {'id': kernel_id, 'name': 'foo'}}) - return self._req('PATCH', id, body) - - def delete(self, id): - return self._req('DELETE', id) - - -class SessionAPITest(ServerTestBase): - """Test the sessions web service API""" - def setUp(self): - rootdir = self.root_dir - subdir = pjoin(rootdir, 'foo') - - try: - os.mkdir(subdir) - except OSError as e: - # Deleting the folder in an earlier test may have failed - if e.errno != errno.EEXIST: - raise - self.addCleanup(partial(shutil.rmtree, subdir, ignore_errors=True)) - - with io.open(pjoin(subdir, 'nb1.ipynb'), 'w', encoding='utf-8') as f: - nb = new_notebook() - write(nb, f, version=4) - - self.sess_api = SessionAPI(self.request) - - @self.addCleanup - def cleanup_sessions(): - for session in self.sess_api.list().json(): - self.sess_api.delete(session['id']) - - # This is necessary in some situations on Windows: without it, it - # fails to delete the directory because something is still using - # it. I think there is a brief period after the kernel terminates - # where Windows still treats its working directory as in use. On my - # Windows VM, 0.01s is not long enough, but 0.1s appears to work - # reliably. -- TK, 15 December 2014 - time.sleep(0.1) - - def test_create(self): - sessions = self.sess_api.list().json() - self.assertEqual(len(sessions), 0) - - resp = self.sess_api.create('foo/nb1.ipynb') - self.assertEqual(resp.status_code, 201) - newsession = resp.json() - self.assertIn('id', newsession) - self.assertEqual(newsession['path'], 'foo/nb1.ipynb') - self.assertEqual(newsession['type'], 'notebook') - self.assertEqual(resp.headers['Location'], self.url_prefix + 'api/sessions/{0}'.format(newsession['id'])) - - sessions = self.sess_api.list().json() - self.assertEqual(sessions, [newsession]) - - # Retrieve it - sid = newsession['id'] - got = self.sess_api.get(sid).json() - self.assertEqual(got, newsession) - - def test_create_file_session(self): - resp = self.sess_api.create('foo/nb1.py', type='file') - self.assertEqual(resp.status_code, 201) - newsession = resp.json() - self.assertEqual(newsession['path'], 'foo/nb1.py') - self.assertEqual(newsession['type'], 'file') - - def test_create_console_session(self): - resp = self.sess_api.create('foo/abc123', type='console') - self.assertEqual(resp.status_code, 201) - newsession = resp.json() - self.assertEqual(newsession['path'], 'foo/abc123') - self.assertEqual(newsession['type'], 'console') - - def test_create_deprecated(self): - resp = self.sess_api.create_deprecated('foo/nb1.ipynb') - self.assertEqual(resp.status_code, 201) - newsession = resp.json() - self.assertEqual(newsession['path'], 'foo/nb1.ipynb') - self.assertEqual(newsession['type'], 'notebook') - self.assertEqual(newsession['notebook']['path'], 'foo/nb1.ipynb') - - def test_create_with_kernel_id(self): - # create a new kernel - r = self.request('POST', 'api/kernels') - r.raise_for_status() - kernel = r.json() - - resp = self.sess_api.create('foo/nb1.ipynb', kernel_id=kernel['id']) - self.assertEqual(resp.status_code, 201) - newsession = resp.json() - self.assertIn('id', newsession) - self.assertEqual(newsession['path'], 'foo/nb1.ipynb') - self.assertEqual(newsession['kernel']['id'], kernel['id']) - self.assertEqual(resp.headers['Location'], self.url_prefix + 'api/sessions/{0}'.format(newsession['id'])) - - sessions = self.sess_api.list().json() - self.assertEqual(sessions, [newsession]) - - # Retrieve it - sid = newsession['id'] - got = self.sess_api.get(sid).json() - self.assertEqual(got, newsession) - - def test_delete(self): - newsession = self.sess_api.create('foo/nb1.ipynb').json() - sid = newsession['id'] - - resp = self.sess_api.delete(sid) - self.assertEqual(resp.status_code, 204) - - sessions = self.sess_api.list().json() - self.assertEqual(sessions, []) - - with assert_http_error(404): - self.sess_api.get(sid) - - def test_modify_path(self): - newsession = self.sess_api.create('foo/nb1.ipynb').json() - sid = newsession['id'] - - changed = self.sess_api.modify_path(sid, 'nb2.ipynb').json() - self.assertEqual(changed['id'], sid) - self.assertEqual(changed['path'], 'nb2.ipynb') - - def test_modify_path_deprecated(self): - newsession = self.sess_api.create('foo/nb1.ipynb').json() - sid = newsession['id'] - - changed = self.sess_api.modify_path_deprecated(sid, 'nb2.ipynb').json() - self.assertEqual(changed['id'], sid) - self.assertEqual(changed['notebook']['path'], 'nb2.ipynb') - - def test_modify_type(self): - newsession = self.sess_api.create('foo/nb1.ipynb').json() - sid = newsession['id'] - - changed = self.sess_api.modify_type(sid, 'console').json() - self.assertEqual(changed['id'], sid) - self.assertEqual(changed['type'], 'console') - - def test_modify_kernel_name(self): - before = self.sess_api.create('foo/nb1.ipynb').json() - sid = before['id'] - - after = self.sess_api.modify_kernel_name(sid, before['kernel']['name']).json() - self.assertEqual(after['id'], sid) - self.assertEqual(after['path'], before['path']) - self.assertEqual(after['type'], before['type']) - self.assertNotEqual(after['kernel']['id'], before['kernel']['id']) - - # check kernel list, to be sure previous kernel was cleaned up - r = self.request('GET', 'api/kernels') - r.raise_for_status() - kernel_list = r.json() - after['kernel'].pop('last_activity') - [ k.pop('last_activity') for k in kernel_list ] - self.assertEqual(kernel_list, [after['kernel']]) - - def test_modify_kernel_id(self): - before = self.sess_api.create('foo/nb1.ipynb').json() - sid = before['id'] - - # create a new kernel - r = self.request('POST', 'api/kernels') - r.raise_for_status() - kernel = r.json() - - # Attach our session to the existing kernel - after = self.sess_api.modify_kernel_id(sid, kernel['id']).json() - self.assertEqual(after['id'], sid) - self.assertEqual(after['path'], before['path']) - self.assertEqual(after['type'], before['type']) - self.assertNotEqual(after['kernel']['id'], before['kernel']['id']) - self.assertEqual(after['kernel']['id'], kernel['id']) - - # check kernel list, to be sure previous kernel was cleaned up - r = self.request('GET', 'api/kernels') - r.raise_for_status() - kernel_list = r.json() - - kernel.pop('last_activity') - [ k.pop('last_activity') for k in kernel_list ] - self.assertEqual(kernel_list, [kernel]) diff --git a/jupyter_server/tests/README.md b/jupyter_server/tests/README.md deleted file mode 100644 index f910471abc..0000000000 --- a/jupyter_server/tests/README.md +++ /dev/null @@ -1,25 +0,0 @@ -# IPython Notebook JavaScript Tests - -This directory includes regression tests for the web notebook. These tests -depend on [CasperJS](http://casperjs.org/), which in turn requires a recent -version of [PhantomJS](http://phantomjs.org/). - -The JavaScript tests are organized into subdirectories that match those in -`static` (`base`, `notebook`, `services`, `tree`, etc.). - -To run all of the JavaScript tests do: - -``` -python -m notebook.jstest -``` - -To run the JavaScript tests for a specific file (`base/utils.js` in this case) -do: - -``` -python -m notebook.jstest base/utils.js -``` - -The file `jstest.py` will automatically launch a notebook server to run the -tests against. You can however specify the url of a running notebook server -by using `--url=http://localhost:8888`. diff --git a/jupyter_server/tests/_testdata/black_square_22.png b/jupyter_server/tests/_testdata/black_square_22.png deleted file mode 100644 index 371f52044f..0000000000 Binary files a/jupyter_server/tests/_testdata/black_square_22.png and /dev/null differ diff --git a/jupyter_server/tests/launchserver.py b/jupyter_server/tests/launchserver.py deleted file mode 100644 index 0d572705fa..0000000000 --- a/jupyter_server/tests/launchserver.py +++ /dev/null @@ -1,216 +0,0 @@ -"""Base class for server tests.""" - -from __future__ import print_function - -from binascii import hexlify -from contextlib import contextmanager -import errno -import os -import sys -from threading import Thread, Event -import time -from unittest import TestCase - -pjoin = os.path.join - -from unittest.mock import patch -import requests -from tornado.ioloop import IOLoop -import zmq - -import jupyter_core.paths -from traitlets.config import Config -from ..serverapp import ServerApp -from ..utils import url_path_join -from ipython_genutils.tempdir import TemporaryDirectory - -MAX_WAITTIME = 30 # seconds to wait for Jupyter server to start -POLL_INTERVAL = 0.1 # time between attempts - - -# TimeoutError is a builtin on Python 3. This can be removed when we stop -# supporting Python 2. -class TimeoutError(Exception): - pass - - -class ServerTestBase(TestCase): - """A base class for tests that need a running Jupyter server. - - This create some empty config and runtime directories - and then starts the Jupyter server with them. - """ - - port = 12341 - config = None - # run with a base URL that would be escaped, - # to test that we don't double-escape URLs - url_prefix = '/a%40b/' - - @classmethod - def wait_until_alive(cls): - """Wait for the server to be alive""" - url = cls.base_url() + 'api/contents' - for _ in range(int(MAX_WAITTIME/POLL_INTERVAL)): - try: - requests.get(url) - except Exception as e: - if not cls.server_thread.is_alive(): - raise RuntimeError("The Jupyter server failed to start") - time.sleep(POLL_INTERVAL) - else: - return - - raise TimeoutError("The Jupyter server didn't start up correctly.") - - @classmethod - def wait_until_dead(cls): - """Wait for the server process to terminate after shutdown""" - cls.server_thread.join(timeout=MAX_WAITTIME) - if cls.server_thread.is_alive(): - raise TimeoutError("Undead Jupyter server") - - @classmethod - def auth_headers(cls): - headers = {} - if cls.token: - headers['Authorization'] = 'token %s' % cls.token - return headers - - @classmethod - def request(cls, verb, path, **kwargs): - """Send a request to my server - - with authentication and everything. - """ - headers = kwargs.setdefault('headers', {}) - headers.update(cls.auth_headers()) - response = requests.request(verb, - url_path_join(cls.base_url(), path), - **kwargs) - return response - - @classmethod - def get_patch_env(cls): - return { - 'HOME': cls.home_dir, - 'PYTHONPATH': os.pathsep.join(sys.path), - 'JUPYTER_NO_CONFIG': '1', # needed in the future - 'JUPYTER_CONFIG_DIR' : cls.config_dir, - 'JUPYTER_DATA_DIR' : cls.data_dir, - 'JUPYTER_RUNTIME_DIR': cls.runtime_dir, - } - - @classmethod - def get_argv(cls): - return [] - - @classmethod - def setup_class(cls): - cls.tmp_dir = TemporaryDirectory() - def tmp(*parts): - path = os.path.join(cls.tmp_dir.name, *parts) - try: - os.makedirs(path) - except OSError as e: - if e.errno != errno.EEXIST: - raise - return path - - cls.home_dir = tmp('home') - data_dir = cls.data_dir = tmp('data') - config_dir = cls.config_dir = tmp('config') - runtime_dir = cls.runtime_dir = tmp('runtime') - cls.root_dir = tmp('root_dir') - cls.env_patch = patch.dict('os.environ', cls.get_patch_env()) - cls.env_patch.start() - cls.path_patch = patch.multiple( - jupyter_core.paths, - SYSTEM_JUPYTER_PATH=[tmp('share', 'jupyter')], - ENV_JUPYTER_PATH=[tmp('env', 'share', 'jupyter')], - SYSTEM_CONFIG_PATH=[tmp('etc', 'jupyter')], - ENV_CONFIG_PATH=[tmp('env', 'etc', 'jupyter')], - ) - cls.path_patch.start() - - config = cls.config or Config() - config.NotebookNotary.db_file = ':memory:' - - cls.token = hexlify(os.urandom(4)).decode('ascii') - - started = Event() - def start_thread(): - if 'asyncio' in sys.modules: - import asyncio - asyncio.set_event_loop(asyncio.new_event_loop()) - app = cls.server = ServerApp( - port=cls.port, - port_retries=0, - open_browser=False, - config_dir=cls.config_dir, - data_dir=cls.data_dir, - runtime_dir=cls.runtime_dir, - root_dir=cls.root_dir, - base_url=cls.url_prefix, - config=config, - allow_root=True, - token=cls.token, - ) - # don't register signal handler during tests - app.init_signal = lambda : None - # clear log handlers and propagate to root for nose to capture it - # needs to be redone after initialize, which reconfigures logging - app.log.propagate = True - app.log.handlers = [] - app.initialize(argv=cls.get_argv()) - app.log.propagate = True - app.log.handlers = [] - loop = IOLoop.current() - loop.add_callback(started.set) - try: - app.start() - finally: - # set the event, so failure to start doesn't cause a hang - started.set() - app.session_manager.close() - cls.server_thread = Thread(target=start_thread) - cls.server_thread.daemon = True - cls.server_thread.start() - started.wait() - cls.wait_until_alive() - - @classmethod - def teardown_class(cls): - cls.server.stop() - cls.wait_until_dead() - cls.env_patch.stop() - cls.path_patch.stop() - cls.tmp_dir.cleanup() - # cleanup global zmq Context, to ensure we aren't leaving dangling sockets - def cleanup_zmq(): - zmq.Context.instance().term() - t = Thread(target=cleanup_zmq) - t.daemon = True - t.start() - t.join(5) # give it a few seconds to clean up (this should be immediate) - # if term never returned, there's zmq stuff still open somewhere, so shout about it. - if t.is_alive(): - raise RuntimeError("Failed to teardown zmq Context, open sockets likely left lying around.") - - @classmethod - def base_url(cls): - return 'http://localhost:%i%s' % (cls.port, cls.url_prefix) - - -@contextmanager -def assert_http_error(status, msg=None): - try: - yield - except requests.HTTPError as e: - real_status = e.response.status_code - assert real_status == status, \ - "Expected status %d, got %d" % (status, real_status) - if msg: - assert msg in str(e), e - else: - assert False, "Expected HTTP error status" diff --git a/jupyter_server/tests/test_config_manager.py b/jupyter_server/tests/test_config_manager.py deleted file mode 100644 index 6d08d206ac..0000000000 --- a/jupyter_server/tests/test_config_manager.py +++ /dev/null @@ -1,57 +0,0 @@ -import json -import os -import shutil -import tempfile - -from jupyter_server.config_manager import BaseJSONConfigManager - - -def test_json(): - tmpdir = tempfile.mkdtemp() - try: - root_data = dict(a=1, x=2, nest={'a':1, 'x':2}) - with open(os.path.join(tmpdir, 'foo.json'), 'w') as f: - json.dump(root_data, f) - # also make a foo.d/ directory with multiple json files - os.makedirs(os.path.join(tmpdir, 'foo.d')) - with open(os.path.join(tmpdir, 'foo.d', 'a.json'), 'w') as f: - json.dump(dict(a=2, b=1, nest={'a':2, 'b':1}), f) - with open(os.path.join(tmpdir, 'foo.d', 'b.json'), 'w') as f: - json.dump(dict(a=3, b=2, c=3, nest={'a':3, 'b':2, 'c':3}, only_in_b={'x':1}), f) - manager = BaseJSONConfigManager(config_dir=tmpdir, read_directory=False) - data = manager.get('foo') - assert 'a' in data - assert 'x' in data - assert 'b' not in data - assert 'c' not in data - assert data['a'] == 1 - assert 'x' in data['nest'] - # if we write it out, it also shouldn't pick up the subdirectoy - manager.set('foo', data) - data = manager.get('foo') - assert data == root_data - - manager = BaseJSONConfigManager(config_dir=tmpdir, read_directory=True) - data = manager.get('foo') - assert 'a' in data - assert 'b' in data - assert 'c' in data - # files should be read in order foo.d/a.json foo.d/b.json foo.json - assert data['a'] == 1 - assert data['b'] == 2 - assert data['c'] == 3 - assert data['nest']['a'] == 1 - assert data['nest']['b'] == 2 - assert data['nest']['c'] == 3 - assert data['nest']['x'] == 2 - - # when writing out, we don't want foo.d/*.json data to be included in the root foo.json - manager.set('foo', data) - manager = BaseJSONConfigManager(config_dir=tmpdir, read_directory=False) - data = manager.get('foo') - assert data == root_data - - finally: - shutil.rmtree(tmpdir) - - diff --git a/jupyter_server/tests/test_extensions.py b/jupyter_server/tests/test_extensions.py deleted file mode 100644 index 9d76dd7c8a..0000000000 --- a/jupyter_server/tests/test_extensions.py +++ /dev/null @@ -1,189 +0,0 @@ -import imp -import os -import sys -from unittest import TestCase -from unittest.mock import patch - -from ipython_genutils.tempdir import TemporaryDirectory -from ipython_genutils import py3compat - -from jupyter_server.config_manager import BaseJSONConfigManager -from traitlets.tests.utils import check_help_all_output -from jupyter_core import paths - -from jupyter_server.extensions import toggle_serverextension_python, _get_config_dir -from jupyter_server import extensions, extensions_base -from jupyter_server.serverapp import ServerApp -from types import SimpleNamespace -from collections import OrderedDict - -def test_help_output(): - check_help_all_output('jupyter_server.extensions') - check_help_all_output('jupyter_server.extensions', ['enable']) - check_help_all_output('jupyter_server.extensions', ['disable']) - check_help_all_output('jupyter_server.extensions', ['install']) - check_help_all_output('jupyter_server.extensions', ['uninstall']) - -outer_file = __file__ - - -class MockExtensionModule(object): - __file__ = outer_file - - @staticmethod - def _jupyter_server_extension_paths(): - return [{ - 'module': '_mockdestination/index' - }] - - loaded = False - - def load_jupyter_server_extension(self, app): - self.loaded = True - - -class MockEnvTestCase(TestCase): - - def tempdir(self): - td = TemporaryDirectory() - self.tempdirs.append(td) - return py3compat.cast_unicode(td.name) - - def setUp(self): - self.tempdirs = [] - self._mock_extensions = [] - - self.test_dir = self.tempdir() - self.data_dir = os.path.join(self.test_dir, 'data') - self.config_dir = os.path.join(self.test_dir, 'config') - self.system_data_dir = os.path.join(self.test_dir, 'system_data') - self.system_config_dir = os.path.join(self.test_dir, 'system_config') - self.system_path = [self.system_data_dir] - self.system_config_path = [self.system_config_dir] - - self.patches = [] - p = patch.dict('os.environ', { - 'JUPYTER_CONFIG_DIR': self.config_dir, - 'JUPYTER_DATA_DIR': self.data_dir, - }) - self.patches.append(p) - for mod in (paths,): - p = patch.object(mod, - 'SYSTEM_JUPYTER_PATH', self.system_path) - self.patches.append(p) - p = patch.object(mod, - 'ENV_JUPYTER_PATH', []) - self.patches.append(p) - for mod in (paths, extensions_base): - p = patch.object(mod, - 'SYSTEM_CONFIG_PATH', self.system_config_path) - self.patches.append(p) - p = patch.object(mod, - 'ENV_CONFIG_PATH', []) - self.patches.append(p) - for p in self.patches: - p.start() - self.addCleanup(p.stop) - # verify our patches - self.assertEqual(paths.jupyter_config_path(), [self.config_dir] + self.system_config_path) - self.assertEqual(extensions_base._get_config_dir(user=False), self.system_config_dir) - self.assertEqual(paths.jupyter_path(), [self.data_dir] + self.system_path) - - def tearDown(self): - for modulename in self._mock_extensions: - sys.modules.pop(modulename) - - def _inject_mock_extension(self, modulename='mockextension'): - - sys.modules[modulename] = ext = MockExtensionModule() - self._mock_extensions.append(modulename) - return ext - - -class TestInstallServerExtension(MockEnvTestCase): - - def _get_config(self, user=True): - cm = BaseJSONConfigManager(config_dir=_get_config_dir(user)) - data = cm.get("jupyter_server_config") - return data.get("ServerApp", {}).get("jpserver_extensions", {}) - - def test_enable(self): - self._inject_mock_extension() - toggle_serverextension_python('mockextension', True) - - config = self._get_config() - assert config['mockextension'] - - def test_disable(self): - self._inject_mock_extension() - toggle_serverextension_python('mockextension', True) - toggle_serverextension_python('mockextension', False) - - config = self._get_config() - assert not config['mockextension'] - - def test_merge_config(self): - # enabled at sys level - mock_sys = self._inject_mock_extension('mockext_sys') - # enabled at sys, disabled at user - mock_both = self._inject_mock_extension('mockext_both') - # enabled at user - mock_user = self._inject_mock_extension('mockext_user') - # enabled at Python - mock_py = self._inject_mock_extension('mockext_py') - - toggle_serverextension_python('mockext_sys', enabled=True, user=False) - toggle_serverextension_python('mockext_user', enabled=True, user=True) - toggle_serverextension_python('mockext_both', enabled=True, user=False) - toggle_serverextension_python('mockext_both', enabled=False, user=True) - - app = ServerApp(jpserver_extensions={'mockext_py': True}) - app.init_server_extension_config() - app.init_server_extensions() - - assert mock_user.loaded - assert mock_sys.loaded - assert mock_py.loaded - assert not mock_both.loaded - - -class TestOrderedServerExtension(MockEnvTestCase): - """ - Test that Server Extensions are loaded _in order_ - """ - - def setUp(self): - super(TestOrderedServerExtension, self).setUp() - mockextension1 = SimpleNamespace() - mockextension2 = SimpleNamespace() - - def load_jupyter_server_extension(obj): - obj.mockI = True - obj.mock_shared = 'I' - - mockextension1.load_jupyter_server_extension = load_jupyter_server_extension - - def load_jupyter_server_extension(obj): - obj.mockII = True - obj.mock_shared = 'II' - - mockextension2.load_jupyter_server_extension = load_jupyter_server_extension - - sys.modules['mockextension2'] = mockextension2 - sys.modules['mockextension1'] = mockextension1 - - def tearDown(self): - super(TestOrderedServerExtension, self).tearDown() - del sys.modules['mockextension2'] - del sys.modules['mockextension1'] - - - def test_load_ordered(self): - app = ServerApp() - app.jpserver_extensions = OrderedDict([('mockextension2',True),('mockextension1',True)]) - - app.init_server_extensions() - - assert app.mockII is True, "Mock II should have been loaded" - assert app.mockI is True, "Mock I should have been loaded" - assert app.mock_shared == 'II', "Mock II should be loaded after Mock I" diff --git a/jupyter_server/tests/test_files.py b/jupyter_server/tests/test_files.py deleted file mode 100644 index 849726f8bd..0000000000 --- a/jupyter_server/tests/test_files.py +++ /dev/null @@ -1,172 +0,0 @@ -# coding: utf-8 -"""Test the /files/ handler.""" - -import io -import os -from unicodedata import normalize - -pjoin = os.path.join - -import requests -import json - -from nbformat import write -from nbformat.v4 import (new_notebook, - new_markdown_cell, new_code_cell, - new_output) - -from jupyter_server.utils import url_path_join -from .launchserver import ServerTestBase -from ipython_genutils import py3compat - - -class FilesTest(ServerTestBase): - - def test_hidden_files(self): - not_hidden = [ - u'å b', - u'å b/ç. d', - ] - hidden = [ - u'.å b', - u'å b/.ç d', - ] - dirs = not_hidden + hidden - - rootdir = self.root_dir - for d in dirs: - path = pjoin(rootdir, d.replace('/', os.sep)) - if not os.path.exists(path): - os.mkdir(path) - with open(pjoin(path, 'foo'), 'w') as f: - f.write('foo') - with open(pjoin(path, '.foo'), 'w') as f: - f.write('.foo') - - for d in not_hidden: - path = pjoin(rootdir, d.replace('/', os.sep)) - r = self.request('GET', url_path_join('files', d, 'foo')) - r.raise_for_status() - self.assertEqual(r.text, 'foo') - r = self.request('GET', url_path_join('files', d, '.foo')) - self.assertEqual(r.status_code, 404) - - for d in hidden: - path = pjoin(rootdir, d.replace('/', os.sep)) - for foo in ('foo', '.foo'): - r = self.request('GET', url_path_join('files', d, foo)) - self.assertEqual(r.status_code, 404) - - self.server.contents_manager.allow_hidden = True - try: - for d in not_hidden: - path = pjoin(rootdir, d.replace('/', os.sep)) - r = self.request('GET', url_path_join('files', d, 'foo')) - r.raise_for_status() - self.assertEqual(r.text, 'foo') - r = self.request('GET', url_path_join('files', d, '.foo')) - r.raise_for_status() - self.assertEqual(r.text, '.foo') - - for d in hidden: - path = pjoin(rootdir, d.replace('/', os.sep)) - for foo in ('foo', '.foo'): - r = self.request('GET', url_path_join('files', d, foo)) - r.raise_for_status() - self.assertEqual(r.text, foo) - finally: - self.server.contents_manager.allow_hidden = False - - def test_contents_manager(self): - "make sure ContentsManager returns right files (ipynb, bin, txt)." - - rootdir = self.root_dir - - nb = new_notebook( - cells=[ - new_markdown_cell(u'Created by test ³'), - new_code_cell("print(2*6)", outputs=[ - new_output("stream", text="12"), - ]) - ] - ) - - with io.open(pjoin(rootdir, 'testnb.ipynb'), 'w', - encoding='utf-8') as f: - write(nb, f, version=4) - - with io.open(pjoin(rootdir, 'test.bin'), 'wb') as f: - f.write(b'\xff' + os.urandom(5)) - f.close() - - with io.open(pjoin(rootdir, 'test.txt'), 'w') as f: - f.write(u'foobar') - f.close() - - r = self.request('GET', 'files/testnb.ipynb') - self.assertEqual(r.status_code, 200) - self.assertIn('print(2*6)', r.text) - json.loads(r.text) - - r = self.request('GET', 'files/test.bin') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.headers['content-type'], 'application/octet-stream') - self.assertEqual(r.content[:1], b'\xff') - self.assertEqual(len(r.content), 6) - - r = self.request('GET', 'files/test.txt') - self.assertEqual(r.status_code, 200) - self.assertEqual(r.headers['content-type'], 'text/plain; charset=UTF-8') - self.assertEqual(r.text, 'foobar') - - def test_download(self): - rootdir = self.root_dir - - text = 'hello' - with open(pjoin(rootdir, 'test.txt'), 'w') as f: - f.write(text) - - r = self.request('GET', 'files/test.txt') - disposition = r.headers.get('Content-Disposition', '') - self.assertNotIn('attachment', disposition) - - r = self.request('GET', 'files/test.txt?download=1') - disposition = r.headers.get('Content-Disposition', '') - self.assertIn('attachment', disposition) - self.assertIn("filename*=utf-8''test.txt", disposition) - - - def test_old_files_redirect(self): - """pre-2.0 'files/' prefixed links are properly redirected""" - rootdir = self.root_dir - - os.mkdir(pjoin(rootdir, 'files')) - os.makedirs(pjoin(rootdir, 'sub', 'files')) - - for prefix in ('', 'sub'): - with open(pjoin(rootdir, prefix, 'files', 'f1.txt'), 'w') as f: - f.write(prefix + '/files/f1') - with open(pjoin(rootdir, prefix, 'files', 'f2.txt'), 'w') as f: - f.write(prefix + '/files/f2') - with open(pjoin(rootdir, prefix, 'f2.txt'), 'w') as f: - f.write(prefix + '/f2') - with open(pjoin(rootdir, prefix, 'f3.txt'), 'w') as f: - f.write(prefix + '/f3') - - # These depend on the tree handlers - # - #url = url_path_join('notebooks', prefix, 'files', 'f1.txt') - #r = self.request('GET', url) - #self.assertEqual(r.status_code, 200) - #self.assertEqual(r.text, prefix + '/files/f1') - - #url = url_path_join('notebooks', prefix, 'files', 'f2.txt') - #r = self.request('GET', url) - #self.assertEqual(r.status_code, 200) - #self.assertEqual(r.text, prefix + '/files/f2') - - #url = url_path_join('notebooks', prefix, 'files', 'f3.txt') - #r = self.request('GET', url) - #self.assertEqual(r.status_code, 200) - #self.assertEqual(r.text, prefix + '/f3') - diff --git a/jupyter_server/tests/test_gateway.py b/jupyter_server/tests/test_gateway.py deleted file mode 100644 index 8e092ec4b2..0000000000 --- a/jupyter_server/tests/test_gateway.py +++ /dev/null @@ -1,354 +0,0 @@ -"""Test GatewayClient""" -import os -import json -import uuid -from datetime import datetime -from tornado import gen -from tornado.web import HTTPError -from tornado.httpclient import HTTPRequest, HTTPResponse -from ipython_genutils.py3compat import str_to_unicode -from .launchserver import ServerTestBase -from jupyter_server.gateway.managers import GatewayClient - -try: - from unittest.mock import patch, Mock -except ImportError: - from mock import patch, Mock # py2 - -try: - from io import StringIO -except ImportError: - import StringIO - -import nose.tools as nt - - -def generate_kernelspec(name): - argv_stanza = ['python', '-m', 'ipykernel_launcher', '-f', '{connection_file}'] - spec_stanza = {'spec': {'argv': argv_stanza, 'env': {}, 'display_name': name, 'language': 'python', 'interrupt_mode': 'signal', 'metadata': {}}} - kernelspec_stanza = {'name': name, 'spec': spec_stanza, 'resources': {}} - return kernelspec_stanza - - -# We'll mock up two kernelspecs - kspec_foo and kspec_bar -kernelspecs = {'default': 'kspec_foo', 'kernelspecs': {'kspec_foo': generate_kernelspec('kspec_foo'), 'kspec_bar': generate_kernelspec('kspec_bar')}} - - -# maintain a dictionary of expected running kernels. Key = kernel_id, Value = model. -running_kernels = dict() - - -def generate_model(name): - """Generate a mocked kernel model. Caller is responsible for adding model to running_kernels dictionary.""" - dt = datetime.utcnow().isoformat() + 'Z' - kernel_id = str(uuid.uuid4()) - model = {'id': kernel_id, 'name': name, 'last_activity': str(dt), 'execution_state': 'idle', 'connections': 1} - return model - - -@gen.coroutine -def mock_gateway_request(url, **kwargs): - method = 'GET' - if kwargs['method']: - method = kwargs['method'] - - request = HTTPRequest(url=url, **kwargs) - - endpoint = str(url) - - # Fetch all kernelspecs - if endpoint.endswith('/api/kernelspecs') and method == 'GET': - response_buf = StringIO(str_to_unicode(json.dumps(kernelspecs))) - response = yield gen.maybe_future(HTTPResponse(request, 200, buffer=response_buf)) - raise gen.Return(response) - - # Fetch named kernelspec - if endpoint.rfind('/api/kernelspecs/') >= 0 and method == 'GET': - requested_kernelspec = endpoint.rpartition('/')[2] - kspecs = kernelspecs.get('kernelspecs') - if requested_kernelspec in kspecs: - response_buf = StringIO(str_to_unicode(json.dumps(kspecs.get(requested_kernelspec)))) - response = yield gen.maybe_future(HTTPResponse(request, 200, buffer=response_buf)) - raise gen.Return(response) - else: - raise HTTPError(404, message='Kernelspec does not exist: %s' % requested_kernelspec) - - # Create kernel - if endpoint.endswith('/api/kernels') and method == 'POST': - json_body = json.loads(kwargs['body']) - name = json_body.get('name') - env = json_body.get('env') - kspec_name = env.get('KERNEL_KSPEC_NAME') - nt.assert_equal(name, kspec_name) # Ensure that KERNEL_ env values get propagated - model = generate_model(name) - running_kernels[model.get('id')] = model # Register model as a running kernel - response_buf = StringIO(str_to_unicode(json.dumps(model))) - response = yield gen.maybe_future(HTTPResponse(request, 201, buffer=response_buf)) - raise gen.Return(response) - - # Fetch list of running kernels - if endpoint.endswith('/api/kernels') and method == 'GET': - kernels = [] - for kernel_id in running_kernels.keys(): - model = running_kernels.get(kernel_id) - kernels.append(model) - response_buf = StringIO(str_to_unicode(json.dumps(kernels))) - response = yield gen.maybe_future(HTTPResponse(request, 200, buffer=response_buf)) - raise gen.Return(response) - - # Interrupt or restart existing kernel - if endpoint.rfind('/api/kernels/') >= 0 and method == 'POST': - requested_kernel_id, sep, action = endpoint.rpartition('/api/kernels/')[2].rpartition('/') - - if action == 'interrupt': - if requested_kernel_id in running_kernels: - response = yield gen.maybe_future(HTTPResponse(request, 204)) - raise gen.Return(response) - else: - raise HTTPError(404, message='Kernel does not exist: %s' % requested_kernel_id) - elif action == 'restart': - if requested_kernel_id in running_kernels: - response_buf = StringIO(str_to_unicode(json.dumps(running_kernels.get(requested_kernel_id)))) - response = yield gen.maybe_future(HTTPResponse(request, 204, buffer=response_buf)) - raise gen.Return(response) - else: - raise HTTPError(404, message='Kernel does not exist: %s' % requested_kernel_id) - else: - raise HTTPError(404, message='Bad action detected: %s' % action) - - # Shutdown existing kernel - if endpoint.rfind('/api/kernels/') >= 0 and method == 'DELETE': - requested_kernel_id = endpoint.rpartition('/')[2] - running_kernels.pop(requested_kernel_id) # Simulate shutdown by removing kernel from running set - response = yield gen.maybe_future(HTTPResponse(request, 204)) - raise gen.Return(response) - - # Fetch existing kernel - if endpoint.rfind('/api/kernels/') >= 0 and method == 'GET': - requested_kernel_id = endpoint.rpartition('/')[2] - if requested_kernel_id in running_kernels: - response_buf = StringIO(str_to_unicode(json.dumps(running_kernels.get(requested_kernel_id)))) - response = yield gen.maybe_future(HTTPResponse(request, 200, buffer=response_buf)) - raise gen.Return(response) - else: - raise HTTPError(404, message='Kernel does not exist: %s' % requested_kernel_id) - - -mocked_gateway = patch('jupyter_server.gateway.managers.gateway_request', mock_gateway_request) - - -class TestGateway(ServerTestBase): - - mock_gateway_url = 'http://mock-gateway-server:8889' - mock_http_user = 'alice' - - @classmethod - def setup_class(cls): - GatewayClient.clear_instance() - super(TestGateway, cls).setup_class() - - @classmethod - def teardown_class(cls): - GatewayClient.clear_instance() - super(TestGateway, cls).teardown_class() - - @classmethod - def get_patch_env(cls): - test_env = super(TestGateway, cls).get_patch_env() - test_env.update({'JUPYTER_GATEWAY_URL': TestGateway.mock_gateway_url, - 'JUPYTER_GATEWAY_REQUEST_TIMEOUT': '44.4'}) - return test_env - - @classmethod - def get_argv(cls): - argv = super(TestGateway, cls).get_argv() - argv.extend(['--GatewayClient.connect_timeout=44.4', '--GatewayClient.http_user=' + TestGateway.mock_http_user]) - return argv - - def test_gateway_options(self): - nt.assert_equal(self.server.gateway_config.gateway_enabled, True) - nt.assert_equal(self.server.gateway_config.url, TestGateway.mock_gateway_url) - nt.assert_equal(self.server.gateway_config.http_user, TestGateway.mock_http_user) - nt.assert_equal(self.server.gateway_config.connect_timeout, self.server.gateway_config.connect_timeout) - nt.assert_equal(self.server.gateway_config.connect_timeout, 44.4) - - def test_gateway_class_mappings(self): - # Ensure appropriate class mappings are in place. - nt.assert_equal(self.server.kernel_manager_class.__name__, 'GatewayKernelManager') - nt.assert_equal(self.server.session_manager_class.__name__, 'GatewaySessionManager') - nt.assert_equal(self.server.kernel_spec_manager_class.__name__, 'GatewayKernelSpecManager') - - def test_gateway_get_kernelspecs(self): - # Validate that kernelspecs come from gateway. - with mocked_gateway: - response = self.request('GET', '/api/kernelspecs') - self.assertEqual(response.status_code, 200) - content = json.loads(response.content.decode('utf-8'), encoding='utf-8') - kspecs = content.get('kernelspecs') - self.assertEqual(len(kspecs), 2) - self.assertEqual(kspecs.get('kspec_bar').get('name'), 'kspec_bar') - - def test_gateway_get_named_kernelspec(self): - # Validate that a specific kernelspec can be retrieved from gateway. - with mocked_gateway: - response = self.request('GET', '/api/kernelspecs/kspec_foo') - self.assertEqual(response.status_code, 200) - kspec_foo = json.loads(response.content.decode('utf-8'), encoding='utf-8') - self.assertEqual(kspec_foo.get('name'), 'kspec_foo') - - response = self.request('GET', '/api/kernelspecs/no_such_spec') - self.assertEqual(response.status_code, 404) - - def test_gateway_session_lifecycle(self): - # Validate session lifecycle functions; create and delete. - - # create - session_id, kernel_id = self.create_session('kspec_foo') - - # ensure kernel still considered running - self.assertTrue(self.is_kernel_running(kernel_id)) - - # interrupt - self.interrupt_kernel(kernel_id) - - # ensure kernel still considered running - self.assertTrue(self.is_kernel_running(kernel_id)) - - # restart - self.restart_kernel(kernel_id) - - # ensure kernel still considered running - self.assertTrue(self.is_kernel_running(kernel_id)) - - # delete - self.delete_session(session_id) - self.assertFalse(self.is_kernel_running(kernel_id)) - - def test_gateway_kernel_lifecycle(self): - # Validate kernel lifecycle functions; create, interrupt, restart and delete. - - # create - kernel_id = self.create_kernel('kspec_bar') - - # ensure kernel still considered running - self.assertTrue(self.is_kernel_running(kernel_id)) - - # interrupt - self.interrupt_kernel(kernel_id) - - # ensure kernel still considered running - self.assertTrue(self.is_kernel_running(kernel_id)) - - # restart - self.restart_kernel(kernel_id) - - # ensure kernel still considered running - self.assertTrue(self.is_kernel_running(kernel_id)) - - # delete - self.delete_kernel(kernel_id) - self.assertFalse(self.is_kernel_running(kernel_id)) - - def create_session(self, kernel_name): - """Creates a session for a kernel. The session is created against the server - which then uses the gateway for kernel management. - """ - with mocked_gateway: - nb_path = os.path.join(self.root_dir, 'testgw.ipynb') - kwargs = dict() - kwargs['json'] = {'path': nb_path, 'type': 'notebook', 'kernel': {'name': kernel_name}} - - # add a KERNEL_ value to the current env and we'll ensure that that value exists in the mocked method - os.environ['KERNEL_KSPEC_NAME'] = kernel_name - - # Create the kernel... (also tests get_kernel) - response = self.request('POST', '/api/sessions', **kwargs) - self.assertEqual(response.status_code, 201) - model = json.loads(response.content.decode('utf-8'), encoding='utf-8') - self.assertEqual(model.get('path'), nb_path) - kernel_id = model.get('kernel').get('id') - # ensure its in the running_kernels and name matches. - running_kernel = running_kernels.get(kernel_id) - self.assertEqual(kernel_id, running_kernel.get('id')) - self.assertEqual(model.get('kernel').get('name'), running_kernel.get('name')) - session_id = model.get('id') - - # restore env - os.environ.pop('KERNEL_KSPEC_NAME') - return session_id, kernel_id - - def delete_session(self, session_id): - """Deletes a session corresponding to the given session id. - """ - with mocked_gateway: - # Delete the session (and kernel) - response = self.request('DELETE', '/api/sessions/' + session_id) - self.assertEqual(response.status_code, 204) - self.assertEqual(response.reason, 'No Content') - - def is_kernel_running(self, kernel_id): - """Issues request to get the set of running kernels - """ - with mocked_gateway: - # Get list of running kernels - response = self.request('GET', '/api/kernels') - self.assertEqual(response.status_code, 200) - kernels = json.loads(response.content.decode('utf-8'), encoding='utf-8') - self.assertEqual(len(kernels), len(running_kernels)) - for model in kernels: - if model.get('id') == kernel_id: - return True - return False - - def create_kernel(self, kernel_name): - """Issues request to retart the given kernel - """ - with mocked_gateway: - kwargs = dict() - kwargs['json'] = {'name': kernel_name} - - # add a KERNEL_ value to the current env and we'll ensure that that value exists in the mocked method - os.environ['KERNEL_KSPEC_NAME'] = kernel_name - - response = self.request('POST', '/api/kernels', **kwargs) - self.assertEqual(response.status_code, 201) - model = json.loads(response.content.decode('utf-8'), encoding='utf-8') - kernel_id = model.get('id') - # ensure its in the running_kernels and name matches. - running_kernel = running_kernels.get(kernel_id) - self.assertEqual(kernel_id, running_kernel.get('id')) - self.assertEqual(model.get('name'), kernel_name) - - # restore env - os.environ.pop('KERNEL_KSPEC_NAME') - return kernel_id - - def interrupt_kernel(self, kernel_id): - """Issues request to interrupt the given kernel - """ - with mocked_gateway: - response = self.request('POST', '/api/kernels/' + kernel_id + '/interrupt') - self.assertEqual(response.status_code, 204) - self.assertEqual(response.reason, 'No Content') - - def restart_kernel(self, kernel_id): - """Issues request to retart the given kernel - """ - with mocked_gateway: - response = self.request('POST', '/api/kernels/' + kernel_id + '/restart') - self.assertEqual(response.status_code, 200) - model = json.loads(response.content.decode('utf-8'), encoding='utf-8') - restarted_kernel_id = model.get('id') - # ensure its in the running_kernels and name matches. - running_kernel = running_kernels.get(restarted_kernel_id) - self.assertEqual(restarted_kernel_id, running_kernel.get('id')) - self.assertEqual(model.get('name'), running_kernel.get('name')) - - def delete_kernel(self, kernel_id): - """Deletes kernel corresponding to the given kernel id. - """ - with mocked_gateway: - # Delete the session (and kernel) - response = self.request('DELETE', '/api/kernels/' + kernel_id) - self.assertEqual(response.status_code, 204) - self.assertEqual(response.reason, 'No Content') diff --git a/jupyter_server/tests/test_hist.sqlite b/jupyter_server/tests/test_hist.sqlite deleted file mode 100644 index 49ca431ffb..0000000000 Binary files a/jupyter_server/tests/test_hist.sqlite and /dev/null differ diff --git a/jupyter_server/tests/test_i18n.py b/jupyter_server/tests/test_i18n.py deleted file mode 100644 index 7d89927970..0000000000 --- a/jupyter_server/tests/test_i18n.py +++ /dev/null @@ -1,10 +0,0 @@ -import nose.tools as nt - -from jupyter_server import i18n - -def test_parse_accept_lang_header(): - palh = i18n.parse_accept_lang_header - nt.assert_equal(palh(''), []) - nt.assert_equal(palh('zh-CN,en-GB;q=0.7,en;q=0.3'), - ['en', 'en_GB', 'zh_CN']) - nt.assert_equal(palh('nl,fr;q=0'), ['nl']) diff --git a/jupyter_server/tests/test_serverapp.py b/jupyter_server/tests/test_serverapp.py deleted file mode 100644 index 9b523bc3c8..0000000000 --- a/jupyter_server/tests/test_serverapp.py +++ /dev/null @@ -1,192 +0,0 @@ -"""Test ServerApp""" - -import getpass -import logging -import os -import re -import signal -from subprocess import Popen, PIPE, STDOUT -import sys -from tempfile import NamedTemporaryFile -from unittest.mock import patch - -import nose.tools as nt - -from ipython_genutils.tempdir import TemporaryDirectory - -from traitlets.tests.utils import check_help_all_output -from traitlets import TraitError - -from jupyter_core.application import NoStart -from jupyter_server import serverapp, __version__ -from jupyter_server.auth.security import passwd_check - -ServerApp = serverapp.ServerApp - -from .launchserver import ServerTestBase - - -def test_help_output(): - """jupyter server --help-all works""" - check_help_all_output('jupyter_server') - -def test_server_info_file(): - td = TemporaryDirectory() - svapp = ServerApp(runtime_dir=td.name, log=logging.getLogger()) - - def get_servers(): - return list(serverapp.list_running_servers(svapp.runtime_dir)) - - svapp.initialize(argv=[]) - svapp.write_server_info_file() - servers = get_servers() - nt.assert_equal(len(servers), 1) - nt.assert_equal(servers[0]['port'], svapp.port) - nt.assert_equal(servers[0]['url'], svapp.connection_url) - nt.assert_equal(servers[0]['version'], svapp.version) - svapp.remove_server_info_file() - nt.assert_equal(get_servers(), []) - - # The ENOENT error should be silenced. - svapp.remove_server_info_file() - -def test_root_dir(): - with TemporaryDirectory() as td: - app = ServerApp(root_dir=td) - nt.assert_equal(app.root_dir, td) - -def test_no_create_root_dir(): - with TemporaryDirectory() as td: - rootdir = os.path.join(td, 'notebooks') - app = ServerApp() - with nt.assert_raises(TraitError): - app.root_dir = rootdir - -def test_missing_root_dir(): - with TemporaryDirectory() as td: - rootdir = os.path.join(td, 'root', 'dir', 'is', 'missing') - app = ServerApp() - with nt.assert_raises(TraitError): - app.root_dir = rootdir - -def test_invalid_root_dir(): - with NamedTemporaryFile() as tf: - app = ServerApp() - with nt.assert_raises(TraitError): - app.root_dir = tf - -def test_root_dir_with_slash(): - with TemporaryDirectory(suffix="_slash" + os.sep) as td: - app = ServerApp(root_dir=td) - nt.assert_false(app.root_dir.endswith(os.sep)) - -def test_root_dir_root(): - root = os.path.abspath(os.sep) # gets the right value on Windows, Posix - app = ServerApp(root_dir=root) - nt.assert_equal(app.root_dir, root) - -def test_generate_config(): - with TemporaryDirectory() as td: - app = ServerApp(config_dir=td) - app.initialize(['--generate-config', '--allow-root']) - with nt.assert_raises(NoStart): - app.start() - assert os.path.exists(os.path.join(td, 'jupyter_server_config.py')) - -#test if the version testin function works -def test_pep440_version(): - - for version in [ - '4.1.0.b1', - '4.1.b1', - '4.2', - 'X.y.z', - '1.2.3.dev1.post2', - ]: - def loc(): - with nt.assert_raises(ValueError): - raise_on_bad_version(version) - yield loc - - for version in [ - '4.1.1', - '4.2.1b3', - ]: - - yield (raise_on_bad_version, version) - -pep440re = re.compile('^(\d+)\.(\d+)\.(\d+((a|b|rc)\d+)?)(\.post\d+)?(\.dev\d*)?$') - -def raise_on_bad_version(version): - if not pep440re.match(version): - raise ValueError("Versions String does apparently not match Pep 440 specification, " - "which might lead to sdist and wheel being seen as 2 different release. " - "E.g: do not use dots for beta/alpha/rc markers.") - -def test_current_version(): - raise_on_bad_version(__version__) - -def test_server_password(): - password = 'secret' - with TemporaryDirectory() as td: - with patch.dict('os.environ', { - 'JUPYTER_CONFIG_DIR': td, - }), patch.object(getpass, 'getpass', return_value=password): - app = serverapp.JupyterPasswordApp(log_level=logging.ERROR) - app.initialize([]) - app.start() - sv = ServerApp() - sv.load_config_file() - nt.assert_not_equal(sv.password, '') - passwd_check(sv.password, password) - -class TestingStopApp(serverapp.JupyterServerStopApp): - """For testing the logic of JupyterServerStopApp.""" - def __init__(self, **kwargs): - super(TestingStopApp, self).__init__(**kwargs) - self.servers_shut_down = [] - - def shutdown_server(self, server): - self.servers_shut_down.append(server) - return True - -def test_server_stop(): - def list_running_servers(runtime_dir): - for port in range(100, 110): - yield { - 'pid': 1000 + port, - 'port': port, - 'base_url': '/', - 'hostname': 'localhost', - 'root_dir': '/', - 'secure': False, - 'token': '', - 'password': False, - 'url': 'http://localhost:%i' % port, - } - - mock_servers = patch('jupyter_server.serverapp.list_running_servers', list_running_servers) - - # test stop with a match - with mock_servers: - app = TestingStopApp() - app.initialize(['105']) - app.start() - nt.assert_equal(len(app.servers_shut_down), 1) - nt.assert_equal(app.servers_shut_down[0]['port'], 105) - - # test no match - with mock_servers, patch('os.kill') as os_kill: - app = TestingStopApp() - app.initialize(['999']) - with nt.assert_raises(SystemExit) as exc: - app.start() - nt.assert_equal(exc.exception.code, 1) - nt.assert_equal(len(app.servers_shut_down), 0) - - -class ServerAppTests(ServerTestBase): - def test_list_running_servers(self): - servers = list(serverapp.list_running_servers()) - assert len(servers) >= 1 - assert self.port in {info['port'] for info in servers} diff --git a/jupyter_server/tests/test_utils.py b/jupyter_server/tests/test_utils.py deleted file mode 100644 index 397f9590ae..0000000000 --- a/jupyter_server/tests/test_utils.py +++ /dev/null @@ -1,161 +0,0 @@ -"""Test HTML utils""" - -# Copyright (c) Jupyter Development Team. -# Distributed under the terms of the Modified BSD License. - -import ctypes -import os -import re -import stat -import shutil -import tempfile - -import nose.tools as nt - -from traitlets.tests.utils import check_help_all_output -from jupyter_server.utils import url_escape, url_unescape, is_hidden, is_file_hidden, secure_write -from ipython_genutils.py3compat import cast_unicode -from ipython_genutils.tempdir import TemporaryDirectory -from ipython_genutils.testing.decorators import skip_if_not_win32, skip_win32 - - -def test_help_output(): - """jupyter server --help-all works""" - check_help_all_output('jupyter_server') - -def test_url_escape(): - - # changes path or notebook name with special characters to url encoding - # these tests specifically encode paths with spaces - path = url_escape('/this is a test/for spaces/') - nt.assert_equal(path, '/this%20is%20a%20test/for%20spaces/') - - path = url_escape('notebook with space.ipynb') - nt.assert_equal(path, 'notebook%20with%20space.ipynb') - - path = url_escape('/path with a/notebook and space.ipynb') - nt.assert_equal(path, '/path%20with%20a/notebook%20and%20space.ipynb') - - path = url_escape('/ !@$#%^&* / test %^ notebook @#$ name.ipynb') - nt.assert_equal(path, - '/%20%21%40%24%23%25%5E%26%2A%20/%20test%20%25%5E%20notebook%20%40%23%24%20name.ipynb') - -def test_url_unescape(): - - # decodes a url string to a plain string - # these tests decode paths with spaces - path = url_unescape('/this%20is%20a%20test/for%20spaces/') - nt.assert_equal(path, '/this is a test/for spaces/') - - path = url_unescape('notebook%20with%20space.ipynb') - nt.assert_equal(path, 'notebook with space.ipynb') - - path = url_unescape('/path%20with%20a/notebook%20and%20space.ipynb') - nt.assert_equal(path, '/path with a/notebook and space.ipynb') - - path = url_unescape( - '/%20%21%40%24%23%25%5E%26%2A%20/%20test%20%25%5E%20notebook%20%40%23%24%20name.ipynb') - nt.assert_equal(path, '/ !@$#%^&* / test %^ notebook @#$ name.ipynb') - -def test_is_hidden(): - with TemporaryDirectory() as root: - subdir1 = os.path.join(root, 'subdir') - os.makedirs(subdir1) - nt.assert_equal(is_hidden(subdir1, root), False) - nt.assert_equal(is_file_hidden(subdir1), False) - - subdir2 = os.path.join(root, '.subdir2') - os.makedirs(subdir2) - nt.assert_equal(is_hidden(subdir2, root), True) - nt.assert_equal(is_file_hidden(subdir2), True)# - # root dir is always visible - nt.assert_equal(is_hidden(subdir2, subdir2), False) - - subdir34 = os.path.join(root, 'subdir3', '.subdir4') - os.makedirs(subdir34) - nt.assert_equal(is_hidden(subdir34, root), True) - nt.assert_equal(is_hidden(subdir34), True) - - subdir56 = os.path.join(root, '.subdir5', 'subdir6') - os.makedirs(subdir56) - nt.assert_equal(is_hidden(subdir56, root), True) - nt.assert_equal(is_hidden(subdir56), True) - nt.assert_equal(is_file_hidden(subdir56), False) - nt.assert_equal(is_file_hidden(subdir56, os.stat(subdir56)), False) - -@skip_if_not_win32 -def test_is_hidden_win32(): - with TemporaryDirectory() as root: - root = cast_unicode(root) - subdir1 = os.path.join(root, u'subdir') - os.makedirs(subdir1) - assert not is_hidden(subdir1, root) - r = ctypes.windll.kernel32.SetFileAttributesW(subdir1, 0x02) - print(r) - assert is_hidden(subdir1, root) - assert is_file_hidden(subdir1) - -@skip_if_not_win32 -def test_secure_write_win32(): - def fetch_win32_permissions(filename): - '''Extracts file permissions on windows using icacls''' - role_permissions = {} - for index, line in enumerate(os.popen("icacls %s" % filename).read().splitlines()): - if index == 0: - line = line.split(filename)[-1].strip().lower() - match = re.match(r'\s*([^:]+):\(([^\)]*)\)', line) - if match: - usergroup, permissions = match.groups() - usergroup = usergroup.lower().split('\\')[-1] - permissions = set(p.lower() for p in permissions.split(',')) - role_permissions[usergroup] = permissions - elif not line.strip(): - break - return role_permissions - - def check_user_only_permissions(fname): - # Windows has it's own permissions ACL patterns - import win32api - username = win32api.GetUserName().lower() - permissions = fetch_win32_permissions(fname) - print(permissions) # for easier debugging - nt.assert_true(username in permissions) - nt.assert_equal(permissions[username], set(['r', 'w'])) - nt.assert_true('administrators' in permissions) - nt.assert_equal(permissions['administrators'], set(['f'])) - nt.assert_true('everyone' not in permissions) - nt.assert_equal(len(permissions), 2) - - directory = tempfile.mkdtemp() - fname = os.path.join(directory, 'check_perms') - try: - with secure_write(fname) as f: - f.write('test 1') - check_user_only_permissions(fname) - with open(fname, 'r') as f: - nt.assert_equal(f.read(), 'test 1') - finally: - shutil.rmtree(directory) - -@skip_win32 -def test_secure_write_unix(): - directory = tempfile.mkdtemp() - fname = os.path.join(directory, 'check_perms') - try: - with secure_write(fname) as f: - f.write('test 1') - mode = os.stat(fname).st_mode - nt.assert_equal('0600', oct(stat.S_IMODE(mode)).replace('0o', '0')) - with open(fname, 'r') as f: - nt.assert_equal(f.read(), 'test 1') - - # Try changing file permissions ahead of time - os.chmod(fname, 0o755) - with secure_write(fname) as f: - f.write('test 2') - mode = os.stat(fname).st_mode - nt.assert_equal('0600', oct(stat.S_IMODE(mode)).replace('0o', '0')) - with open(fname, 'r') as f: - nt.assert_equal(f.read(), 'test 2') - finally: - shutil.rmtree(directory) \ No newline at end of file diff --git a/jupyter_server/tree/tests/handlers.py b/jupyter_server/tree/tests/handlers.py deleted file mode 100644 index ef42527616..0000000000 --- a/jupyter_server/tree/tests/handlers.py +++ /dev/null @@ -1,77 +0,0 @@ -"""Tornado handlers for the tree view.""" - -# Copyright (c) Jupyter Development Team. -# Distributed under the terms of the Modified BSD License. - -from tornado import web -import os -from ..base.handlers import IPythonHandler, path_regex -from ..utils import url_path_join, url_escape - - -class TreeHandler(IPythonHandler): - """Render the tree view, listing notebooks, etc.""" - - def generate_breadcrumbs(self, path): - breadcrumbs = [(url_path_join(self.base_url, 'tree'), '')] - parts = path.split('/') - for i in range(len(parts)): - if parts[i]: - link = url_path_join(self.base_url, 'tree', - url_escape(url_path_join(*parts[:i+1])), - ) - breadcrumbs.append((link, parts[i])) - return breadcrumbs - - def generate_page_title(self, path): - comps = path.split('/') - if len(comps) > 3: - for i in range(len(comps)-2): - comps.pop(0) - page_title = url_path_join(*comps) - if page_title: - return page_title+'/' - else: - return 'Home' - - @web.authenticated - def get(self, path=''): - path = path.strip('/') - cm = self.contents_manager - - if cm.dir_exists(path=path): - if cm.is_hidden(path) and not cm.allow_hidden: - self.log.info("Refusing to serve hidden directory, via 404 Error") - raise web.HTTPError(404) - breadcrumbs = self.generate_breadcrumbs(path) - page_title = self.generate_page_title(path) - self.write(self.render_template('tree.html', - page_title=page_title, - notebook_path=path, - breadcrumbs=breadcrumbs, - terminals_available=self.settings['terminals_available'], - server_root=self.settings['server_root_dir'], - )) - elif cm.file_exists(path): - # it's not a directory, we have redirecting to do - model = cm.get(path, content=False) - # redirect to /api/notebooks if it's a notebook, otherwise /api/files - service = 'notebooks' if model['type'] == 'notebook' else 'files' - url = url_path_join( - self.base_url, service, url_escape(path), - ) - self.log.debug("Redirecting %s to %s", self.request.path, url) - self.redirect(url) - else: - raise web.HTTPError(404) - - -#----------------------------------------------------------------------------- -# URL to handler mappings -#----------------------------------------------------------------------------- - - -default_handlers = [ - (r"/tree%s" % path_regex, TreeHandler), - (r"/tree", TreeHandler), - ] diff --git a/setup.py b/setup.py index 9b76f5dcba..190ce5c1a6 100755 --- a/setup.py +++ b/setup.py @@ -83,7 +83,7 @@ 'jupyter_core>=4.4.0', 'jupyter_client>=5.3.1', 'nbformat', - 'nbconvert', + 'nbconvert<6', 'ipykernel', # bless IPython kernel for now 'Send2Trash', 'terminado>=0.8.1', @@ -92,7 +92,7 @@ ], extras_require = { 'test': ['nose', 'coverage', 'requests', 'nose_warnings_filters', - 'nbval', 'nose-exclude', 'selenium', 'pytest', 'pytest-cov'], + 'pytest', 'pytest-cov', 'pytest-tornasync'], 'test:sys_platform == "win32"': ['nose-exclude'], }, python_requires = '>=3.5', diff --git a/setupbase.py b/setupbase.py index 7ce5e66de5..d45df53a39 100644 --- a/setupbase.py +++ b/setupbase.py @@ -107,9 +107,6 @@ def find_package_data(): cwd = os.getcwd() os.chdir('jupyter_server') - os.chdir(os.path.join('tests',)) - js_tests = glob('*.js') + glob('*/*.js') - os.chdir(cwd) package_data = { diff --git a/jupyter_server/auth/tests/__init__.py b/tests/__init__.py similarity index 100% rename from jupyter_server/auth/tests/__init__.py rename to tests/__init__.py diff --git a/jupyter_server/extension/tests/__init__.py b/tests/auth/__init__.py similarity index 100% rename from jupyter_server/extension/tests/__init__.py rename to tests/auth/__init__.py diff --git a/tests/auth/test_security.py b/tests/auth/test_security.py new file mode 100644 index 0000000000..85a4ead118 --- /dev/null +++ b/tests/auth/test_security.py @@ -0,0 +1,29 @@ +import pytest + +from jupyter_server.auth.security import passwd, passwd_check, salt_len + + +def test_passwd_structure(): + p = passwd('passphrase') + algorithm, salt, hashed = p.split(':') + assert algorithm == 'sha1' + assert len(salt) == salt_len + assert len(hashed) == 40 + + +def test_roundtrip(): + p = passwd('passphrase') + assert passwd_check(p, 'passphrase') + + +def test_bad(): + p = passwd('passphrase') + assert not passwd_check(p, p) + assert not passwd_check(p, 'a:b:c:d') + assert not passwd_check(p, 'a:b') + + +def test_passwd_check_unicode(): + # GH issue #4524 + phash = u'sha1:23862bc21dd3:7a415a95ae4580582e314072143d9c382c491e4f' + assert passwd_check(phash, u"łe¶ŧ←↓→") \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000000..ecf8c0040c --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,198 @@ +import os +import sys +import json +import pytest +import asyncio +from binascii import hexlify + +import urllib.parse +import tornado +from tornado.escape import url_escape + +from traitlets.config import Config + +import jupyter_core.paths +from jupyter_server.serverapp import ServerApp +from jupyter_server.utils import url_path_join + + +pytest_plugins = ("pytest_tornasync") + + +# NOTE: This is a temporary fix for Windows 3.8 +# We have to override the io_loop fixture with an +# asyncio patch. This will probably be removed in +# the future. +if sys.platform.startswith("win") and sys.version_info >= (3, 8): + + @pytest.fixture + def asyncio_patch(): + ServerApp()._init_asyncio_patch() + + @pytest.fixture + def io_loop(asyncio_patch): + loop = tornado.ioloop.IOLoop() + loop.make_current() + yield loop + loop.clear_current() + loop.close(all_fds=True) + + +def mkdir(tmp_path, *parts): + path = tmp_path.joinpath(*parts) + if not path.exists(): + path.mkdir(parents=True) + return path + + +def expected_http_error(error, expected_code, expected_message=None): + """Check that the error matches the expected output error.""" + e = error.value + if isinstance(e, tornado.web.HTTPError): + if expected_code != e.status_code: + return False + if expected_message is not None and expected_message != str(e): + return False + return True + elif any([ + isinstance(e, tornado.httpclient.HTTPClientError), + isinstance(e, tornado.httpclient.HTTPError) + ]): + if expected_code != e.code: + return False + if expected_message: + message = json.loads(e.response.body.decode())['message'] + if expected_message != message: + return False + return True + + +config = pytest.fixture(lambda: {}) +home_dir = pytest.fixture(lambda tmp_path: mkdir(tmp_path, 'home')) +data_dir = pytest.fixture(lambda tmp_path: mkdir(tmp_path, 'data')) +config_dir = pytest.fixture(lambda tmp_path: mkdir(tmp_path, 'config')) +runtime_dir = pytest.fixture(lambda tmp_path: mkdir(tmp_path, 'runtime')) +root_dir = pytest.fixture(lambda tmp_path: mkdir(tmp_path, 'root_dir')) +argv = pytest.fixture(lambda: []) + +@pytest.fixture +def environ( + monkeypatch, + tmp_path, + home_dir, + data_dir, + config_dir, + runtime_dir, + root_dir + ): + monkeypatch.setenv('HOME', str(home_dir)) + monkeypatch.setenv('PYTHONPATH', os.pathsep.join(sys.path)) + monkeypatch.setenv('JUPYTER_NO_CONFIG', '1') + monkeypatch.setenv('JUPYTER_CONFIG_DIR', str(config_dir)) + monkeypatch.setenv('JUPYTER_DATA_DIR', str(data_dir)) + monkeypatch.setenv('JUPYTER_RUNTIME_DIR', str(runtime_dir)) + monkeypatch.setattr(jupyter_core.paths, 'SYSTEM_JUPYTER_PATH', [str(mkdir(tmp_path, 'share', 'jupyter'))]) + monkeypatch.setattr(jupyter_core.paths, 'ENV_JUPYTER_PATH', [str(mkdir(tmp_path, 'env', 'share', 'jupyter'))]) + monkeypatch.setattr(jupyter_core.paths, 'SYSTEM_CONFIG_PATH', [str(mkdir(tmp_path, 'etc', 'jupyter'))]) + monkeypatch.setattr(jupyter_core.paths, 'ENV_CONFIG_PATH', [str(mkdir(tmp_path, 'env', 'etc', 'jupyter'))]) + + +@pytest.fixture +def configurable_serverapp( + environ, + http_port, + tmp_path, + home_dir, + data_dir, + config_dir, + runtime_dir, + root_dir + ): + + def serverapp( + config={}, + argv=[], + environ=environ, + http_port=http_port, + tmp_path=tmp_path, + home_dir=home_dir, + data_dir=data_dir, + config_dir=config_dir, + runtime_dir=runtime_dir, + root_dir=root_dir, + **kwargs): + c = Config(config) + c.NotebookNotary.db_file = ':memory:' + token = hexlify(os.urandom(4)).decode('ascii') + url_prefix = '/' + app = ServerApp.instance( + port=http_port, + port_retries=0, + open_browser=False, + config_dir=str(config_dir), + data_dir=str(data_dir), + runtime_dir=str(runtime_dir), + root_dir=str(root_dir), + base_url=url_prefix, + config=c, + allow_root=True, + token=token, + **kwargs + ) + app.init_signal = lambda : None + app.log.propagate = True + app.log.handlers = [] + # Initialize app without httpserver + app.initialize(argv=argv, new_httpserver=False) + app.log.propagate = True + app.log.handlers = [] + # Start app without ioloop + app.start_app() + return app + + yield serverapp + ServerApp.clear_instance() + + +@pytest.fixture +def serverapp(configurable_serverapp, config, argv): + app = configurable_serverapp(config=config, argv=argv) + yield app + app.remove_server_info_file() + app.remove_browser_open_file() + app.cleanup_kernels() + + +@pytest.fixture +def app(serverapp): + return serverapp.web_app + + +@pytest.fixture +def auth_header(serverapp): + return {'Authorization': 'token {token}'.format(token=serverapp.token)} + + +@pytest.fixture +def http_port(http_server_port): + return http_server_port[-1] + + +@pytest.fixture +def base_url(http_server_port): + return '/' + + +@pytest.fixture +def fetch(http_server_client, auth_header, base_url): + """fetch fixture that handles auth, base_url, and path""" + def client_fetch(*parts, headers={}, params={}, **kwargs): + # Handle URL strings + path_url = url_escape(url_path_join(base_url, *parts), plus=False) + params_url = urllib.parse.urlencode(params) + url = path_url + "?" + params_url + # Add auth keys to header + headers.update(auth_header) + # Make request. + return http_server_client.fetch(url, headers=headers, request_timeout=20, **kwargs) + return client_fetch \ No newline at end of file diff --git a/jupyter_server/nbconvert/tests/__init__.py b/tests/extension/__init__.py similarity index 100% rename from jupyter_server/nbconvert/tests/__init__.py rename to tests/extension/__init__.py diff --git a/tests/extension/conftest.py b/tests/extension/conftest.py new file mode 100644 index 0000000000..ffd342c41d --- /dev/null +++ b/tests/extension/conftest.py @@ -0,0 +1,27 @@ +import pytest +from traitlets import Unicode + +from jupyter_server.extension.application import ExtensionApp +from jupyter_server.extension.handler import ExtensionHandler + +# --------------- Build a mock extension -------------- + +class MockExtensionHandler(ExtensionHandler): + + def get(self): + self.finish(self.config.mock_trait) + + +class MockExtension(ExtensionApp): + extension_name = 'mock' + mock_trait = Unicode('mock trait', config=True) + + def initialize_handlers(self): + self.handlers.append(('/mock', MockExtensionHandler)) + + +@pytest.fixture +def extended_serverapp(serverapp): + m = MockExtension() + m.initialize(serverapp) + return m \ No newline at end of file diff --git a/tests/extension/test_api.py b/tests/extension/test_api.py new file mode 100644 index 0000000000..8b23a01d63 --- /dev/null +++ b/tests/extension/test_api.py @@ -0,0 +1,43 @@ +import pytest + +from jupyter_server.serverapp import ServerApp +from .conftest import MockExtension + +# ------------------ Start tests ------------------- + +async def test_handler(fetch, extended_serverapp): + r = await fetch( + 'mock', + method='GET' + ) + assert r.code == 200 + assert r.body.decode() == 'mock trait' + + +async def test_handler_setting(fetch, serverapp): + # Configure trait in Mock Extension. + m = MockExtension(mock_trait='test mock trait') + m.initialize(serverapp) + + # Test that the extension trait was picked up by the webapp. + r = await fetch( + 'mock', + method='GET' + ) + assert r.code == 200 + assert r.body.decode() == 'test mock trait' + + +async def test_handler_argv(fetch, serverapp): + # Configure trait in Mock Extension. + m = MockExtension() + argv = ['--MockExtension.mock_trait="test mock trait"'] + m.initialize(serverapp, argv=argv) + + # Test that the extension trait was picked up by the webapp. + r = await fetch( + 'mock', + method='GET' + ) + assert r.code == 200 + assert r.body.decode() == 'test mock trait' diff --git a/tests/extension/test_app.py b/tests/extension/test_app.py new file mode 100644 index 0000000000..5ff33be540 --- /dev/null +++ b/tests/extension/test_app.py @@ -0,0 +1,59 @@ +import pytest + +from jupyter_server.serverapp import ServerApp +from jupyter_server.extension.application import ExtensionApp + +from .conftest import MockExtension + + +def test_instance_creation(): + mock_extension = MockExtension() + assert mock_extension.static_paths == [] + assert mock_extension.template_paths == [] + assert mock_extension.settings == {} + assert mock_extension.handlers == [] + + +def test_initialize(serverapp): + mock_extension = MockExtension() + mock_extension.initialize(serverapp) + # Check that settings and handlers were added to the mock extension. + assert isinstance(mock_extension.serverapp, ServerApp) + assert len(mock_extension.settings) > 0 + assert len(mock_extension.handlers) > 0 + + +traits = [ + ('static_paths', ['test']), + ('template_paths', ['test']), + ('custom_display_url', '/test_custom_url'), + ('default_url', '/test_url') +] + + +@pytest.mark.parametrize( + 'trait_name,trait_value', + traits +) +def test_instance_creation_with_instance_args(trait_name, trait_value): + kwarg = {} + kwarg.setdefault(trait_name, trait_value) + mock_extension = MockExtension(**kwarg) + assert getattr(mock_extension, trait_name) == trait_value + + +@pytest.mark.parametrize( + 'trait_name,trait_value', + traits +) +def test_instance_creation_with_argv(serverapp, trait_name, trait_value): + kwarg = {} + kwarg.setdefault(trait_name, trait_value) + + argv = [ + '--MockExtension.{name}={value}'.format(name=trait_name, value=trait_value) + ] + + mock_extension = MockExtension() + mock_extension.initialize(serverapp, argv=argv) + assert getattr(mock_extension, trait_name) == trait_value diff --git a/jupyter_server/services/api/tests/__init__.py b/tests/nbconvert/__init__.py similarity index 100% rename from jupyter_server/services/api/tests/__init__.py rename to tests/nbconvert/__init__.py diff --git a/tests/nbconvert/test_handlers.py b/tests/nbconvert/test_handlers.py new file mode 100644 index 0000000000..12a7256d26 --- /dev/null +++ b/tests/nbconvert/test_handlers.py @@ -0,0 +1,153 @@ +# coding: utf-8 +import io +import json +import os +from os.path import join as pjoin +import shutil + +import tornado + +from nbformat import writes +from nbformat.v4 import ( + new_notebook, new_markdown_cell, new_code_cell, new_output, +) + +from ipython_genutils.testing.decorators import onlyif_cmds_exist + +from base64 import encodebytes + +import pytest + +from ..conftest import expected_http_error + + +png_green_pixel = encodebytes(b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00' +b'\x00\x00\x01\x00\x00x00\x01\x08\x02\x00\x00\x00\x90wS\xde\x00\x00\x00\x0cIDAT' +b'\x08\xd7c\x90\xfb\xcf\x00\x00\x02\\\x01\x1e.~d\x87\x00\x00\x00\x00IEND\xaeB`\x82' +).decode('ascii') + + +@pytest.fixture +def notebook(root_dir): + # Build sub directory. + if not root_dir.joinpath('foo').is_dir(): + subdir = root_dir / 'foo' + subdir.mkdir() + + # Build a notebook programmatically. + nb = new_notebook() + nb.cells.append(new_markdown_cell(u'Created by test ³')) + cc1 = new_code_cell(source=u'print(2*6)') + cc1.outputs.append(new_output(output_type="stream", text=u'12')) + cc1.outputs.append(new_output(output_type="execute_result", + data={'image/png' : png_green_pixel}, + execution_count=1, + )) + nb.cells.append(cc1) + + # Write file to tmp dir. + nbfile = subdir / 'testnb.ipynb' + nbfile.write_text(writes(nb, version=4), encoding='utf-8') + + +@onlyif_cmds_exist('pandoc') +async def test_from_file(fetch, notebook): + r = await fetch( + 'nbconvert', 'html', 'foo', 'testnb.ipynb', + method='GET', + params={'download': False} + ) + + assert r.code == 200 + assert 'text/html' in r.headers['Content-Type'] + assert 'Created by test' in r.body.decode() + assert 'print' in r.body.decode() + + r = await fetch( + 'nbconvert', 'python', 'foo', 'testnb.ipynb', + method='GET', + params={'download': False} + ) + + assert r.code == 200 + assert 'text/x-python' in r.headers['Content-Type'] + assert 'print(2*6)' in r.body.decode() + + +@onlyif_cmds_exist('pandoc') +async def test_from_file_404(fetch, notebook): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await fetch( + 'nbconvert', 'html', 'foo', 'thisdoesntexist.ipynb', + method='GET', + params={'download': False} + ) + assert expected_http_error(e, 404) + + +@onlyif_cmds_exist('pandoc') +async def test_from_file_download(fetch, notebook): + r = await fetch( + 'nbconvert', 'python', 'foo', 'testnb.ipynb', + method='GET', + params={'download': True} + ) + content_disposition = r.headers['Content-Disposition'] + assert 'attachment' in content_disposition + assert 'testnb.py' in content_disposition + + +@onlyif_cmds_exist('pandoc') +async def test_from_file_zip(fetch, notebook): + r = await fetch( + 'nbconvert', 'latex', 'foo', 'testnb.ipynb', + method='GET', + params={'download': True} + ) + assert 'application/zip' in r.headers['Content-Type'] + assert '.zip' in r.headers['Content-Disposition'] + + +@onlyif_cmds_exist('pandoc') +async def test_from_post(fetch, notebook): + r = await fetch( + 'api/contents/foo/testnb.ipynb', + method='GET', + ) + nbmodel = json.loads(r.body.decode()) + + r = await fetch( + 'nbconvert', 'html', + method='POST', + body=json.dumps(nbmodel) + ) + assert r.code == 200 + assert 'text/html' in r.headers['Content-Type'] + assert 'Created by test' in r.body.decode() + assert 'print' in r.body.decode() + + r = await fetch( + 'nbconvert', 'python', + method='POST', + body=json.dumps(nbmodel) + ) + assert r.code == 200 + assert u'text/x-python' in r.headers['Content-Type'] + assert 'print(2*6)'in r.body.decode() + + +@onlyif_cmds_exist('pandoc') +async def test_from_post_zip(fetch, notebook): + r = await fetch( + 'api/contents/foo/testnb.ipynb', + method='GET', + ) + nbmodel = json.loads(r.body.decode()) + + r = await fetch( + 'nbconvert', 'latex', + method='POST', + body=json.dumps(nbmodel) + ) + assert 'application/zip' in r.headers['Content-Type'] + assert '.zip' in r.headers['Content-Disposition'] \ No newline at end of file diff --git a/jupyter_server/services/config/tests/__init__.py b/tests/services/__init__.py similarity index 100% rename from jupyter_server/services/config/tests/__init__.py rename to tests/services/__init__.py diff --git a/jupyter_server/services/contents/tests/__init__.py b/tests/services/api/__init__.py similarity index 100% rename from jupyter_server/services/contents/tests/__init__.py rename to tests/services/api/__init__.py diff --git a/tests/services/api/test_api.py b/tests/services/api/test_api.py new file mode 100644 index 0000000000..b2c88c3bcd --- /dev/null +++ b/tests/services/api/test_api.py @@ -0,0 +1,14 @@ +import pytest + +from jupyter_server.utils import url_path_join + + +async def test_get_spec(fetch): + response = await fetch( + 'api', 'spec.yaml', + method='GET' + ) + assert response.code == 200 + + + diff --git a/jupyter_server/services/kernels/tests/__init__.py b/tests/services/config/__init__.py similarity index 100% rename from jupyter_server/services/kernels/tests/__init__.py rename to tests/services/config/__init__.py diff --git a/tests/services/config/test_api.py b/tests/services/config/test_api.py new file mode 100644 index 0000000000..356abbc3a1 --- /dev/null +++ b/tests/services/config/test_api.py @@ -0,0 +1,68 @@ +import json +import pytest + +from jupyter_server.utils import url_path_join + + +async def test_create_retrieve_config(fetch): + sample = {'foo': 'bar', 'baz': 73} + response = await fetch( + 'api', 'config', 'example', + method='PUT', + body=json.dumps(sample) + ) + assert response.code == 204 + + response2 = await fetch( + 'api', 'config', 'example', + method='GET', + ) + assert response2.code == 200 + assert json.loads(response2.body.decode()) == sample + + +async def test_modify(fetch): + sample = { + 'foo': 'bar', + 'baz': 73, + 'sub': {'a': 6, 'b': 7}, + 'sub2': {'c': 8} + } + + modified_sample = { + 'foo': None, # should delete foo + 'baz': 75, + 'wib': [1,2,3], + 'sub': {'a': 8, 'b': None, 'd': 9}, + 'sub2': {'c': None} # should delete sub2 + } + + diff = { + 'baz': 75, + 'wib': [1,2,3], + 'sub': {'a': 8, 'd': 9} + } + + await fetch( + 'api', 'config', 'example', + method='PUT', + body=json.dumps(sample) + ) + + response2 = await fetch( + 'api', 'config', 'example', + method='PATCH', + body=json.dumps(modified_sample) + ) + + assert response2.code == 200 + assert json.loads(response2.body.decode()) == diff + + +async def test_get_unknown(fetch): + response = await fetch( + 'api', 'config', 'nonexistant', + method='GET', + ) + assert response.code == 200 + assert json.loads(response.body.decode()) == {} \ No newline at end of file diff --git a/jupyter_server/services/kernelspecs/tests/__init__.py b/tests/services/contents/__init__.py similarity index 100% rename from jupyter_server/services/kernelspecs/tests/__init__.py rename to tests/services/contents/__init__.py diff --git a/tests/services/contents/test_api.py b/tests/services/contents/test_api.py new file mode 100644 index 0000000000..7dbd3fd759 --- /dev/null +++ b/tests/services/contents/test_api.py @@ -0,0 +1,841 @@ +import sys +import json +import pathlib +import pytest +from urllib.parse import ParseResult, urlunparse + +import tornado + +from nbformat import writes, from_dict +from nbformat.v4 import ( + new_notebook, new_markdown_cell, +) + +from jupyter_server.utils import url_path_join + +from base64 import encodebytes, decodebytes + +from ...conftest import expected_http_error + + +def notebooks_only(dir_model): + return [nb for nb in dir_model['content'] if nb['type']=='notebook'] + +def dirs_only(dir_model): + return [x for x in dir_model['content'] if x['type']=='directory'] + + +dirs = [ + ('', 'inroot'), + ('Directory with spaces in', 'inspace'), + (u'unicodé', 'innonascii'), + ('foo', 'a'), + ('foo', 'b'), + ('foo', 'name with spaces'), + ('foo', u'unicodé'), + ('foo/bar', 'baz'), + ('ordering', 'A'), + ('ordering', 'b'), + ('ordering', 'C'), + (u'å b', u'ç d'), +] + + +@pytest.fixture +def contents_dir(tmp_path, serverapp): + return tmp_path / serverapp.root_dir + + +@pytest.fixture +def contents(contents_dir): + # Create files in temporary directory + for d, name in dirs: + p = contents_dir / d + p.mkdir(parents=True, exist_ok=True) + + # Create a notebook + nb = writes(new_notebook(), version=4) + nbname = p.joinpath('{}.ipynb'.format(name)) + nbname.write_text(nb, encoding='utf-8') + + # Create a text file + txt = '{} text file'.format(name) + txtname = p.joinpath('{}.txt'.format(name)) + txtname.write_text(txt, encoding='utf-8') + + # Create a random blob + blob = name.encode('utf-8') + b'\xFF' + blobname = p.joinpath('{}.blob'.format(name)) + blobname.write_bytes(blob) + + +@pytest.fixture +def folders(): + return list(set(item[0] for item in dirs)) + + +@pytest.mark.parametrize('path,name', dirs) +async def test_list_notebooks(fetch, contents, path, name): + response = await fetch( + 'api', 'contents', path, + method='GET', + ) + data = json.loads(response.body.decode()) + nbs = notebooks_only(data) + assert len(nbs) > 0 + assert name+'.ipynb' in [n['name'] for n in nbs] + assert url_path_join(path, name+'.ipynb') in [n['path'] for n in nbs] + + +@pytest.mark.parametrize('path,name', dirs) +async def test_get_dir_no_contents(fetch, contents, path, name): + response = await fetch( + 'api', 'contents', path, + method='GET', + params=dict( + content='0', + ) + ) + model = json.loads(response.body.decode()) + assert model['path'] == path + assert model['type'] == 'directory' + assert 'content' in model + assert model['content'] == None + + +async def test_list_nonexistant_dir(fetch, contents): + with pytest.raises(tornado.httpclient.HTTPClientError): + await fetch( + 'api', 'contents', 'nonexistant', + method='GET', + ) + + +@pytest.mark.parametrize('path,name', dirs) +async def test_get_nb_contents(fetch, contents, path, name): + nbname = name+'.ipynb' + nbpath = (path + '/' + nbname).lstrip('/') + r = await fetch( + 'api', 'contents', nbpath, + method='GET', + params=dict(content='1') + ) + model = json.loads(r.body.decode()) + assert model['name'] == nbname + assert model['path'] == nbpath + assert model['type'] == 'notebook' + assert 'content' in model + assert model['format'] == 'json' + assert 'metadata' in model['content'] + assert isinstance(model['content']['metadata'], dict) + + +@pytest.mark.parametrize('path,name', dirs) +async def test_get_nb_no_contents(fetch, contents, path, name): + nbname = name+'.ipynb' + nbpath = (path + '/' + nbname).lstrip('/') + r = await fetch( + 'api', 'contents', nbpath, + method='GET', + params=dict(content='0') + ) + model = json.loads(r.body.decode()) + assert model['name'] == nbname + assert model['path'] == nbpath + assert model['type'] == 'notebook' + assert 'content' in model + assert model['content'] == None + + +async def test_get_nb_invalid(contents_dir, fetch, contents): + nb = { + 'nbformat': 4, + 'metadata': {}, + 'cells': [{ + 'cell_type': 'wrong', + 'metadata': {}, + }], + } + nbpath = u'å b/Validate tést.ipynb' + (contents_dir / nbpath).write_text(json.dumps(nb)) + r = await fetch( + 'api', 'contents', nbpath, + method='GET', + ) + model = json.loads(r.body.decode()) + assert model['path'] == nbpath + assert model['type'] == 'notebook' + assert 'content' in model + assert 'message' in model + assert 'validation failed' in model['message'].lower() + + +async def test_get_contents_no_such_file(fetch): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await fetch( + 'api', 'contents', 'foo/q.ipynb', + method='GET', + ) + assert e.value.code == 404 + + +@pytest.mark.parametrize('path,name', dirs) +async def test_get_text_file_contents(fetch, contents, path, name): + txtname = name+'.txt' + txtpath = (path + '/' + txtname).lstrip('/') + r = await fetch( + 'api', 'contents', txtpath, + method='GET', + params=dict(content='1') + ) + model = json.loads(r.body.decode()) + assert model['name'] == txtname + assert model['path'] == txtpath + assert 'content' in model + assert model['format'] == 'text' + assert model['type'] == 'file' + assert model['content'] == '{} text file'.format(name) + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await fetch( + 'api', 'contents', 'foo/q.txt', + method='GET', + ) + assert expected_http_error(e, 404) + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await fetch( + 'api', 'contents', 'foo/bar/baz.blob', + method='GET', + params=dict( + type='file', + format='text' + ) + ) + assert expected_http_error(e, 400) + + + +@pytest.mark.parametrize('path,name', dirs) +async def test_get_binary_file_contents(fetch, contents, path, name): + blobname = name+'.blob' + blobpath = (path + '/' + blobname).lstrip('/') + r = await fetch( + 'api', 'contents', blobpath, + method='GET', + params=dict(content='1') + ) + model = json.loads(r.body.decode()) + assert model['name'] == blobname + assert model['path'] == blobpath + assert 'content' in model + assert model['format'] == 'base64' + assert model['type'] == 'file' + data_out = decodebytes(model['content'].encode('ascii')) + data_in = name.encode('utf-8') + b'\xFF' + assert data_in == data_out + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await fetch( + 'api', 'contents', 'foo/q.txt', + method='GET', + ) + assert expected_http_error(e, 404) + + +async def test_get_bad_type(fetch, contents): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + path = 'unicodé' + type = 'file' + await fetch( + 'api', 'contents', path, + method='GET', + params=dict(type=type) # This should be a directory, and thus throw and error + ) + assert expected_http_error(e, 400, '%s is a directory, not a %s' % (path, type)) + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + path = 'unicodé/innonascii.ipynb' + type = 'directory' + await fetch( + 'api', 'contents', path, + method='GET', + params=dict(type=type) # This should be a file, and thus throw and error + ) + assert expected_http_error(e, 400, '%s is not a directory' % path) + + +def _check_created(r, contents_dir, path, name, type='notebook'): + fpath = path+'/'+name + assert r.code == 201 + location = '/api/contents/' + tornado.escape.url_escape(fpath, plus=False) + assert r.headers['Location'] == location + model = json.loads(r.body.decode()) + assert model['name'] == name + assert model['path'] == fpath + assert model['type'] == type + path = contents_dir + '/' + fpath + if type == 'directory': + assert pathlib.Path(path).is_dir() + else: + assert pathlib.Path(path).is_file() + + +async def test_create_untitled(fetch, contents, contents_dir): + path = 'å b' + name = 'Untitled.ipynb' + r = await fetch( + 'api', 'contents', path, + method='POST', + body=json.dumps({'ext': '.ipynb'}) + ) + _check_created(r, str(contents_dir), path, name, type='notebook') + + name = 'Untitled1.ipynb' + r = await fetch( + 'api', 'contents', path, + method='POST', + body=json.dumps({'ext': '.ipynb'}) + ) + _check_created(r, str(contents_dir), path, name, type='notebook') + + path = 'foo/bar' + name = 'Untitled.ipynb' + r = await fetch( + 'api', 'contents', path, + method='POST', + body=json.dumps({'ext': '.ipynb'}) + ) + _check_created(r, str(contents_dir), path, name, type='notebook') + + +async def test_create_untitled_txt(fetch, contents, contents_dir): + name = 'untitled.txt' + path = 'foo/bar' + r = await fetch( + 'api', 'contents', path, + method='POST', + body=json.dumps({'ext': '.txt'}) + ) + _check_created(r, str(contents_dir), path, name, type='file') + + r = await fetch( + 'api', 'contents', path, name, + method='GET' + ) + model = json.loads(r.body.decode()) + assert model['type'] == 'file' + assert model['format'] == 'text' + assert model['content'] == '' + + +async def test_upload(fetch, contents, contents_dir): + nb = new_notebook() + nbmodel = {'content': nb, 'type': 'notebook'} + path = 'å b' + name = 'Upload tést.ipynb' + r = await fetch( + 'api', 'contents', path, name, + method='PUT', + body=json.dumps(nbmodel) + ) + _check_created(r, str(contents_dir), path, name) + + +async def test_mkdir_untitled(fetch, contents, contents_dir): + name = 'Untitled Folder' + path = 'å b' + r = await fetch( + 'api', 'contents', path, + method='POST', + body=json.dumps({'type': 'directory'}) + ) + _check_created(r, str(contents_dir), path, name, type='directory') + + name = 'Untitled Folder 1' + r = await fetch( + 'api', 'contents', path, + method='POST', + body=json.dumps({'type': 'directory'}) + ) + _check_created(r, str(contents_dir), path, name, type='directory') + + name = 'Untitled Folder' + path = 'foo/bar' + r = await fetch( + 'api', 'contents', path, + method='POST', + body=json.dumps({'type': 'directory'}) + ) + _check_created(r, str(contents_dir), path, name, type='directory') + + +async def test_mkdir(fetch, contents, contents_dir): + name = 'New ∂ir' + path = 'å b' + r = await fetch( + 'api', 'contents', path, name, + method='PUT', + body=json.dumps({'type': 'directory'}) + ) + _check_created(r, str(contents_dir), path, name, type='directory') + + +async def test_mkdir_hidden_400(fetch): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await fetch( + 'api', 'contents', 'å b/.hidden', + method='PUT', + body=json.dumps({'type': 'directory'}) + ) + assert expected_http_error(e, 400) + + +async def test_upload_txt(fetch, contents, contents_dir): + body = 'ünicode téxt' + model = { + 'content' : body, + 'format' : 'text', + 'type' : 'file', + } + path = 'å b' + name = 'Upload tést.txt' + await fetch( + 'api', 'contents', path, name, + method='PUT', + body=json.dumps(model) + ) + + # check roundtrip + r = await fetch( + 'api', 'contents', path, name, + method='GET' + ) + model = json.loads(r.body.decode()) + assert model['type'] == 'file' + assert model['format'] == 'text' + assert model['path'] == path+'/'+name + assert model['content'] == body + + +async def test_upload_b64(fetch, contents, contents_dir): + body = b'\xFFblob' + b64body = encodebytes(body).decode('ascii') + model = { + 'content' : b64body, + 'format' : 'base64', + 'type' : 'file', + } + path = 'å b' + name = 'Upload tést.blob' + await fetch( + 'api', 'contents', path, name, + method='PUT', + body=json.dumps(model) + ) + # check roundtrip + r = await fetch( + 'api', 'contents', path, name, + method='GET' + ) + model = json.loads(r.body.decode()) + assert model['type'] == 'file' + assert model['path'] == path+'/'+name + assert model['format'] == 'base64' + decoded = decodebytes(model['content'].encode('ascii')) + assert decoded == body + + +async def test_copy(fetch, contents, contents_dir): + path = 'å b' + name = 'ç d.ipynb' + copy = 'ç d-Copy1.ipynb' + r = await fetch( + 'api', 'contents', path, + method='POST', + body=json.dumps({'copy_from': path+'/'+name}) + ) + _check_created(r, str(contents_dir), path, copy, type='notebook') + + # Copy the same file name + copy2 = 'ç d-Copy2.ipynb' + r = await fetch( + 'api', 'contents', path, + method='POST', + body=json.dumps({'copy_from': path+'/'+name}) + ) + _check_created(r, str(contents_dir), path, copy2, type='notebook') + + # copy a copy. + copy3 = 'ç d-Copy3.ipynb' + r = await fetch( + 'api', 'contents', path, + method='POST', + body=json.dumps({'copy_from': path+'/'+copy2}) + ) + _check_created(r, str(contents_dir), path, copy3, type='notebook') + + +async def test_copy_path(fetch, contents, contents_dir): + path1 = 'foo' + path2 = 'å b' + name = 'a.ipynb' + copy = 'a-Copy1.ipynb' + r = await fetch( + 'api', 'contents', path2, + method='POST', + body=json.dumps({'copy_from': path1+'/'+name}) + ) + _check_created(r, str(contents_dir), path2, name, type='notebook') + + r = await fetch( + 'api', 'contents', path2, + method='POST', + body=json.dumps({'copy_from': path1+'/'+name}) + ) + _check_created(r, str(contents_dir), path2, copy, type='notebook') + + +async def test_copy_put_400(fetch, contents, contents_dir): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await fetch( + 'api', 'contents', 'å b/cøpy.ipynb', + method='PUT', + body=json.dumps({'copy_from': 'å b/ç d.ipynb'}) + ) + assert expected_http_error(e, 400) + + +async def test_copy_dir_400(fetch, contents, contents_dir): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await fetch( + 'api', 'contents', 'foo', + method='POST', + body=json.dumps({'copy_from': 'å b'}) + ) + assert expected_http_error(e, 400) + + +@pytest.mark.parametrize('path,name', dirs) +async def test_delete(fetch, contents, contents_dir, path, name): + nbname = name+'.ipynb' + nbpath = (path + '/' + nbname).lstrip('/') + r = await fetch( + 'api', 'contents', nbpath, + method='DELETE', + ) + assert r.code == 204 + + +async def test_delete_dirs(fetch, contents, folders): + # Iterate over folders + for name in sorted(folders + ['/'], key=len, reverse=True): + r = await fetch( + 'api', 'contents', name, + method='GET' + ) + # Get JSON blobs for each content. + listing = json.loads(r.body.decode())['content'] + # Delete all content + for model in listing: + await fetch( + 'api', 'contents', model['path'], + method='DELETE' + ) + # Make sure all content has been deleted. + r = await fetch( + 'api', 'contents', + method='GET' + ) + model = json.loads(r.body.decode()) + assert model['content'] == [] + + +@pytest.mark.skipif(sys.platform == 'win32', reason="Disabled deleting non-empty dirs on Windows") +async def test_delete_non_empty_dir(fetch, contents): + # Delete a folder + await fetch( + 'api', 'contents', 'å b', + method='DELETE' + ) + # Check that the folder was been deleted. + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await fetch( + 'api', 'contents', 'å b', + method='GET' + ) + assert expected_http_error(e, 404) + + +async def test_rename(fetch, contents, contents_dir): + path = 'foo' + name = 'a.ipynb' + new_name = 'z.ipynb' + # Rename the file + r = await fetch( + 'api', 'contents', path, name, + method='PATCH', + body=json.dumps({'path': path+'/'+new_name}) + ) + fpath = path+'/'+new_name + assert r.code == 200 + location = '/api/contents/' + fpath + assert r.headers['Location'] == location + model = json.loads(r.body.decode()) + assert model['name'] == new_name + assert model['path'] == fpath + fpath = str(contents_dir / fpath) + assert pathlib.Path(fpath).is_file() + + # Check that the files have changed + r = await fetch( + 'api', 'contents', path, + method='GET' + ) + listing = json.loads(r.body.decode()) + nbnames = [name['name'] for name in listing['content']] + assert 'z.ipynb' in nbnames + assert 'a.ipynb' not in nbnames + + +async def test_checkpoints_follow_file(fetch, contents): + path = 'foo' + name = 'a.ipynb' + + # Read initial file. + r = await fetch( + 'api', 'contents', path, name, + method='GET' + ) + model = json.loads(r.body.decode()) + + # Create a checkpoint of initial state + r = await fetch( + 'api', 'contents', path, name, 'checkpoints', + method='POST', + allow_nonstandard_methods=True + ) + cp1 = json.loads(r.body.decode()) + + # Modify file and save. + nbcontent = model['content'] + nb = from_dict(nbcontent) + hcell = new_markdown_cell('Created by test') + nb.cells.append(hcell) + nbmodel = {'content': nb, 'type': 'notebook'} + r = await fetch( + 'api', 'contents', path, name, + method='PUT', + body=json.dumps(nbmodel) + ) + + # List checkpoints + r = await fetch( + 'api', 'contents', path, name, 'checkpoints', + method='GET', + ) + cps = json.loads(r.body.decode()) + assert cps == [cp1] + + r = await fetch( + 'api', 'contents', path, name, + method='GET' + ) + model = json.loads(r.body.decode()) + nbcontent = model['content'] + nb = from_dict(nbcontent) + assert nb.cells[0].source == "Created by test" + + +async def test_rename_existing(fetch, contents): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + path = 'foo' + name = 'a.ipynb' + new_name = 'b.ipynb' + # Rename the file + r = await fetch( + 'api', 'contents', path, name, + method='PATCH', + body=json.dumps({'path': path+'/'+new_name}) + ) + assert expected_http_error(e, 409) + + +async def test_save(fetch, contents): + r = await fetch( + 'api', 'contents', 'foo/a.ipynb', + method='GET' + ) + model = json.loads(r.body.decode()) + nbmodel = model['content'] + nb = from_dict(nbmodel) + nb.cells.append(new_markdown_cell('Created by test ³')) + nbmodel = {'content': nb, 'type': 'notebook'} + r = await fetch( + 'api', 'contents', 'foo/a.ipynb', + method='PUT', + body=json.dumps(nbmodel) + ) + # Round trip. + r = await fetch( + 'api', 'contents', 'foo/a.ipynb', + method='GET' + ) + model = json.loads(r.body.decode()) + newnb = from_dict(model['content']) + assert newnb.cells[0].source == 'Created by test ³' + + +async def test_checkpoints(fetch, contents): + path = 'foo/a.ipynb' + resp = await fetch( + 'api', 'contents', path, + method='GET' + ) + model = json.loads(resp.body.decode()) + r = await fetch( + 'api', 'contents', path, 'checkpoints', + method='POST', + allow_nonstandard_methods=True + ) + assert r.code == 201 + cp1 = json.loads(r.body.decode()) + assert set(cp1) == {'id', 'last_modified'} + assert r.headers['Location'].split('/')[-1] == cp1['id'] + + # Modify it. + nbcontent = model['content'] + nb = from_dict(nbcontent) + hcell = new_markdown_cell('Created by test') + nb.cells.append(hcell) + + # Save it. + nbmodel = {'content': nb, 'type': 'notebook'} + resp = await fetch( + 'api', 'contents', path, + method='PUT', + body=json.dumps(nbmodel) + ) + + # List checkpoints + r = await fetch( + 'api', 'contents', path, 'checkpoints', + method='GET' + ) + cps = json.loads(r.body.decode()) + assert cps == [cp1] + + r = await fetch( + 'api', 'contents', path, + method='GET' + ) + nbcontent = json.loads(r.body.decode())['content'] + nb = from_dict(nbcontent) + assert nb.cells[0].source == 'Created by test' + + # Restore Checkpoint cp1 + r = await fetch( + 'api', 'contents', path, 'checkpoints', cp1['id'], + method='POST', + allow_nonstandard_methods=True + ) + assert r.code == 204 + + r = await fetch( + 'api', 'contents', path, + method='GET' + ) + nbcontent = json.loads(r.body.decode())['content'] + nb = from_dict(nbcontent) + assert nb.cells == [] + + # Delete cp1 + r = await fetch( + 'api', 'contents', path, 'checkpoints', cp1['id'], + method='DELETE' + ) + assert r.code == 204 + + r = await fetch( + 'api', 'contents', path, 'checkpoints', + method='GET' + ) + cps = json.loads(r.body.decode()) + assert cps == [] + + +async def test_file_checkpoints(fetch, contents): + path = 'foo/a.txt' + resp = await fetch( + 'api', 'contents', path, + method='GET' + ) + orig_content = json.loads(resp.body.decode())['content'] + r = await fetch( + 'api', 'contents', path, 'checkpoints', + method='POST', + allow_nonstandard_methods=True + ) + assert r.code == 201 + cp1 = json.loads(r.body.decode()) + assert set(cp1) == {'id', 'last_modified'} + assert r.headers['Location'].split('/')[-1] == cp1['id'] + + # Modify it. + new_content = orig_content + '\nsecond line' + model = { + 'content': new_content, + 'type': 'file', + 'format': 'text', + } + + # Save it. + resp = await fetch( + 'api', 'contents', path, + method='PUT', + body=json.dumps(model) + ) + + # List checkpoints + r = await fetch( + 'api', 'contents', path, 'checkpoints', + method='GET' + ) + cps = json.loads(r.body.decode()) + assert cps == [cp1] + + r = await fetch( + 'api', 'contents', path, + method='GET' + ) + content = json.loads(r.body.decode())['content'] + assert content == new_content + + # Restore Checkpoint cp1 + r = await fetch( + 'api', 'contents', path, 'checkpoints', cp1['id'], + method='POST', + allow_nonstandard_methods=True + ) + assert r.code == 204 + + r = await fetch( + 'api', 'contents', path, + method='GET' + ) + restored_content = json.loads(r.body.decode())['content'] + assert restored_content == orig_content + + # Delete cp1 + r = await fetch( + 'api', 'contents', path, 'checkpoints', cp1['id'], + method='DELETE' + ) + assert r.code == 204 + + r = await fetch( + 'api', 'contents', path, 'checkpoints', + method='GET' + ) + cps = json.loads(r.body.decode()) + assert cps == [] \ No newline at end of file diff --git a/tests/services/contents/test_config.py b/tests/services/contents/test_config.py new file mode 100644 index 0000000000..a427861dd3 --- /dev/null +++ b/tests/services/contents/test_config.py @@ -0,0 +1,13 @@ +import pytest + +from traitlets.config import Config +from jupyter_server.services.contents.filecheckpoints import GenericFileCheckpoints + + +@pytest.fixture +def config(): + return {'FileContentsManager': {'checkpoints_class': GenericFileCheckpoints}} + + +def test_config_did_something(serverapp): + assert isinstance(serverapp.contents_manager.checkpoints, GenericFileCheckpoints) \ No newline at end of file diff --git a/tests/services/contents/test_fileio.py b/tests/services/contents/test_fileio.py new file mode 100644 index 0000000000..3a88d4aaa7 --- /dev/null +++ b/tests/services/contents/test_fileio.py @@ -0,0 +1,138 @@ +import io +import os +import stat +import functools +import decorator + +import pytest + +from ipython_genutils.testing.decorators import skip_win32 as _skip_win32 +from ipython_genutils.testing.decorators import skip_if_not_win32 as _skip_if_not_win32 + +from jupyter_server.services.contents.fileio import atomic_writing + + +@functools.wraps(_skip_win32) +def skip_win32(f): + # Patches the "skip_win32" method to allow pytest fixtures + # in methods wrapped by this decorator. + def inner(f, *args, **kwargs): + decorated_f = _skip_win32(f) + return decorated_f(*args, **kwargs) + return decorator.decorator(inner, f) + + +umask = 0 + + +def test_atomic_writing(tmp_path): + class CustomExc(Exception): pass + + f1 = tmp_path / 'penguin' + f1.write_text('Before') + + if os.name != 'nt': + os.chmod(str(f1), 0o701) + orig_mode = stat.S_IMODE(os.stat(str(f1)).st_mode) + + f2 = tmp_path / 'flamingo' + try: + os.symlink(str(f1), str(f2)) + have_symlink = True + except (AttributeError, NotImplementedError, OSError): + # AttributeError: Python doesn't support it + # NotImplementedError: The system doesn't support it + # OSError: The user lacks the privilege (Windows) + have_symlink = False + + with pytest.raises(CustomExc): + with atomic_writing(str(f1)) as f: + f.write('Failing write') + raise CustomExc + + with io.open(str(f1), 'r') as f: + assert f.read() == 'Before' + + with atomic_writing(str(f1)) as f: + f.write('Overwritten') + + with io.open(str(f1), 'r') as f: + assert f.read() == 'Overwritten' + + if os.name != 'nt': + mode = stat.S_IMODE(os.stat(str(f1)).st_mode) + assert mode == orig_mode + + if have_symlink: + # Check that writing over a file preserves a symlink + with atomic_writing(str(f2)) as f: + f.write(u'written from symlink') + + with io.open(str(f1), 'r') as f: + assert f.read() == 'written from symlink' + + +@pytest.fixture +def handle_umask(): + global umask + umask = os.umask(0) + os.umask(umask) + yield + os.umask(umask) + + +@skip_win32 +def test_atomic_writing_umask(handle_umask, tmp_path): + + os.umask(0o022) + f1 = str(tmp_path / '1') + with atomic_writing(f1) as f: + f.write('1') + mode = stat.S_IMODE(os.stat(f1).st_mode) + assert mode == 0o644 + + os.umask(0o057) + f2 = str(tmp_path / '2') + + with atomic_writing(f2) as f: + f.write('2') + + mode = stat.S_IMODE(os.stat(f2).st_mode) + assert mode == 0o620 + + +def test_atomic_writing_newlines(tmp_path): + path = str(tmp_path / 'testfile') + + lf = u'a\nb\nc\n' + plat = lf.replace(u'\n', os.linesep) + crlf = lf.replace(u'\n', u'\r\n') + + # test default + with io.open(path, 'w') as f: + f.write(lf) + with io.open(path, 'r', newline='') as f: + read = f.read() + assert read == plat + + # test newline=LF + with io.open(path, 'w', newline='\n') as f: + f.write(lf) + with io.open(path, 'r', newline='') as f: + read = f.read() + assert read == lf + + # test newline=CRLF + with atomic_writing(str(path), newline='\r\n') as f: + f.write(lf) + with io.open(path, 'r', newline='') as f: + read = f.read() + assert read == crlf + + # test newline=no convert + text = u'crlf\r\ncr\rlf\n' + with atomic_writing(str(path), newline='') as f: + f.write(text) + with io.open(path, 'r', newline='') as f: + read = f.read() + assert read == text \ No newline at end of file diff --git a/tests/services/contents/test_largefilemanager.py b/tests/services/contents/test_largefilemanager.py new file mode 100644 index 0000000000..bf4e3d24c9 --- /dev/null +++ b/tests/services/contents/test_largefilemanager.py @@ -0,0 +1,91 @@ +import pytest +import tornado + +from jupyter_server.services.contents.largefilemanager import LargeFileManager +from ...conftest import expected_http_error + +contents_manager = pytest.fixture(lambda tmp_path: LargeFileManager(root_dir=str(tmp_path))) + + +def test_save(contents_manager): + cm = contents_manager + model = cm.new_untitled(type='notebook') + name = model['name'] + path = model['path'] + + # Get the model with 'content' + full_model = cm.get(path) + # Save the notebook + model = cm.save(full_model, path) + assert isinstance(model, dict) + assert 'name' in model + assert 'path' in model + assert model['name'] == name + assert model['path'] == path + + +@pytest.mark.parametrize( + 'model,err_message', + [ + ( + {'name': 'test', 'path': 'test', 'chunk': 1}, + 'HTTP 400: Bad Request (No file type provided)' + ), + ( + {'name': 'test', 'path': 'test', 'chunk': 1, 'type': 'notebook'}, + 'HTTP 400: Bad Request (File type "notebook" is not supported for large file transfer)' + ), + ( + {'name': 'test', 'path': 'test', 'chunk': 1, 'type': 'file'}, + 'HTTP 400: Bad Request (No file content provided)', + ), + ( + {'name': 'test', 'path': 'test', 'chunk': 2, 'type': 'file', + 'content': u'test', 'format': 'json'}, + "HTTP 400: Bad Request (Must specify format of file contents as 'text' or 'base64')" + ) + ] +) +def test_bad_save(contents_manager, model, err_message): + with pytest.raises(tornado.web.HTTPError) as e: + contents_manager.save(model, model['path']) + assert expected_http_error(e, 400, expected_message=err_message) + + +def test_saving_different_chunks(contents_manager): + cm = contents_manager + model = {'name': 'test', 'path': 'test', 'type': 'file', + 'content': u'test==', 'format': 'text'} + name = model['name'] + path = model['path'] + cm.save(model, path) + + for chunk in (1, 2, -1): + for fm in ('text', 'base64'): + full_model = cm.get(path) + full_model['chunk'] = chunk + full_model['format'] = fm + model_res = cm.save(full_model, path) + assert isinstance(model_res, dict) + assert 'name' in model_res + assert 'path' in model_res + assert 'chunk' not in model_res + assert model_res['name'] == name + assert model_res['path'] == path + + +def test_save_in_subdirectory(contents_manager, tmp_path): + cm = contents_manager + sub_dir = tmp_path / 'foo' + sub_dir.mkdir() + model = cm.new_untitled(path='/foo/', type='notebook') + path = model['path'] + model = cm.get(path) + + # Change the name in the model for rename + model = cm.save(model, path) + assert isinstance(model, dict) + assert 'name' in model + assert 'path' in model + assert model['name'] == 'Untitled.ipynb' + assert model['path'] == 'foo/Untitled.ipynb' \ No newline at end of file diff --git a/tests/services/contents/test_manager.py b/tests/services/contents/test_manager.py new file mode 100644 index 0000000000..526e53050e --- /dev/null +++ b/tests/services/contents/test_manager.py @@ -0,0 +1,596 @@ +import os +import sys +import time +import pytest +import functools +from traitlets import TraitError +from tornado.web import HTTPError +from itertools import combinations + + +from nbformat import v4 as nbformat + +from jupyter_server.services.contents.filemanager import FileContentsManager +from ...conftest import expected_http_error + +# -------------- Functions ---------------------------- + +# contents_manager_atomic = pytest.fixture(lambda tmp_path: FileContentsManager(root_dir=str(tmp_path), use_atomic_writing=True)) +# contents_manager_nonatomic = pytest.fixture(lambda tmp_path: FileContentsManager(root_dir=str(tmp_path), use_atomic_writing=False)) + +@pytest.fixture(params=[True, False]) +def contents_manager(request, tmp_path): + return FileContentsManager(root_dir=str(tmp_path), use_atomic_writing=request.param) + + +def _make_dir(contents_manager, api_path): + """ + Make a directory. + """ + os_path = contents_manager._get_os_path(api_path) + try: + os.makedirs(os_path) + except OSError: + print("Directory already exists: %r" % os_path) + + +def symlink(contents_manager, src, dst): + """Make a symlink to src from dst + + src and dst are api_paths + """ + src_os_path = contents_manager._get_os_path(src) + dst_os_path = contents_manager._get_os_path(dst) + print(src_os_path, dst_os_path, os.path.isfile(src_os_path)) + os.symlink(src_os_path, dst_os_path) + + +def add_code_cell(notebook): + output = nbformat.new_output("display_data", {'application/javascript': "alert('hi');"}) + cell = nbformat.new_code_cell("print('hi')", outputs=[output]) + notebook.cells.append(cell) + + +def new_notebook(contents_manager): + cm = contents_manager + model = cm.new_untitled(type='notebook') + name = model['name'] + path = model['path'] + + full_model = cm.get(path) + nb = full_model['content'] + nb['metadata']['counter'] = int(1e6 * time.time()) + add_code_cell(nb) + + cm.save(full_model, path) + return nb, name, path + + +def make_populated_dir(contents_manager, api_path): + cm = contents_manager + _make_dir(cm, api_path) + cm.new(path="/".join([api_path, "nb.ipynb"])) + cm.new(path="/".join([api_path, "file.txt"])) + + +def check_populated_dir_files(contents_manager, api_path): + dir_model = contents_manager.get(api_path) + + assert dir_model['path'] == api_path + assert dir_model['type'] == "directory" + + for entry in dir_model['content']: + if entry['type'] == "directory": + continue + elif entry['type'] == "file": + assert entry['name'] == "file.txt" + complete_path = "/".join([api_path, "file.txt"]) + assert entry["path"] == complete_path + elif entry['type'] == "notebook": + assert entry['name'] == "nb.ipynb" + complete_path = "/".join([api_path, "nb.ipynb"]) + assert entry["path"] == complete_path + +# ----------------- Tests ---------------------------------- + +def test_root_dir(tmp_path): + fm = FileContentsManager(root_dir=str(tmp_path)) + assert fm.root_dir == str(tmp_path) + + +def test_missing_root_dir(tmp_path): + root = tmp_path / 'notebook' / 'dir' / 'is' / 'missing' + with pytest.raises(TraitError): + FileContentsManager(root_dir=str(root)) + + +def test_invalid_root_dir(tmp_path): + temp_file = tmp_path / 'file.txt' + temp_file.write_text('') + with pytest.raises(TraitError): + FileContentsManager(root_dir=str(temp_file)) + +def test_get_os_path(tmp_path): + fm = FileContentsManager(root_dir=str(tmp_path)) + path = fm._get_os_path('/path/to/notebook/test.ipynb') + rel_path_list = '/path/to/notebook/test.ipynb'.split('/') + fs_path = os.path.join(fm.root_dir, *rel_path_list) + assert path == fs_path + + fm = FileContentsManager(root_dir=str(tmp_path)) + path = fm._get_os_path('test.ipynb') + fs_path = os.path.join(fm.root_dir, 'test.ipynb') + assert path == fs_path + + fm = FileContentsManager(root_dir=str(tmp_path)) + path = fm._get_os_path('////test.ipynb') + fs_path = os.path.join(fm.root_dir, 'test.ipynb') + assert path == fs_path + + +def test_checkpoint_subdir(tmp_path): + subd = 'sub ∂ir' + cp_name = 'test-cp.ipynb' + fm = FileContentsManager(root_dir=str(tmp_path)) + tmp_path.joinpath(subd).mkdir() + cpm = fm.checkpoints + cp_dir = cpm.checkpoint_path('cp', 'test.ipynb') + cp_subdir = cpm.checkpoint_path('cp', '/%s/test.ipynb' % subd) + assert cp_dir != cp_subdir + assert cp_dir == os.path.join(str(tmp_path), cpm.checkpoint_dir, cp_name) + + +@pytest.mark.skipif( + sys.platform == 'win32' and sys.version_info[0] < 3, + reason="System platform is Windows, version < 3" +) +def test_bad_symlink(tmp_path): + td = str(tmp_path) + + cm = FileContentsManager(root_dir=td) + path = 'test bad symlink' + _make_dir(cm, path) + + file_model = cm.new_untitled(path=path, ext='.txt') + + # create a broken symlink + symlink(cm, "target", '%s/%s' % (path, 'bad symlink')) + model = cm.get(path) + + contents = { + content['name']: content for content in model['content'] + } + assert 'untitled.txt' in contents + assert contents['untitled.txt'] == file_model + assert 'bad symlink' in contents + + +@pytest.mark.skipif( + sys.platform == 'win32' and sys.version_info[0] < 3, + reason="System platform is Windows, version < 3" +) +def test_good_symlink(tmp_path): + td = str(tmp_path) + cm = FileContentsManager(root_dir=td) + parent = 'test good symlink' + name = 'good symlink' + path = '{0}/{1}'.format(parent, name) + _make_dir(cm, parent) + + file_model = cm.new(path=parent + '/zfoo.txt') + + # create a good symlink + symlink(cm, file_model['path'], path) + symlink_model = cm.get(path, content=False) + dir_model = cm.get(parent) + assert sorted(dir_model['content'], key=lambda x: x['name']) == [symlink_model, file_model] + + +def test_403(tmp_path): + if hasattr(os, 'getuid'): + if os.getuid() == 0: + raise pytest.skip("Can't test permissions as root") + if sys.platform.startswith('win'): + raise pytest.skip("Can't test permissions on Windows") + + td = str(tmp_path) + cm = FileContentsManager(root_dir=td) + model = cm.new_untitled(type='file') + os_path = cm._get_os_path(model['path']) + + os.chmod(os_path, 0o400) + try: + with cm.open(os_path, 'w') as f: + f.write(u"don't care") + except HTTPError as e: + assert e.status_code == 403 + +def test_escape_root(tmp_path): + td = str(tmp_path) + cm = FileContentsManager(root_dir=td) + # make foo, bar next to root + with open(os.path.join(cm.root_dir, '..', 'foo'), 'w') as f: + f.write('foo') + with open(os.path.join(cm.root_dir, '..', 'bar'), 'w') as f: + f.write('bar') + + with pytest.raises(HTTPError) as e: + cm.get('..') + expected_http_error(e, 404) + + with pytest.raises(HTTPError) as e: + cm.get('foo/../../../bar') + expected_http_error(e, 404) + + with pytest.raises(HTTPError) as e: + cm.delete('../foo') + expected_http_error(e, 404) + + with pytest.raises(HTTPError) as e: + cm.rename('../foo', '../bar') + expected_http_error(e, 404) + + with pytest.raises(HTTPError) as e: + cm.save(model={ + 'type': 'file', + 'content': u'', + 'format': 'text', + }, path='../foo') + expected_http_error(e, 404) + + +def test_new_untitled(contents_manager): + cm = contents_manager + # Test in root directory + model = cm.new_untitled(type='notebook') + assert isinstance(model, dict) + assert 'name' in model + assert 'path' in model + assert 'type' in model + assert model['type'] == 'notebook' + assert model['name'] == 'Untitled.ipynb' + assert model['path'] == 'Untitled.ipynb' + + # Test in sub-directory + model = cm.new_untitled(type='directory') + assert isinstance(model, dict) + assert 'name' in model + assert 'path' in model + assert 'type' in model + assert model['type'] == 'directory' + assert model['name'] == 'Untitled Folder' + assert model['path'] == 'Untitled Folder' + sub_dir = model['path'] + + model = cm.new_untitled(path=sub_dir) + assert isinstance(model, dict) + assert 'name' in model + assert 'path' in model + assert 'type' in model + assert model['type'] == 'file' + assert model['name'] == 'untitled' + assert model['path'] == '%s/untitled' % sub_dir + + # Test with a compound extension + model = cm.new_untitled(path=sub_dir, ext='.foo.bar') + assert model['name'] == 'untitled.foo.bar' + model = cm.new_untitled(path=sub_dir, ext='.foo.bar') + assert model['name'] == 'untitled1.foo.bar' + + +def test_modified_date(contents_manager): + cm = contents_manager + # Create a new notebook. + nb, name, path = new_notebook(cm) + model = cm.get(path) + + # Add a cell and save. + add_code_cell(model['content']) + cm.save(model, path) + + # Reload notebook and verify that last_modified incremented. + saved = cm.get(path) + assert saved['last_modified'] >= model['last_modified'] + + # Move the notebook and verify that last_modified stayed the same. + # (The frontend fires a warning if last_modified increases on the + # renamed file.) + new_path = 'renamed.ipynb' + cm.rename(path, new_path) + renamed = cm.get(new_path) + assert renamed['last_modified'] >= saved['last_modified'] + + +def test_get(contents_manager): + cm = contents_manager + # Create a notebook + model = cm.new_untitled(type='notebook') + name = model['name'] + path = model['path'] + + # Check that we 'get' on the notebook we just created + model2 = cm.get(path) + assert isinstance(model2, dict) + assert 'name' in model2 + assert 'path' in model2 + assert model['name'] == name + assert model['path'] == path + + nb_as_file = cm.get(path, content=True, type='file') + assert nb_as_file['path'] == path + assert nb_as_file['type'] == 'file' + assert nb_as_file['format'] == 'text' + assert not isinstance(nb_as_file['content'], dict) + + nb_as_bin_file = cm.get(path, content=True, type='file', format='base64') + assert nb_as_bin_file['format'] == 'base64' + + # Test in sub-directory + sub_dir = '/foo/' + _make_dir(cm, 'foo') + model = cm.new_untitled(path=sub_dir, ext='.ipynb') + model2 = cm.get(sub_dir + name) + assert isinstance(model2, dict) + assert 'name' in model2 + assert 'path' in model2 + assert 'content' in model2 + assert model2['name'] == 'Untitled.ipynb' + assert model2['path'] == '{0}/{1}'.format(sub_dir.strip('/'), name) + + + # Test with a regular file. + file_model_path = cm.new_untitled(path=sub_dir, ext='.txt')['path'] + file_model = cm.get(file_model_path) + expected_model = { + 'content': u'', + 'format': u'text', + 'mimetype': u'text/plain', + 'name': u'untitled.txt', + 'path': u'foo/untitled.txt', + 'type': u'file', + 'writable': True, + } + # Assert expected model is in file_model + for key, value in expected_model.items(): + assert file_model[key] == value + assert 'created' in file_model + assert 'last_modified' in file_model + + # Create a sub-sub directory to test getting directory contents with a + # subdir. + _make_dir(cm, 'foo/bar') + dirmodel = cm.get('foo') + assert dirmodel['type'] == 'directory' + assert isinstance(dirmodel['content'], list) + assert len(dirmodel['content']) == 3 + assert dirmodel['path'] == 'foo' + assert dirmodel['name'] == 'foo' + + # Directory contents should match the contents of each individual entry + # when requested with content=False. + model2_no_content = cm.get(sub_dir + name, content=False) + file_model_no_content = cm.get(u'foo/untitled.txt', content=False) + sub_sub_dir_no_content = cm.get('foo/bar', content=False) + assert sub_sub_dir_no_content['path'] == 'foo/bar' + assert sub_sub_dir_no_content['name'] == 'bar' + + for entry in dirmodel['content']: + # Order isn't guaranteed by the spec, so this is a hacky way of + # verifying that all entries are matched. + if entry['path'] == sub_sub_dir_no_content['path']: + assert entry == sub_sub_dir_no_content + elif entry['path'] == model2_no_content['path']: + assert entry == model2_no_content + elif entry['path'] == file_model_no_content['path']: + assert entry == file_model_no_content + else: + assert False, "Unexpected directory entry: %s" % entry() + + with pytest.raises(HTTPError): + cm.get('foo', type='file') + + +def test_update(contents_manager): + cm = contents_manager + # Create a notebook. + model = cm.new_untitled(type='notebook') + name = model['name'] + path = model['path'] + + # Change the name in the model for rename + model['path'] = 'test.ipynb' + model = cm.update(model, path) + assert isinstance(model, dict) + assert 'name' in model + assert 'path' in model + assert model['name'] == 'test.ipynb' + + # Make sure the old name is gone + with pytest.raises(HTTPError): + cm.get(path) + + # Test in sub-directory + # Create a directory and notebook in that directory + sub_dir = '/foo/' + _make_dir(cm, 'foo') + model = cm.new_untitled(path=sub_dir, type='notebook') + path = model['path'] + + # Change the name in the model for rename + d = path.rsplit('/', 1)[0] + new_path = model['path'] = d + '/test_in_sub.ipynb' + model = cm.update(model, path) + assert isinstance(model, dict) + assert 'name' in model + assert 'path' in model + assert model['name'] == 'test_in_sub.ipynb' + assert model['path'] == new_path + + # Make sure the old name is gone + with pytest.raises(HTTPError): + cm.get(path) + + +def test_save(contents_manager): + cm = contents_manager + # Create a notebook + model = cm.new_untitled(type='notebook') + name = model['name'] + path = model['path'] + + # Get the model with 'content' + full_model = cm.get(path) + + # Save the notebook + model = cm.save(full_model, path) + assert isinstance(model, dict) + assert 'name' in model + assert 'path' in model + assert model['name'] == name + assert model['path'] == path + + # Test in sub-directory + # Create a directory and notebook in that directory + sub_dir = '/foo/' + _make_dir(cm, 'foo') + model = cm.new_untitled(path=sub_dir, type='notebook') + name = model['name'] + path = model['path'] + model = cm.get(path) + + # Change the name in the model for rename + model = cm.save(model, path) + assert isinstance(model, dict) + assert 'name' in model + assert 'path' in model + assert model['name'] == 'Untitled.ipynb' + assert model['path'] == 'foo/Untitled.ipynb' + + +def test_delete(contents_manager): + cm = contents_manager + # Create a notebook + nb, name, path = new_notebook(cm) + + # Delete the notebook + cm.delete(path) + + # Check that deleting a non-existent path raises an error. + with pytest.raises(HTTPError): + cm.delete(path) + + # Check that a 'get' on the deleted notebook raises and error + with pytest.raises(HTTPError): + cm.get(path) + + +def test_rename(contents_manager): + cm = contents_manager + # Create a new notebook + nb, name, path = new_notebook(cm) + + # Rename the notebook + cm.rename(path, "changed_path") + + # Attempting to get the notebook under the old name raises an error + with pytest.raises(HTTPError): + cm.get(path) + # Fetching the notebook under the new name is successful + assert isinstance(cm.get("changed_path"), dict) + + # Ported tests on nested directory renaming from pgcontents + all_dirs = ['foo', 'bar', 'foo/bar', 'foo/bar/foo', 'foo/bar/foo/bar'] + unchanged_dirs = all_dirs[:2] + changed_dirs = all_dirs[2:] + + for _dir in all_dirs: + make_populated_dir(cm, _dir) + check_populated_dir_files(cm, _dir) + + # Renaming to an existing directory should fail + for src, dest in combinations(all_dirs, 2): + with pytest.raises(HTTPError) as e: + cm.rename(src, dest) + assert expected_http_error(e, 409) + + # Creating a notebook in a non_existant directory should fail + with pytest.raises(HTTPError) as e: + cm.new_untitled("foo/bar_diff", ext=".ipynb") + assert expected_http_error(e, 404) + + cm.rename("foo/bar", "foo/bar_diff") + + # Assert that unchanged directories remain so + for unchanged in unchanged_dirs: + check_populated_dir_files(cm, unchanged) + + # Assert changed directories can no longer be accessed under old names + for changed_dirname in changed_dirs: + with pytest.raises(HTTPError) as e: + cm.get(changed_dirname) + assert expected_http_error(e, 404) + new_dirname = changed_dirname.replace("foo/bar", "foo/bar_diff", 1) + check_populated_dir_files(cm, new_dirname) + + # Created a notebook in the renamed directory should work + cm.new_untitled("foo/bar_diff", ext=".ipynb") + + +def test_delete_root(contents_manager): + cm = contents_manager + with pytest.raises(HTTPError) as e: + cm.delete('') + assert expected_http_error(e, 400) + + +def test_copy(contents_manager): + cm = contents_manager + parent = u'å b' + name = u'nb √.ipynb' + path = u'{0}/{1}'.format(parent, name) + _make_dir(cm, parent) + + orig = cm.new(path=path) + # copy with unspecified name + copy = cm.copy(path) + assert copy['name'] == orig['name'].replace('.ipynb', '-Copy1.ipynb') + + # copy with specified name + copy2 = cm.copy(path, u'å b/copy 2.ipynb') + assert copy2['name'] == u'copy 2.ipynb' + assert copy2['path'] == u'å b/copy 2.ipynb' + # copy with specified path + copy2 = cm.copy(path, u'/') + assert copy2['name'] == name + assert copy2['path'] == name + + +def test_mark_trusted_cells(contents_manager): + cm = contents_manager + nb, name, path = new_notebook(cm) + + cm.mark_trusted_cells(nb, path) + for cell in nb.cells: + if cell.cell_type == 'code': + assert not cell.metadata.trusted + + cm.trust_notebook(path) + nb = cm.get(path)['content'] + for cell in nb.cells: + if cell.cell_type == 'code': + assert cell.metadata.trusted + + +def test_check_and_sign(contents_manager): + cm = contents_manager + nb, name, path = new_notebook(cm) + + cm.mark_trusted_cells(nb, path) + cm.check_and_sign(nb, path) + assert not cm.notary.check_signature(nb) + + cm.trust_notebook(path) + nb = cm.get(path)['content'] + cm.mark_trusted_cells(nb, path) + cm.check_and_sign(nb, path) + assert cm.notary.check_signature(nb) diff --git a/jupyter_server/services/nbconvert/tests/__init__.py b/tests/services/kernels/__init__.py similarity index 100% rename from jupyter_server/services/nbconvert/tests/__init__.py rename to tests/services/kernels/__init__.py diff --git a/tests/services/kernels/test_api.py b/tests/services/kernels/test_api.py new file mode 100644 index 0000000000..cfdc6b80c1 --- /dev/null +++ b/tests/services/kernels/test_api.py @@ -0,0 +1,254 @@ +import time +import json +import pytest + + +import tornado +import urllib.parse +from tornado.escape import url_escape + +from jupyter_client.kernelspec import NATIVE_KERNEL_NAME + +from jupyter_server.utils import url_path_join +from ...conftest import expected_http_error + + +@pytest.fixture +def ws_fetch(auth_header, http_port): + """fetch fixture that handles auth, base_url, and path""" + def client_fetch(*parts, headers={}, params={}, **kwargs): + # Handle URL strings + path = url_escape(url_path_join(*parts), plus=False) + urlparts = urllib.parse.urlparse('ws://localhost:{}'.format(http_port)) + urlparts = urlparts._replace( + path=path, + query=urllib.parse.urlencode(params) + ) + url = urlparts.geturl() + # Add auth keys to header + headers.update(auth_header) + # Make request. + req = tornado.httpclient.HTTPRequest( + url, + headers=auth_header, + connect_timeout=120 + ) + return tornado.websocket.websocket_connect(req) + return client_fetch + + +async def test_no_kernels(fetch): + r = await fetch( + 'api', 'kernels', + method='GET' + ) + kernels = json.loads(r.body.decode()) + assert kernels == [] + + +async def test_default_kernels(fetch): + r = await fetch( + 'api', 'kernels', + method='POST', + allow_nonstandard_methods=True + ) + kernel = json.loads(r.body.decode()) + assert r.headers['location'] == '/api/kernels/' + kernel['id'] + assert r.code == 201 + assert isinstance(kernel, dict) + + report_uri = '/api/security/csp-report' + expected_csp = '; '.join([ + "frame-ancestors 'self'", + 'report-uri ' + report_uri, + "default-src 'none'" + ]) + assert r.headers['Content-Security-Policy'] == expected_csp + + +async def test_main_kernel_handler(fetch): + # Start the first kernel + r = await fetch( + 'api', 'kernels', + method='POST', + body=json.dumps({ + 'name': NATIVE_KERNEL_NAME + }) + ) + kernel1 = json.loads(r.body.decode()) + assert r.headers['location'] == '/api/kernels/' + kernel1['id'] + assert r.code == 201 + assert isinstance(kernel1, dict) + + report_uri = '/api/security/csp-report' + expected_csp = '; '.join([ + "frame-ancestors 'self'", + 'report-uri ' + report_uri, + "default-src 'none'" + ]) + assert r.headers['Content-Security-Policy'] == expected_csp + + # Check that the kernel is found in the kernel list + r = await fetch( + 'api', 'kernels', + method='GET' + ) + kernel_list = json.loads(r.body.decode()) + assert r.code == 200 + assert isinstance(kernel_list, list) + assert kernel_list[0]['id'] == kernel1['id'] + assert kernel_list[0]['name'] == kernel1['name'] + + # Start a second kernel + r = await fetch( + 'api', 'kernels', + method='POST', + body=json.dumps({ + 'name': NATIVE_KERNEL_NAME + }) + ) + kernel2 = json.loads(r.body.decode()) + assert isinstance(kernel2, dict) + + # Get kernel list again + r = await fetch( + 'api', 'kernels', + method='GET' + ) + kernel_list = json.loads(r.body.decode()) + assert r.code == 200 + assert isinstance(kernel_list, list) + assert len(kernel_list) == 2 + + # Interrupt a kernel + r = await fetch( + 'api', 'kernels', kernel2['id'], 'interrupt', + method='POST', + allow_nonstandard_methods=True + ) + assert r.code == 204 + + # Restart a kernel + r = await fetch( + 'api', 'kernels', kernel2['id'], 'restart', + method='POST', + allow_nonstandard_methods=True + ) + restarted_kernel = json.loads(r.body.decode()) + assert restarted_kernel['id'] == kernel2['id'] + assert restarted_kernel['name'] == kernel2['name'] + + +async def test_kernel_handler(fetch): + # Create a kernel + r = await fetch( + 'api', 'kernels', + method='POST', + body=json.dumps({ + 'name': NATIVE_KERNEL_NAME + }) + ) + kernel_id = json.loads(r.body.decode())['id'] + r = await fetch( + 'api', 'kernels', kernel_id, + method='GET' + ) + kernel = json.loads(r.body.decode()) + assert r.code == 200 + assert isinstance(kernel, dict) + assert 'id' in kernel + assert kernel['id'] == kernel_id + + # Requests a bad kernel id. + bad_id = '111-111-111-111-111' + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + r = await fetch( + 'api', 'kernels', bad_id, + method='GET' + ) + assert expected_http_error(e, 404) + + # Delete kernel with id. + r = await fetch( + 'api', 'kernels', kernel_id, + method='DELETE', + ) + assert r.code == 204 + + # Get list of kernels + r = await fetch( + 'api', 'kernels', + method='GET' + ) + kernel_list = json.loads(r.body.decode()) + assert kernel_list == [] + + # Request to delete a non-existent kernel id + bad_id = '111-111-111-111-111' + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + r = await fetch( + 'api', 'kernels', bad_id, + method='DELETE' + ) + assert expected_http_error(e, 404, 'Kernel does not exist: ' + bad_id) + + +async def test_connection(fetch, ws_fetch, http_port, auth_header): + print('hello') + # Create kernel + r = await fetch( + 'api', 'kernels', + method='POST', + body=json.dumps({ + 'name': NATIVE_KERNEL_NAME + }) + ) + kid = json.loads(r.body.decode())['id'] + + # Get kernel info + r = await fetch( + 'api', 'kernels', kid, + method='GET' + ) + model = json.loads(r.body.decode()) + assert model['connections'] == 0 + + time.sleep(1) + # Open a websocket connection. + ws = await ws_fetch( + 'api', 'kernels', kid, 'channels' + ) + + # Test that it was opened. + r = await fetch( + 'api', 'kernels', kid, + method='GET' + ) + model = json.loads(r.body.decode()) + assert model['connections'] == 1 + + # Close websocket + ws.close() + # give it some time to close on the other side: + for i in range(10): + r = await fetch( + 'api', 'kernels', kid, + method='GET' + ) + model = json.loads(r.body.decode()) + if model['connections'] > 0: + time.sleep(0.1) + else: + break + + r = await fetch( + 'api', 'kernels', kid, + method='GET' + ) + model = json.loads(r.body.decode()) + assert model['connections'] == 0 + + +async def test_config2(serverapp): + assert serverapp.kernel_manager.allowed_message_types == [] + diff --git a/tests/services/kernels/test_config.py b/tests/services/kernels/test_config.py new file mode 100644 index 0000000000..051727fbad --- /dev/null +++ b/tests/services/kernels/test_config.py @@ -0,0 +1,17 @@ +import pytest +from traitlets.config import Config + + +@pytest.fixture +def config(): + return Config({ + 'ServerApp': { + 'MappingKernelManager': { + 'allowed_message_types': ['kernel_info_request'] + } + } + }) + + +def test_config(serverapp): + assert serverapp.kernel_manager.allowed_message_types == ['kernel_info_request'] \ No newline at end of file diff --git a/jupyter_server/services/sessions/tests/__init__.py b/tests/services/kernelspecs/__init__.py similarity index 100% rename from jupyter_server/services/sessions/tests/__init__.py rename to tests/services/kernelspecs/__init__.py diff --git a/tests/services/kernelspecs/test_api.py b/tests/services/kernelspecs/test_api.py new file mode 100644 index 0000000000..0d3a2ba387 --- /dev/null +++ b/tests/services/kernelspecs/test_api.py @@ -0,0 +1,124 @@ +import pytest +import json + +import tornado + +from jupyter_client.kernelspec import NATIVE_KERNEL_NAME + +from ...conftest import expected_http_error + + +sample_kernel_json = { + 'argv':['cat', '{connection_file}'], + 'display_name':'Test kernel', +} +some_resource = u"The very model of a modern major general" + + +@pytest.fixture +def kernelspecs(data_dir): + spec_names = ['sample', 'sample 2'] + for name in spec_names: + sample_kernel_dir = data_dir.joinpath('kernels', name) + sample_kernel_dir.mkdir(parents=True) + # Create kernel json file + sample_kernel_file = sample_kernel_dir.joinpath('kernel.json') + sample_kernel_file.write_text(json.dumps(sample_kernel_json)) + # Create resources text + sample_kernel_resources = sample_kernel_dir.joinpath('resource.txt') + sample_kernel_resources.write_text(some_resource) + + +async def test_list_kernelspecs_bad(fetch, kernelspecs, data_dir): + bad_kernel_dir = data_dir.joinpath(data_dir, 'kernels', 'bad') + bad_kernel_dir.mkdir(parents=True) + bad_kernel_json = bad_kernel_dir.joinpath('kernel.json') + bad_kernel_json.write_text('garbage') + + r = await fetch( + 'api', 'kernelspecs', + method='GET' + ) + model = json.loads(r.body.decode()) + assert isinstance(model, dict) + assert model['default'] == NATIVE_KERNEL_NAME + specs = model['kernelspecs'] + assert isinstance(specs, dict) + assert len(specs) > 2 + + +async def test_list_kernelspecs(fetch, kernelspecs): + r = await fetch( + 'api', 'kernelspecs', + method='GET' + ) + model = json.loads(r.body.decode()) + assert isinstance(model, dict) + assert model['default'] == NATIVE_KERNEL_NAME + specs = model['kernelspecs'] + assert isinstance(specs, dict) + assert len(specs) > 2 + + def is_sample_kernelspec(s): + return s['name'] == 'sample' and s['spec']['display_name'] == 'Test kernel' + + def is_default_kernelspec(s): + return s['name'] == NATIVE_KERNEL_NAME and s['spec']['display_name'].startswith("Python") + + assert any(is_sample_kernelspec(s) for s in specs.values()), specs + assert any(is_default_kernelspec(s) for s in specs.values()), specs + + +async def test_get_kernelspecs(fetch, kernelspecs): + r = await fetch( + 'api', 'kernelspecs', 'Sample', + method='GET' + ) + model = json.loads(r.body.decode()) + assert model['name'].lower() == 'sample' + assert isinstance(model['spec'], dict) + assert model['spec']['display_name'] == 'Test kernel' + assert isinstance(model['resources'], dict) + + +async def test_get_kernelspec_spaces(fetch, kernelspecs): + r = await fetch( + 'api', 'kernelspecs', 'sample%202', + method='GET' + ) + model = json.loads(r.body.decode()) + assert model['name'].lower() == 'sample 2' + + +async def test_get_nonexistant_kernelspec(fetch, kernelspecs): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await fetch( + 'api', 'kernelspecs', 'nonexistant', + method='GET' + ) + assert expected_http_error(e, 404) + + +async def test_get_kernel_resource_file(fetch, kernelspecs): + r = await fetch( + 'kernelspecs', 'sAmple', 'resource.txt', + method='GET' + ) + res = r.body.decode('utf-8') + assert res == some_resource + + +async def test_get_nonexistant_resource(fetch, kernelspecs): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await fetch( + 'kernelspecs', 'nonexistant', 'resource.txt', + method='GET' + ) + assert expected_http_error(e, 404) + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await fetch( + 'kernelspecs', 'sample', 'nonexistant.txt', + method='GET' + ) + assert expected_http_error(e, 404) \ No newline at end of file diff --git a/jupyter_server/tests/__init__.py b/tests/services/nbconvert/__init__.py similarity index 100% rename from jupyter_server/tests/__init__.py rename to tests/services/nbconvert/__init__.py diff --git a/tests/services/nbconvert/test_api.py b/tests/services/nbconvert/test_api.py new file mode 100644 index 0000000000..c0633a492a --- /dev/null +++ b/tests/services/nbconvert/test_api.py @@ -0,0 +1,13 @@ +import json +import pytest + +async def test_list_formats(fetch): + r = await fetch( + 'api', 'nbconvert', + method='GET' + ) + formats = json.loads(r.body.decode()) + assert isinstance(formats, dict) + assert 'python' in formats + assert 'html' in formats + assert formats['python']['output_mimetype'] == 'text/x-python' \ No newline at end of file diff --git a/tests/services/sessions/__init__.py b/tests/services/sessions/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/services/sessions/test_api.py b/tests/services/sessions/test_api.py new file mode 100644 index 0000000000..20d90ded26 --- /dev/null +++ b/tests/services/sessions/test_api.py @@ -0,0 +1,61 @@ +import json +import pytest + + +def get_session_model( + path, + type='notebook', + kernel_name='python', + kernel_id=None + ): + return { + 'path': path, + 'type': type, + 'kernel': { + 'name': kernel_name, + 'id': kernel_id + } + } + + +async def test_create(fetch): + # Make sure no sessions exist. + r = await fetch( + 'api', 'sessions', + method='GET' + ) + sessions = json.loads(r.body.decode()) + assert len(sessions) == 0 + + # Create a session. + model = get_session_model('foo/nb1.ipynb') + r = await fetch( + 'api', 'sessions', + method='POST', + body=json.dumps(model) + ) + assert r.code == 201 + new_session = json.loads(r.body.decode()) + assert 'id' in new_session + assert new_session['path'] == 'foo/nb1.ipynb' + assert new_session['type'] == 'notebook' + assert r.headers['Location'] == '/api/sessions/' + new_session['id'] + + # Check that the new session appears in list. + r = await fetch( + 'api', 'sessions', + method='GET' + ) + sessions = json.loads(r.body.decode()) + assert sessions == [new_session] + + # Retrieve that session. + sid = new_session['id'] + r = await fetch( + 'api', 'sessions', sid, + method='GET' + ) + got = json.loads(r.body.decode()) + assert got == new_session + + diff --git a/tests/test_config_manager.py b/tests/test_config_manager.py new file mode 100644 index 0000000000..4384f69999 --- /dev/null +++ b/tests/test_config_manager.py @@ -0,0 +1,52 @@ +import json +import os +import shutil +import tempfile + +from jupyter_server.config_manager import BaseJSONConfigManager + + +def test_json(tmp_path): + tmpdir = str(tmp_path) + + root_data = dict(a=1, x=2, nest={'a':1, 'x':2}) + with open(os.path.join(tmpdir, 'foo.json'), 'w') as f: + json.dump(root_data, f) + # also make a foo.d/ directory with multiple json files + os.makedirs(os.path.join(tmpdir, 'foo.d')) + with open(os.path.join(tmpdir, 'foo.d', 'a.json'), 'w') as f: + json.dump(dict(a=2, b=1, nest={'a':2, 'b':1}), f) + with open(os.path.join(tmpdir, 'foo.d', 'b.json'), 'w') as f: + json.dump(dict(a=3, b=2, c=3, nest={'a':3, 'b':2, 'c':3}, only_in_b={'x':1}), f) + manager = BaseJSONConfigManager(config_dir=tmpdir, read_directory=False) + data = manager.get('foo') + assert 'a' in data + assert 'x' in data + assert 'b' not in data + assert 'c' not in data + assert data['a'] == 1 + assert 'x' in data['nest'] + # if we write it out, it also shouldn't pick up the subdirectoy + manager.set('foo', data) + data = manager.get('foo') + assert data == root_data + + manager = BaseJSONConfigManager(config_dir=tmpdir, read_directory=True) + data = manager.get('foo') + assert 'a' in data + assert 'b' in data + assert 'c' in data + # files should be read in order foo.d/a.json foo.d/b.json foo.json + assert data['a'] == 1 + assert data['b'] == 2 + assert data['c'] == 3 + assert data['nest']['a'] == 1 + assert data['nest']['b'] == 2 + assert data['nest']['c'] == 3 + assert data['nest']['x'] == 2 + + # when writing out, we don't want foo.d/*.json data to be included in the root foo.json + manager.set('foo', data) + manager = BaseJSONConfigManager(config_dir=tmpdir, read_directory=False) + data = manager.get('foo') + assert data == root_data diff --git a/tests/test_extensions.py b/tests/test_extensions.py new file mode 100644 index 0000000000..5cd11ed6bf --- /dev/null +++ b/tests/test_extensions.py @@ -0,0 +1,157 @@ +import sys +import pytest +from collections import OrderedDict + +from types import SimpleNamespace + +from traitlets.tests.utils import check_help_all_output + +from .conftest import mkdir + +from jupyter_core import paths +from jupyter_server.serverapp import ServerApp +from jupyter_server import extensions, extensions_base +from jupyter_server.extensions import toggle_serverextension_python, _get_config_dir +from jupyter_server.config_manager import BaseJSONConfigManager + + +def test_help_output(): + check_help_all_output('jupyter_server.extensions') + check_help_all_output('jupyter_server.extensions', ['enable']) + check_help_all_output('jupyter_server.extensions', ['disable']) + check_help_all_output('jupyter_server.extensions', ['install']) + check_help_all_output('jupyter_server.extensions', ['uninstall']) + + +outer_file = __file__ + + +@pytest.fixture +def environ( + monkeypatch, + tmp_path, + data_dir, + config_dir, + ): + system_data_dir = tmp_path / 'system_data' + system_config_dir = tmp_path / 'system_config' + system_path = [str(system_data_dir)] + system_config_path = [str(system_config_dir)] + + # Set global environments variable + monkeypatch.setenv('JUPYTER_CONFIG_DIR', str(config_dir)) + monkeypatch.setenv('JUPYTER_DATA_DIR', str(data_dir)) + + # Set paths for each extension. + for mod in (paths,): + monkeypatch.setattr(mod, 'SYSTEM_JUPYTER_PATH', system_path) + monkeypatch.setattr(mod, 'ENV_JUPYTER_PATH', []) + for mod in (paths, extensions_base): + monkeypatch.setattr(mod, 'SYSTEM_CONFIG_PATH', system_config_path) + monkeypatch.setattr(mod, 'ENV_CONFIG_PATH', []) + + assert paths.jupyter_config_path() == [str(config_dir)] + system_config_path + assert extensions_base._get_config_dir(user=False) == str(system_config_dir) + assert paths.jupyter_path() == [str(data_dir)] + system_path + + +class MockExtensionModule(object): + __file__ = outer_file + + @staticmethod + def _jupyter_server_extension_paths(): + return [{ + 'module': '_mockdestination/index' + }] + + loaded = False + + def load_jupyter_server_extension(self, app): + self.loaded = True + + +def get_config(user=True): + cm = BaseJSONConfigManager(config_dir=_get_config_dir(user)) + data = cm.get("jupyter_server_config") + return data.get("ServerApp", {}).get("jpserver_extensions", {}) + + +@pytest.fixture +def inject_mock_extension(environ): + def ext(modulename='mockextension'): + sys.modules[modulename] = e = MockExtensionModule() + return e + return ext + + +def test_enable(inject_mock_extension): + inject_mock_extension() + toggle_serverextension_python('mockextension', True) + config = get_config() + assert config['mockextension'] + + +def test_disable(inject_mock_extension): + inject_mock_extension() + toggle_serverextension_python('mockextension', True) + toggle_serverextension_python('mockextension', False) + + config = get_config() + assert not config['mockextension'] + + +def test_merge_config(inject_mock_extension): + # enabled at sys level + mock_sys = inject_mock_extension('mockext_sys') + # enabled at sys, disabled at user + mock_both = inject_mock_extension('mockext_both') + # enabled at user + mock_user = inject_mock_extension('mockext_user') + # enabled at Python + mock_py = inject_mock_extension('mockext_py') + + toggle_serverextension_python('mockext_sys', enabled=True, user=False) + toggle_serverextension_python('mockext_user', enabled=True, user=True) + toggle_serverextension_python('mockext_both', enabled=True, user=False) + toggle_serverextension_python('mockext_both', enabled=False, user=True) + + app = ServerApp(jpserver_extensions={'mockext_py': True}) + app.init_server_extension_config() + app.init_server_extensions() + + assert mock_user.loaded + assert mock_sys.loaded + assert mock_py.loaded + assert not mock_both.loaded + + +@pytest.fixture +def ordered_server_extensions(): + mockextension1 = SimpleNamespace() + mockextension2 = SimpleNamespace() + + def load_jupyter_server_extension(obj): + obj.mockI = True + obj.mock_shared = 'I' + + mockextension1.load_jupyter_server_extension = load_jupyter_server_extension + + def load_jupyter_server_extension(obj): + obj.mockII = True + obj.mock_shared = 'II' + + mockextension2.load_jupyter_server_extension = load_jupyter_server_extension + + sys.modules['mockextension2'] = mockextension2 + sys.modules['mockextension1'] = mockextension1 + + +def test_load_ordered(ordered_server_extensions): + app = ServerApp() + app.jpserver_extensions = OrderedDict([('mockextension2',True),('mockextension1',True)]) + + app.init_server_extensions() + + assert app.mockII is True, "Mock II should have been loaded" + assert app.mockI is True, "Mock I should have been loaded" + assert app.mock_shared == 'II', "Mock II should be loaded after Mock I" diff --git a/tests/test_files.py b/tests/test_files.py new file mode 100644 index 0000000000..0185c0e2fa --- /dev/null +++ b/tests/test_files.py @@ -0,0 +1,174 @@ +import os +import pytest +import tornado + +from .conftest import expected_http_error + +from nbformat import writes +from nbformat.v4 import (new_notebook, + new_markdown_cell, new_code_cell, + new_output) + + +async def test_hidden_files(fetch, serverapp, root_dir): + not_hidden = [ + u'å b', + u'å b/ç. d', + ] + hidden = [ + u'.å b', + u'å b/.ç d', + ] + dirs = not_hidden + hidden + + for d in dirs: + path = root_dir / d.replace('/', os.sep) + path.mkdir(parents=True, exist_ok=True) + path.joinpath('foo').write_text('foo') + path.joinpath('.foo').write_text('.foo') + + + for d in not_hidden: + path = root_dir / d.replace('/', os.sep) + + r = await fetch( + 'files', d, 'foo', + method='GET' + ) + assert r.body.decode() == 'foo' + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + r = await fetch( + 'files', d, '.foo', + method='GET' + ) + assert expected_http_error(e, 404) + + + for d in hidden: + path = root_dir / d.replace('/', os.sep) + for foo in ('foo', '.foo'): + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + r = await fetch( + 'files', d, foo, + method='GET' + ) + assert expected_http_error(e, 404) + + serverapp.contents_manager.allow_hidden = True + + for d in not_hidden: + path = root_dir / d.replace('/', os.sep) + + r = await fetch( + 'files', d, 'foo', + method='GET' + ) + assert r.body.decode() == 'foo' + + r = await fetch( + 'files', d, '.foo', + method='GET' + ) + assert r.body.decode() == '.foo' + + for d in hidden: + path = root_dir / d.replace('/', os.sep) + + for foo in ('foo', '.foo'): + r = await fetch( + 'files', d, foo, + method='GET' + ) + assert r.body.decode() == foo + + +async def test_contents_manager(fetch, serverapp, root_dir): + "make sure ContentsManager returns right files (ipynb, bin, txt)." + nb = new_notebook( + cells=[ + new_markdown_cell(u'Created by test ³'), + new_code_cell("print(2*6)", outputs=[ + new_output("stream", text="12"), + ]) + ] + ) + root_dir.joinpath('testnb.ipynb').write_text(writes(nb, version=4), encoding='utf-8') + root_dir.joinpath('test.bin').write_bytes(b'\xff' + os.urandom(5)) + root_dir.joinpath('test.txt').write_text('foobar') + + r = await fetch( + 'files/testnb.ipynb', + method='GET' + ) + assert r.code == 200 + assert 'print(2*6)' in r.body.decode('utf-8') + + r = await fetch( + 'files/test.bin', + method='GET' + ) + assert r.code == 200 + assert r.headers['content-type'] == 'application/octet-stream' + assert r.body[:1] == b'\xff' + assert len(r.body) == 6 + + r = await fetch( + 'files/test.txt', + method='GET' + ) + assert r.code == 200 + assert r.headers['content-type'] == 'text/plain; charset=UTF-8' + assert r.body.decode() == 'foobar' + + +async def test_download(fetch, serverapp, root_dir): + text = 'hello' + root_dir.joinpath('test.txt').write_text(text) + + r = await fetch( + 'files', 'test.txt', + method='GET' + ) + disposition = r.headers.get('Content-Disposition', '') + assert 'attachment' not in disposition + + r = await fetch( + 'files', 'test.txt', + method='GET', + params={'download': True} + ) + disposition = r.headers.get('Content-Disposition', '') + assert 'attachment' in disposition + assert "filename*=utf-8''test.txt" in disposition + + +async def test_old_files_redirect(fetch, serverapp, root_dir): + """pre-2.0 'files/' prefixed links are properly redirected""" + root_dir.joinpath('files').mkdir(parents=True, exist_ok=True) + root_dir.joinpath('sub', 'files').mkdir(parents=True, exist_ok=True) + + + for prefix in ('', 'sub'): + root_dir.joinpath(prefix, 'files', 'f1.txt').write_text(prefix + '/files/f1') + root_dir.joinpath(prefix, 'files', 'f2.txt').write_text(prefix + '/files/f2') + root_dir.joinpath(prefix, 'f2.txt').write_text(prefix + '/f2') + root_dir.joinpath(prefix, 'f3.txt').write_text(prefix + '/f3') + + # These depend on the tree handlers + # + # def test_download(self): + # rootdir = self.root_dir + + # text = 'hello' + # with open(pjoin(rootdir, 'test.txt'), 'w') as f: + # f.write(text) + + # r = self.request('GET', 'files/test.txt') + # disposition = r.headers.get('Content-Disposition', '') + # self.assertNotIn('attachment', disposition) + + # r = self.request('GET', 'files/test.txt?download=1') + # disposition = r.headers.get('Content-Disposition', '') + # self.assertIn('attachment', disposition) + # self.assertIn("filename*=utf-8''test.txt", disposition) \ No newline at end of file diff --git a/tests/test_gateway.py b/tests/test_gateway.py new file mode 100644 index 0000000000..4ad4d71a68 --- /dev/null +++ b/tests/test_gateway.py @@ -0,0 +1,404 @@ +"""Test GatewayClient""" +import json +import os +import pytest +import tornado +import uuid +from datetime import datetime +from tornado import gen +from tornado.web import HTTPError +from tornado.httpclient import HTTPRequest, HTTPResponse +from ipython_genutils.py3compat import str_to_unicode +from jupyter_server.serverapp import ServerApp +from jupyter_server.gateway.managers import GatewayClient + +from unittest.mock import patch +from io import StringIO +from .conftest import expected_http_error + + +def generate_kernelspec(name): + argv_stanza = ['python', '-m', 'ipykernel_launcher', '-f', '{connection_file}'] + spec_stanza = {'spec': {'argv': argv_stanza, 'env': {}, 'display_name': name, 'language': 'python', 'interrupt_mode': 'signal', 'metadata': {}}} + kernelspec_stanza = {'name': name, 'spec': spec_stanza, 'resources': {}} + return kernelspec_stanza + + +# We'll mock up two kernelspecs - kspec_foo and kspec_bar +kernelspecs = {'default': 'kspec_foo', 'kernelspecs': {'kspec_foo': generate_kernelspec('kspec_foo'), 'kspec_bar': generate_kernelspec('kspec_bar')}} + + +# maintain a dictionary of expected running kernels. Key = kernel_id, Value = model. +running_kernels = dict() + + +def generate_model(name): + """Generate a mocked kernel model. Caller is responsible for adding model to running_kernels dictionary.""" + dt = datetime.utcnow().isoformat() + 'Z' + kernel_id = str(uuid.uuid4()) + model = {'id': kernel_id, 'name': name, 'last_activity': str(dt), 'execution_state': 'idle', 'connections': 1} + return model + + +async def mock_gateway_request(url, **kwargs): + method = 'GET' + if kwargs['method']: + method = kwargs['method'] + + request = HTTPRequest(url=url, **kwargs) + + endpoint = str(url) + + # Fetch all kernelspecs + if endpoint.endswith('/api/kernelspecs') and method == 'GET': + response_buf = StringIO(str_to_unicode(json.dumps(kernelspecs))) + response = await gen.maybe_future(HTTPResponse(request, 200, buffer=response_buf)) + return response + + # Fetch named kernelspec + if endpoint.rfind('/api/kernelspecs/') >= 0 and method == 'GET': + requested_kernelspec = endpoint.rpartition('/')[2] + kspecs = kernelspecs.get('kernelspecs') + if requested_kernelspec in kspecs: + response_buf = StringIO(str_to_unicode(json.dumps(kspecs.get(requested_kernelspec)))) + response = await gen.maybe_future(HTTPResponse(request, 200, buffer=response_buf)) + return response + else: + raise HTTPError(404, message='Kernelspec does not exist: %s' % requested_kernelspec) + + # Create kernel + if endpoint.endswith('/api/kernels') and method == 'POST': + json_body = json.loads(kwargs['body']) + name = json_body.get('name') + env = json_body.get('env') + kspec_name = env.get('KERNEL_KSPEC_NAME') + assert name == kspec_name # Ensure that KERNEL_ env values get propagated + model = generate_model(name) + running_kernels[model.get('id')] = model # Register model as a running kernel + response_buf = StringIO(str_to_unicode(json.dumps(model))) + response = await gen.maybe_future(HTTPResponse(request, 201, buffer=response_buf)) + return response + + # Fetch list of running kernels + if endpoint.endswith('/api/kernels') and method == 'GET': + kernels = [] + for kernel_id in running_kernels.keys(): + model = running_kernels.get(kernel_id) + kernels.append(model) + response_buf = StringIO(str_to_unicode(json.dumps(kernels))) + response = await gen.maybe_future(HTTPResponse(request, 200, buffer=response_buf)) + return response + + # Interrupt or restart existing kernel + if endpoint.rfind('/api/kernels/') >= 0 and method == 'POST': + requested_kernel_id, sep, action = endpoint.rpartition('/api/kernels/')[2].rpartition('/') + + if action == 'interrupt': + if requested_kernel_id in running_kernels: + response = await gen.maybe_future(HTTPResponse(request, 204)) + return response + else: + raise HTTPError(404, message='Kernel does not exist: %s' % requested_kernel_id) + elif action == 'restart': + if requested_kernel_id in running_kernels: + response_buf = StringIO(str_to_unicode(json.dumps(running_kernels.get(requested_kernel_id)))) + response = await gen.maybe_future(HTTPResponse(request, 204, buffer=response_buf)) + return response + else: + raise HTTPError(404, message='Kernel does not exist: %s' % requested_kernel_id) + else: + raise HTTPError(404, message='Bad action detected: %s' % action) + + # Shutdown existing kernel + if endpoint.rfind('/api/kernels/') >= 0 and method == 'DELETE': + requested_kernel_id = endpoint.rpartition('/')[2] + running_kernels.pop(requested_kernel_id) # Simulate shutdown by removing kernel from running set + response = await gen.maybe_future(HTTPResponse(request, 204)) + return response + + # Fetch existing kernel + if endpoint.rfind('/api/kernels/') >= 0 and method == 'GET': + requested_kernel_id = endpoint.rpartition('/')[2] + if requested_kernel_id in running_kernels: + response_buf = StringIO(str_to_unicode(json.dumps(running_kernels.get(requested_kernel_id)))) + response = await gen.maybe_future(HTTPResponse(request, 200, buffer=response_buf)) + return response + else: + raise HTTPError(404, message='Kernel does not exist: %s' % requested_kernel_id) + + +mocked_gateway = patch('jupyter_server.gateway.managers.gateway_request', mock_gateway_request) +mock_gateway_url = 'http://mock-gateway-server:8889' +mock_http_user = 'alice' + + +@pytest.fixture +def init_gateway(monkeypatch): + """Initializes the server for use as a gateway client. """ + # Clear the singleton first since previous tests may not have used a gateway. + GatewayClient.clear_instance() + monkeypatch.setenv('JUPYTER_GATEWAY_URL', mock_gateway_url) + monkeypatch.setenv('JUPYTER_GATEWAY_HTTP_USER', mock_http_user) + monkeypatch.setenv('JUPYTER_GATEWAY_REQUEST_TIMEOUT', '44.4') + monkeypatch.setenv('JUPYTER_GATEWAY_CONNECT_TIMEOUT', '44.4') + yield + GatewayClient.clear_instance() + + +async def test_gateway_env_options(init_gateway, serverapp): + assert serverapp.gateway_config.gateway_enabled is True + assert serverapp.gateway_config.url == mock_gateway_url + assert serverapp.gateway_config.http_user == mock_http_user + assert serverapp.gateway_config.connect_timeout == serverapp.gateway_config.request_timeout + assert serverapp.gateway_config.connect_timeout == 44.4 + + +async def test_gateway_cli_options(configurable_serverapp): + argv = [ + "--gateway-url='" + mock_gateway_url + "'", + "--GatewayClient.http_user='" + mock_http_user + "'", + '--GatewayClient.connect_timeout=44.4', + '--GatewayClient.request_timeout=44.4' + ] + + + GatewayClient.clear_instance() + app = configurable_serverapp(argv=argv) + + assert app.gateway_config.gateway_enabled is True + assert app.gateway_config.url == mock_gateway_url + assert app.gateway_config.http_user == mock_http_user + assert app.gateway_config.connect_timeout == app.gateway_config.request_timeout + assert app.gateway_config.connect_timeout == 44.4 + GatewayClient.clear_instance() + + +async def test_gateway_class_mappings(init_gateway, serverapp): + # Ensure appropriate class mappings are in place. + assert serverapp.kernel_manager_class.__name__ == 'GatewayKernelManager' + assert serverapp.session_manager_class.__name__ == 'GatewaySessionManager' + assert serverapp.kernel_spec_manager_class.__name__ == 'GatewayKernelSpecManager' + + +async def test_gateway_get_kernelspecs(init_gateway, fetch): + # Validate that kernelspecs come from gateway. + with mocked_gateway: + r = await fetch( + 'api', 'kernelspecs', + method='GET' + ) + assert r.code == 200 + content = json.loads(r.body.decode('utf-8')) + kspecs = content.get('kernelspecs') + assert len(kspecs) == 2 + assert kspecs.get('kspec_bar').get('name') == 'kspec_bar' + + +async def test_gateway_get_named_kernelspec(init_gateway, fetch): + # Validate that a specific kernelspec can be retrieved from gateway (and an invalid spec can't) + with mocked_gateway: + r = await fetch( + 'api', 'kernelspecs', 'kspec_foo', + method='GET' + ) + assert r.code == 200 + kspec_foo = json.loads(r.body.decode('utf-8')) + assert kspec_foo.get('name') == 'kspec_foo' + + with pytest.raises(tornado.httpclient.HTTPClientError) as e: + await fetch( + 'api', 'kernelspecs', 'no_such_spec', + method='GET' + ) + assert expected_http_error(e, 404) + + +async def test_gateway_session_lifecycle(init_gateway, root_dir, fetch): + # Validate session lifecycle functions; create and delete. + + # create + session_id, kernel_id = await create_session(root_dir, fetch, 'kspec_foo') + + # ensure kernel still considered running + assert await is_kernel_running(fetch, kernel_id) is True + + # interrupt + await interrupt_kernel(fetch, kernel_id) + + # ensure kernel still considered running + assert await is_kernel_running(fetch, kernel_id) is True + + # restart + await restart_kernel(fetch, kernel_id) + + # ensure kernel still considered running + assert await is_kernel_running(fetch, kernel_id) is True + + # delete + await delete_session(fetch, session_id) + assert await is_kernel_running(fetch, kernel_id) is False + + +async def test_gateway_kernel_lifecycle(init_gateway, fetch): + # Validate kernel lifecycle functions; create, interrupt, restart and delete. + + # create + kernel_id = await create_kernel(fetch, 'kspec_bar') + + # ensure kernel still considered running + assert await is_kernel_running(fetch, kernel_id) is True + + # interrupt + await interrupt_kernel(fetch, kernel_id) + + # ensure kernel still considered running + assert await is_kernel_running(fetch, kernel_id) is True + + # restart + await restart_kernel(fetch, kernel_id) + + # ensure kernel still considered running + assert await is_kernel_running(fetch, kernel_id) is True + + # delete + await delete_kernel(fetch, kernel_id) + assert await is_kernel_running(fetch, kernel_id) is False + + +# +# Test methods below... +# +async def create_session(root_dir, fetch, kernel_name): + """Creates a session for a kernel. The session is created against the server + which then uses the gateway for kernel management. + """ + with mocked_gateway: + nb_path = root_dir / 'testgw.ipynb' + body = json.dumps({'path': str(nb_path), + 'type': 'notebook', + 'kernel': {'name': kernel_name}}) + + # add a KERNEL_ value to the current env and we'll ensure that that value exists in the mocked method + os.environ['KERNEL_KSPEC_NAME'] = kernel_name + + # Create the kernel... (also tests get_kernel) + r = await fetch( + 'api', 'sessions', + method='POST', + body=body + ) + assert r.code == 201 + model = json.loads(r.body.decode('utf-8')) + assert model.get('path') == str(nb_path) + kernel_id = model.get('kernel').get('id') + # ensure its in the running_kernels and name matches. + running_kernel = running_kernels.get(kernel_id) + assert kernel_id == running_kernel.get('id') + assert model.get('kernel').get('name') == running_kernel.get('name') + session_id = model.get('id') + + # restore env + os.environ.pop('KERNEL_KSPEC_NAME') + return session_id, kernel_id + + +async def delete_session(fetch, session_id): + """Deletes a session corresponding to the given session id. + """ + with mocked_gateway: + # Delete the session (and kernel) + r = await fetch( + 'api', 'sessions', session_id, + method='DELETE' + ) + assert r.code == 204 + assert r.reason == 'No Content' + + +async def is_kernel_running(fetch, kernel_id): + """Issues request to get the set of running kernels + """ + with mocked_gateway: + # Get list of running kernels + r = await fetch( + 'api', 'kernels', + method='GET' + ) + assert r.code == 200 + kernels = json.loads(r.body.decode('utf-8')) + assert len(kernels) == len(running_kernels) + for model in kernels: + if model.get('id') == kernel_id: + return True + return False + + +async def create_kernel(fetch, kernel_name): + """Issues request to retart the given kernel + """ + with mocked_gateway: + body = json.dumps({'name': kernel_name}) + + # add a KERNEL_ value to the current env and we'll ensure that that value exists in the mocked method + os.environ['KERNEL_KSPEC_NAME'] = kernel_name + + r = await fetch( + 'api', 'kernels', + method='POST', + body=body + ) + assert r.code == 201 + model = json.loads(r.body.decode('utf-8')) + kernel_id = model.get('id') + # ensure its in the running_kernels and name matches. + running_kernel = running_kernels.get(kernel_id) + assert kernel_id == running_kernel.get('id') + assert model.get('name') == kernel_name + + # restore env + os.environ.pop('KERNEL_KSPEC_NAME') + return kernel_id + + +async def interrupt_kernel(fetch, kernel_id): + """Issues request to interrupt the given kernel + """ + with mocked_gateway: + r = await fetch( + 'api', 'kernels', kernel_id, 'interrupt', + method='POST', + allow_nonstandard_methods=True + ) + assert r.code == 204 + assert r.reason == 'No Content' + + +async def restart_kernel(fetch, kernel_id): + """Issues request to retart the given kernel + """ + with mocked_gateway: + r = await fetch( + 'api', 'kernels', kernel_id, 'restart', + method='POST', + allow_nonstandard_methods=True + ) + assert r.code == 200 + model = json.loads(r.body.decode('utf-8')) + restarted_kernel_id = model.get('id') + # ensure its in the running_kernels and name matches. + running_kernel = running_kernels.get(restarted_kernel_id) + assert restarted_kernel_id == running_kernel.get('id') + assert model.get('name') == running_kernel.get('name') + + +async def delete_kernel(fetch, kernel_id): + """Deletes kernel corresponding to the given kernel id. + """ + with mocked_gateway: + # Delete the session (and kernel) + r = await fetch( + 'api', 'kernels', kernel_id, + method='DELETE' + ) + assert r.code == 204 + assert r.reason == 'No Content' diff --git a/jupyter_server/tests/test_paths.py b/tests/test_paths.py similarity index 82% rename from jupyter_server/tests/test_paths.py rename to tests/test_paths.py index 88e6d15896..45f60b2d23 100644 --- a/jupyter_server/tests/test_paths.py +++ b/tests/test_paths.py @@ -1,6 +1,4 @@ - import re -import nose.tools as nt from jupyter_server.base.handlers import path_regex @@ -17,7 +15,7 @@ def test_path_regex(): '/x/foo/bar', '/x/foo/bar.txt', ): - nt.assert_regex(path, path_pat) + assert re.match(path_pat, path) def test_path_regex_bad(): for path in ( @@ -30,4 +28,4 @@ def test_path_regex_bad(): '/y', '/y/x/foo', ): - nt.assert_not_regex(path, path_pat) + assert re.match(path_pat, path) is None diff --git a/jupyter_server/tests/test_serialize.py b/tests/test_serialize.py similarity index 83% rename from jupyter_server/tests/test_serialize.py rename to tests/test_serialize.py index 600928b0af..07947dc549 100644 --- a/jupyter_server/tests/test_serialize.py +++ b/tests/test_serialize.py @@ -2,10 +2,8 @@ import os -import nose.tools as nt - from jupyter_client.session import Session -from ..base.zmqhandlers import ( +from jupyter_server.base.zmqhandlers import ( serialize_binary_message, deserialize_binary_message, ) @@ -15,7 +13,8 @@ def test_serialize_binary(): msg = s.msg('data_pub', content={'a': 'b'}) msg['buffers'] = [ memoryview(os.urandom(3)) for i in range(3) ] bmsg = serialize_binary_message(msg) - nt.assert_is_instance(bmsg, bytes) + assert isinstance(bmsg, bytes) + def test_deserialize_binary(): s = Session() @@ -23,4 +22,4 @@ def test_deserialize_binary(): msg['buffers'] = [ memoryview(os.urandom(2)) for i in range(3) ] bmsg = serialize_binary_message(msg) msg2 = deserialize_binary_message(bmsg) - nt.assert_equal(msg2, msg) + assert msg2 == msg \ No newline at end of file diff --git a/tests/test_serverapp.py b/tests/test_serverapp.py new file mode 100644 index 0000000000..8b1ed09bd3 --- /dev/null +++ b/tests/test_serverapp.py @@ -0,0 +1,125 @@ + +import os +import getpass +import pathlib +import pytest +import logging + +from unittest.mock import patch + + +from traitlets import TraitError +from traitlets.tests.utils import check_help_all_output + +from jupyter_core.application import NoStart + + +from jupyter_server.serverapp import ( + ServerApp, + list_running_servers, + JupyterPasswordApp, + JupyterServerStopApp +) +from jupyter_server.auth.security import passwd_check + + +def test_help_output(): + """jupyter server --help-all works""" + check_help_all_output('jupyter_server') + + +def test_server_info_file(tmp_path, configurable_serverapp): + app = configurable_serverapp(log=logging.getLogger()) + + app.write_server_info_file() + servers = list(list_running_servers(app.runtime_dir)) + + assert len(servers) == 1 + sinfo = servers[0] + + assert sinfo['port'] == app.port + assert sinfo['url'] == app.connection_url + assert sinfo['version'] == app.version + + app.remove_server_info_file() + + assert list(list_running_servers(app.runtime_dir)) == [] + app.remove_server_info_file + + +def test_root_dir(tmp_path, configurable_serverapp): + app = configurable_serverapp(root_dir=str(tmp_path)) + assert app.root_dir == str(tmp_path) + + +# Build a list of invalid paths +@pytest.fixture( + params=[ + ('notebooks',), + ('root', 'dir', 'is', 'missing'), + ('test.txt',) + ] +) +def invalid_root_dir(tmp_path, request): + path = tmp_path.joinpath(*request.param) + # If the path is a file, create it. + if os.path.splitext(str(path))[1] != '': + path.write_text('') + return str(path) + + +def test_invalid_root_dir(invalid_root_dir, configurable_serverapp): + app = configurable_serverapp() + with pytest.raises(TraitError): + app.root_dir = invalid_root_dir + +@pytest.fixture( + params=[ + ('/',), + ('first-level',), + ('first-level', 'second-level') + ] +) +def valid_root_dir(tmp_path, request): + path = tmp_path.joinpath(*request.param) + if not path.exists(): + # Create path in temporary directory + path.mkdir(parents=True) + return str(path) + +def test_valid_root_dir(valid_root_dir, configurable_serverapp): + app = configurable_serverapp(root_dir=valid_root_dir) + root_dir = valid_root_dir + # If nested path, the last slash should + # be stripped by the root_dir trait. + if root_dir != '/': + root_dir = valid_root_dir.rstrip('/') + assert app.root_dir == root_dir + + +def test_generate_config(tmp_path, configurable_serverapp): + app = configurable_serverapp(config_dir=str(tmp_path)) + app.initialize(['--generate-config', '--allow-root']) + with pytest.raises(NoStart): + app.start() + assert tmp_path.joinpath('jupyter_server_config.py').exists() + + +def test_server_password(tmp_path, configurable_serverapp): + password = 'secret' + with patch.dict( + 'os.environ', {'JUPYTER_CONFIG_DIR': str(tmp_path)} + ), patch.object(getpass, 'getpass', return_value=password): + app = JupyterPasswordApp(log_level=logging.ERROR) + app.initialize([]) + app.start() + sv = configurable_serverapp() + sv.load_config_file() + assert sv.password != '' + passwd_check(sv.password, password) + + +def test_list_running_servers(serverapp, app): + servers = list(list_running_servers(serverapp.runtime_dir)) + assert len(servers) >= 1 + diff --git a/tests/test_utils.py b/tests/test_utils.py new file mode 100644 index 0000000000..039d86df33 --- /dev/null +++ b/tests/test_utils.py @@ -0,0 +1,86 @@ +import os +import sys +import ctypes +import pytest + +from traitlets.tests.utils import check_help_all_output +from jupyter_server.utils import url_escape, url_unescape, is_hidden, is_file_hidden, secure_write +from ipython_genutils.py3compat import cast_unicode +from ipython_genutils.tempdir import TemporaryDirectory +from ipython_genutils.testing.decorators import skip_if_not_win32, skip_win32 + + +def test_help_output(): + check_help_all_output('jupyter_server') + + + +@pytest.mark.parametrize( + 'unescaped,escaped', + [ + ( + '/this is a test/for spaces/', + '/this%20is%20a%20test/for%20spaces/' + ), + ( + 'notebook with space.ipynb', + 'notebook%20with%20space.ipynb' + ), + ( + '/path with a/notebook and space.ipynb', + '/path%20with%20a/notebook%20and%20space.ipynb' + ), + ( + '/ !@$#%^&* / test %^ notebook @#$ name.ipynb', + '/%20%21%40%24%23%25%5E%26%2A%20/%20test%20%25%5E%20notebook%20%40%23%24%20name.ipynb' + ) + ] +) +def test_url_escaping(unescaped, escaped): + # Test escaping. + path = url_escape(unescaped) + assert path == escaped + # Test unescaping. + path = url_unescape(escaped) + assert path == unescaped + + +def test_is_hidden(tmp_path): + root = str(tmp_path) + subdir1_path = tmp_path / 'subdir' + subdir1_path.mkdir() + subdir1 = str(subdir1_path) + assert not is_hidden(subdir1, root) + assert not is_file_hidden(subdir1) + + subdir2_path = tmp_path / '.subdir2' + subdir2_path.mkdir() + subdir2 = str(subdir2_path) + assert is_hidden(subdir2, root) + assert is_file_hidden(subdir2) + + subdir34_path = tmp_path / 'subdir3' / '.subdir4' + subdir34_path.mkdir(parents=True) + subdir34 = str(subdir34_path) + assert is_hidden(subdir34, root) + assert is_hidden(subdir34) + + subdir56_path = tmp_path / '.subdir5' / 'subdir6' + subdir56_path.mkdir(parents=True) + subdir56 = str(subdir56_path) + assert is_hidden(subdir56, root) + assert is_hidden(subdir56) + assert not is_file_hidden(subdir56) + assert not is_file_hidden(subdir56, os.stat(subdir56)) + + +@pytest.mark.skipif(sys.platform != "win32", reason="Test is not windows.") +def test_is_hidden_win32(tmp_path): + root = str(tmp_path) + root = cast_unicode(root) + subdir1 = tmp_path / 'subdir' + subdir1.mkdir() + assert not is_hidden(str(subdir1), root) + ctypes.windll.kernel32.SetFileAttributesW(str(subdir1), 0x02) + assert is_hidden(str(subdir1), root) + assert is_file_hidden(str(subdir1)) diff --git a/tests/test_version.py b/tests/test_version.py new file mode 100644 index 0000000000..43f2db1cae --- /dev/null +++ b/tests/test_version.py @@ -0,0 +1,48 @@ +import re +import pytest + +from jupyter_server import __version__ + + +pep440re = re.compile('^(\d+)\.(\d+)\.(\d+((a|b|rc)\d+)?)(\.post\d+)?(\.dev\d*)?$') + +def raise_on_bad_version(version): + if not pep440re.match(version): + raise ValueError("Versions String does apparently not match Pep 440 specification, " + "which might lead to sdist and wheel being seen as 2 different release. " + "E.g: do not use dots for beta/alpha/rc markers.") + +# --------- Meta test to test the versioning tests ------------- + +@pytest.mark.parametrize( + 'version', + [ + '4.1.0.b1', + '4.1.b1', + '4.2', + 'X.y.z', + '1.2.3.dev1.post2', + ] +) +def test_invalid_pep440_versions(version): + with pytest.raises(ValueError): + raise_on_bad_version(version) + + +@pytest.mark.parametrize( + 'version', + [ + '4.1.1', + '4.2.1b3', + ] +) +def test_valid_pep440_versions(version): + assert raise_on_bad_version(version) is None + + +# --------- Test current version -------------- +def test_current_version(): + raise_on_bad_version(__version__) + + +