Script 'mail_helper' called by obssrc Hello community, here is the log from the commit of package python-jupyter-server for openSUSE:Factory checked in at 2023-02-06 14:15:22 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Comparing /work/SRC/openSUSE:Factory/python-jupyter-server (Old) and /work/SRC/openSUSE:Factory/.python-jupyter-server.new.4462 (New) ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-jupyter-server" Mon Feb 6 14:15:22 2023 rev:30 rq:1063312 version:2.2.1 Changes: -------- --- /work/SRC/openSUSE:Factory/python-jupyter-server/python-jupyter-server.changes 2023-01-16 18:01:43.675767283 +0100 +++ /work/SRC/openSUSE:Factory/.python-jupyter-server.new.4462/python-jupyter-server.changes 2023-02-06 14:15:22.960560406 +0100 @@ -1,0 +2,15 @@ +Sun Feb 5 16:11:49 UTC 2023 - Ben Greiner <c...@bnavigator.de> + +- Update to 2.2.1 + * remove upper bound on anyio #1192 (@minrk) +- Release 2.2.0 + ## Enhancements made + * Only load enabled extension packages #1180 (@minrk) + * Pass in a logger to get_metadata #1176 (@yuvipanda) + ## Bugs fixed + * Don't assume that resources entries are relative #1182 (@ojarjur) + ## Maintenance and upkeep improvements + * Updates for client 8 #1188 (@blink1073) + * Use repr in logging for exception. #1185 (@Carreau) + +------------------------------------------------------------------- Old: ---- jupyter_server-2.1.0.tar.gz New: ---- jupyter_server-2.2.1.tar.gz ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Other differences: ------------------ ++++++ python-jupyter-server.spec ++++++ --- /var/tmp/diff_new_pack.BJzxOy/_old 2023-02-06 14:15:23.496563077 +0100 +++ /var/tmp/diff_new_pack.BJzxOy/_new 2023-02-06 14:15:23.504563117 +0100 @@ -32,7 +32,7 @@ %endif Name: python-jupyter-server%{psuffix} -Version: 2.1.0 +Version: 2.2.1 Release: 0 Summary: The backend to Jupyter web applications License: BSD-3-Clause @@ -51,6 +51,7 @@ Requires: python >= 3.8 Requires: python-Jinja2 Requires: python-Send2Trash +Requires: python-anyio >= 3.1.0 Requires: python-argon2-cffi Requires: python-jupyter-client >= 7.4.4 Requires: python-jupyter_events >= 0.4.0 @@ -65,7 +66,6 @@ Requires: python-traitlets >= 5.6 Requires: python-websocket-client Requires: ((python-jupyter-core >= 4.12 with python-jupyter-core < 5.0) or python-jupyter-core >= 5.1) -Requires: (python-anyio >= 3.1.0 with python-anyio < 4) Provides: python-jupyter_server = %{version}-%{release} Obsoletes: python-jupyter_server < %{version}-%{release} %if %{with test} ++++++ jupyter_server-2.1.0.tar.gz -> jupyter_server-2.2.1.tar.gz ++++++ diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/.github/workflows/python-tests.yml new/jupyter_server-2.2.1/.github/workflows/python-tests.yml --- old/jupyter_server-2.1.0/.github/workflows/python-tests.yml 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/.github/workflows/python-tests.yml 2020-02-02 01:00:00.000000000 +0100 @@ -179,6 +179,7 @@ - build - test_docs - test_lint + - test_examples - test_minimum_versions - test_prereleases - check_links diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/.pre-commit-config.yaml new/jupyter_server-2.2.1/.pre-commit-config.yaml --- old/jupyter_server-2.1.0/.pre-commit-config.yaml 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/.pre-commit-config.yaml 2020-02-02 01:00:00.000000000 +0100 @@ -35,7 +35,7 @@ - id: black - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: v0.0.206 + rev: v0.0.236 hooks: - id: ruff args: ["--fix"] diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/CHANGELOG.md new/jupyter_server-2.2.1/CHANGELOG.md --- old/jupyter_server-2.1.0/CHANGELOG.md 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/CHANGELOG.md 2020-02-02 01:00:00.000000000 +0100 @@ -4,6 +4,51 @@ <!-- <START NEW CHANGELOG ENTRY> --> +## 2.2.1 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.2.0...0f9556b48d7699bd2d246222067b1cb215d44c28)) + +### Maintenance and upkeep improvements + +- Delete the extra "or" in front of the second url [#1194](https://github.com/jupyter-server/jupyter_server/pull/1194) ([@jonnygrout](https://github.com/jonnygrout)) +- remove upper bound on anyio [#1192](https://github.com/jupyter-server/jupyter_server/pull/1192) ([@minrk](https://github.com/minrk)) +- Adopt more lint rules [#1189](https://github.com/jupyter-server/jupyter_server/pull/1189) ([@blink1073](https://github.com/blink1073)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-01-31&to=2023-02-02&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2023-01-31..2023-02-02&type=Issues) | [@codecov](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov+updated%3A2023-01-31..2023-02-02&type=Issues) | [@jonnygrout](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ajonnygrout+updated%3A2023-01-31..2023-02-02&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2023-01-31..2023-02-02&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2023-01-31..2023-02-02&type=Issues) + +<!-- <END NEW CHANGELOG ENTRY> --> + +## 2.2.0 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.1.0...b6c1edb0b205f8d53f1a2e81abb997bfc693f144)) + +### Enhancements made + +- Only load enabled extension packages [#1180](https://github.com/jupyter-server/jupyter_server/pull/1180) ([@minrk](https://github.com/minrk)) +- Pass in a logger to get_metadata [#1176](https://github.com/jupyter-server/jupyter_server/pull/1176) ([@yuvipanda](https://github.com/yuvipanda)) + +### Bugs fixed + +- Don't assume that resources entries are relative [#1182](https://github.com/jupyter-server/jupyter_server/pull/1182) ([@ojarjur](https://github.com/ojarjur)) + +### Maintenance and upkeep improvements + +- Updates for client 8 [#1188](https://github.com/jupyter-server/jupyter_server/pull/1188) ([@blink1073](https://github.com/blink1073)) +- Use repr in logging for exception. [#1185](https://github.com/jupyter-server/jupyter_server/pull/1185) ([@Carreau](https://github.com/Carreau)) +- Update example npm deps [#1184](https://github.com/jupyter-server/jupyter_server/pull/1184) ([@blink1073](https://github.com/blink1073)) +- Fix docs and examples [#1183](https://github.com/jupyter-server/jupyter_server/pull/1183) ([@blink1073](https://github.com/blink1073)) +- Update jupyter client api docs links [#1179](https://github.com/jupyter-server/jupyter_server/pull/1179) ([@blink1073](https://github.com/blink1073)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-01-13&to=2023-01-31&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2023-01-13..2023-01-31&type=Issues) | [@Carreau](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3ACarreau+updated%3A2023-01-13..2023-01-31&type=Issues) | [@codecov](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov+updated%3A2023-01-13..2023-01-31&type=Issues) | [@kevin-bates](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Akevin-bates+updated%3A2023-01-13..2023-01-31&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2023-01-13..2023-01-31&type=Issues) | [@ojarjur](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aojarjur+updated%3A2023-01-13..2023-01-31&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2023-01-13.. 2023-01-31&type=Issues) | [@yuvipanda](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ayuvipanda+updated%3A2023-01-13..2023-01-31&type=Issues) + ## 2.1.0 ([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.0.7...34f509d8da1710039634bc16f5336570c4861bcd)) @@ -26,8 +71,6 @@ [@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2023-01-12..2023-01-12&type=Issues) | [@codecov](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Acodecov+updated%3A2023-01-12..2023-01-12&type=Issues) | [@vidartf](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Avidartf+updated%3A2023-01-12..2023-01-12&type=Issues) -<!-- <END NEW CHANGELOG ENTRY> --> - ## 2.0.7 ([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.0.6...5cce2afcbeeb44581e9b29ab27fef75a12d651ca)) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/PKG-INFO new/jupyter_server-2.2.1/PKG-INFO --- old/jupyter_server-2.1.0/PKG-INFO 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/PKG-INFO 2020-02-02 01:00:00.000000000 +0100 @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: jupyter_server -Version: 2.1.0 +Version: 2.2.1 Summary: The backendâi.e. core services, APIs, and REST endpointsâto Jupyter web applications. Project-URL: Homepage, https://jupyter-server.readthedocs.io Project-URL: Documentation, https://jupyter-server.readthedocs.io @@ -86,7 +86,7 @@ Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: 3.11 Requires-Python: >=3.8 -Requires-Dist: anyio<4,>=3.1.0 +Requires-Dist: anyio>=3.1.0 Requires-Dist: argon2-cffi Requires-Dist: jinja2 Requires-Dist: jupyter-client>=7.4.4 diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/docs/source/developers/architecture.rst new/jupyter_server-2.2.1/docs/source/developers/architecture.rst --- old/jupyter_server-2.1.0/docs/source/developers/architecture.rst 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/docs/source/developers/architecture.rst 2020-02-02 01:00:00.000000000 +0100 @@ -66,7 +66,7 @@ - **Kernel Manager** manages a single kernel for the Notebook. To know more about Kernel Manager, follow - `the Jupyter Client APIs documentation <https://jupyter-client.readthedocs.io/en/latest/api/manager.html#jupyter_client.KernelManager>`_. + `the Jupyter Client APIs documentation <https://jupyter-client.readthedocs.io/en/latest/api/jupyter_client.html#jupyter_client.manager.AsyncKernelManager>`_. - **Kernel Spec Manager** parses files with JSON specification for a kernels, and provides a list of available kernel configurations. To learn about @@ -96,17 +96,17 @@ #. **Mapping Kernel Manager** starts the kernel create process by using **Multi Kernel Manager** and **Kernel Manager**. You can learn more about **Multi Kernel Manager** in - `the Jupyter Client APIs <https://jupyter-client.readthedocs.io/en/latest/api/manager.html#multikernelmanager-controlling-multiple-kernels>`_. + `the Jupyter Client APIs <https://jupyter-client.readthedocs.io/en/latest/api/jupyter_client.html#jupyter_client.multikernelmanager.AsyncMultiKernelManager>`_. #. **Kernel Manager** uses the provisioner layer to launch a new kernel. #. **Kernel Provisioner** is responsible for launching kernels based on the kernel specification. If the kernel specification doesn't define a provisioner, - it uses `Local Provisioner <https://jupyter-client.readthedocs.io/en/latest/api/provisioners.html#jupyter_client.provisioning.local_provisioner.LocalProvisioner>`_ + it uses `Local Provisioner <https://jupyter-client.readthedocs.io/en/latest/api/jupyter_client.provisioning.html#jupyter_client.provisioning.local_provisioner.LocalProvisioner>`_ to launch the kernel. You can use - `Kernel Provisioner Base <https://jupyter-client.readthedocs.io/en/latest/api/provisioners.html#jupyter_client.provisioning.provisioner_base.KernelProvisionerBase>`_ + `Kernel Provisioner Base <https://jupyter-client.readthedocs.io/en/latest/api/jupyter_client.provisioning.html#jupyter_client.provisioning.provisioner_base.KernelProvisionerBase>`_ and - `Kernel Provisioner Factory <https://jupyter-client.readthedocs.io/en/latest/api/provisioners.html#jupyter_client.provisioning.factory.KernelProvisionerFactory>`_ + `Kernel Provisioner Factory <https://jupyter-client.readthedocs.io/en/latest/api/jupyter_client.provisioning.html#jupyter_client.provisioning.factory.KernelProvisionerFactory>`_ to create custom provisioners. #. **Kernel Spec Manager** gets the kernel specification from the JSON file. diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/examples/simple/package.json new/jupyter_server-2.2.1/examples/simple/package.json --- old/jupyter_server-2.1.0/examples/simple/package.json 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/examples/simple/package.json 2020-02-02 01:00:00.000000000 +0100 @@ -9,10 +9,8 @@ }, "dependencies": {}, "devDependencies": { - "rifraf": "2.0.3", - "webpack": "~4.29.6", - "webpack-cli": "^3.3.0", - "whatwg-fetch": "~2.0.3", - "typescript": "3.6.4" + "webpack": "^5.72.0", + "webpack-cli": "^5.0.0", + "typescript": "~4.7.3" } } diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/examples/simple/webpack.config.js new/jupyter_server-2.2.1/examples/simple/webpack.config.js --- old/jupyter_server-2.1.0/examples/simple/webpack.config.js 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/examples/simple/webpack.config.js 2020-02-02 01:00:00.000000000 +0100 @@ -3,6 +3,7 @@ output: { path: require("path").join(__dirname, "simple_ext1", "static"), filename: "bundle.js", + hashFunction: 'sha256' }, mode: "development", }; diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/jupyter_server/_version.py new/jupyter_server-2.2.1/jupyter_server/_version.py --- old/jupyter_server-2.1.0/jupyter_server/_version.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/jupyter_server/_version.py 2020-02-02 01:00:00.000000000 +0100 @@ -6,7 +6,7 @@ from typing import List # Version string must appear intact for automatic versioning -__version__ = "2.1.0" +__version__ = "2.2.1" # Build up version_info tuple for backwards compatibility pattern = r"(?P<major>\d+).(?P<minor>\d+).(?P<patch>\d+)(?P<rest>.*)" diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/jupyter_server/auth/__main__.py new/jupyter_server-2.2.1/jupyter_server/auth/__main__.py --- old/jupyter_server-2.1.0/jupyter_server/auth/__main__.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/jupyter_server/auth/__main__.py 2020-02-02 01:00:00.000000000 +0100 @@ -20,7 +20,7 @@ password_repeat = getpass("" if args.quiet else "Repeat password: ") if password1 != password_repeat: warnings.warn("Passwords do not match, try again") - elif len(password1) < 4: + elif len(password1) < 4: # noqa warnings.warn("Please provide at least 4 characters") else: password = password1 diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/jupyter_server/auth/identity.py new/jupyter_server-2.2.1/jupyter_server/auth/identity.py --- old/jupyter_server-2.1.0/jupyter_server/auth/identity.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/jupyter_server/auth/identity.py 2020-02-02 01:00:00.000000000 +0100 @@ -96,9 +96,8 @@ return User(username=got_user) elif isinstance(got_user, dict): kwargs = {} - if "username" not in got_user: - if "name" in got_user: - kwargs["username"] = got_user["name"] + if "username" not in got_user and "name" in got_user: + kwargs["username"] = got_user["name"] for field in User.__dataclass_fields__: if field in got_user: kwargs[field] = got_user[field] @@ -367,7 +366,7 @@ which values were used for a given cookie). """ name = escape.native_str(name) - expires = datetime.datetime.utcnow() - datetime.timedelta(days=365) + expires = datetime.datetime.now(tz=datetime.timezone.utc) - datetime.timedelta(days=365) morsel: Morsel = Morsel() morsel.set(name, "", '""') diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/jupyter_server/base/handlers.py new/jupyter_server-2.2.1/jupyter_server/base/handlers.py --- old/jupyter_server-2.1.0/jupyter_server/base/handlers.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/jupyter_server/base/handlers.py 2020-02-02 01:00:00.000000000 +0100 @@ -183,7 +183,7 @@ def logged_in(self): """Is a user currently logged in?""" user = self.current_user - return user and not user == "anonymous" + return user and user != "anonymous" @property def login_handler(self): diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/jupyter_server/config_manager.py new/jupyter_server-2.2.1/jupyter_server/config_manager.py --- old/jupyter_server-2.1.0/jupyter_server/config_manager.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/jupyter_server/config_manager.py 2020-02-02 01:00:00.000000000 +0100 @@ -116,8 +116,7 @@ # Generate the JSON up front, since it could raise an exception, # in order to avoid writing half-finished corrupted data to disk. json_content = json.dumps(data, indent=2) - f = open(filename, "w", encoding="utf-8") - with f: + with open(filename, "w", encoding="utf-8") as f: f.write(json_content) def update(self, section_name, new_data): diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/jupyter_server/extension/manager.py new/jupyter_server-2.2.1/jupyter_server/extension/manager.py --- old/jupyter_server-2.1.0/jupyter_server/extension/manager.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/jupyter_server/extension/manager.py 2020-02-02 01:00:00.000000000 +0100 @@ -2,7 +2,7 @@ import importlib from tornado.gen import multi -from traitlets import Any, Bool, Dict, HasTraits, Instance, Unicode, default, observe +from traitlets import Any, Bool, Dict, HasTraits, Instance, List, Unicode, default, observe from traitlets import validate as validate_trait from traitlets.config import LoggingConfigurable @@ -147,7 +147,7 @@ return loader(serverapp) -class ExtensionPackage(HasTraits): +class ExtensionPackage(LoggingConfigurable): """An API for interfacing with a Jupyter Server extension package. Usage: @@ -157,22 +157,39 @@ """ name = Unicode(help="Name of the an importable Python package.") - enabled = Bool(False).tag(config=True) + enabled = Bool(False, help="Whether the extension package is enabled.") - def __init__(self, *args, **kwargs): + _linked_points = Dict() + extension_points = Dict() + module = Any(allow_none=True, help="The module for this extension package. None if not enabled") + metadata = List(Dict(), help="Extension metadata loaded from the extension package.") + version = Unicode( + help=""" + The version of this extension package, if it can be found. + Otherwise, an empty string. + """, + ) + + @default("version") + def _load_version(self): + if not self.enabled: + return "" + return getattr(self.module, "__version__", "") + + def __init__(self, **kwargs): """Initialize an extension package.""" - # Store extension points that have been linked. - self._linked_points = {} - super().__init__(*args, **kwargs) - - _linked_points: dict = {} - - @validate_trait("name") - def _validate_name(self, proposed): - name = proposed["value"] - self._extension_points = {} + super().__init__(**kwargs) + if self.enabled: + self._load_metadata() + + def _load_metadata(self): + """Import package and load metadata + + Only used if extension package is enabled + """ + name = self.name try: - self._module, self._metadata = get_metadata(name) + self.module, self.metadata = get_metadata(name, logger=self.log) except ImportError as e: msg = ( f"The module '{name}' could not be found ({e}). Are you " @@ -180,37 +197,14 @@ ) raise ExtensionModuleNotFound(msg) from None # Create extension point interfaces for each extension path. - for m in self._metadata: + for m in self.metadata: point = ExtensionPoint(metadata=m) - self._extension_points[point.name] = point + self.extension_points[point.name] = point return name - @property - def module(self): - """Extension metadata loaded from the extension package.""" - return self._module - - @property - def version(self) -> str: - """Get the version of this package, if it's given. Otherwise, return an empty string""" - return getattr(self._module, "__version__", "") - - @property - def metadata(self): - """Extension metadata loaded from the extension package.""" - return self._metadata - - @property - def extension_points(self): - """A dictionary of extension points.""" - return self._extension_points - def validate(self): """Validate all extension points in this package.""" - for extension in self.extension_points.values(): - if not extension.validate(): - return False - return True + return all(extension.validate() for extension in self.extension_points.values()) def link_point(self, point_name, serverapp): """Link an extension point.""" @@ -363,8 +357,9 @@ except Exception as e: if self.serverapp and self.serverapp.reraise_server_extension_failures: raise - self.log.warning("%s | extension failed loading with message: %s", name, e) - self.log.exception("%s | stack trace", name) + self.log.warning( + "%s | extension failed loading with message: %r", name, e, exc_info=True + ) else: self.log.info("%s | extension was successfully loaded.", name) @@ -381,7 +376,7 @@ """ # Sort the extension names to enforce deterministic linking # order. - for name in self.sorted_extensions.keys(): + for name in self.sorted_extensions: self.link_extension(name) def load_all_extensions(self): @@ -390,7 +385,7 @@ """ # Sort the extension names to enforce deterministic loading # order. - for name in self.sorted_extensions.keys(): + for name in self.sorted_extensions: self.load_extension(name) async def stop_all_extensions(self): diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/jupyter_server/gateway/gateway_client.py new/jupyter_server-2.2.1/jupyter_server/gateway/gateway_client.py --- old/jupyter_server-2.1.0/jupyter_server/gateway/gateway_client.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/jupyter_server/gateway/gateway_client.py 2020-02-02 01:00:00.000000000 +0100 @@ -7,7 +7,7 @@ import os import typing as ty from abc import ABC, ABCMeta, abstractmethod -from datetime import datetime +from datetime import datetime, timezone from email.utils import parsedate_to_datetime from http.cookies import SimpleCookie from socket import gaierror @@ -96,9 +96,8 @@ def _url_validate(self, proposal): value = proposal["value"] # Ensure value, if present, starts with 'http' - if value is not None and len(value) > 0: - if not str(value).lower().startswith("http"): - raise TraitError("GatewayClient url must start with 'http': '%r'" % value) + if value is not None and len(value) > 0 and not str(value).lower().startswith("http"): + raise TraitError("GatewayClient url must start with 'http': '%r'" % value) return value ws_url = Unicode( @@ -115,18 +114,16 @@ @default("ws_url") def _ws_url_default(self): default_value = os.environ.get(self.ws_url_env) - if default_value is None: - if self.gateway_enabled: - default_value = self.url.lower().replace("http", "ws") + if default_value is None and self.gateway_enabled: + default_value = self.url.lower().replace("http", "ws") return default_value @validate("ws_url") def _ws_url_validate(self, proposal): value = proposal["value"] # Ensure value, if present, starts with 'ws' - if value is not None and len(value) > 0: - if not str(value).lower().startswith("ws"): - raise TraitError("GatewayClient ws_url must start with 'ws': '%r'" % value) + if value is not None and len(value) > 0 and not str(value).lower().startswith("ws"): + raise TraitError("GatewayClient ws_url must start with 'ws': '%r'" % value) return value kernels_endpoint_default_value = "/api/kernels" @@ -599,7 +596,7 @@ if not self.accept_cookies: return - store_time = datetime.now() + store_time = datetime.now(tz=timezone.utc) for key, item in cookie.items(): # Convert "expires" arg into "max-age" to facilitate expiration management. # As "max-age" has precedence, ignore "expires" when "max-age" exists. @@ -611,7 +608,7 @@ def _clear_expired_cookies(self) -> None: """Clear expired cookies.""" - check_time = datetime.now() + check_time = datetime.now(tz=timezone.utc) expired_keys = [] for key, (morsel, store_time) in self._cookies.items(): diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/jupyter_server/gateway/managers.py new/jupyter_server-2.2.1/jupyter_server/gateway/managers.py --- old/jupyter_server-2.1.0/jupyter_server/gateway/managers.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/jupyter_server/gateway/managers.py 2020-02-02 01:00:00.000000000 +0100 @@ -72,9 +72,8 @@ """ self.log.info(f"Request start kernel: kernel_id={kernel_id}, path='{path}'") - if kernel_id is None: - if path is not None: - kwargs["cwd"] = self.cwd_for_path(path) + if kernel_id is None and path is not None: + kwargs["cwd"] = self.cwd_for_path(path) km = self.kernel_manager_factory(parent=self, log=self.log) await km.start_kernel(kernel_id=kernel_id, **kwargs) @@ -144,7 +143,7 @@ The purpose of this shutdown is to restart the kernel (True) """ km = self.get_kernel(kernel_id) - await km.shutdown_kernel(now=now, restart=restart) + await ensure_async(km.shutdown_kernel(now=now, restart=restart)) self.remove_kernel(kernel_id) async def restart_kernel(self, kernel_id, now=False, **kwargs): @@ -156,7 +155,7 @@ The id of the kernel to restart. """ km = self.get_kernel(kernel_id) - await km.restart_kernel(now=now, **kwargs) + await ensure_async(km.restart_kernel(now=now, **kwargs)) async def interrupt_kernel(self, kernel_id, **kwargs): """Interrupt a kernel by its kernel uuid. @@ -167,14 +166,14 @@ The id of the kernel to interrupt. """ km = self.get_kernel(kernel_id) - await km.interrupt_kernel() + await ensure_async(km.interrupt_kernel()) async def shutdown_all(self, now=False): """Shutdown all kernels.""" kids = list(self._kernels) for kernel_id in kids: km = self.get_kernel(kernel_id) - await km.shutdown_kernel(now=now) + await ensure_async(km.shutdown_kernel(now=now)) self.remove_kernel(kernel_id) async def cull_kernels(self): @@ -218,13 +217,16 @@ for resource_name in resources: original_path = resources[resource_name] split_eg_base_url = str.rsplit(original_path, sep="/kernelspecs/", maxsplit=1) - new_path = url_path_join(self.parent.base_url, "kernelspecs", split_eg_base_url[1]) - kernel_specs["kernelspecs"][kernel_name]["resources"][resource_name] = new_path - if original_path != new_path: - self.log.debug( - f"Replaced original kernel resource path {original_path} with new " - f"path {kernel_specs['kernelspecs'][kernel_name]['resources'][resource_name]}" + if len(split_eg_base_url) > 1: + new_path = url_path_join( + self.parent.base_url, "kernelspecs", split_eg_base_url[1] ) + kernel_specs["kernelspecs"][kernel_name]["resources"][resource_name] = new_path + if original_path != new_path: + self.log.debug( + f"Replaced original kernel resource path {original_path} with new " + f"path {kernel_specs['kernelspecs'][kernel_name]['resources'][resource_name]}" + ) return kernel_specs def _get_kernelspecs_endpoint_url(self, kernel_name=None): @@ -280,7 +282,7 @@ try: response = await gateway_request(kernel_spec_url, method="GET") except web.HTTPError as error: - if error.status_code == 404: + if error.status_code == 404: # noqa[PLR2004] # Convert not found to KeyError since that's what the Notebook handler expects # message is not used, but might as well make it useful for troubleshooting msg = f"kernelspec {kernel_name} not found on Gateway server at: {GatewayClient.instance().url}" @@ -309,7 +311,7 @@ try: response = await gateway_request(kernel_spec_resource_url, method="GET") except web.HTTPError as error: - if error.status_code == 404: + if error.status_code == 404: # noqa[PLR2004] kernel_spec_resource = None else: raise @@ -404,7 +406,7 @@ response = await gateway_request(self.kernel_url, method="GET") except web.HTTPError as error: - if error.status_code == 404: + if error.status_code == 404: # noqa[PLR2004] self.log.warning("Kernel not found at: %s" % self.kernel_url) model = None else: @@ -492,7 +494,7 @@ response = await gateway_request(self.kernel_url, method="DELETE") self.log.debug("Shutdown kernel response: %d %s", response.code, response.reason) except web.HTTPError as error: - if error.status_code == 404: + if error.status_code == 404: # noqa[PLR2004] self.log.debug("Shutdown kernel response: kernel not found (ignored)") else: raise diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/jupyter_server/i18n/__init__.py new/jupyter_server-2.2.1/jupyter_server/i18n/__init__.py --- old/jupyter_server-2.1.0/jupyter_server/i18n/__init__.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/jupyter_server/i18n/__init__.py 2020-02-02 01:00:00.000000000 +0100 @@ -42,10 +42,7 @@ lang, qvalue = m.group("lang", "qvalue") # Browser header format is zh-CN, gettext uses zh_CN lang = lang.replace("-", "_") - if qvalue is None: - qvalue = 1.0 - else: - qvalue = float(qvalue) + qvalue = 1.0 if qvalue is None else float(qvalue) if qvalue == 0: continue # 0 means not accepted by_q[qvalue].append(lang) @@ -59,7 +56,7 @@ def load(language, domain="nbjs"): """Load translations from an nbjs.json file""" try: - f = open(pjoin(I18N_DIR, language, "LC_MESSAGES", "nbjs.json"), encoding="utf-8") + f = open(pjoin(I18N_DIR, language, "LC_MESSAGES", "nbjs.json"), encoding="utf-8") # noqa except OSError as e: if e.errno != errno.ENOENT: raise diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/jupyter_server/log.py new/jupyter_server-2.2.1/jupyter_server/log.py --- old/jupyter_server-2.1.0/jupyter_server/log.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/jupyter_server/log.py 2020-02-02 01:00:00.000000000 +0100 @@ -28,12 +28,12 @@ except AttributeError: logger = access_log - if status < 300 or status == 304: + if status < 300 or status == 304: # noqa[PLR2004] # Successes (or 304 FOUND) are debug-level log_method = logger.debug - elif status < 400: + elif status < 400: # noqa[PLR2004] log_method = logger.info - elif status < 500: + elif status < 500: # noqa[PLR2004] log_method = logger.warning else: log_method = logger.error @@ -53,21 +53,15 @@ user = handler.current_user except Exception: user = None - if user: - if isinstance(user, User): - username = user.username - else: - username = "unknown" - else: - username = "" + username = (user.username if isinstance(user, User) else "unknown") if user else "" ns["username"] = username msg = "{status} {method} {uri} ({username}@{ip}) {request_time:.2f}ms" - if status >= 400: + if status >= 400: # noqa[PLR2004] # log bad referers ns["referer"] = request.headers.get("Referer", "None") msg = msg + " referer={referer}" - if status >= 500 and status != 502: + if status >= 500 and status != 502: # noqa[PLR2004] # Log a subset of the headers if it caused an error. headers = {} for header in ["Host", "Accept", "Referer", "User-Agent"]: diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/jupyter_server/nbconvert/handlers.py new/jupyter_server-2.2.1/jupyter_server/nbconvert/handlers.py --- old/jupyter_server-2.1.0/jupyter_server/nbconvert/handlers.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/jupyter_server/nbconvert/handlers.py 2020-02-02 01:00:00.000000000 +0100 @@ -135,7 +135,7 @@ lambda: exporter.from_notebook_node(nb, resources=resource_dict) ) except Exception as e: - self.log.exception("nbconvert failed: %s", e) + self.log.exception("nbconvert failed: %r", e) raise web.HTTPError(500, "nbconvert failed: %s" % e) from e if respond_zip(self, name, output, resources): diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/jupyter_server/serverapp.py new/jupyter_server-2.2.1/jupyter_server/serverapp.py --- old/jupyter_server-2.1.0/jupyter_server/serverapp.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/jupyter_server/serverapp.py 2020-02-02 01:00:00.000000000 +0100 @@ -308,7 +308,7 @@ jenv_opt: dict = {"autoescape": True} jenv_opt.update(jinja_env_options if jinja_env_options else {}) - env = Environment( + env = Environment( # noqa[S701] loader=FileSystemLoader(template_path), extensions=["jinja2.ext.i18n"], **jenv_opt ) sys_info = get_sys_info() @@ -326,7 +326,8 @@ version_hash = "" else: # reset the cache on server restart - version_hash = datetime.datetime.now().strftime("%Y%m%d%H%M%S") + utc = datetime.timezone.utc + version_hash = datetime.datetime.now(tz=utc).strftime("%Y%m%d%H%M%S") now = utcnow() @@ -465,7 +466,7 @@ new_handlers = [] for handler in handlers: pattern = url_path_join(settings["base_url"], handler[0]) - new_handler = tuple([pattern] + list(handler[1:])) + new_handler = (pattern, *list(handler[1:])) new_handlers.append(new_handler) # add 404 on the end, which will catch everything that falls through new_handlers.append((r"(.*)", Template404)) @@ -1427,7 +1428,7 @@ @property def static_file_path(self): """return extra paths + the default location""" - return self.extra_static_paths + [DEFAULT_STATIC_FILES_PATH] + return [*self.extra_static_paths, DEFAULT_STATIC_FILES_PATH] static_custom_path = List(Unicode(), help=_i18n("""Path to search for custom.js, css""")) @@ -1622,10 +1623,7 @@ @observe("pylab") def _update_pylab(self, change): """when --pylab is specified, display a warning and exit""" - if change["new"] != "warn": - backend = " %s" % change["new"] - else: - backend = "" + backend = " %s" % change["new"] if change["new"] != "warn" else "" self.log.error( _i18n("Support for specifying --pylab on the command line has been removed.") ) @@ -2107,21 +2105,16 @@ else: ip = f"[{self.ip}]" if ":" in self.ip else self.ip netloc = f"{ip}:{self.port}" - if self.certfile: - scheme = "https" - else: - scheme = "http" + scheme = "https" if self.certfile else "http" if not path: path = self.default_url query = None - if include_token: - if self.identity_provider.token: # Don't log full token if it came from config - token = ( - self.identity_provider.token - if self.identity_provider.token_generated - else "..." - ) - query = urllib.parse.urlencode({"token": token}) + # Don't log full token if it came from config + if include_token and self.identity_provider.token: + token = ( + self.identity_provider.token if self.identity_provider.token_generated else "..." + ) + query = urllib.parse.urlencode({"token": token}) # Build the URL Parts to dump. urlparts = urllib.parse.ParseResult( scheme=scheme, netloc=netloc, path=path, query=query or "", params="", fragment="" @@ -2154,7 +2147,7 @@ """Human readable string with URLs for interacting with the running Jupyter Server """ - url = self.public_url + "\n or " + self.local_url + url = self.public_url + "\n " + self.local_url return url @property @@ -2618,7 +2611,7 @@ with secure_write(self.info_file) as f: json.dump(self.server_info(), f, indent=2, sort_keys=True) except OSError as e: - self.log.error(_i18n("Failed to write server-info to %s: %s"), self.info_file, e) + self.log.error(_i18n("Failed to write server-info to %s: %r"), self.info_file, e) def remove_server_info_file(self): """Remove the jpserver-<pid>.json file created for this server. @@ -2760,7 +2753,7 @@ try: browser = webbrowser.get(self.browser or None) except webbrowser.Error as e: - self.log.warning(_i18n("No web browser found: %s.") % e) + self.log.warning(_i18n("No web browser found: %r.") % e) browser = None if not browser: diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/jupyter_server/services/contents/fileio.py new/jupyter_server-2.2.1/jupyter_server/services/contents/fileio.py --- old/jupyter_server-2.1.0/jupyter_server/services/contents/fileio.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/jupyter_server/services/contents/fileio.py 2020-02-02 01:00:00.000000000 +0100 @@ -104,9 +104,9 @@ if text: # Make sure that text files have Unix linefeeds by default kwargs.setdefault("newline", "\n") - fileobj = open(path, "w", encoding=encoding, **kwargs) + fileobj = open(path, "w", encoding=encoding, **kwargs) # noqa else: - fileobj = open(path, "wb", **kwargs) + fileobj = open(path, "wb", **kwargs) # noqa try: yield fileobj @@ -152,9 +152,9 @@ if text: # Make sure that text files have Unix linefeeds by default kwargs.setdefault("newline", "\n") - fileobj = open(path, "w", encoding=encoding, **kwargs) + fileobj = open(path, "w", encoding=encoding, **kwargs) # noqa else: - fileobj = open(path, "wb", **kwargs) + fileobj = open(path, "wb", **kwargs) # noqa try: yield fileobj @@ -194,9 +194,8 @@ @contextmanager def open(self, os_path, *args, **kwargs): """wrapper around io.open that turns permission errors into 403""" - with self.perm_to_403(os_path): - with open(os_path, *args, **kwargs) as f: - yield f + with self.perm_to_403(os_path), open(os_path, *args, **kwargs) as f: + yield f @contextmanager def atomic_writing(self, os_path, *args, **kwargs): diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/jupyter_server/services/contents/filemanager.py new/jupyter_server-2.2.1/jupyter_server/services/contents/filemanager.py --- old/jupyter_server-2.1.0/jupyter_server/services/contents/filemanager.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/jupyter_server/services/contents/filemanager.py 2020-02-02 01:00:00.000000000 +0100 @@ -287,7 +287,7 @@ try: os_path = os.path.join(os_dir, name) except UnicodeDecodeError as e: - self.log.warning("failed to decode filename '%s': %s", name, e) + self.log.warning("failed to decode filename '%s': %r", name, e) continue try: @@ -297,7 +297,7 @@ if e.errno == errno.ENOENT: self.log.warning("%s doesn't exist", os_path) elif e.errno != errno.EACCES: # Don't provide clues about protected files - self.log.warning("Error stat-ing %s: %s", os_path, e) + self.log.warning("Error stat-ing %s: %r", os_path, e) continue if ( @@ -309,9 +309,10 @@ continue try: - if self.should_list(name): - if self.allow_hidden or not is_file_hidden(os_path, stat_res=st): - contents.append(self.get(path=f"{path}/{name}", content=False)) + if self.should_list(name) and ( + self.allow_hidden or not is_file_hidden(os_path, stat_res=st) + ): + contents.append(self.get(path=f"{path}/{name}", content=False)) except OSError as e: # ELOOP: recursive symlink, also don't show failure due to permissions if e.errno not in [errno.ELOOP, errno.EACCES]: @@ -596,10 +597,7 @@ """Return the initial API path of a kernel associated with a given notebook""" if self.dir_exists(path): return path - if "/" in path: - parent_dir = path.rsplit("/", 1)[0] - else: - parent_dir = "" + parent_dir = path.rsplit("/", 1)[0] if "/" in path else "" return parent_dir @@ -636,7 +634,7 @@ try: os_path = os.path.join(os_dir, name) except UnicodeDecodeError as e: - self.log.warning("failed to decode filename '%s': %s", name, e) + self.log.warning("failed to decode filename '%s': %r", name, e) continue try: @@ -646,7 +644,7 @@ if e.errno == errno.ENOENT: self.log.warning("%s doesn't exist", os_path) elif e.errno != errno.EACCES: # Don't provide clues about protected files - self.log.warning("Error stat-ing %s: %s", os_path, e) + self.log.warning("Error stat-ing %s: %r", os_path, e) continue if ( @@ -658,9 +656,10 @@ continue try: - if self.should_list(name): - if self.allow_hidden or not is_file_hidden(os_path, stat_res=st): - contents.append(await self.get(path=f"{path}/{name}", content=False)) + if self.should_list(name) and ( + self.allow_hidden or not is_file_hidden(os_path, stat_res=st) + ): + contents.append(await self.get(path=f"{path}/{name}", content=False)) except OSError as e: # ELOOP: recursive symlink, also don't show failure due to permissions if e.errno not in [errno.ELOOP, errno.EACCES]: @@ -954,8 +953,5 @@ """Return the initial API path of a kernel associated with a given notebook""" if await self.dir_exists(path): return path - if "/" in path: - parent_dir = path.rsplit("/", 1)[0] - else: - parent_dir = "" + parent_dir = path.rsplit("/", 1)[0] if "/" in path else "" return parent_dir diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/jupyter_server/services/contents/manager.py new/jupyter_server-2.2.1/jupyter_server/services/contents/manager.py --- old/jupyter_server-2.1.0/jupyter_server/services/contents/manager.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/jupyter_server/services/contents/manager.py 2020-02-02 01:00:00.000000000 +0100 @@ -528,10 +528,7 @@ suffix = dot + ext for i in itertools.count(): - if i: - insert_i = f"{insert}{i}" - else: - insert_i = "" + insert_i = f"{insert}{i}" if i else "" name = "{basename}{insert}{suffix}".format( basename=basename, insert=insert_i, suffix=suffix ) @@ -920,10 +917,7 @@ suffix = dot + ext for i in itertools.count(): - if i: - insert_i = f"{insert}{i}" - else: - insert_i = "" + insert_i = f"{insert}{i}" if i else "" name = "{basename}{insert}{suffix}".format( basename=basename, insert=insert_i, suffix=suffix ) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/jupyter_server/services/kernels/connection/base.py new/jupyter_server-2.2.1/jupyter_server/services/kernels/connection/base.py --- old/jupyter_server-2.1.0/jupyter_server/services/kernels/connection/base.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/jupyter_server/services/kernels/connection/base.py 2020-02-02 01:00:00.000000000 +0100 @@ -95,7 +95,7 @@ offsets.append(len(msg) + offsets[-1]) offset_number = len(offsets).to_bytes(8, byteorder="little") offsets = [offset.to_bytes(8, byteorder="little") for offset in offsets] - bin_msg = b"".join([offset_number] + offsets + [channel] + msg_list) + bin_msg = b"".join([offset_number, *offsets] + [channel] + msg_list) return bin_msg diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/jupyter_server/services/kernels/connection/channels.py new/jupyter_server-2.2.1/jupyter_server/services/kernels/connection/channels.py --- old/jupyter_server-2.1.0/jupyter_server/services/kernels/connection/channels.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/jupyter_server/services/kernels/connection/channels.py 2020-02-02 01:00:00.000000000 +0100 @@ -692,10 +692,7 @@ # Increment the bytes and message count self._iopub_window_msg_count += 1 - if msg_type == "stream": - byte_count = sum(len(x) for x in msg_list) - else: - byte_count = 0 + byte_count = sum(len(x) for x in msg_list) if msg_type == "stream" else 0 self._iopub_window_byte_count += byte_count # Queue a removal of the byte and message count for a time in the diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/jupyter_server/services/kernels/kernelmanager.py new/jupyter_server-2.2.1/jupyter_server/services/kernels/kernelmanager.py --- old/jupyter_server-2.1.0/jupyter_server/services/kernels/kernelmanager.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/jupyter_server/services/kernels/kernelmanager.py 2020-02-02 01:00:00.000000000 +0100 @@ -256,7 +256,7 @@ # see https://github.com/jupyter-server/jupyter_server/issues/1165 # this assignment is technically incorrect, but might need a change of API # in jupyter_client. - start_kernel = _async_start_kernel + start_kernel = _async_start_kernel # type:ignore[assignment] async def _finish_kernel_start(self, kernel_id): """Handle a kernel that finishes starting.""" @@ -581,29 +581,30 @@ Regardless of that value, set flag that we've been here. """ - if not self._initialized_culler and self.cull_idle_timeout > 0: - if self._culler_callback is None: - _ = IOLoop.current() - if self.cull_interval <= 0: # handle case where user set invalid value - self.log.warning( - "Invalid value for 'cull_interval' detected (%s) - using default value (%s).", - self.cull_interval, - self.cull_interval_default, - ) - self.cull_interval = self.cull_interval_default - self._culler_callback = PeriodicCallback( - self.cull_kernels, 1000 * self.cull_interval - ) - self.log.info( - "Culling kernels with idle durations > %s seconds at %s second intervals ...", - self.cull_idle_timeout, + if ( + not self._initialized_culler + and self.cull_idle_timeout > 0 + and self._culler_callback is None + ): + _ = IOLoop.current() + if self.cull_interval <= 0: # handle case where user set invalid value + self.log.warning( + "Invalid value for 'cull_interval' detected (%s) - using default value (%s).", self.cull_interval, + self.cull_interval_default, ) - if self.cull_busy: - self.log.info("Culling kernels even if busy") - if self.cull_connected: - self.log.info("Culling kernels even with connected clients") - self._culler_callback.start() + self.cull_interval = self.cull_interval_default + self._culler_callback = PeriodicCallback(self.cull_kernels, 1000 * self.cull_interval) + self.log.info( + "Culling kernels with idle durations > %s seconds at %s second intervals ...", + self.cull_idle_timeout, + self.cull_interval, + ) + if self.cull_busy: + self.log.info("Culling kernels even if busy") + if self.cull_connected: + self.log.info("Culling kernels even with connected clients") + self._culler_callback.start() self._initialized_culler = True diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/jupyter_server/services/sessions/sessionmanager.py new/jupyter_server-2.2.1/jupyter_server/services/sessions/sessionmanager.py --- old/jupyter_server-2.1.0/jupyter_server/services/sessions/sessionmanager.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/jupyter_server/services/sessions/sessionmanager.py 2020-02-02 01:00:00.000000000 +0100 @@ -189,7 +189,7 @@ with open(value, "rb") as f: header = f.read(100) - if not header.startswith(b"SQLite format 3") and not header == b"": + if not header.startswith(b"SQLite format 3") and header != b"": # noqa msg = "The given file is not an SQLite database file." raise TraitError(msg) return value @@ -272,11 +272,11 @@ ) -> Dict[str, Any]: """Creates a session and returns its model + Parameters + ---------- name: ModelName(str) Usually the model name, like the filename associated with current kernel. - - """ session_id = self.new_session_id() record = KernelSessionRecord(session_id=session_id) @@ -300,13 +300,13 @@ ) -> Dict[str, str]: """Return the environment variables that need to be set in the kernel + Parameters + ---------- path : str the url path for the given session. name: ModelName(str), optional Here the name is likely to be the name of the associated file with the current kernel at startup time. - - """ if name is not None: cwd = self.kernel_manager.cwd_for_path(path) @@ -324,6 +324,8 @@ ) -> str: """Start a new kernel for a given session. + Parameters + ---------- session_id : str uuid for the session; this method must be given a session_id path : str @@ -335,7 +337,6 @@ the type of the session kernel_name : str the name of the kernel specification to use. The default kernel name will be used if not provided. - """ # allow contents manager to specify kernels cwd kernel_path = await ensure_async(self.contents_manager.get_kernel_path(path=path)) @@ -403,7 +404,7 @@ raise TypeError(msg) conditions = [] - for column in kwargs.keys(): + for column in kwargs: if column not in self._columns: msg = f"No such column: {column}" raise TypeError(msg) @@ -453,12 +454,12 @@ return sets = [] - for column in kwargs.keys(): + for column in kwargs: if column not in self._columns: raise TypeError("No such column: %r" % column) sets.append("%s=?" % column) query = "UPDATE session SET %s WHERE session_id=?" % (", ".join(sets)) - self.cursor.execute(query, list(kwargs.values()) + [session_id]) + self.cursor.execute(query, [*list(kwargs.values()), session_id]) async def kernel_culled(self, kernel_id: str) -> bool: """Checks if the kernel is still considered alive and returns true if its not found.""" diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/jupyter_server/traittypes.py new/jupyter_server-2.2.1/jupyter_server/traittypes.py --- old/jupyter_server-2.1.0/jupyter_server/traittypes.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/jupyter_server/traittypes.py 2020-02-02 01:00:00.000000000 +0100 @@ -68,10 +68,11 @@ try: value = self._resolve_string(value) except ImportError as e: - raise TraitError( - "The '%s' trait of %s instance must be a type, but " - "%r could not be imported" % (self.name, obj, value) - ) from e + emsg = ( + "The '{}' trait of {} instance must be a type, but " + "{!r} could not be imported".format(self.name, obj, value) + ) + raise TraitError(emsg) from e try: if self.subclass_from_klasses(value): return value @@ -88,7 +89,7 @@ klass = klass.__module__ + "." + klass.__name__ result += f"{klass} or " # Strip the last "or" - result = result.strip(" or ") # noqa B005 + result = result.strip(" or ") # noqa if self.allow_none: return result + " or None" return result @@ -199,7 +200,7 @@ else: result += describe("a", klass) result += " or " - result = result.strip(" or ") # noqa B005 + result = result.strip(" or ") # noqa if self.allow_none: result += " or None" return result diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/jupyter_server/utils.py new/jupyter_server-2.2.1/jupyter_server/utils.py --- old/jupyter_server-2.1.0/jupyter_server/utils.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/jupyter_server/utils.py 2020-02-02 01:00:00.000000000 +0100 @@ -412,7 +412,7 @@ """ parts = name.rsplit(".", 1) - if len(parts) == 2: + if len(parts) == 2: # noqa # called with 'foo.bar....' package, obj = parts module = __import__(package, fromlist=[obj]) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/pyproject.toml new/jupyter_server-2.2.1/pyproject.toml --- old/jupyter_server-2.1.0/pyproject.toml 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/pyproject.toml 2020-02-02 01:00:00.000000000 +0100 @@ -27,7 +27,7 @@ ] requires-python = ">=3.8" dependencies = [ - "anyio>=3.1.0,<4", + "anyio>=3.1.0", "argon2-cffi", "jinja2", "jupyter_client>=7.4.4", @@ -120,7 +120,7 @@ dependencies = [ "black[jupyter]==22.12.0", "mdformat>0.7", - "ruff==0.0.206", + "ruff==0.0.236", ] [tool.hatch.envs.lint.scripts] style = [ @@ -160,8 +160,31 @@ target-version = "py38" line-length = 100 select = [ - "A", "B", "C", "E", "EM", "F", "FBT", "I", "N", "Q", "RUF", "S", "T", - "UP", "W", "YTT", + "A", + "B", + "C", + "DTZ", + "E", + "EM", + "F", + "FBT", + "I", + "ICN", + "ISC", + "N", + "PLC", + "PLE", + "PLR", + "PLW", + "Q", + "RUF", + "S", + "SIM", + "T", + "TID", + "UP", + "W", + "YTT", ] ignore = [ # Allow non-abstract empty methods in abstract base classes @@ -192,6 +215,8 @@ "N806", # Exception name `KernelSessionRecordConflict` should be named with an Error suffix "N818", + # SIM105 Use `contextlib.suppress(...)` + "SIM105", ] unfixable = [ # Don't touch print statements @@ -209,9 +234,13 @@ # E402 Module level import not at top of file # T201 `print` found # EM101 Exception must not use a string literal -"tests/*" = ["B011", "F841", "C408", "E402", "T201", "EM101", "EM102", "EM103"] +# PLR2004 Magic value used in comparison +# S108 Probable insecure usage of temporary file or directory +"tests/*" = ["B011", "F841", "C408", "E402", "T201", "EM101", "EM102", "EM103", "PLR2004", "S108"] # Ignore flake 8 errors from shimmed imports "jupyter_server/base/zmqhandlers.py" = ["F401"] +# PLR2004 Magic value used in comparison +"test_handlers.py" = ["PLR2004"] # F821 Undefined name `c` "**/*_config.py" = ["F821"] # F401 `jupyter_server_terminals.TerminalAPIHandler` imported but unused diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/tests/auth/test_identity.py new/jupyter_server-2.2.1/tests/auth/test_identity.py --- old/jupyter_server-2.1.0/tests/auth/test_identity.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/tests/auth/test_identity.py 2020-02-02 01:00:00.000000000 +0100 @@ -172,12 +172,9 @@ password_required=password_required, ) app.identity_provider = idp - if ok: - ctx = nullcontext() - else: - ctx = pytest.raises(SystemExit) # type:ignore + ctx = nullcontext() if ok else pytest.raises(SystemExit) - with ctx: + with ctx: # type:ignore[attr-defined] idp.validate_security(app, ssl_options=None) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/tests/extension/test_launch.py new/jupyter_server-2.2.1/tests/extension/test_launch.py --- old/jupyter_server-2.1.0/tests/extension/test_launch.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/tests/extension/test_launch.py 2020-02-02 01:00:00.000000000 +0100 @@ -32,7 +32,7 @@ def wait_up(url, interval=0.1, check=None): while True: try: - r = requests.get(url) + r = requests.get(url) # noqa except Exception: if check: assert check() @@ -90,7 +90,7 @@ def fetch(port, auth_header): def _get(endpoint): url = f"http://127.0.0.1:{port}" + endpoint - return requests.get(url, headers=auth_header) + return requests.get(url, headers=auth_header) # noqa return _get diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/tests/extension/test_manager.py new/jupyter_server-2.2.1/tests/extension/test_manager.py --- old/jupyter_server-2.1.0/tests/extension/test_manager.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/tests/extension/test_manager.py 2020-02-02 01:00:00.000000000 +0100 @@ -1,5 +1,6 @@ import os -import unittest.mock as mock +import sys +from unittest import mock import pytest from jupyter_core.paths import jupyter_config_path @@ -60,7 +61,7 @@ path1 = metadata_list[0] app = path1["app"] - e = ExtensionPackage(name="tests.extension.mockextensions") + e = ExtensionPackage(name="tests.extension.mockextensions", enabled=True) e.extension_points assert hasattr(e, "extension_points") assert len(e.extension_points) == len(metadata_list) @@ -70,7 +71,9 @@ def test_extension_package_notfound_error(): with pytest.raises(ExtensionModuleNotFound): - ExtensionPackage(name="nonexistent") + ExtensionPackage(name="nonexistent", enabled=True) + # no raise if not enabled + ExtensionPackage(name="nonexistent", enabled=False) def _normalize_path(path_list): @@ -144,3 +147,23 @@ manager.load_extension(name) else: manager.load_extension(name) + + +@pytest.mark.parametrize("has_app", [True, False]) +def test_disable_no_import(jp_serverapp, has_app): + # de-import modules so we can detect if they are re-imported + disabled_ext = "tests.extension.mockextensions.mock1" + enabled_ext = "tests.extension.mockextensions.mock2" + sys.modules.pop(disabled_ext, None) + sys.modules.pop(enabled_ext, None) + + manager = ExtensionManager(serverapp=jp_serverapp if has_app else None) + manager.add_extension(disabled_ext, enabled=False) + manager.add_extension(enabled_ext, enabled=True) + assert disabled_ext not in sys.modules + assert enabled_ext in sys.modules + + ext_pkg = manager.extensions[disabled_ext] + assert ext_pkg.extension_points == {} + assert ext_pkg.version == "" + assert ext_pkg.metadata == [] diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/tests/services/contents/test_api.py new/jupyter_server-2.2.1/tests/services/contents/test_api.py --- old/jupyter_server-2.1.0/tests/services/contents/test_api.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/tests/services/contents/test_api.py 2020-02-02 01:00:00.000000000 +0100 @@ -665,7 +665,7 @@ async def test_delete_dirs(jp_fetch, contents, folders): # Iterate over folders - for name in sorted(folders + ["/"], key=len, reverse=True): + for name in sorted([*folders, "/"], key=len, reverse=True): r = await jp_fetch("api", "contents", name, method="GET") # Get JSON blobs for each content. listing = json.loads(r.body.decode())["content"] diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/tests/services/contents/test_fileio.py new/jupyter_server-2.2.1/tests/services/contents/test_fileio.py --- old/jupyter_server-2.1.0/tests/services/contents/test_fileio.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/tests/services/contents/test_fileio.py 2020-02-02 01:00:00.000000000 +0100 @@ -41,10 +41,9 @@ # OSError: The user lacks the privilege (Windows) have_symlink = False - with pytest.raises(CustomExc): - with atomic_writing(str(f1)) as f: - f.write("Failing write") - raise CustomExc + with pytest.raises(CustomExc), atomic_writing(str(f1)) as f: + f.write("Failing write") + raise CustomExc with open(str(f1)) as f: assert f.read() == "Before" diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/tests/services/contents/test_manager.py new/jupyter_server-2.2.1/tests/services/contents/test_manager.py --- old/jupyter_server-2.1.0/tests/services/contents/test_manager.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/tests/services/contents/test_manager.py 2020-02-02 01:00:00.000000000 +0100 @@ -243,9 +243,8 @@ @pytest.mark.skipif(sys.platform.startswith("win"), reason="Can't test permissions on Windows") async def test_403(jp_file_contents_manager_class, tmp_path): - if hasattr(os, "getuid"): - if os.getuid() == 0: - raise pytest.skip("Can't test permissions as root") + if hasattr(os, "getuid") and os.getuid() == 0: + raise pytest.skip("Can't test permissions as root") td = str(tmp_path) cm = jp_file_contents_manager_class(root_dir=td) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/jupyter_server-2.1.0/tests/test_gateway.py new/jupyter_server-2.2.1/tests/test_gateway.py --- old/jupyter_server-2.1.0/tests/test_gateway.py 2020-02-02 01:00:00.000000000 +0100 +++ new/jupyter_server-2.2.1/tests/test_gateway.py 2020-02-02 01:00:00.000000000 +0100 @@ -4,7 +4,7 @@ import logging import os import uuid -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from email.utils import format_datetime from http.cookies import SimpleCookie from io import BytesIO @@ -41,7 +41,10 @@ kernelspec_stanza = { "name": name, "spec": spec_stanza, - "resources": {"logo-64x64": f"f/kernelspecs/{name}/logo-64x64.png"}, + "resources": { + "logo-64x64": f"f/kernelspecs/{name}/logo-64x64.png", + "url": "https://example.com/example-url", + }, } return kernelspec_stanza @@ -62,7 +65,7 @@ def generate_model(name): """Generate a mocked kernel model. Caller is responsible for adding model to running_kernels dictionary.""" - dt = datetime.utcnow().isoformat() + "Z" + dt = datetime.utcnow().isoformat() + "Z" # noqa kernel_id = str(uuid.uuid4()) model = { "id": kernel_id, @@ -127,7 +130,7 @@ # Fetch list of running kernels if endpoint.endswith("/api/kernels") and method == "GET": kernels = [] - for kernel_id in running_kernels.keys(): + for kernel_id in running_kernels: model = running_kernels.get(kernel_id) kernels.append(model) response_buf = BytesIO(json.dumps(kernels).encode("utf-8")) @@ -344,7 +347,7 @@ GatewayClient.clear_instance() -cookie_expire_time = format_datetime(datetime.now() + timedelta(seconds=180)) +cookie_expire_time = format_datetime(datetime.now(tz=timezone.utc) + timedelta(seconds=180)) @pytest.mark.parametrize( @@ -623,10 +626,7 @@ assert r.code == 200 sessions = json.loads(r.body.decode("utf-8")) assert len(sessions) == len(running_kernels) # Use running_kernels as truth - for model in sessions: - if model.get("id") == session_id: - return True - return False + return any(model.get("id") == session_id for model in sessions) async def create_session(jp_fetch, kernel_name): @@ -677,10 +677,7 @@ assert r.code == 200 kernels = json.loads(r.body.decode("utf-8")) assert len(kernels) == len(running_kernels) - for model in kernels: - if model.get("id") == kernel_id: - return True - return False + return any(model.get("id") == kernel_id for model in kernels) async def create_kernel(jp_fetch, kernel_name):