diff --git a/docs/source/conf.py b/docs/source/conf.py index 1aa7613b42..9482f01205 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -319,7 +319,7 @@ # -- Options for link checks ---------------------------------------------- linkcheck_ignore = [ - 'http://127\.0\.0\.1/*' + r'http://127\.0\.0\.1/*' ] diff --git a/notebook/__main__.py b/notebook/__main__.py index ee339caebf..c11733204c 100644 --- a/notebook/__main__.py +++ b/notebook/__main__.py @@ -1,4 +1,3 @@ - if __name__ == '__main__': from notebook import notebookapp as app app.launch_new_instance() diff --git a/notebook/_sysinfo.py b/notebook/_sysinfo.py index 951ffd1014..4abeadcedb 100644 --- a/notebook/_sysinfo.py +++ b/notebook/_sysinfo.py @@ -55,10 +55,10 @@ def pkg_commit_hash(pkg_path): if repo_commit: return 'repository', repo_commit.strip().decode('ascii') else: - return u'', u'' + return '', '' par_path = p.dirname(par_path) - - return u'', u'' + + return '', '' def pkg_info(pkg_path): diff --git a/notebook/auth/__main__.py b/notebook/auth/__main__.py index ff413b534e..26c0a8f2a0 100644 --- a/notebook/auth/__main__.py +++ b/notebook/auth/__main__.py @@ -25,14 +25,17 @@ def set_password(args): } }) if not args.quiet: - print("password stored in config dir: %s" % jupyter_config_dir()) + print(f"password stored in config dir: {jupyter_config_dir()}") def main(argv): parser = argparse.ArgumentParser(argv[0]) subparsers = parser.add_subparsers() parser_password = subparsers.add_parser('password', help='sets a password for your notebook server') - parser_password.add_argument("password", help="password to set, if not given, a password will be queried for (NOTE: this may not be safe)", - nargs="?") + parser_password.add_argument( + "password", + help="password to set, if not given, a password will be queried for (NOTE: this may not be safe)", + nargs="?" + ) parser_password.add_argument("--quiet", help="suppress messages", action="store_true") parser_password.set_defaults(function=set_password) args = parser.parse_args(argv[1:]) diff --git a/notebook/auth/login.py b/notebook/auth/login.py index 16c4e7a356..47cfb79ae0 100644 --- a/notebook/auth/login.py +++ b/notebook/auth/login.py @@ -48,7 +48,7 @@ def _redirect_safe(self, url, default=None): # OR pass our cross-origin check if parsed.netloc: # if full URL, run our cross-origin check: - origin = '%s://%s' % (parsed.scheme, parsed.netloc) + origin = f'{parsed.scheme}://{parsed.netloc}' origin = origin.lower() if self.allow_origin: allow = self.allow_origin == origin @@ -56,7 +56,7 @@ def _redirect_safe(self, url, default=None): allow = bool(self.allow_origin_pat.match(origin)) if not allow: # not allowed, use default - self.log.warning("Not allowing login redirect to %r" % url) + self.log.warning(f"Not allowing login redirect to {url!r}") url = default self.redirect(url) @@ -73,13 +73,13 @@ def hashed_password(self): def passwd_check(self, a, b): return passwd_check(a, b) - + def post(self): - typed_password = self.get_argument('password', default=u'') - new_password = self.get_argument('new_password', default=u'') + typed_password = self.get_argument('password', default='') + new_password = self.get_argument('new_password', default='') + - if self.get_login_available(self.settings): if self.passwd_check(self.hashed_password, typed_password) and not new_password: self.set_login_cookie(self, uuid.uuid4().hex) @@ -89,7 +89,7 @@ def post(self): config_dir = self.settings.get('config_dir') config_file = os.path.join(config_dir, 'jupyter_notebook_config.json') set_password(new_password, config_file=config_file) - self.log.info("Wrote hashed password to %s" % config_file) + self.log.info(f"Wrote hashed password to {config_file}") else: self.set_status(401) self._render(message={'error': 'Invalid credentials'}) @@ -197,7 +197,7 @@ def get_user(cls, handler): @classmethod def get_user_token(cls, handler): """Identify the user based on a token in the URL or Authorization header - + Returns: - uuid if authenticated - None if not @@ -245,7 +245,7 @@ def password_from_settings(cls, settings): If there is no configured password, an empty string will be returned. """ - return settings.get('password', u'') + return settings.get('password', '') @classmethod def get_login_available(cls, settings): diff --git a/notebook/auth/security.py b/notebook/auth/security.py index cc9723901b..a043b63e30 100644 --- a/notebook/auth/security.py +++ b/notebook/auth/security.py @@ -5,7 +5,6 @@ from contextlib import contextmanager import getpass import hashlib -import io import json import os import random @@ -71,7 +70,7 @@ def passwd(passphrase=None, algorithm='argon2'): return ':'.join((algorithm, cast_unicode(h, 'ascii'))) else: h = hashlib.new(algorithm) - salt = ('%0' + str(salt_len) + 'x') % random.getrandbits(4 * salt_len) + salt = f"{random.getrandbits(4 * salt_len):0{salt_len}x}" h.update(cast_bytes(passphrase, 'utf-8') + str_to_bytes(salt, 'ascii')) return ':'.join((algorithm, salt, h.hexdigest())) @@ -135,7 +134,7 @@ def passwd_check(hashed_passphrase, passphrase): def persist_config(config_file=None, mode=0o600): """Context manager that can be used to modify a config object - On exit of the context manager, the config will be written back to disk, + On exit of the context manager, the config will be written back to disk, by default with user-only (600) permissions. """ @@ -152,20 +151,20 @@ def persist_config(config_file=None, mode=0o600): yield config - with io.open(config_file, 'w', encoding='utf8') as f: + with open(config_file, 'w', encoding='utf8') as f: f.write(cast_unicode(json.dumps(config, indent=2))) try: os.chmod(config_file, mode) except Exception as e: tb = traceback.format_exc() - warnings.warn("Failed to set permissions on %s:\n%s" % (config_file, tb), + warnings.warn(f"Failed to set permissions on {config_file}:\n{tb}", RuntimeWarning) def set_password(password=None, config_file=None): """Ask user for password, store it in notebook json configuration file""" - + hashed_password = passwd(password) with persist_config(config_file) as config: diff --git a/notebook/auth/tests/test_security.py b/notebook/auth/tests/test_security.py index cf748cda82..b042fcbfa9 100644 --- a/notebook/auth/tests/test_security.py +++ b/notebook/auth/tests/test_security.py @@ -18,8 +18,8 @@ def test_bad(): def test_passwd_check_unicode(): # GH issue #4524 - phash = u'sha1:23862bc21dd3:7a415a95ae4580582e314072143d9c382c491e4f' - assert passwd_check(phash, u"łe¶ŧ←↓→") - phash = (u'argon2:$argon2id$v=19$m=10240,t=10,p=8$' - u'qjjDiZUofUVVnrVYxacnbA$l5pQq1bJ8zglGT2uXP6iOg') - assert passwd_check(phash, u"łe¶ŧ←↓→") + phash = 'sha1:23862bc21dd3:7a415a95ae4580582e314072143d9c382c491e4f' + assert passwd_check(phash, "łe¶ŧ←↓→") + phash = ('argon2:$argon2id$v=19$m=10240,t=10,p=8$' + 'qjjDiZUofUVVnrVYxacnbA$l5pQq1bJ8zglGT2uXP6iOg') + assert passwd_check(phash, "łe¶ŧ←↓→") diff --git a/notebook/base/handlers.py b/notebook/base/handlers.py index 216480291d..97c818a342 100755 --- a/notebook/base/handlers.py +++ b/notebook/base/handlers.py @@ -59,7 +59,7 @@ class AuthenticatedHandler(web.RequestHandler): @property def content_security_policy(self): """The default Content-Security-Policy header - + Can be overridden by defining Content-Security-Policy in settings['headers'] """ if 'Content-Security-Policy' in self.settings.get('headers', {}): @@ -132,7 +132,7 @@ def get_current_user(self): def skip_check_origin(self): """Ask my login_handler if I should skip the origin_check - + For example: in the default LoginHandler, if a request is token-authenticated, origin checking should be skipped. """ @@ -152,11 +152,9 @@ def token_authenticated(self): @property def cookie_name(self): - default_cookie_name = non_alphanum.sub('-', 'username-{}'.format( - self.request.host - )) + default_cookie_name = non_alphanum.sub('-', f'username-{self.request.host}') return self.settings.get('cookie_name', default_cookie_name) - + @property def logged_in(self): """Is a user currently logged in?""" @@ -188,14 +186,14 @@ def login_available(self): class IPythonHandler(AuthenticatedHandler): """IPython-specific extensions to authenticated handling - + Mostly property shortcuts to IPython-specific settings. """ @property def ignore_minified_js(self): """Wether to user bundle in template. (*.min files) - + Mainly use for development and avoid file recompilation """ return self.settings.get('ignore_minified_js', False) @@ -203,7 +201,7 @@ def ignore_minified_js(self): @property def config(self): return self.settings.get('config', None) - + @property def log(self): """use the IPython log by default, falling back on tornado's logger""" @@ -213,23 +211,23 @@ def log(self): def jinja_template_vars(self): """User-supplied values to supply to jinja templates.""" return self.settings.get('jinja_template_vars', {}) - + #--------------------------------------------------------------- # URLs #--------------------------------------------------------------- - + @property def version_hash(self): """The version hash to use for cache hints for static files""" return self.settings.get('version_hash', '') - + @property def mathjax_url(self): url = self.settings.get('mathjax_url', '') if not url or url_is_absolute(url): return url return url_path_join(self.base_url, url) - + @property def mathjax_config(self): return self.settings.get('mathjax_config', 'TeX-AMS-MML_HTMLorMML-full,Safe') @@ -251,11 +249,11 @@ def contents_js_source(self): self.log.debug("Using contents: %s", self.settings.get('contents_js_source', 'services/contents')) return self.settings.get('contents_js_source', 'services/contents') - + #--------------------------------------------------------------- # Manager objects #--------------------------------------------------------------- - + @property def kernel_manager(self): return self.settings['kernel_manager'] @@ -263,15 +261,15 @@ def kernel_manager(self): @property def contents_manager(self): return self.settings['contents_manager'] - + @property def session_manager(self): return self.settings['session_manager'] - + @property def terminal_manager(self): return self.settings['terminal_manager'] - + @property def kernel_spec_manager(self): return self.settings['kernel_spec_manager'] @@ -283,22 +281,22 @@ def config_manager(self): #--------------------------------------------------------------- # CORS #--------------------------------------------------------------- - + @property def allow_origin(self): """Normal Access-Control-Allow-Origin""" return self.settings.get('allow_origin', '') - + @property def allow_origin_pat(self): """Regular expression version of allow_origin""" return self.settings.get('allow_origin_pat', None) - + @property def allow_credentials(self): """Whether to set Access-Control-Allow-Credentials""" return self.settings.get('allow_credentials', False) - + def set_default_headers(self): """Add CORS headers, if defined""" super().set_default_headers() @@ -320,19 +318,16 @@ def set_default_headers(self): if self.allow_credentials: self.set_header("Access-Control-Allow-Credentials", 'true') - + def set_attachment_header(self, filename): """Set Content-Disposition: attachment header As a method to ensure handling of filename encoding """ escaped_filename = url_escape(filename) - self.set_header('Content-Disposition', - 'attachment;' - " filename*=utf-8''{utf8}" - .format( - utf8=escaped_filename, - ) + self.set_header( + 'Content-Disposition', + f"attachment; filename*=utf-8''{escaped_filename}" ) def get_origin(self): @@ -424,7 +419,7 @@ def check_referer(self): return True # apply cross-origin checks to Referer: - origin = "{}://{}".format(referer_url.scheme, referer_url.netloc) + origin = f"{referer_url.scheme}://{referer_url.netloc}" if self.allow_origin: allow = self.allow_origin == origin elif self.allow_origin_pat: @@ -453,7 +448,7 @@ def check_xsrf_cookie(self): if not self.check_referer(): referer = self.request.headers.get('Referer') if referer: - msg = "Blocking Cross Origin request from {}.".format(referer) + msg = f"Blocking Cross Origin request from {referer}." else: msg = "Blocking request from unknown origin" raise web.HTTPError(403, msg) from e @@ -505,16 +500,16 @@ def prepare(self): #--------------------------------------------------------------- # template rendering #--------------------------------------------------------------- - + def get_template(self, name): """Return the jinja template object for a given name""" return self.settings['jinja2_env'].get_template(name) - + def render_template(self, name, **ns): ns.update(self.template_namespace) template = self.get_template(name) return template.render(**ns) - + @property def template_namespace(self): return dict( @@ -537,19 +532,19 @@ def template_namespace(self): self.request.headers.get('Accept-Language', ''))), **self.jinja_template_vars ) - + def get_json_body(self): """Return the body of the request as JSON data.""" if not self.request.body: return None # Do we need to call body.decode('utf-8') here? - body = self.request.body.strip().decode(u'utf-8') + body = self.request.body.strip().decode('utf-8') try: model = json.loads(body) except Exception as e: self.log.debug("Bad JSON: %r", body) self.log.error("Couldn't parse JSON", exc_info=True) - raise web.HTTPError(400, u'Invalid JSON in body of request') from e + raise web.HTTPError(400, 'Invalid JSON in body of request') from e return model def write_error(self, status_code, **kwargs): @@ -565,12 +560,12 @@ def write_error(self, status_code, **kwargs): message = exception.log_message % exception.args except Exception: pass - + # construct the custom reason, if defined reason = getattr(exception, 'reason', '') if reason: status_message = reason - + # build template namespace ns = dict( status_code=status_code, @@ -582,7 +577,7 @@ def write_error(self, status_code, **kwargs): self.set_header('Content-Type', 'text/html') # render the template try: - html = self.render_template('%s.html' % status_code, **ns) + html = self.render_template(f'{status_code}.html', **ns) except TemplateNotFound: html = self.render_template('error.html', **ns) @@ -741,15 +736,15 @@ def set_headers(self): # disable browser caching, rely on 304 replies for savings if "v" not in self.request.arguments: self.add_header("Cache-Control", "no-cache") - + def compute_etag(self): return None - + def validate_absolute_path(self, root, absolute_path): """Validate and return the absolute path. - + Requires tornado 3.1 - + Adding to tornado's own handling, forbids the serving of hidden files. """ abs_path = super().validate_absolute_path(root, absolute_path) @@ -762,12 +757,12 @@ def validate_absolute_path(self, root, absolute_path): def json_errors(method): """Decorate methods with this to return GitHub style JSON errors. - + This should be used on any JSON API on any handler method that can raise HTTPErrors. - + This will grab the latest HTTPError exception using sys.exc_info and then: - + 1. Set the HTTP status code based on the HTTPError 2. Create and return a JSON body with a message field describing the error in a human readable form. @@ -793,31 +788,31 @@ def wrapper(self, *args, **kwargs): class FileFindHandler(IPythonHandler, web.StaticFileHandler): """subclass of StaticFileHandler for serving files from a search path""" - + # cache search results, don't search for files more than once _static_paths = {} - + def set_headers(self): super().set_headers() # disable browser caching, rely on 304 replies for savings if "v" not in self.request.arguments or \ any(self.request.path.startswith(path) for path in self.no_cache_paths): self.set_header("Cache-Control", "no-cache") - + def initialize(self, path, default_filename=None, no_cache_paths=None): self.no_cache_paths = no_cache_paths or [] - + if isinstance(path, string_types): path = [path] - + self.root = tuple( os.path.abspath(os.path.expanduser(p)) + os.sep for p in path ) self.default_filename = default_filename - + def compute_etag(self): return None - + @classmethod def get_absolute_path(cls, roots, path): """locate a file to serve on our static file search path""" @@ -826,25 +821,25 @@ def get_absolute_path(cls, roots, path): return cls._static_paths[path] try: abspath = os.path.abspath(filefind(path, roots)) - except IOError: + except OSError: # IOError means not found return '' - + cls._static_paths[path] = abspath - + log().debug("Path %s served from %s"%(path, abspath)) return abspath - + def validate_absolute_path(self, root, absolute_path): """check if the file should be served (raises 404, 403, etc.)""" if absolute_path == '': raise web.HTTPError(404) - + for root in self.root: if (absolute_path + os.sep).startswith(root): break - + return super().validate_absolute_path(root, absolute_path) @@ -874,11 +869,11 @@ def get(self): class FilesRedirectHandler(IPythonHandler): """Handler for redirecting relative URLs to the /files/ handler""" - + @staticmethod def redirect_to_files(self, path): """make redirect logic a reusable static method - + so it can be called from other handlers. """ cm = self.contents_manager @@ -903,7 +898,7 @@ def redirect_to_files(self, path): url = url_path_join(self.base_url, 'files', url_escape(path)) self.log.debug("Redirecting %s to %s", self.request.path, url) self.redirect(url) - + def get(self, path=''): return self.redirect_to_files(self, path) diff --git a/notebook/base/zmqhandlers.py b/notebook/base/zmqhandlers.py index c4d4554a89..85937ef6ec 100644 --- a/notebook/base/zmqhandlers.py +++ b/notebook/base/zmqhandlers.py @@ -87,7 +87,7 @@ def deserialize_binary_message(bmsg): WS_PING_INTERVAL = 30000 -class WebSocketMixin(object): +class WebSocketMixin: """Mixin for common websocket options""" ping_callback = None last_ping = 0 @@ -167,7 +167,7 @@ def open(self, *args, **kwargs): self.send_ping, self.ping_interval, ) self.ping_callback.start() - return super(WebSocketMixin, self).open(*args, **kwargs) + return super().open(*args, **kwargs) def send_ping(self): """send a ping to keep the websocket alive""" @@ -249,7 +249,7 @@ def _on_zmq_reply(self, stream, msg_list): try: msg = self._reserialize_reply(msg_list, channel=channel) except Exception: - self.log.critical("Malformed message: %r" % msg_list, exc_info=True) + self.log.critical(f"Malformed message: {msg_list!r}", exc_info=True) return try: diff --git a/notebook/bundler/bundlerextensions.py b/notebook/bundler/bundlerextensions.py index 2ac346f971..2f218ebdec 100644 --- a/notebook/bundler/bundlerextensions.py +++ b/notebook/bundler/bundlerextensions.py @@ -17,14 +17,14 @@ def _get_bundler_metadata(module): """Gets the list of bundlers associated with a Python package. - + Returns a tuple of (the module, [{ 'name': 'unique name of the bundler', 'label': 'file menu item label for the bundler', 'module_name': 'dotted package/module name containing the bundler', 'group': 'download or deploy parent menu item' }]) - + Parameters ---------- @@ -34,16 +34,16 @@ def _get_bundler_metadata(module): """ m = import_item(module) if not hasattr(m, '_jupyter_bundlerextension_paths'): - raise KeyError('The Python module {} does not contain a valid bundlerextension'.format(module)) + raise KeyError(f'The Python module {module} does not contain a valid bundlerextension') bundlers = m._jupyter_bundlerextension_paths() return m, bundlers def _set_bundler_state(name, label, module_name, group, state, user=True, sys_prefix=False, logger=None): """Set whether a bundler is enabled or disabled. - + Returns True if the final state is the one requested. - + Parameters ---------- name : string @@ -68,14 +68,10 @@ def _set_bundler_state(name, label, module_name, group, state, config_dir = os.path.join( _get_config_dir(user=user, sys_prefix=sys_prefix), 'nbconfig') cm = BaseJSONConfigManager(config_dir=config_dir) - + if logger: - logger.info("{} {} bundler {}...".format( - "Enabling" if state else "Disabling", - name, - module_name - )) - + logger.info(f"{'Enabling' if state else 'Disabling'} {name} bundler {module_name}...") + if state: cm.update(BUNDLER_SECTION, { BUNDLER_SUBSECTION: { @@ -96,13 +92,13 @@ def _set_bundler_state(name, label, module_name, group, state, return (cm.get(BUNDLER_SECTION) .get(BUNDLER_SUBSECTION, {}) .get(name) is not None) == state - + def _set_bundler_state_python(state, module, user, sys_prefix, logger=None): """Enables or disables bundlers defined in a Python package. - + Returns a list of whether the state was achieved for each bundler. - + Parameters ---------- state : Bool @@ -129,9 +125,9 @@ def _set_bundler_state_python(state, module, user, sys_prefix, logger=None): def enable_bundler_python(module, user=True, sys_prefix=False, logger=None): """Enables bundlers defined in a Python package. - + Returns whether each bundle defined in the packaged was enabled or not. - + Parameters ---------- module : str @@ -147,12 +143,12 @@ def enable_bundler_python(module, user=True, sys_prefix=False, logger=None): """ return _set_bundler_state_python(True, module, user, sys_prefix, logger=logger) - + def disable_bundler_python(module, user=True, sys_prefix=False, logger=None): """Disables bundlers defined in a Python package. - + Returns whether each bundle defined in the packaged was enabled or not. - + Parameters ---------- module : str @@ -176,13 +172,13 @@ class ToggleBundlerExtensionApp(BaseExtensionApp): description = "Enable/disable a bundlerextension in configuration." user = Bool(True, config=True, help="Apply the configuration only for the current user (default)") - + _toggle_value = None - + def _config_file_name_default(self): """The default config file name.""" return 'jupyter_notebook_config' - + def toggle_bundler_python(self, module): """Toggle some extensions in an importable Python module. @@ -210,25 +206,25 @@ def start(self): if self.python: self.toggle_bundler_python(self.extra_args[0]) else: - raise NotImplementedError('Cannot install bundlers from non-Python packages') + raise NotImplementedError('Cannot install bundlers from non-Python packages') class EnableBundlerExtensionApp(ToggleBundlerExtensionApp): """An App that enables bundlerextensions""" name = "jupyter bundlerextension enable" description = """ Enable a bundlerextension in frontend configuration. - + Usage jupyter bundlerextension enable [--system|--sys-prefix] """ _toggle_value = True - + class DisableBundlerExtensionApp(ToggleBundlerExtensionApp): """An App that disables bundlerextensions""" name = "jupyter bundlerextension disable" description = """ Disable a bundlerextension in frontend configuration. - + Usage jupyter bundlerextension disable [--system|--sys-prefix] """ @@ -240,15 +236,15 @@ class ListBundlerExtensionApp(BaseExtensionApp): name = "jupyter nbextension list" version = __version__ description = "List all nbextensions known by the configuration system" - + def list_nbextensions(self): """List all the nbextensions""" config_dirs = [os.path.join(p, 'nbconfig') for p in jupyter_config_path()] - + print("Known bundlerextensions:") - + for config_dir in config_dirs: - head = u' config dir: {}'.format(config_dir) + head = f' config dir: {config_dir}' head_shown = False cm = BaseJSONConfigManager(parent=self, config_dir=config_dir) @@ -258,18 +254,16 @@ def list_nbextensions(self): # only show heading if there is an nbextension here print(head) head_shown = True - + for bundler_id, info in data['bundlerextensions'].items(): label = info.get('label') module = info.get('module_name') if label is None or module is None: - msg = u' {} {}'.format(bundler_id, RED_DISABLED) + msg = f' {bundler_id} {RED_DISABLED}' else: - msg = u' "{}" from {} {}'.format( - label, module, GREEN_ENABLED - ) + msg = f' "{label}" from {module} {GREEN_ENABLED}' print(msg) - + def start(self): """Perform the App's functions as configured""" self.list_nbextensions() @@ -299,7 +293,7 @@ def start(self): # The above should have called a subcommand and raised NoStart; if we # get here, it didn't, so we should self.log.info a message. subcmds = ", ".join(sorted(self.subcommands)) - sys.exit("Please supply at least one subcommand: %s" % subcmds) + sys.exit(f"Please supply at least one subcommand: {subcmds}") main = BundlerExtensionApp.launch_instance diff --git a/notebook/bundler/handlers.py b/notebook/bundler/handlers.py index 868dd78324..213ea8aa84 100644 --- a/notebook/bundler/handlers.py +++ b/notebook/bundler/handlers.py @@ -60,8 +60,7 @@ def get(self, path): try: bundler = self.get_bundler(bundler_id) except KeyError as e: - raise web.HTTPError(400, 'Bundler %s not enabled' % - bundler_id) from e + raise web.HTTPError(400, f'Bundler {bundler_id} not enabled') from e module_name = bundler['module_name'] try: @@ -70,12 +69,11 @@ def get(self, path): except UnicodeEncodeError: # Encode unicode as utf-8 in python2 else import_item fails module_name = module_name.encode('utf-8') - + try: bundler_mod = import_item(module_name) except ImportError as e: - raise web.HTTPError(500, 'Could not import bundler %s ' % - bundler_id) from e + raise web.HTTPError(500, f'Could not import bundler {bundler_id} ') from e # Let the bundler respond in any way it sees fit and assume it will # finish the request diff --git a/notebook/bundler/tarball_bundler.py b/notebook/bundler/tarball_bundler.py index c112841482..74861e7a78 100644 --- a/notebook/bundler/tarball_bundler.py +++ b/notebook/bundler/tarball_bundler.py @@ -20,7 +20,7 @@ def _jupyter_bundlerextension_paths(): def bundle(handler, model): """Create a compressed tarball containing the notebook document. - + Parameters ---------- handler : tornado.web.RequestHandler @@ -31,8 +31,8 @@ def bundle(handler, model): notebook_filename = model['name'] notebook_content = nbformat.writes(model['content']).encode('utf-8') notebook_name = os.path.splitext(notebook_filename)[0] - tar_filename = '{}.tar.gz'.format(notebook_name) - + tar_filename = f'{notebook_name}.tar.gz' + info = tarfile.TarInfo(notebook_filename) info.size = len(notebook_content) diff --git a/notebook/bundler/tests/test_bundler_api.py b/notebook/bundler/tests/test_bundler_api.py index 6c251e5e99..0192160241 100644 --- a/notebook/bundler/tests/test_bundler_api.py +++ b/notebook/bundler/tests/test_bundler_api.py @@ -3,7 +3,6 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. -import io from os.path import join as pjoin from notebook.tests.launchnotebook import NotebookTestBase @@ -14,7 +13,7 @@ from unittest.mock import patch - + def bundle(handler, model): """Bundler test stub. Echo the notebook path.""" handler.finish(model['path']) @@ -25,17 +24,17 @@ class BundleAPITest(NotebookTestBase): def setup_class(cls): """Make a test notebook. Borrowed from nbconvert test. Assumes the class teardown will clean it up in the end.""" - super(BundleAPITest, cls).setup_class() + super().setup_class() nbdir = cls.notebook_dir nb = new_notebook() - nb.cells.append(new_markdown_cell(u'Created by test')) - cc1 = new_code_cell(source=u'print(2*6)') - cc1.outputs.append(new_output(output_type="stream", text=u'12')) + nb.cells.append(new_markdown_cell('Created by test')) + cc1 = new_code_cell(source='print(2*6)') + cc1.outputs.append(new_output(output_type="stream", text='12')) nb.cells.append(cc1) - - with io.open(pjoin(nbdir, 'testnb.ipynb'), 'w', + + with open(pjoin(nbdir, 'testnb.ipynb'), 'w', encoding='utf-8') as f: write(nb, f, version=4) @@ -68,7 +67,7 @@ def test_bundler_import_error(self): mock.assert_called_with('fake_bundler') self.assertEqual(resp.status_code, 500) self.assertIn('Could not import bundler fake_bundler', resp.text) - + def test_bundler_invoke(self): """Should respond with 200 and output from test bundler stub""" with patch('notebook.bundler.handlers.BundlerHandler.get_bundler') as mock: diff --git a/notebook/config_manager.py b/notebook/config_manager.py index 137cbd691c..a8e4ee26b5 100644 --- a/notebook/config_manager.py +++ b/notebook/config_manager.py @@ -5,7 +5,6 @@ import errno import glob -import io import json import os import copy @@ -97,7 +96,7 @@ def get(self, section_name, include_root=True): data = {} for path in paths: if os.path.isfile(path): - with io.open(path, encoding='utf-8') as f: + with open(path, encoding='utf-8') as f: recursive_update(data, json.load(f)) return data @@ -117,7 +116,7 @@ def set(self, section_name, data): # in order to avoid writing half-finished corrupted data to disk. json_content = json.dumps(data, indent=2) - f = io.open(filename, 'w', encoding='utf-8') + f = open(filename, 'w', encoding='utf-8') with f: f.write(json_content) diff --git a/notebook/edit/handlers.py b/notebook/edit/handlers.py index 9ed9a9c380..2a22337adc 100644 --- a/notebook/edit/handlers.py +++ b/notebook/edit/handlers.py @@ -13,7 +13,7 @@ class EditorHandler(IPythonHandler): def get(self, path): path = path.strip('/') if not self.contents_manager.file_exists(path): - raise web.HTTPError(404, u'File does not exist: %s' % path) + raise web.HTTPError(404, f'File does not exist: {path}') basename = path.rsplit('/', 1)[-1] self.write(self.render_template('edit.html', @@ -24,5 +24,5 @@ def get(self, path): ) default_handlers = [ - (r"/edit%s" % path_regex, EditorHandler), -] \ No newline at end of file + (fr"/edit{path_regex}", EditorHandler), +] diff --git a/notebook/extensions.py b/notebook/extensions.py index 8efa65724b..882ca4cfb4 100644 --- a/notebook/extensions.py +++ b/notebook/extensions.py @@ -65,7 +65,11 @@ class BaseExtensionApp(JupyterApp): def _verbose_changed(self): """Warn about verbosity changes""" import warnings - warnings.warn("`verbose` traits of `{}` has been deprecated, has no effects and will be removed in notebook 5.0.".format(type(self).__name__), DeprecationWarning) + warnings.warn( + f"`verbose` traits of `{type(self).__name__}` has been deprecated, " + f"has no effects and will be removed in notebook 5.0.", + DeprecationWarning + ) def _log_format_default(self): """A default format for messages""" diff --git a/notebook/gateway/handlers.py b/notebook/gateway/handlers.py index d774ba39e2..56ade796d5 100644 --- a/notebook/gateway/handlers.py +++ b/notebook/gateway/handlers.py @@ -101,7 +101,7 @@ def write_message(self, message, binary=False): super().write_message(message, binary=binary) elif self.log.isEnabledFor(logging.DEBUG): msg_summary = WebSocketChannelsHandler._get_message_summary(json_decode(utf8(message))) - self.log.debug("Notebook client closed websocket connection - message dropped: {}".format(msg_summary)) + self.log.debug(f"Notebook client closed websocket connection - message dropped: {msg_summary}") def on_close(self): self.log.debug("Closing websocket connection %s", self.request.path) @@ -112,14 +112,16 @@ def on_close(self): def _get_message_summary(message): summary = [] message_type = message['msg_type'] - summary.append('type: {}'.format(message_type)) + summary.append(f'type: {message_type}') if message_type == 'status': - summary.append(', state: {}'.format(message['content']['execution_state'])) + summary.append(f', state: {message["content"]["execution_state"]}') elif message_type == 'error': - summary.append(', {}:{}:{}'.format(message['content']['ename'], - message['content']['evalue'], - message['content']['traceback'])) + summary.append( + f', {message["content"]["ename"]}:' + f'{message["content"]["evalue"]}:' + f'{message["content"]["traceback"]}' + ) else: summary.append(', ...') # don't display potentially sensitive data @@ -146,7 +148,7 @@ def _connect(self, kernel_id): GatewayClient.instance().ws_url, GatewayClient.instance().kernels_endpoint, url_escape(kernel_id), 'channels' ) - self.log.info('Connecting to {}'.format(ws_url)) + self.log.info(f'Connecting to {ws_url}') kwargs = {} kwargs = GatewayClient.instance().load_connection_args(**kwargs) @@ -158,7 +160,7 @@ def _connection_done(self, fut): if not self.disconnected and fut.exception() is None: # prevent concurrent.futures._base.CancelledError self.ws = fut.result() self.retry = 0 - self.log.debug("Connection is ready: ws: {}".format(self.ws)) + self.log.debug(f"Connection is ready: ws: {self.ws}") else: self.log.warning("Websocket connection has been closed via client disconnect or due to error. " "Kernel with ID '{}' may not be terminated on GatewayClient: {}". @@ -172,7 +174,7 @@ def _disconnect(self): elif not self.ws_future.done(): # Cancel pending connection. Since future.cancel() is a noop on tornado, we'll track cancellation locally self.ws_future.cancel() - self.log.debug("_disconnect: future cancelled, disconnected: {}".format(self.disconnected)) + self.log.debug(f"_disconnect: future cancelled, disconnected: {self.disconnected}") @gen.coroutine def _read_messages(self, callback): @@ -183,10 +185,10 @@ def _read_messages(self, callback): try: message = yield self.ws.read_message() except Exception as e: - self.log.error("Exception reading message from websocket: {}".format(e)) # , exc_info=True) + self.log.error(f"Exception reading message from websocket: {e}") # , exc_info=True) if message is None: if not self.disconnected: - self.log.warning("Lost connection to Gateway: {}".format(self.kernel_id)) + self.log.warning(f"Lost connection to Gateway: {self.kernel_id}") break callback(message) # pass back to notebook client (see self.on_open and WebSocketChannelsHandler.open) else: # ws cancelled - stop reading @@ -231,7 +233,7 @@ def _write_message(self, message): if not self.disconnected and self.ws is not None: self.ws.write_message(message) except Exception as e: - self.log.error("Exception writing message to websocket: {}".format(e)) # , exc_info=True) + self.log.error(f"Exception writing message to websocket: {e}") # , exc_info=True) def on_close(self): """Web socket closed event.""" @@ -247,8 +249,10 @@ def get(self, kernel_name, path, include_body=True): ksm = self.kernel_spec_manager kernel_spec_res = yield ksm.get_kernel_spec_resource(kernel_name, path) if kernel_spec_res is None: - self.log.warning("Kernelspec resource '{}' for '{}' not found. Gateway may not support" - " resource serving.".format(path, kernel_name)) + self.log.warning( + f"Kernelspec resource '{path}' for '{kernel_name}' not found. " + f"Gateway may not support resource serving." + ) else: self.set_header("Content-Type", mimetypes.guess_type(path)[0]) self.finish(kernel_spec_res) @@ -258,6 +262,6 @@ def get(self, kernel_name, path, include_body=True): from ..services.kernelspecs.handlers import kernel_name_regex default_handlers = [ - (r"/api/kernels/%s/channels" % _kernel_id_regex, WebSocketChannelsHandler), - (r"/kernelspecs/%s/(?P.*)" % kernel_name_regex, GatewayResourceHandler), + (fr"/api/kernels/{_kernel_id_regex}/channels", WebSocketChannelsHandler), + (fr"/kernelspecs/{kernel_name_regex}/(?P.*)", GatewayResourceHandler), ] diff --git a/notebook/gateway/managers.py b/notebook/gateway/managers.py index b2ae3b30a2..0ec2c41fb6 100644 --- a/notebook/gateway/managers.py +++ b/notebook/gateway/managers.py @@ -46,7 +46,7 @@ def _url_validate(self, proposal): # Ensure value, if present, starts with 'http' if value is not None and len(value) > 0: if not str(value).lower().startswith('http'): - raise TraitError("GatewayClient url must start with 'http': '%r'" % value) + raise TraitError(f"GatewayClient url must start with 'http': '{value!r}'") return value ws_url = Unicode(default_value=None, allow_none=True, config=True, @@ -71,7 +71,7 @@ def _ws_url_validate(self, proposal): # Ensure value, if present, starts with 'ws' if value is not None and len(value) > 0: if not str(value).lower().startswith('ws'): - raise TraitError("GatewayClient ws_url must start with 'ws': '%r'" % value) + raise TraitError(f"GatewayClient ws_url must start with 'ws': '{value!r}'") return value kernels_endpoint_default_value = '/api/kernels' @@ -276,7 +276,7 @@ def init_static_args(self): self._static_args['headers'] = json.loads(self.headers) if 'Authorization' not in self._static_args['headers'].keys(): self._static_args['headers'].update({ - 'Authorization': 'token {}'.format(self.auth_token) + 'Authorization': f'token {self.auth_token}' }) self._static_args['connect_timeout'] = self.connect_timeout self._static_args['request_timeout'] = self.request_timeout @@ -322,19 +322,22 @@ async def gateway_request(endpoint, **kwargs): except ConnectionRefusedError as e: raise web.HTTPError( 503, - "Connection refused from Gateway server url '{}'. Check to be sure the" - " Gateway instance is running.".format(GatewayClient.instance().url) + f"Connection refused from Gateway server url '{GatewayClient.instance().url}'. " + f"Check to be sure the Gateway instance is running." ) from e except HTTPError as e: # This can occur if the host is valid (e.g., foo.com) but there's nothing there. - raise web.HTTPError(e.code, "Error attempting to connect to Gateway server url '{}'. " - "Ensure gateway url is valid and the Gateway instance is running.". - format(GatewayClient.instance().url)) from e + raise web.HTTPError( + e.code, + f"Error attempting to connect to Gateway server url '{GatewayClient.instance().url}'. " + f"Ensure gateway url is valid and the Gateway instance is running." + ) from e except gaierror as e: raise web.HTTPError( 404, - "The Gateway server specified in the gateway_url '{}' doesn't appear to be valid. Ensure gateway " - "url is valid and the Gateway instance is running.".format(GatewayClient.instance().url) + f"The Gateway server specified in the gateway_url '{GatewayClient.instance().url}' " + f"doesn't appear to be valid. " + f"Ensure gateway url is valid and the Gateway instance is running." ) from e return response @@ -392,7 +395,7 @@ async def start_kernel(self, kernel_id=None, path=None, **kwargs): kwargs['cwd'] = self.cwd_for_path(path) kernel_name = kwargs.get('kernel_name', 'python3') kernel_url = self._get_kernel_endpoint_url() - self.log.debug("Request new kernel at: %s" % kernel_url) + self.log.debug(f"Request new kernel at: {kernel_url}") # Let KERNEL_USERNAME take precedent over http_user config option. if os.environ.get('KERNEL_USERNAME') is None and GatewayClient.instance().http_user: @@ -412,12 +415,12 @@ async def start_kernel(self, kernel_id=None, path=None, **kwargs): ) kernel = json_decode(response.body) kernel_id = kernel['id'] - self.log.info("Kernel started: %s" % kernel_id) - self.log.debug("Kernel args: %r" % kwargs) + self.log.info(f"Kernel started: {kernel_id}") + self.log.debug(f"Kernel args: {kwargs!r}") else: kernel = await self.get_kernel(kernel_id) kernel_id = kernel['id'] - self.log.info("Using existing kernel: %s" % kernel_id) + self.log.info(f"Using existing kernel: {kernel_id}") self._kernels[kernel_id] = kernel return kernel_id @@ -431,12 +434,12 @@ async def get_kernel(self, kernel_id=None, **kwargs): The uuid of the kernel. """ kernel_url = self._get_kernel_endpoint_url(kernel_id) - self.log.debug("Request kernel at: %s" % kernel_url) + self.log.debug(f"Request kernel at: {kernel_url}") try: response = await gateway_request(kernel_url, method='GET') except web.HTTPError as error: if error.status_code == 404: - self.log.warn("Kernel not found at: %s" % kernel_url) + self.log.warn(f"Kernel not found at: {kernel_url}") self.remove_kernel(kernel_id) kernel = None else: @@ -611,7 +614,7 @@ async def get_kernel_spec(self, kernel_name, **kwargs): The name of the kernel. """ kernel_spec_url = self._get_kernelspecs_endpoint_url(kernel_name=str(kernel_name)) - self.log.debug("Request kernel spec at: %s" % kernel_spec_url) + self.log.debug(f"Request kernel spec at: {kernel_spec_url}") try: response = await gateway_request(kernel_spec_url, method='GET') except web.HTTPError as error: @@ -640,7 +643,7 @@ async def get_kernel_spec_resource(self, kernel_name, path): The name of the desired resource """ kernel_spec_resource_url = url_path_join(self.base_resource_endpoint, str(kernel_name), str(path)) - self.log.debug("Request kernel spec resource '{}' at: {}".format(path, kernel_spec_resource_url)) + self.log.debug(f"Request kernel spec resource '{path}' at: {kernel_spec_resource_url}") try: response = await gateway_request(kernel_spec_resource_url, method='GET') except web.HTTPError as error: diff --git a/notebook/i18n/__init__.py b/notebook/i18n/__init__.py index 83f65c5fd9..0f836d2eeb 100644 --- a/notebook/i18n/__init__.py +++ b/notebook/i18n/__init__.py @@ -2,7 +2,6 @@ """ from collections import defaultdict import errno -import io import json from os.path import dirname, join as pjoin import re @@ -58,9 +57,9 @@ def parse_accept_lang_header(accept_lang): def load(language, domain='nbjs'): """Load translations from an nbjs.json file""" try: - f = io.open(pjoin(I18N_DIR, language, 'LC_MESSAGES', 'nbjs.json'), + f = open(pjoin(I18N_DIR, language, 'LC_MESSAGES', 'nbjs.json'), encoding='utf-8') - except IOError as e: + except OSError as e: if e.errno != errno.ENOENT: raise return {} diff --git a/notebook/jstest.py b/notebook/jstest.py index 2bb318af31..91c5977b7a 100644 --- a/notebook/jstest.py +++ b/notebook/jstest.py @@ -60,7 +60,7 @@ def run(self): self.buffer.write(chunk) if self.echo: sys.stdout.write(bytes_to_str(chunk)) - + os.close(self.readfd) os.close(self.writefd) @@ -87,7 +87,7 @@ def halt(self): self.join() -class TestController(object): +class TestController: """Run tests in a subprocess """ #: str, test group to be executed. @@ -110,7 +110,7 @@ def __init__(self): def setup(self): """Create temporary directories etc. - + This is only called when we know the test group will be run. Things created here may be cleaned up by self.cleanup(). """ @@ -138,11 +138,11 @@ def wait(self): def print_extra_info(self): """Print extra information about this test run. - + If we're running in parallel and showing the concise view, this is only called if the test group fails. Otherwise, it's called before the test group is started. - + The base implementation does nothing, but it can be overridden by subclasses. """ @@ -155,7 +155,7 @@ def cleanup_process(self): return # Process doesn't exist, or is already dead. try: - print('Cleaning up stale PID: %d' % subp.pid) + print(f'Cleaning up stale PID: {subp.pid}') subp.kill() except: # (OSError, WindowsError) ? # This is just a best effort, if we fail or the process was @@ -193,7 +193,7 @@ def all_js_groups(): class JSController(TestController): """Run CasperJS tests """ - + requirements = ['casperjs'] def __init__(self, section, xunit=True, engine='phantomjs', url=None): @@ -210,7 +210,7 @@ def __init__(self, section, xunit=True, engine='phantomjs', url=None): js_test_dir = get_js_test_dir() includes = '--includes=' + os.path.join(js_test_dir,'util.js') test_cases = os.path.join(js_test_dir, self.section) - self.cmd = ['casperjs', 'test', includes, test_cases, '--engine=%s' % self.engine] + self.cmd = ['casperjs', 'test', includes, test_cases, f'--engine={self.engine}'] def setup(self): self.ipydir = TemporaryDirectory() @@ -226,8 +226,8 @@ def setup(self): self.dirs.append(self.home) self.dirs.append(self.config_dir) self.dirs.append(self.nbdir) - os.makedirs(os.path.join(self.nbdir.name, os.path.join(u'sub ∂ir1', u'sub ∂ir 1a'))) - os.makedirs(os.path.join(self.nbdir.name, os.path.join(u'sub ∂ir2', u'sub ∂ir 1b'))) + os.makedirs(os.path.join(self.nbdir.name, os.path.join('sub ∂ir1', 'sub ∂ir 1a'))) + os.makedirs(os.path.join(self.nbdir.name, os.path.join('sub ∂ir2', 'sub ∂ir 1b'))) if self.xunit: self.add_xunit() @@ -240,22 +240,22 @@ def setup(self): alive = False if alive: - self.cmd.append("--url=%s" % self.url) + self.cmd.append(f"--url={self.url}") else: - raise Exception('Could not reach "%s".' % self.url) + raise Exception(f'Could not reach "{self.url}".') else: # start the ipython notebook, so we get the port number self.server_port = 0 self._init_server() if self.server_port: - self.cmd.append('--url=http://localhost:%i%s' % (self.server_port, self.base_url)) + self.cmd.append(f'--url=http://localhost:{self.server_port:d}{self.base_url}') else: # don't launch tests if the server didn't start self.cmd = [sys.executable, '-c', 'raise SystemExit(1)'] def add_xunit(self): xunit_file = os.path.abspath(self.section.replace('/','.') + '.xunit.xml') - self.cmd.append('--xunit=%s' % xunit_file) + self.cmd.append(f'--xunit={xunit_file}') def launch(self, buffer_output): # If the engine is SlimerJS, we need to buffer the output because @@ -281,7 +281,7 @@ def wait(self, *pargs, **kwargs): return ret def print_extra_info(self): - print("Running tests with notebook directory %r" % self.nbdir.name) + print(f"Running tests with notebook directory {self.nbdir.name!r}") @property def will_run(self): @@ -295,7 +295,7 @@ def _init_server(self): '--no-browser', '--notebook-dir', self.nbdir.name, '--NotebookApp.token=', - '--NotebookApp.base_url=%s' % self.base_url, + f'--NotebookApp.base_url={self.base_url}', ] # ipc doesn't work on Windows, and darwin has crazy-long temp paths, # which run afoul of ipc's maximum path length. @@ -313,11 +313,12 @@ def _init_server(self): ) with patch.dict('os.environ', {'HOME': self.home.name}): runtime_dir = jupyter_runtime_dir() - self.server_info_file = os.path.join(runtime_dir, - 'nbserver-%i.json' % self.server.pid + self.server_info_file = os.path.join( + runtime_dir, + f'nbserver-{self.server.pid}.json' ) self._wait_for_server() - + def _wait_for_server(self): """Wait 30 seconds for the notebook server to start""" for i in range(300): @@ -333,17 +334,18 @@ def _wait_for_server(self): else: return time.sleep(0.1) - print("Notebook server-info file never arrived: %s" % self.server_info_file, + print( + f"Notebook server-info file never arrived: {self.server_info_file}", file=sys.stderr ) - + def _failed_to_start(self): """Notebook server exited prematurely""" captured = self.stream_capturer.get_buffer().decode('utf-8', 'replace') print("Notebook failed to start: ", file=sys.stderr) print(self.server_command) print(captured, file=sys.stderr) - + def _load_server_info(self): """Notebook server started, load connection info from JSON""" with open(self.server_info_file) as f: @@ -374,10 +376,11 @@ def cleanup(self): try: popen_wait(self.server, NOTEBOOK_SHUTDOWN_TIMEOUT) except TimeoutExpired: - print("Notebook server still running (%s)" % self.server_info_file, + print( + f"Notebook server still running ({self.server_info_file})", file=sys.stderr ) - + self.stream_capturer.halt() TestController.cleanup(self) @@ -399,11 +402,11 @@ def prepare_controllers(options): def do_run(controller, buffer_output=True): """Setup and run a test controller. - + If buffer_output is True, no output is displayed, to avoid it appearing interleaved. In this case, the caller is responsible for displaying test output on failure. - + Returns ------- controller : TestController @@ -443,7 +446,7 @@ def _add(name, value): _add('Platform', inf['platform']) width = max(len(n) for (n,v) in out) - out = ["{:<{width}}: {}\n".format(n, v, width=width) for (n,v) in out] + out = [f"{n:<{width}}: {v}\n" for (n, v) in out] avail = [] not_avail = [] @@ -468,7 +471,7 @@ def _add(name, value): def run_jstestall(options): """Run the entire Javascript test suite. - + This function constructs TestControllers and runs them in subprocesses. Parameters @@ -552,16 +555,15 @@ def justify(ltext, rtext, width=70, fill='-'): print('_'*70) print('Test suite completed for system with the following information:') print(report()) - took = "Took %.3fs." % t_tests + took = f"Took {t_tests:.3f}s." print('Status: ', end='') if not failed: - print('OK (%d test groups).' % nrunners, took) + print(f'OK ({nrunners} test groups).', took) else: # If anything went wrong, point out what command to rerun manually to # see the actual errors and individual summary failed_sections = [c.section for c in failed] - print('ERROR - {} out of {} test groups failed ({}).'.format(nfail, - nrunners, ', '.join(failed_sections)), took) + print(f'ERROR - {nfail} out of {nrunners} test groups failed ({", ".join(failed_sections)}).', took) print() print('You may wish to rerun these, with:') print(' python -m notebook.jstest', *failed_sections) diff --git a/notebook/kernelspecs/handlers.py b/notebook/kernelspecs/handlers.py index be768b5ab7..b9c6af4b49 100644 --- a/notebook/kernelspecs/handlers.py +++ b/notebook/kernelspecs/handlers.py @@ -15,7 +15,7 @@ def get(self, kernel_name, path, include_body=True): self.root = ksm.get_kernel_spec(kernel_name).resource_dir except KeyError as e: raise web.HTTPError(404, - u'Kernel spec %s not found' % kernel_name) from e + f'Kernel spec {kernel_name} not found') from e self.log.debug("Serving kernel resource from: %s", self.root) return web.StaticFileHandler.get(self, path, include_body=include_body) @@ -24,5 +24,5 @@ def head(self, kernel_name, path): return self.get(kernel_name, path, include_body=False) default_handlers = [ - (r"/kernelspecs/%s/(?P.*)" % kernel_name_regex, KernelSpecResourceHandler), -] \ No newline at end of file + (fr"/kernelspecs/{kernel_name_regex}/(?P.*)", KernelSpecResourceHandler), +] diff --git a/notebook/log.py b/notebook/log.py index f7e393770b..a495db0641 100644 --- a/notebook/log.py +++ b/notebook/log.py @@ -29,20 +29,20 @@ def log_request(handler, log=access_log, log_json=False): log_method = log.warning else: log_method = log.error - + request_time = 1000.0 * request.request_time() ns = dict( status=status, method=request.method, ip=request.remote_ip, uri=request.uri, - request_time=float('%.2f' % request_time), + request_time=float(f'{request_time:.2f}'), ) msg = "{status} {method} {uri} ({ip}) {request_time:f}ms" if status >= 400: # log bad referers ns['referer'] = request.headers.get('Referer', 'None') - msg = msg + ' referer={referer}' + msg += ' referer={referer}' if status >= 500 and status != 502: # Log a subset of the headers if it caused an error. headers = {} diff --git a/notebook/nbconvert/handlers.py b/notebook/nbconvert/handlers.py index 24a3f093fb..c5c8de252a 100644 --- a/notebook/nbconvert/handlers.py +++ b/notebook/nbconvert/handlers.py @@ -61,19 +61,19 @@ def get_exporter(format, **kwargs): try: from nbconvert.exporters.base import get_exporter except ImportError as e: - raise web.HTTPError(500, "Could not import nbconvert: %s" % e) from e + raise web.HTTPError(500, f"Could not import nbconvert: {e}") from e try: Exporter = get_exporter(format) except KeyError as e: # should this be 400? - raise web.HTTPError(404, u"No exporter for format: %s" % format) from e + raise web.HTTPError(404, f"No exporter for format: {format}") from e try: return Exporter(**kwargs) except Exception as e: app_log.exception("Could not construct Exporter: %s", Exporter) - raise web.HTTPError(500, "Could not construct Exporter: %s" % e) from e + raise web.HTTPError(500, f"Could not construct Exporter: {e}") from e class NbconvertFileHandler(IPythonHandler): @@ -132,7 +132,7 @@ def get(self, format, path): ) except Exception as e: self.log.exception("nbconvert failed: %s", e) - raise web.HTTPError(500, "nbconvert failed: %s" % e) from e + raise web.HTTPError(500, f"nbconvert failed: {e}") from e if respond_zip(self, name, output, resources): return @@ -145,7 +145,7 @@ def get(self, format, path): # MIME type if exporter.output_mimetype: self.set_header('Content-Type', - '%s; charset=utf-8' % exporter.output_mimetype) + f'{exporter.output_mimetype}; charset=utf-8') self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0') self.finish(output) @@ -173,7 +173,7 @@ def post(self, format): "config_dir": self.application.settings['config_dir'], }) except Exception as e: - raise web.HTTPError(500, "nbconvert failed: %s" % e) from e + raise web.HTTPError(500, f"nbconvert failed: {e}") from e if respond_zip(self, name, output, resources): return @@ -181,7 +181,7 @@ def post(self, format): # MIME type if exporter.output_mimetype: self.set_header('Content-Type', - '%s; charset=utf-8' % exporter.output_mimetype) + f'{exporter.output_mimetype}; charset=utf-8') self.finish(output) @@ -194,7 +194,6 @@ def post(self, format): default_handlers = [ - (r"/nbconvert/%s" % _format_regex, NbconvertPostHandler), - (r"/nbconvert/%s%s" % (_format_regex, path_regex), - NbconvertFileHandler), + (fr"/nbconvert/{_format_regex}", NbconvertPostHandler), + (fr"/nbconvert/{_format_regex}{path_regex}", NbconvertFileHandler), ] diff --git a/notebook/nbconvert/tests/test_nbconvert_handlers.py b/notebook/nbconvert/tests/test_nbconvert_handlers.py index e5af13c0c4..93b83738c1 100644 --- a/notebook/nbconvert/tests/test_nbconvert_handlers.py +++ b/notebook/nbconvert/tests/test_nbconvert_handlers.py @@ -1,10 +1,8 @@ -import io import json import os from os.path import join as pjoin import shutil -import requests import pytest from notebook.utils import url_path_join from notebook.tests.launchnotebook import NotebookTestBase, assert_http_error @@ -13,8 +11,6 @@ new_notebook, new_markdown_cell, new_code_cell, new_output, ) -from ipython_genutils.testing.decorators import onlyif_cmds_exist - from base64 import encodebytes @@ -25,7 +21,7 @@ def cmd_exists(cmd): return True -class NbconvertAPI(object): +class NbconvertAPI: """Wrapper for nbconvert API calls.""" def __init__(self, request): self.request = request @@ -72,16 +68,16 @@ def cleanup_dir(): nb = new_notebook() - nb.cells.append(new_markdown_cell(u'Created by test ³')) - cc1 = new_code_cell(source=u'print(2*6)') - cc1.outputs.append(new_output(output_type="stream", text=u'12')) + nb.cells.append(new_markdown_cell('Created by test ³')) + cc1 = new_code_cell(source='print(2*6)') + cc1.outputs.append(new_output(output_type="stream", text='12')) cc1.outputs.append(new_output(output_type="execute_result", data={'image/png' : png_green_pixel}, execution_count=1, )) nb.cells.append(cc1) - with io.open(pjoin(nbdir, 'foo', 'testnb.ipynb'), 'w', + with open(pjoin(nbdir, 'foo', 'testnb.ipynb'), 'w', encoding='utf-8') as f: write(nb, f, version=4) @@ -94,13 +90,13 @@ def cleanup_dir(): def test_from_file(self): r = self.nbconvert_api.from_file('html', 'foo', 'testnb.ipynb') self.assertEqual(r.status_code, 200) - self.assertIn(u'text/html', r.headers['Content-Type']) - self.assertIn(u'Created by test', r.text) - self.assertIn(u'print', r.text) + self.assertIn('text/html', r.headers['Content-Type']) + self.assertIn('Created by test', r.text) + self.assertIn('print', r.text) r = self.nbconvert_api.from_file('python', 'foo', 'testnb.ipynb') - self.assertIn(u'text/x-python', r.headers['Content-Type']) - self.assertIn(u'print(2*6)', r.text) + self.assertIn('text/x-python', r.headers['Content-Type']) + self.assertIn('print(2*6)', r.text) @pytest.mark.skipif( not cmd_exists('pandoc'), @@ -126,8 +122,8 @@ def test_from_file_download(self): ) def test_from_file_zip(self): r = self.nbconvert_api.from_file('latex', 'foo', 'testnb.ipynb', download=True) - self.assertIn(u'application/zip', r.headers['Content-Type']) - self.assertIn(u'.zip', r.headers['Content-Disposition']) + self.assertIn('application/zip', r.headers['Content-Type']) + self.assertIn('.zip', r.headers['Content-Disposition']) @pytest.mark.skipif( not cmd_exists('pandoc'), @@ -138,13 +134,13 @@ def test_from_post(self): r = self.nbconvert_api.from_post(format='html', nbmodel=nbmodel) self.assertEqual(r.status_code, 200) - self.assertIn(u'text/html', r.headers['Content-Type']) - self.assertIn(u'Created by test', r.text) - self.assertIn(u'print', r.text) + self.assertIn('text/html', r.headers['Content-Type']) + self.assertIn('Created by test', r.text) + self.assertIn('print', r.text) r = self.nbconvert_api.from_post(format='python', nbmodel=nbmodel) - self.assertIn(u'text/x-python', r.headers['Content-Type']) - self.assertIn(u'print(2*6)', r.text) + self.assertIn('text/x-python', r.headers['Content-Type']) + self.assertIn('print(2*6)', r.text) @pytest.mark.skipif( not cmd_exists('pandoc'), @@ -154,5 +150,5 @@ def test_from_post_zip(self): nbmodel = self.request('GET', 'api/contents/foo/testnb.ipynb').json() r = self.nbconvert_api.from_post(format='latex', nbmodel=nbmodel) - self.assertIn(u'application/zip', r.headers['Content-Type']) - self.assertIn(u'.zip', r.headers['Content-Disposition']) + self.assertIn('application/zip', r.headers['Content-Type']) + self.assertIn('.zip', r.headers['Content-Disposition']) diff --git a/notebook/nbextensions.py b/notebook/nbextensions.py index aac752cce9..3619f791ef 100644 --- a/notebook/nbextensions.py +++ b/notebook/nbextensions.py @@ -35,7 +35,7 @@ def check_nbextension(files, user=False, prefix=None, nbextensions_dir=None, sys_prefix=False): """Check whether nbextension files have been installed - + Returns True if all files are found, False if any are missing. Parameters @@ -58,11 +58,11 @@ def check_nbextension(files, user=False, prefix=None, nbextensions_dir=None, sys # make sure nbextensions dir exists if not os.path.exists(nbext): return False - + if isinstance(files, string_types): # one file given, turn it into a list files = [files] - + return all(os.path.exists(pjoin(nbext, f)) for f in files) @@ -72,14 +72,14 @@ def install_nbextension(path, overwrite=False, symlink=False, logger=None, sys_prefix=False ): """Install a Javascript extension for the notebook - + Stages files and/or directories into the nbextensions directory. By default, this compares modification time, and only stages files that need updating. If `overwrite` is specified, matching files are purged before proceeding. - + Parameters ---------- - + path : path to file, directory, zip or tarball archive, or URL to install By default, the file will be installed with its base name, so '/path/to/foo' will install to 'nbextensions/foo'. See the destination argument below to change this. @@ -116,14 +116,14 @@ def install_nbextension(path, overwrite=False, symlink=False, nbext = _get_nbextension_dir(user=user, sys_prefix=sys_prefix, prefix=prefix, nbextensions_dir=nbextensions_dir) # make sure nbextensions dir exists ensure_dir_exists(nbext) - + # forcing symlink parameter to False if os.symlink does not exist (e.g., on Windows machines running python 2) if not hasattr(os, 'symlink'): symlink = False - + if isinstance(path, (list, tuple)): raise TypeError("path must be a string pointing to a single extension to install; call this function multiple times to install multiple extensions") - + path = cast_unicode_py2(path) if path.startswith(('https://', 'http://')): @@ -134,7 +134,7 @@ def install_nbextension(path, overwrite=False, symlink=False, filename = urlparse(path).path.split('/')[-1] local_path = os.path.join(td, filename) if logger: - logger.info("Downloading: %s -> %s" % (path, local_path)) + logger.info(f"Downloading: {path} -> {local_path}") urlretrieve(path, local_path) # now install from the local copy full_dest = install_nbextension(local_path, overwrite=overwrite, symlink=symlink, @@ -145,7 +145,7 @@ def install_nbextension(path, overwrite=False, symlink=False, if destination: raise ValueError("Cannot give destination for archives") if logger: - logger.info("Extracting: %s -> %s" % (path, nbext)) + logger.info(f"Extracting: {path} -> {nbext}") if path.endswith('.zip'): archive = zipfile.ZipFile(path) @@ -162,7 +162,7 @@ def install_nbextension(path, overwrite=False, symlink=False, full_dest = normpath(pjoin(nbext, destination)) if overwrite and os.path.lexists(full_dest): if logger: - logger.info("Removing: %s" % full_dest) + logger.info(f"Removing: {full_dest}") if os.path.isdir(full_dest) and not os.path.islink(full_dest): shutil.rmtree(full_dest) else: @@ -172,7 +172,7 @@ def install_nbextension(path, overwrite=False, symlink=False, path = os.path.abspath(path) if not os.path.exists(full_dest): if logger: - logger.info("Symlinking: %s -> %s" % (full_dest, path)) + logger.info(f"Symlinking: {full_dest} -> {path}") os.symlink(path, full_dest) elif os.path.isdir(path): path = pjoin(os.path.abspath(path), '') # end in path separator @@ -180,7 +180,7 @@ def install_nbextension(path, overwrite=False, symlink=False, dest_dir = pjoin(full_dest, parent[len(path):]) if not os.path.exists(dest_dir): if logger: - logger.info("Making directory: %s" % dest_dir) + logger.info(f"Making directory: {dest_dir}") os.makedirs(dest_dir) for file_name in files: src = pjoin(parent, file_name) @@ -210,7 +210,7 @@ def install_nbextension_python(module, overwrite=False, symlink=False, dest = nbext['dest'] if logger: - logger.info("Installing %s -> %s" % (src, dest)) + logger.info(f"Installing {src} -> {dest}") full_dest = install_nbextension( src, overwrite=overwrite, symlink=symlink, user=user, sys_prefix=sys_prefix, prefix=prefix, nbextensions_dir=nbextensions_dir, @@ -222,16 +222,16 @@ def install_nbextension_python(module, overwrite=False, symlink=False, return full_dests -def uninstall_nbextension(dest, require=None, user=False, sys_prefix=False, prefix=None, +def uninstall_nbextension(dest, require=None, user=False, sys_prefix=False, prefix=None, nbextensions_dir=None, logger=None): """Uninstall a Javascript extension of the notebook - - Removes staged files and/or directories in the nbextensions directory and + + Removes staged files and/or directories in the nbextensions directory and removes the extension from the frontend config. - + Parameters ---------- - + dest : str path to file, directory, zip or tarball archive, or URL to install name the nbextension is installed to. For example, if destination is 'foo', then @@ -256,12 +256,12 @@ def uninstall_nbextension(dest, require=None, user=False, sys_prefix=False, pref full_dest = pjoin(nbext, dest) if os.path.lexists(full_dest): if logger: - logger.info("Removing: %s" % full_dest) + logger.info(f"Removing: {full_dest}") if os.path.isdir(full_dest) and not os.path.islink(full_dest): shutil.rmtree(full_dest) else: os.remove(full_dest) - + # Look through all of the config sections making sure that the nbextension # doesn't exist. config_dir = os.path.join(_get_config_dir(user=user, sys_prefix=sys_prefix), 'nbconfig') @@ -281,7 +281,7 @@ def _find_uninstall_nbextension(filename, logger=None): path = pjoin(nbext, filename) if os.path.lexists(path): if logger: - logger.info("Removing: %s" % path) + logger.info(f"Removing: {path}") if os.path.isdir(path) and not os.path.islink(path): shutil.rmtree(path) else: @@ -295,7 +295,7 @@ def uninstall_nbextension_python(module, user=False, sys_prefix=False, prefix=None, nbextensions_dir=None, logger=None): """Uninstall an nbextension bundled in a Python package. - + See parameters of `install_nbextension_python` """ m, nbexts = _get_nbextension_metadata(module) @@ -303,8 +303,8 @@ def uninstall_nbextension_python(module, dest = nbext['dest'] require = nbext['require'] if logger: - logger.info("Uninstalling {} {}".format(dest, require)) - uninstall_nbextension(dest, require, user=user, sys_prefix=sys_prefix, + logger.info(f"Uninstalling {dest} {require}") + uninstall_nbextension(dest, require, user=user, sys_prefix=sys_prefix, prefix=prefix, nbextensions_dir=nbextensions_dir, logger=logger) @@ -335,11 +335,7 @@ def _set_nbextension_state(section, require, state, _get_config_dir(user=user, sys_prefix=sys_prefix), 'nbconfig') cm = BaseJSONConfigManager(config_dir=config_dir) if logger: - logger.info("{} {} extension {}...".format( - "Enabling" if state else "Disabling", - section, - require - )) + logger.info(f"{'Enabling' if state else 'Disabling'} {section} extension {require}...") cm.update(section, {"load_extensions": {require: state}}) validate_nbextension(require, logger=logger) @@ -408,7 +404,7 @@ def enable_nbextension(section, require, user=True, sys_prefix=False, def disable_nbextension(section, require, user=True, sys_prefix=False, logger=None): """Disable a named nbextension - + Returns True if the final state is the one requested. Parameters @@ -478,7 +474,7 @@ def enable_nbextension_python(module, user=True, sys_prefix=False, def disable_nbextension_python(module, user=True, sys_prefix=False, logger=None): """Disable some nbextensions associated with a Python module. - + Returns True if the final state is the one requested. Parameters @@ -516,27 +512,27 @@ def validate_nbextension(require, logger=None): js_exists = False for exts in jupyter_path('nbextensions'): # Does the Javascript entrypoint actually exist on disk? - js = u"{}.js".format(os.path.join(exts, *require.split("/"))) + js = f"{os.path.join(exts, *require.split('/'))}.js" js_exists = os.path.exists(js) if js_exists: break - require_tmpl = u" - require? {} {}" + require_tmpl = " - require? {} {}" if js_exists: infos.append(require_tmpl.format(GREEN_OK, require)) else: warnings.append(require_tmpl.format(RED_X, require)) - + if logger: if warnings: - logger.warning(u" - Validating: problems found:") + logger.warning(" - Validating: problems found:") for msg in warnings: logger.warning(msg) for msg in infos: logger.info(msg) else: - logger.info(u" - Validating: {}".format(GREEN_OK)) - + logger.info(f" - Validating: {GREEN_OK}") + return warnings @@ -567,19 +563,19 @@ def validate_nbextension_python(spec, full_dest, logger=None): section = spec.get("section", None) if section in NBCONFIG_SECTIONS: - infos.append(u" {} section: {}".format(GREEN_OK, section)) + infos.append(f" {GREEN_OK} section: {section}") else: - warnings.append(u" {} section: {}".format(RED_X, section)) + warnings.append(f" {RED_X} section: {section}") require = spec.get("require", None) if require is not None: require_path = os.path.join( full_dest[0:-len(spec["dest"])], - u"{}.js".format(require)) + f"{require}.js") if os.path.exists(require_path): - infos.append(u" {} require: {}".format(GREEN_OK, require_path)) + infos.append(f" {GREEN_OK} require: {require_path}") else: - warnings.append(u" {} require: {}".format(RED_X, require_path)) + warnings.append(f" {RED_X} require: {require_path}") if logger: if warnings: @@ -588,9 +584,9 @@ def validate_nbextension_python(spec, full_dest, logger=None): logger.warning(msg) for msg in infos: logger.info(msg) - logger.warning(u"Full spec: {}".format(spec)) + logger.warning(f"Full spec: {spec}") else: - logger.info(u"- Validating: {}".format(GREEN_OK)) + logger.info(f"- Validating: {GREEN_OK}") return warnings @@ -633,24 +629,24 @@ def validate_nbextension_python(spec, full_dest, logger=None): class InstallNBExtensionApp(BaseExtensionApp): """Entry point for installing notebook extensions""" description = """Install Jupyter notebook extensions - + Usage - + jupyter nbextension install path|url [--user|--sys-prefix] - + This copies a file or a folder into the Jupyter nbextensions directory. If a URL is given, it will be downloaded. If an archive is given, it will be extracted into nbextensions. If the requested files are already up to date, no action is taken unless --overwrite is specified. """ - + examples = """ jupyter nbextension install /path/to/myextension """ aliases = aliases flags = flags - + overwrite = Bool(False, config=True, help="Force overwrite of existing files") symlink = Bool(False, config=True, help="Create symlinks instead of copying files") @@ -662,7 +658,7 @@ class InstallNBExtensionApp(BaseExtensionApp): def _config_file_name_default(self): """The default config file name.""" return 'jupyter_notebook_config' - + def install_extensions(self): """Perform the installation of nbextension(s)""" if len(self.extra_args)>1: @@ -675,7 +671,7 @@ def install_extensions(self): else: install = install_nbextension kwargs = {'destination': self.destination} - + full_dests = install(self.extra_args[0], overwrite=self.overwrite, symlink=self.symlink, @@ -689,14 +685,12 @@ def install_extensions(self): if full_dests: self.log.info( - u"\nTo initialize this nbextension in the browser every time" - " the notebook (or other app) loads:\n\n" - " jupyter nbextension enable {}{}{}{}\n".format( - self.extra_args[0] if self.python else "", - " --user" if self.user else "", - " --py" if self.python else "", - " --sys-prefix" if self.sys_prefix else "" - ) + f"\nTo initialize this nbextension in the browser every time" + f" the notebook (or other app) loads:\n\n" + f" jupyter nbextension enable {self.extra_args[0] if self.python else ''}" + f"{' --user' if self.user else ''}" + f"{' --py' if self.python else ''}" + f"{' --sys-prefix' if self.sys_prefix else ''}\n" ) def start(self): @@ -714,9 +708,9 @@ class UninstallNBExtensionApp(BaseExtensionApp): """Entry point for uninstalling notebook extensions""" version = __version__ description = """Uninstall Jupyter notebook extensions - + Usage - + jupyter nbextension uninstall path/url path/url/entrypoint jupyter nbextension uninstall --py pythonPackageName @@ -734,12 +728,12 @@ class UninstallNBExtensionApp(BaseExtensionApp): It will uninstall nbextensions listed in that module, but not the module itself (which you should uninstall using a package manager such as pip). """ - + examples = """ jupyter nbextension uninstall dest/dir dest/dir/extensionjs jupyter nbextension uninstall --py extensionPyPackage """ - + aliases = { "prefix" : "UninstallNBExtensionApp.prefix", "nbextensions" : "UninstallNBExtensionApp.nbextensions_dir", @@ -748,7 +742,7 @@ class UninstallNBExtensionApp(BaseExtensionApp): flags = BaseExtensionApp.flags.copy() flags['system'] = ({'UninstallNBExtensionApp': {'system': True}}, "Uninstall specifically from systemwide installation directory") - + prefix = Unicode('', config=True, help="Installation prefix. Overrides --user, --sys-prefix and --system" ) @@ -759,7 +753,7 @@ class UninstallNBExtensionApp(BaseExtensionApp): system = Bool(False, config=True, help="Uninstall specifically from systemwide installation directory" ) - + def _config_file_name_default(self): """The default config file name.""" return 'jupyter_notebook_config' @@ -800,7 +794,7 @@ def find_uninstall_extension(self): changed = _find_uninstall_nbextension(name, logger=self.log) if not changed: - print("No installed extension %r found." % name) + print(f"No installed extension {name!r} found.") if self.require: for section in NBCONFIG_SECTIONS: @@ -837,13 +831,13 @@ class ToggleNBExtensionApp(BaseExtensionApp): user = Bool(True, config=True, help="Apply the configuration only for the current user (default)") aliases = {'section': 'ToggleNBExtensionApp.section'} - + _toggle_value = None def _config_file_name_default(self): """The default config file name.""" return 'jupyter_notebook_config' - + def toggle_nbextension_python(self, module): """Toggle some extensions in an importable Python module. @@ -878,7 +872,7 @@ def toggle_nbextension(self, require): return toggle(self.section, require, user=self.user, sys_prefix=self.sys_prefix, logger=self.log) - + def start(self): if not self.extra_args: sys.exit('Please specify an nbextension/package to enable or disable') @@ -895,7 +889,7 @@ class EnableNBExtensionApp(ToggleNBExtensionApp): name = "jupyter nbextension enable" description = """ Enable an nbextension in frontend configuration. - + Usage jupyter nbextension enable [--system|--sys-prefix] """ @@ -907,7 +901,7 @@ class DisableNBExtensionApp(ToggleNBExtensionApp): name = "jupyter nbextension disable" description = """ Disable an nbextension in frontend configuration. - + Usage jupyter nbextension disable [--system|--sys-prefix] """ @@ -919,15 +913,15 @@ class ListNBExtensionsApp(BaseExtensionApp): name = "jupyter nbextension list" version = __version__ description = "List all nbextensions known by the configuration system" - + def list_nbextensions(self): """List all the nbextensions""" config_dirs = [os.path.join(p, 'nbconfig') for p in jupyter_config_path()] - + print("Known nbextensions:") - + for config_dir in config_dirs: - head = u' config dir: {}'.format(config_dir) + head = f' config dir: {config_dir}' head_shown = False cm = BaseJSONConfigManager(parent=self, config_dir=config_dir) @@ -938,15 +932,13 @@ def list_nbextensions(self): # only show heading if there is an nbextension here print(head) head_shown = True - print(u' {} section'.format(section)) - + print(f' {section} section') + for require, enabled in data['load_extensions'].items(): - print(u' {} {}'.format( - require, - GREEN_ENABLED if enabled else RED_DISABLED)) + print(f' {require} {GREEN_ENABLED if enabled else RED_DISABLED}') if enabled: validate_nbextension(require, logger=self.log) - + def start(self): """Perform the App's functions as configured""" self.list_nbextensions() @@ -982,7 +974,7 @@ def start(self): # The above should have called a subcommand and raised NoStart; if we # get here, it didn't, so we should self.log.info a message. subcmds = ", ".join(sorted(self.subcommands)) - sys.exit("Please supply at least one subcommand: %s" % subcmds) + sys.exit(f"Please supply at least one subcommand: {subcmds}") main = NBExtensionApp.launch_instance @@ -1012,10 +1004,10 @@ def _should_copy(src, dest, logger=None): # we add a fudge factor to work around a bug in python 2.x # that was fixed in python 3.x: https://bugs.python.org/issue12904 if logger: - logger.warn("Out of date: %s" % dest) + logger.warn(f"Out of date: {dest}") return True if logger: - logger.info("Up to date: %s" % dest) + logger.info(f"Up to date: {dest}") return False @@ -1034,7 +1026,7 @@ def _maybe_copy(src, dest, logger=None): """ if _should_copy(src, dest, logger=logger): if logger: - logger.info("Copying: %s -> %s" % (src, dest)) + logger.info(f"Copying: {src} -> {dest}") shutil.copy2(src, dest) @@ -1051,7 +1043,7 @@ def _safe_is_tarfile(path): """ try: return tarfile.is_tarfile(path) - except IOError: + except OSError: return False @@ -1076,15 +1068,16 @@ def _get_nbextension_dir(user=False, sys_prefix=False, prefix=None, nbextensions ('nbextensions_dir', nbextensions_dir), ('sys_prefix', sys_prefix), ] - conflicting_set = ['{}={!r}'.format(n, v) for n, v in conflicting if v] + conflicting_set = [f'{n}={v!r}' for n, v in conflicting if v] if len(conflicting_set) > 1: raise ArgumentConflict( - "cannot specify more than one of user, sys_prefix, prefix, or nbextensions_dir, but got: {}" - .format(', '.join(conflicting_set))) + f"cannot specify more than one of user, sys_prefix, prefix, or nbextensions_dir, " + f"but got: {', '.join(conflicting_set)}" + ) if user: - nbext = pjoin(jupyter_data_dir(), u'nbextensions') + nbext = pjoin(jupyter_data_dir(), 'nbextensions') elif sys_prefix: - nbext = pjoin(ENV_JUPYTER_PATH[0], u'nbextensions') + nbext = pjoin(ENV_JUPYTER_PATH[0], 'nbextensions') elif prefix: nbext = pjoin(prefix, 'share', 'jupyter', 'nbextensions') elif nbextensions_dir: @@ -1113,8 +1106,10 @@ def _get_nbextension_metadata(module): """ m = import_item(module) if not hasattr(m, '_jupyter_nbextension_paths'): - raise KeyError('The Python module {} is not a valid nbextension, ' - 'it is missing the `_jupyter_nbextension_paths()` method.'.format(module)) + raise KeyError( + f'The Python module {module} is not a valid nbextension, ' + f'it is missing the `_jupyter_nbextension_paths()` method.' + ) nbexts = m._jupyter_nbextension_paths() return m, nbexts diff --git a/notebook/notebook/handlers.py b/notebook/notebook/handlers.py index 638931d6be..a725810054 100644 --- a/notebook/notebook/handlers.py +++ b/notebook/notebook/handlers.py @@ -46,13 +46,12 @@ def get_frontend_exporters(): # Ensure export_from_notebook is explicitly defined & not inherited if ux_name is not None and ux_name != super_uxname: - display = _('{} ({})'.format(ux_name, - exporter_instance.file_extension)) + display = _(f'{ux_name} ({exporter_instance.file_extension})') frontend_exporters.append(ExporterInfo(name, display)) # Ensure default_exporters are in frontend_exporters if not already # This protects against nbconvert versions lower than 5.5 - names = set(exporter.name.lower() for exporter in frontend_exporters) + names = {exporter.name.lower() for exporter in frontend_exporters} for exporter in default_exporters: if exporter.name not in names: frontend_exporters.append(exporter) @@ -74,11 +73,11 @@ class NotebookHandler(IPythonHandler): @web.authenticated @gen.coroutine def get(self, path): - """get renders the notebook template if a name is given, or + """get renders the notebook template if a name is given, or redirects to the '/files/' handler if the name is not given.""" path = path.strip('/') cm = self.contents_manager - + # will raise 404 on not found try: model = yield maybe_future(cm.get(path, content=False)) @@ -109,6 +108,6 @@ def get(self, path): default_handlers = [ - (r"/notebooks%s" % path_regex, NotebookHandler), + (fr"/notebooks{path_regex}", NotebookHandler), ] diff --git a/notebook/notebookapp.py b/notebook/notebookapp.py index e07bfcf507..a274f4bb38 100755 --- a/notebook/notebookapp.py +++ b/notebook/notebookapp.py @@ -14,7 +14,6 @@ import hmac import importlib import inspect -import io import ipaddress import json import logging @@ -115,7 +114,6 @@ url_path_join, urldecode_unix_socket_path, urlencode_unix_socket, - urlencode_unix_socket_path, urljoin, ) from .traittypes import TypeFromClasses @@ -217,13 +215,13 @@ def init_settings(self, jupyter_app, kernel_manager, contents_manager, env.install_gettext_translations(nbui, newstyle=False) if dev_mode: - DEV_NOTE_NPM = """It looks like you're running the notebook from source. + DEV_NOTE_NPM = f"""It looks like you're running the notebook from source. If you're working on the Javascript of the notebook, try running - %s + {'npm run build:watch'} in another terminal window to have the system incrementally - watch and build the notebook's JavaScript for you, as you make changes.""" % 'npm run build:watch' + watch and build the notebook's JavaScript for you, as you make changes.""" log.info(DEV_NOTE_NPM) if sys_info['commit_source'] == 'repository': @@ -426,7 +424,7 @@ def _config_file_default(self): def start(self): from .auth.security import set_password set_password(config_file=self.config_file) - self.log.info("Wrote hashed password to %s" % self.config_file) + self.log.info(f"Wrote hashed password to {self.config_file}") def shutdown_server(server_info, timeout=5, log=None): @@ -505,9 +503,9 @@ class NbserverStopApp(JupyterApp): description="Stop currently running notebook server." port = Integer(DEFAULT_NOTEBOOK_PORT, config=True, - help="Port of the server to be killed. Default %s" % DEFAULT_NOTEBOOK_PORT) + help=f"Port of the server to be killed. Default {DEFAULT_NOTEBOOK_PORT}") - sock = Unicode(u'', config=True, + sock = Unicode('', config=True, help="UNIX socket of the server to be killed.") def parse_command_line(self, argv=None): @@ -523,7 +521,7 @@ def shutdown_server(self, server): return shutdown_server(server, log=self.log) def _shutdown_or_exit(self, target_endpoint, server): - print("Shutting down server on %s..." % target_endpoint) + print(f"Shutting down server on {target_endpoint}...") server_stopped = self.shutdown_server(server) if not server_stopped and sys.platform.startswith('win'): # the pid check on Windows appears to be unreliable, so fetch another @@ -533,13 +531,13 @@ def _shutdown_or_exit(self, target_endpoint, server): if server not in servers: server_stopped = True if not server_stopped: - sys.exit("Could not stop server on %s" % target_endpoint) + sys.exit(f"Could not stop server on {target_endpoint}") @staticmethod def _maybe_remove_unix_socket(socket_path): try: os.unlink(socket_path) - except (OSError, IOError): + except OSError: pass def start(self): @@ -563,7 +561,7 @@ def start(self): else: current_endpoint = self.sock or self.port print( - "There is currently no server running on {}".format(current_endpoint), + f"There is currently no server running on {current_endpoint}", file=sys.stderr ) print("Ports/sockets currently in use:", file=sys.stderr) @@ -753,7 +751,7 @@ def _default_log_datefmt(self): @default('log_format') def _default_log_format(self): """override default log format to include time""" - return u"%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s]%(end_color)s %(message)s" + return "%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s]%(end_color)s %(message)s" ignore_minified_js = Bool(False, config=True, @@ -834,7 +832,7 @@ def _default_ip(self): s = socket.socket() try: s.bind(('localhost', 0)) - except socket.error as e: + except OSError as e: self.log.warning(_("Cannot bind to localhost, using 127.0.0.1 as default ip\n%s"), e) return '127.0.0.1' else: @@ -844,11 +842,11 @@ def _default_ip(self): @validate('ip') def _validate_ip(self, proposal): value = proposal['value'] - if value == u'*': - value = u'' + if value == '*': + value = '' return value - custom_display_url = Unicode(u'', config=True, + custom_display_url = Unicode('', config=True, help=_("""Override URL shown to users. Replace actual URL, including protocol, address, port and base URL, @@ -883,7 +881,7 @@ def port_retries_default(self): return int(os.getenv(self.port_retries_env, self.port_retries_default_value)) - sock = Unicode(u'', config=True, + sock = Unicode('', config=True, help=_("The UNIX socket the notebook server will listen on.") ) @@ -914,15 +912,15 @@ def _validate_sock_mode(self, proposal): return value - certfile = Unicode(u'', config=True, + certfile = Unicode('', config=True, help=_("""The full path to an SSL/TLS certificate file.""") ) - keyfile = Unicode(u'', config=True, + keyfile = Unicode('', config=True, help=_("""The full path to a private key file for usage with SSL/TLS.""") ) - client_ca = Unicode(u'', config=True, + client_ca = Unicode('', config=True, help=_("""The full path to a certificate authority certificate for SSL/TLS client authentication.""") ) @@ -947,7 +945,7 @@ def _default_cookie_secret_file(self): @default('cookie_secret') def _default_cookie_secret(self): if os.path.exists(self.cookie_secret_file): - with io.open(self.cookie_secret_file, 'rb') as f: + with open(self.cookie_secret_file, 'rb') as f: key = f.read() else: key = encodebytes(os.urandom(32)) @@ -960,7 +958,7 @@ def _write_cookie_secret_file(self, secret): """write my secret to my secret_file""" self.log.info(_("Writing notebook server cookie secret to %s"), self.cookie_secret_file) try: - with io.open(self.cookie_secret_file, 'wb') as f: + with open(self.cookie_secret_file, 'wb') as f: f.write(secret) except OSError as e: self.log.error(_("Failed to write cookie secret to %s: %s"), @@ -995,12 +993,12 @@ def _token_default(self): return os.getenv('JUPYTER_TOKEN') if os.getenv('JUPYTER_TOKEN_FILE'): self._token_generated = False - with io.open(os.getenv('JUPYTER_TOKEN_FILE'), "r") as token_file: + with open(os.getenv('JUPYTER_TOKEN_FILE')) as token_file: return token_file.read() if self.password: # no token if password is enabled self._token_generated = False - return u'' + return '' else: self._token_generated = True return binascii.hexlify(os.urandom(24)).decode('ascii') @@ -1051,7 +1049,7 @@ def _default_min_open_files_limit(self): def _token_changed(self, change): self._token_generated = False - password = Unicode(u'', config=True, + password = Unicode('', config=True, help="""Hashed password to use for web authentication. To generate, type in a python/IPython shell: @@ -1166,7 +1164,7 @@ def _default_allow_remote(self): (NotebookApp.browser) configuration option. """) - browser = Unicode(u'', config=True, + browser = Unicode('', config=True, help="""Specify what command to use to invoke a web browser when opening the notebook. If not specified, the default browser will be determined by the `webbrowser` @@ -1252,7 +1250,7 @@ def _update_webapp_settings(self, change): def _update_enable_mathjax(self, change): """set mathjax url to empty if mathjax is disabled""" if not change['new']: - self.mathjax_url = u'' + self.mathjax_url = '' base_url = Unicode('/', config=True, help='''The base URL for the notebook server. @@ -1351,7 +1349,7 @@ def nbextensions_path(self): @default('mathjax_url') def _default_mathjax_url(self): if not self.enable_mathjax: - return u'' + return '' static_url_prefix = self.tornado_settings.get("static_url_prefix", "static") return url_path_join(static_url_prefix, 'components', 'MathJax', 'MathJax.js') @@ -1360,7 +1358,7 @@ def _update_mathjax_url(self, change): new = change['new'] if new and not self.enable_mathjax: # enable_mathjax=False overrides mathjax_url - self.mathjax_url = u'' + self.mathjax_url = '' else: self.log.info(_("Using MathJax: %s"), new) @@ -1701,7 +1699,7 @@ def init_resources(self): if hard < soft: hard = soft self.log.debug( - 'Raising open file limit: soft {}->{}; hard {}->{}'.format(old_soft, soft, old_hard, hard) + f'Raising open file limit: soft {old_soft}->{soft}; hard {old_hard}->{hard}' ) resource.setrlimit(resource.RLIMIT_NOFILE, (soft, hard)) @@ -1803,7 +1801,7 @@ def _bind_http_server_unix(self): try: sock = bind_unix_socket(self.sock, mode=int(self.sock_mode.encode(), 8)) self.http_server.add_socket(sock) - except socket.error as e: + except OSError as e: if e.errno == errno.EADDRINUSE: self.log.warning(_('The socket %s is already in use.') % self.sock) return False @@ -1820,7 +1818,7 @@ def _bind_http_server_tcp(self): for port in random_ports(self.port, self.port_retries+1): try: self.http_server.listen(port, self.ip) - except socket.error as e: + except OSError as e: eacces = (errno.EACCES, getattr(errno, 'WSAEACCES', errno.EACCES)) if sys.platform == 'cygwin': # Cygwin has a bug that causes EPERM to be returned in this @@ -1886,7 +1884,7 @@ def connection_url(self): return self._tcp_url(ip) def _unix_sock_url(self, token=None): - return '%s%s' % (urlencode_unix_socket(self.sock), self.base_url) + return f'{urlencode_unix_socket(self.sock)}{self.base_url}' def _tcp_url(self, ip, port=None): proto = 'https' if self.certfile else 'http' @@ -2411,7 +2409,7 @@ def list_running_servers(runtime_dir=None): for file_name in os.listdir(runtime_dir): if re.match('nbserver-(.+).json', file_name): - with io.open(os.path.join(runtime_dir, file_name), encoding='utf-8') as f: + with open(os.path.join(runtime_dir, file_name), encoding='utf-8') as f: info = json.load(f) # Simple check whether that process is really still running diff --git a/notebook/prometheus/log_functions.py b/notebook/prometheus/log_functions.py index a67a252ade..f82e0f970e 100644 --- a/notebook/prometheus/log_functions.py +++ b/notebook/prometheus/log_functions.py @@ -19,6 +19,6 @@ def prometheus_log_method(handler): """ HTTP_REQUEST_DURATION_SECONDS.labels( method=handler.request.method, - handler='{}.{}'.format(handler.__class__.__module__, type(handler).__name__), + handler=f'{handler.__class__.__module__}.{type(handler).__name__}', status_code=handler.get_status() ).observe(handler.request.request_time()) diff --git a/notebook/serverextensions.py b/notebook/serverextensions.py index 72020a24f5..7a56a8fca8 100644 --- a/notebook/serverextensions.py +++ b/notebook/serverextensions.py @@ -59,14 +59,14 @@ def toggle_serverextension_python(import_name, enabled=None, parent=None, if logger: if new_enabled: - logger.info(u"Enabling: %s" % (import_name)) + logger.info(f"Enabling: {import_name}") else: - logger.info(u"Disabling: %s" % (import_name)) + logger.info(f"Disabling: {import_name}") server_extensions[import_name] = new_enabled if logger: - logger.info(u"- Writing config: {}".format(config_dir)) + logger.info(f"- Writing config: {config_dir}") cm.update("jupyter_notebook_config", cfg) @@ -104,13 +104,13 @@ def validate_serverextension(import_name, logger=None): except Exception: logger.warning("Error loading server extension %s", import_name) - import_msg = u" {} is {} importable?" + import_msg = " {} is {} importable?" if func is not None: infos.append(import_msg.format(GREEN_OK, import_name)) else: warnings.append(import_msg.format(RED_X, import_name)) - post_mortem = u" {} {} {}" + post_mortem = " {} {} {}" if logger: if warnings: [logger.info(info) for info in infos] @@ -159,13 +159,13 @@ class ToggleServerExtensionApp(BaseExtensionApp): """A base class for enabling/disabling extensions""" name = "jupyter serverextension enable/disable" description = "Enable/disable a server extension using frontend configuration files." - + flags = flags user = Bool(True, config=True, help="Whether to do a user install") sys_prefix = Bool(False, config=True, help="Use the sys.prefix as the prefix") python = Bool(False, config=True, help="Install from a Python package") - + def toggle_server_extension(self, import_name): """Change the status of a named server extension. @@ -215,7 +215,7 @@ class EnableServerExtensionApp(ToggleServerExtensionApp): name = "jupyter serverextension enable" description = """ Enable a serverextension in configuration. - + Usage jupyter serverextension enable [--system|--sys-prefix] """ @@ -227,7 +227,7 @@ class DisableServerExtensionApp(ToggleServerExtensionApp): name = "jupyter serverextension disable" description = """ Disable a serverextension in configuration. - + Usage jupyter serverextension disable [--system|--sys-prefix] """ @@ -254,11 +254,9 @@ def list_server_extensions(self): .setdefault("nbserver_extensions", {}) ) if server_extensions: - print(u'config dir: {}'.format(config_dir)) + print(f'config dir: {config_dir}') for import_name, enabled in server_extensions.items(): - print(u' {} {}'.format( - import_name, - GREEN_ENABLED if enabled else RED_DISABLED)) + print(f' {import_name} {GREEN_ENABLED if enabled else RED_DISABLED}') validate_serverextension(import_name, self.log) def start(self): @@ -293,7 +291,7 @@ def start(self): # The above should have called a subcommand and raised NoStart; if we # get here, it didn't, so we should self.log.info a message. subcmds = ", ".join(sorted(self.subcommands)) - sys.exit("Please supply at least one subcommand: %s" % subcmds) + sys.exit(f"Please supply at least one subcommand: {subcmds}") main = ServerExtensionApp.launch_instance @@ -324,7 +322,7 @@ def _get_server_extension_metadata(module): """ m = import_item(module) if not hasattr(m, '_jupyter_server_extension_paths'): - raise KeyError(u'The Python module {} does not include any valid server extensions'.format(module)) + raise KeyError(f'The Python module {module} does not include any valid server extensions') return m, m._jupyter_server_extension_paths() if __name__ == '__main__': diff --git a/notebook/services/config/handlers.py b/notebook/services/config/handlers.py index aae6480757..899eb4cf22 100644 --- a/notebook/services/config/handlers.py +++ b/notebook/services/config/handlers.py @@ -3,9 +3,6 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import json -import os -import io -import errno from tornado import web from ...base.handlers import APIHandler @@ -35,5 +32,5 @@ def patch(self, section_name): section_name_regex = r"(?P\w+)" default_handlers = [ - (r"/api/config/%s" % section_name_regex, ConfigHandler), + (fr"/api/config/{section_name_regex}", ConfigHandler), ] diff --git a/notebook/services/config/tests/test_config_api.py b/notebook/services/config/tests/test_config_api.py index a4df8f40fc..151f0ea6cc 100644 --- a/notebook/services/config/tests/test_config_api.py +++ b/notebook/services/config/tests/test_config_api.py @@ -2,13 +2,11 @@ import json -import requests - from notebook.utils import url_path_join from notebook.tests.launchnotebook import NotebookTestBase -class ConfigAPI(object): +class ConfigAPI: """Wrapper for notebook API calls.""" def __init__(self, request): self.request = request diff --git a/notebook/services/contents/checkpoints.py b/notebook/services/contents/checkpoints.py index c29a669c22..fd80ef8c0e 100644 --- a/notebook/services/contents/checkpoints.py +++ b/notebook/services/contents/checkpoints.py @@ -53,7 +53,7 @@ def delete_all_checkpoints(self, path): self.delete_checkpoint(checkpoint['id'], path) -class GenericCheckpointsMixin(object): +class GenericCheckpointsMixin: """ Helper for creating Checkpoints subclasses that can be used with any ContentsManager. @@ -90,7 +90,7 @@ def create_checkpoint(self, contents_mgr, path): path, ) else: - raise HTTPError(500, u'Unexpected type %s' % type) + raise HTTPError(500, f'Unexpected type {type}') def restore_checkpoint(self, contents_mgr, checkpoint_id, path): """Restore a checkpoint.""" @@ -100,7 +100,7 @@ def restore_checkpoint(self, contents_mgr, checkpoint_id, path): elif type == 'file': model = self.get_file_checkpoint(checkpoint_id, path) else: - raise HTTPError(500, u'Unexpected type %s' % type) + raise HTTPError(500, f'Unexpected type {type}') contents_mgr.save(model, path) # Required Methods diff --git a/notebook/services/contents/filecheckpoints.py b/notebook/services/contents/filecheckpoints.py index 5a9c835749..540091fd50 100644 --- a/notebook/services/contents/filecheckpoints.py +++ b/notebook/services/contents/filecheckpoints.py @@ -50,7 +50,7 @@ def _root_dir_default(self): # ContentsManager-dependent checkpoint API def create_checkpoint(self, contents_mgr, path): """Create a checkpoint.""" - checkpoint_id = u'checkpoint' + checkpoint_id = 'checkpoint' src_path = contents_mgr._get_os_path(path) dest_path = self.checkpoint_path(checkpoint_id, path) self._copy(src_path, dest_path) @@ -107,11 +107,7 @@ def checkpoint_path(self, checkpoint_id, path): parent, name = ('/' + path).rsplit('/', 1) parent = parent.strip('/') basename, ext = os.path.splitext(name) - filename = u"{name}-{checkpoint_id}{ext}".format( - name=basename, - checkpoint_id=checkpoint_id, - ext=ext, - ) + filename = f"{basename}-{checkpoint_id}{ext}" os_path = self._get_os_path(path=parent) cp_dir = os.path.join(os_path, self.checkpoint_dir) with self.perm_to_403(): @@ -133,7 +129,7 @@ def checkpoint_model(self, checkpoint_id, os_path): def no_such_checkpoint(self, path, checkpoint_id): raise HTTPError( 404, - u'Checkpoint does not exist: %s@%s' % (path, checkpoint_id) + f'Checkpoint does not exist: {path}@{checkpoint_id}' ) @@ -146,7 +142,7 @@ def create_file_checkpoint(self, content, format, path): """Create a checkpoint from the current content of a file.""" path = path.strip('/') # only the one checkpoint ID: - checkpoint_id = u"checkpoint" + checkpoint_id = "checkpoint" os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) self.log.debug("creating checkpoint for %s", path) with self.perm_to_403(): @@ -159,7 +155,7 @@ def create_notebook_checkpoint(self, nb, path): """Create a checkpoint from the current content of a notebook.""" path = path.strip('/') # only the one checkpoint ID: - checkpoint_id = u"checkpoint" + checkpoint_id = "checkpoint" os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) self.log.debug("creating checkpoint for %s", path) with self.perm_to_403(): diff --git a/notebook/services/contents/fileio.py b/notebook/services/contents/fileio.py index f99504e32b..79482cf9ca 100644 --- a/notebook/services/contents/fileio.py +++ b/notebook/services/contents/fileio.py @@ -7,7 +7,6 @@ from contextlib import contextmanager import errno -import io import os import shutil @@ -102,9 +101,9 @@ def atomic_writing(path, text=True, encoding='utf-8', log=None, **kwargs): if text: # Make sure that text files have Unix linefeeds by default kwargs.setdefault('newline', '\n') - fileobj = io.open(path, 'w', encoding=encoding, **kwargs) + fileobj = open(path, 'w', encoding=encoding, **kwargs) else: - fileobj = io.open(path, 'wb', **kwargs) + fileobj = open(path, 'wb', **kwargs) try: yield fileobj @@ -154,9 +153,9 @@ def _simple_writing(path, text=True, encoding='utf-8', log=None, **kwargs): if text: # Make sure that text files have Unix linefeeds by default kwargs.setdefault('newline', '\n') - fileobj = io.open(path, 'w', encoding=encoding, **kwargs) + fileobj = open(path, 'w', encoding=encoding, **kwargs) else: - fileobj = io.open(path, 'wb', **kwargs) + fileobj = open(path, 'wb', **kwargs) try: yield fileobj @@ -197,7 +196,7 @@ class FileManagerMixin(Configurable): def open(self, os_path, *args, **kwargs): """wrapper around io.open that turns permission errors into 403""" with self.perm_to_403(os_path): - with io.open(os_path, *args, **kwargs) as f: + with open(os_path, *args, **kwargs) as f: yield f @contextmanager @@ -218,7 +217,7 @@ def perm_to_403(self, os_path=''): """context manager for turning permission errors into 403.""" try: yield - except (OSError, IOError) as e: + except OSError as e: if e.errno in {errno.EPERM, errno.EACCES}: # make 403 error message without root prefix # this may not work perfectly on unicode paths on Python 2, @@ -226,7 +225,7 @@ def perm_to_403(self, os_path=''): if not os_path: os_path = str_to_unicode(e.filename or 'unknown file') path = to_api_path(os_path, root=self.root_dir) - raise HTTPError(403, u'Permission denied: %s' % path) from e + raise HTTPError(403, f'Permission denied: {path}') from e else: raise @@ -257,7 +256,7 @@ def _get_os_path(self, path): root = os.path.abspath(self.root_dir) os_path = to_os_path(path, root) if not (os.path.abspath(os_path) + os.path.sep).startswith(root): - raise HTTPError(404, "%s is outside root contents directory" % path) + raise HTTPError(404, f"{path} is outside root contents directory") return os_path def _read_notebook(self, os_path, as_version=4): @@ -276,7 +275,7 @@ def _read_notebook(self, os_path, as_version=4): if not self.use_atomic_writing or not os.path.exists(tmp_path): raise HTTPError( 400, - u"Unreadable Notebook: %s %r" % (os_path, e_orig), + f"Unreadable Notebook: {os_path} {e_orig!r}", ) # Move the bad file aside, restore the intermediate, and try again. @@ -300,7 +299,7 @@ def _read_file(self, os_path, format): If not specified, try to decode as UTF-8, and fall back to base64 """ if not os.path.isfile(os_path): - raise HTTPError(400, "Cannot read non-file %s" % os_path) + raise HTTPError(400, f"Cannot read non-file {os_path}") with self.open(os_path, 'rb') as f: bcontent = f.read() @@ -314,7 +313,7 @@ def _read_file(self, os_path, format): if format == 'text': raise HTTPError( 400, - "%s is not UTF-8 encoded" % os_path, + f"{os_path} is not UTF-8 encoded", reason='bad format', ) from e return encodebytes(bcontent).decode('ascii'), 'base64' @@ -334,7 +333,7 @@ def _save_file(self, os_path, content, format): bcontent = decodebytes(b64_bytes) except Exception as e: raise HTTPError( - 400, u'Encoding error saving %s: %s' % (os_path, e) + 400, f'Encoding error saving {os_path}: {e}' ) from e with self.atomic_writing(os_path, text=False) as f: diff --git a/notebook/services/contents/filemanager.py b/notebook/services/contents/filemanager.py index 0c9386b2fc..b517ca4702 100644 --- a/notebook/services/contents/filemanager.py +++ b/notebook/services/contents/filemanager.py @@ -5,7 +5,6 @@ from datetime import datetime import errno -import io import os import shutil import stat @@ -60,7 +59,7 @@ def _post_save_script(model, os_path, contents_manager, **kwargs): script, resources = _script_exporter.from_filename(os_path) script_fname = base + resources.get('output_extension', '.txt') log.info("Saving script /%s", to_api_path(script_fname, contents_manager.root_dir)) - with io.open(script_fname, 'w', encoding='utf-8') as f: + with open(script_fname, 'w', encoding='utf-8') as f: f.write(script) @@ -132,8 +131,7 @@ def run_post_save_hook(self, model, os_path): self.post_save_hook(os_path=os_path, model=model, contents_manager=self) except Exception as e: self.log.error("Post-save hook failed o-n %s", os_path, exc_info=True) - raise web.HTTPError(500, u'Unexpected error while running post hook save: %s' - % e) from e + raise web.HTTPError(500, f'Unexpected error while running post hook save: {e}') from e @validate('root_dir') def _validate_root_dir(self, proposal): @@ -143,7 +141,7 @@ def _validate_root_dir(self, proposal): # If we receive a non-absolute path, make it absolute. value = os.path.abspath(value) if not os.path.isdir(value): - raise TraitError("%r is not a directory" % value) + raise TraitError(f"{value!r} is not a directory") return value @default('checkpoints_class') @@ -243,14 +241,14 @@ def _base_model(self, path): """Build the common base of a contents model""" os_path = self._get_os_path(path) info = os.lstat(os_path) - + try: - # size of file + # size of file size = info.st_size except (ValueError, OSError): self.log.warning('Unable to get size.') size = None - + try: last_modified = tz.utcfromtimestamp(info.st_mtime) except (ValueError, OSError): @@ -292,7 +290,7 @@ def _dir_model(self, path, content=True): """ os_path = self._get_os_path(path) - four_o_four = u'directory does not exist: %r' % path + four_o_four = f'directory does not exist: {path!r}' if not os.path.isdir(os_path): raise web.HTTPError(404, four_o_four) @@ -336,7 +334,7 @@ def _dir_model(self, path, content=True): if self.should_list(name): if self.allow_hidden or not is_file_hidden(os_path, stat_res=st): contents.append( - self.get(path='%s/%s' % (path, name), content=False) + self.get(path=f'{path}/{name}', content=False) ) except OSError as e: # ELOOP: recursive symlink @@ -392,14 +390,14 @@ def _notebook_model(self, path, content=True): model = self._base_model(path) model['type'] = 'notebook' os_path = self._get_os_path(path) - + if content: nb = self._read_notebook(os_path, as_version=4) self.mark_trusted_cells(nb, path) model['content'] = nb model['format'] = 'json' self.validate_notebook_model(model) - + return model def get(self, path, content=True, type=None, format=None): @@ -427,32 +425,33 @@ def get(self, path, content=True, type=None, format=None): path = path.strip('/') if not self.exists(path): - raise web.HTTPError(404, u'No such file or directory: %s' % path) + raise web.HTTPError(404, f'No such file or directory: {path}') os_path = self._get_os_path(path) if os.path.isdir(os_path): if type not in (None, 'directory'): raise web.HTTPError(400, - u'%s is a directory, not a %s' % (path, type), reason='bad type') + f'{path} is a directory, not a {type}', reason='bad type') model = self._dir_model(path, content=content) elif type == 'notebook' or (type is None and path.endswith('.ipynb')): model = self._notebook_model(path, content=content) else: if type == 'directory': - raise web.HTTPError(400, - u'%s is not a directory' % path, reason='bad type') + raise web.HTTPError( + 400, + f'{path} is not a directory', reason='bad type') model = self._file_model(path, content=content, format=format) return model def _save_directory(self, os_path, model, path=''): """create a directory""" if is_hidden(os_path, self.root_dir) and not self.allow_hidden: - raise web.HTTPError(400, u'Cannot create hidden directory %r' % os_path) + raise web.HTTPError(400, f'Cannot create hidden directory {os_path!r}') if not os.path.exists(os_path): with self.perm_to_403(): os.mkdir(os_path) elif not os.path.isdir(os_path): - raise web.HTTPError(400, u'Not a directory: %s' % (os_path)) + raise web.HTTPError(400, f'Not a directory: {os_path}') else: self.log.debug("Directory %r already exists", os_path) @@ -461,9 +460,9 @@ def save(self, model, path=''): path = path.strip('/') if 'type' not in model: - raise web.HTTPError(400, u'No file type provided') + raise web.HTTPError(400, 'No file type provided') if 'content' not in model and model['type'] != 'directory': - raise web.HTTPError(400, u'No file content provided') + raise web.HTTPError(400, 'No file content provided') os_path = self._get_os_path(path) self.log.debug("Saving %s", os_path) @@ -484,13 +483,12 @@ def save(self, model, path=''): elif model['type'] == 'directory': self._save_directory(os_path, model, path) else: - raise web.HTTPError(400, "Unhandled contents type: %s" % model['type']) + raise web.HTTPError(400, f"Unhandled contents type: {model['type']}") except web.HTTPError: raise except Exception as e: - self.log.error(u'Error while saving file: %s %s', path, e, exc_info=True) - raise web.HTTPError(500, u'Unexpected error while saving file: %s %s' % - (path, e)) from e + self.log.error('Error while saving file: %s %s', path, e, exc_info=True) + raise web.HTTPError(500, f'Unexpected error while saving file: {path} {e}') from e validation_message = None if model['type'] == 'notebook': @@ -511,7 +509,7 @@ def delete_file(self, path): os_path = self._get_os_path(path) rm = os.unlink if not os.path.exists(os_path): - raise web.HTTPError(404, u'File or directory does not exist: %s' % os_path) + raise web.HTTPError(404, f'File or directory does not exist: {os_path}') def is_non_empty_dir(os_path): if os.path.isdir(os_path): @@ -527,7 +525,7 @@ def is_non_empty_dir(os_path): if sys.platform == 'win32' and is_non_empty_dir(os_path): # send2trash can really delete files on Windows, so disallow # deleting non-empty files. See Github issue 3631. - raise web.HTTPError(400, u'Directory %s not empty' % os_path) + raise web.HTTPError(400, f'Directory {os_path} not empty') try: self.log.debug("Sending %s to trash", os_path) send2trash(os_path) @@ -538,7 +536,7 @@ def is_non_empty_dir(os_path): if os.path.isdir(os_path): # Don't permanently delete non-empty directories. if is_non_empty_dir(os_path): - raise web.HTTPError(400, u'Directory %s not empty' % os_path) + raise web.HTTPError(400, f'Directory {os_path} not empty') self.log.debug("Removing directory %s", os_path) with self.perm_to_403(): shutil.rmtree(os_path) @@ -563,7 +561,7 @@ def rename_file(self, old_path, new_path): # Should we proceed with the move? if os.path.exists(new_os_path) and not samefile(old_os_path, new_os_path): - raise web.HTTPError(409, u'File already exists: %s' % new_path) + raise web.HTTPError(409, f'File already exists: {new_path}') # Move the file try: @@ -572,8 +570,7 @@ def rename_file(self, old_path, new_path): except web.HTTPError: raise except Exception as e: - raise web.HTTPError(500, u'Unknown error renaming file: %s %s' % - (old_path, e)) from e + raise web.HTTPError(500, f'Unknown error renaming file: {old_path} {e}') from e def info_string(self): return _("Serving notebooks from local directory: %s") % self.root_dir @@ -604,6 +601,6 @@ def _validate_path(path): for char in invalid_chars: if char in path: - raise web.HTTPError(400, "Path '{}' contains characters that are invalid for the filesystem. " - "Path names on this filesystem cannot contain any of the following " - "characters: {}".format(path, invalid_chars)) + raise web.HTTPError(400, f"Path '{path}' contains characters that are invalid for the filesystem. " + f"Path names on this filesystem cannot contain any of the following " + f"characters: {invalid_chars}") diff --git a/notebook/services/contents/handlers.py b/notebook/services/contents/handlers.py index b3216335bb..1a8f2a2aab 100644 --- a/notebook/services/contents/handlers.py +++ b/notebook/services/contents/handlers.py @@ -45,7 +45,7 @@ def validate_model(model, expect_content): if missing: raise web.HTTPError( 500, - u"Missing Model Keys: {missing}".format(missing=missing), + f"Missing Model Keys: {missing}", ) maybe_none_keys = ['content', 'format'] @@ -54,7 +54,7 @@ def validate_model(model, expect_content): if errors: raise web.HTTPError( 500, - u"Keys unexpectedly None: {keys}".format(keys=errors), + f"Keys unexpectedly None: {errors}", ) else: errors = { @@ -65,7 +65,7 @@ def validate_model(model, expect_content): if errors: raise web.HTTPError( 500, - u"Keys unexpectedly not None: {keys}".format(keys=errors), + f"Keys unexpectedly not None: {errors}", ) @@ -103,14 +103,14 @@ def get(self, path=''): path = path or '' type = self.get_query_argument('type', default=None) if type not in {None, 'directory', 'file', 'notebook'}: - raise web.HTTPError(400, u'Type %r is invalid' % type) + raise web.HTTPError(400, f'Type {type!r} is invalid') format = self.get_query_argument('format', default=None) if format not in {None, 'text', 'base64'}: - raise web.HTTPError(400, u'Format %r is invalid' % format) + raise web.HTTPError(400, f'Format {format!r} is invalid') content = self.get_query_argument('content', default='1') if content not in {'0', '1'}: - raise web.HTTPError(400, u'Content %r is invalid' % content) + raise web.HTTPError(400, f'Content {content!r} is invalid') content = int(content) model = yield maybe_future(self.contents_manager.get( @@ -126,7 +126,7 @@ def patch(self, path=''): cm = self.contents_manager model = self.get_json_body() if model is None: - raise web.HTTPError(400, u'JSON body missing') + raise web.HTTPError(400, 'JSON body missing') model = yield maybe_future(cm.update(model, path)) validate_model(model, expect_content=False) self._finish_model(model) @@ -134,10 +134,7 @@ def patch(self, path=''): @gen.coroutine def _copy(self, copy_from, copy_to=None): """Copy a file, optionally specifying a target directory.""" - self.log.info(u"Copying {copy_from} to {copy_to}".format( - copy_from=copy_from, - copy_to=copy_to or '', - )) + self.log.info(f"Copying {copy_from} to {copy_to or ''}") model = yield maybe_future(self.contents_manager.copy(copy_from, copy_to)) self.set_status(201) validate_model(model, expect_content=False) @@ -146,7 +143,7 @@ def _copy(self, copy_from, copy_to=None): @gen.coroutine def _upload(self, model, path): """Handle upload of a new file to path""" - self.log.info(u"Uploading file to %s", path) + self.log.info("Uploading file to %s", path) model = yield maybe_future(self.contents_manager.new(model, path)) self.set_status(201) validate_model(model, expect_content=False) @@ -155,7 +152,7 @@ def _upload(self, model, path): @gen.coroutine def _new_untitled(self, path, type='', ext=''): """Create a new, empty untitled entity""" - self.log.info(u"Creating new %s in %s", type or 'file', path) + self.log.info("Creating new %s in %s", type or 'file', path) model = yield maybe_future(self.contents_manager.new_untitled(path=path, type=type, ext=ext)) self.set_status(201) validate_model(model, expect_content=False) @@ -166,7 +163,7 @@ def _save(self, model, path): """Save an existing file.""" chunk = model.get("chunk", None) if not chunk or chunk == -1: # Avoid tedious log information - self.log.info(u"Saving file at %s", path) + self.log.info("Saving file at %s", path) model = yield maybe_future(self.contents_manager.save(model, path)) validate_model(model, expect_content=False) self._finish_model(model) @@ -193,7 +190,7 @@ def post(self, path=''): dir_exists = yield maybe_future(cm.dir_exists(path)) if not dir_exists: - raise web.HTTPError(404, "No such directory: %s" % path) + raise web.HTTPError(404, f"No such directory: {path}") model = self.get_json_body() @@ -323,10 +320,10 @@ def post(self,path=''): _checkpoint_id_regex = r"(?P[\w-]+)" default_handlers = [ - (r"/api/contents%s/checkpoints" % path_regex, CheckpointsHandler), - (r"/api/contents%s/checkpoints/%s" % (path_regex, _checkpoint_id_regex), + (fr"/api/contents{path_regex}/checkpoints", CheckpointsHandler), + (fr"/api/contents{path_regex}/checkpoints/{_checkpoint_id_regex}", ModifyCheckpointsHandler), - (r"/api/contents%s/trust" % path_regex, TrustNotebooksHandler), - (r"/api/contents%s" % path_regex, ContentsHandler), + (fr"/api/contents{path_regex}/trust", TrustNotebooksHandler), + (fr"/api/contents{path_regex}", ContentsHandler), (r"/api/notebooks/?(.*)", NotebooksRedirectHandler), ] diff --git a/notebook/services/contents/largefilemanager.py b/notebook/services/contents/largefilemanager.py index 6779a0b5c2..38aeeef290 100644 --- a/notebook/services/contents/largefilemanager.py +++ b/notebook/services/contents/largefilemanager.py @@ -1,9 +1,7 @@ from notebook.services.contents.filemanager import FileContentsManager -from contextlib import contextmanager from tornado import web -import nbformat import base64 -import os, io +import os class LargeFileManager(FileContentsManager): """Handle large file upload.""" @@ -13,13 +11,13 @@ def save(self, model, path=''): chunk = model.get('chunk', None) if chunk is not None: path = path.strip('/') - + if 'type' not in model: - raise web.HTTPError(400, u'No file type provided') + raise web.HTTPError(400, 'No file type provided') if model['type'] != 'file': - raise web.HTTPError(400, u'File type "{}" is not supported for large file transfer'.format(model['type'])) + raise web.HTTPError(400, f'File type "{model["type"]}" is not supported for large file transfer') if 'content' not in model and model['type'] != 'directory': - raise web.HTTPError(400, u'No file content provided') + raise web.HTTPError(400, 'No file content provided') os_path = self._get_os_path(path) @@ -33,8 +31,8 @@ def save(self, model, path=''): except web.HTTPError: raise except Exception as e: - self.log.error(u'Error while saving file: %s %s', path, e, exc_info=True) - raise web.HTTPError(500, u'Unexpected error while saving file: %s %s' % (path, e)) from e + self.log.error('Error while saving file: %s %s', path, e, exc_info=True) + raise web.HTTPError(500, f'Unexpected error while saving file: {path} {e}') from e model = self.get(path, content=False) @@ -60,11 +58,11 @@ def _save_large_file(self, os_path, content, format): bcontent = base64.b64decode(b64_bytes) except Exception as e: raise web.HTTPError( - 400, u'Encoding error saving %s: %s' % (os_path, e) + 400, f'Encoding error saving {os_path}: {e}' ) from e with self.perm_to_403(os_path): if os.path.islink(os_path): os_path = os.path.join(os.path.dirname(os_path), os.readlink(os_path)) - with io.open(os_path, 'ab') as f: + with open(os_path, 'ab') as f: f.write(bcontent) diff --git a/notebook/services/contents/manager.py b/notebook/services/contents/manager.py index b556abc1d3..dd8f848c7e 100644 --- a/notebook/services/contents/manager.py +++ b/notebook/services/contents/manager.py @@ -55,7 +55,7 @@ class ContentsManager(LoggingConfigurable): indicating the root path. """ - + root_dir = Unicode('/', config=True) allow_hidden = Bool(False, config=True, help="Allow access to hidden files") @@ -65,7 +65,7 @@ def _notary_default(self): return sign.NotebookNotary(parent=self) hide_globs = List(Unicode(), [ - u'__pycache__', '*.pyc', '*.pyo', + '__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dylib', '*~', ], config=True, help=""" Glob patterns to hide in file and directory listings. @@ -302,7 +302,7 @@ def info_string(self): def get_kernel_path(self, path, model=None): """Return the API path for the kernel - + KernelManagers can turn this value into a filesystem path, or ignore it altogether. @@ -334,17 +334,16 @@ def increment_filename(self, filename, path='', insert=''): basename, dot, ext = filename.rpartition('.') if ext != 'ipynb': basename, dot, ext = filename.partition('.') - + suffix = dot + ext for i in itertools.count(): if i: - insert_i = '{}{}'.format(insert, i) + insert_i = f'{insert}{i}' else: insert_i = '' - name = u'{basename}{insert}{suffix}'.format(basename=basename, - insert=insert_i, suffix=suffix) - if not self.exists(u'{}/{}'.format(path, name)): + name = f'{basename}{insert_i}{suffix}' + if not self.exists(f'{path}/{name}'): break return name @@ -353,33 +352,32 @@ def validate_notebook_model(self, model): try: validate_nb(model['content']) except ValidationError as e: - model['message'] = u'Notebook validation failed: {}:\n{}'.format( - e.message, json.dumps(e.instance, indent=1, default=lambda obj: ''), - ) + model['message'] = f'Notebook validation failed: {e.message}:\n' \ + f'{json.dumps(e.instance, indent=1, default=lambda obj: "")}' return model - + def new_untitled(self, path='', type='', ext=''): """Create a new untitled file or directory in path - + path must be a directory - + File extension can be specified. - + Use `new` to create files with a fully specified path (including filename). """ path = path.strip('/') if not self.dir_exists(path): - raise HTTPError(404, 'No such directory: %s' % path) - + raise HTTPError(404, f'No such directory: {path}') + model = {} if type: model['type'] = type - + if ext == '.ipynb': model.setdefault('type', 'notebook') else: model.setdefault('type', 'file') - + insert = '' if model['type'] == 'directory': untitled = self.untitled_directory @@ -390,26 +388,26 @@ def new_untitled(self, path='', type='', ext=''): elif model['type'] == 'file': untitled = self.untitled_file else: - raise HTTPError(400, "Unexpected model type: %r" % model['type']) - + raise HTTPError(400, f"Unexpected model type: {model['type']!r}") + name = self.increment_filename(untitled + ext, path, insert=insert) - path = u'{0}/{1}'.format(path, name) + path = f'{path}/{name}' return self.new(model, path) - + def new(self, model=None, path=''): """Create a new file or directory and return its model with no content. - + To create a new untitled entity in a directory, use `new_untitled`. """ path = path.strip('/') if model is None: model = {} - + if path.endswith('.ipynb'): model.setdefault('type', 'notebook') else: model.setdefault('type', 'file') - + # no content, not a directory, so fill out new-file model if 'content' not in model and model['type'] != 'directory': if model['type'] == 'notebook': @@ -419,7 +417,7 @@ def new(self, model=None, path=''): model['content'] = '' model['type'] = 'file' model['format'] = 'text' - + model = self.save(model, path) return model @@ -429,7 +427,7 @@ def copy(self, from_path, to_path=None): If to_path not specified, it will be the parent directory of from_path. If to_path is a directory, filename will increment `from_path-Copy#.ext`. Considering multi-part extensions, the Copy# part will be placed before the first dot for all the extensions except `ipynb`. - For easier manual searching in case of notebooks, the Copy# part will be placed before the last dot. + For easier manual searching in case of notebooks, the Copy# part will be placed before the last dot. from_path must be a full path to a file. """ @@ -442,20 +440,20 @@ def copy(self, from_path, to_path=None): else: from_dir = '' from_name = path - + model = self.get(path) model.pop('path', None) model.pop('name', None) if model['type'] == 'directory': raise HTTPError(400, "Can't copy directories") - + if to_path is None: to_path = from_dir if self.dir_exists(to_path): - name = copy_pat.sub(u'.', from_name) + name = copy_pat.sub('.', from_name) to_name = self.increment_filename(name, to_path, insert='-Copy') - to_path = u'{0}/{1}'.format(to_path, to_name) - + to_path = f'{to_path}/{to_name}' + model = self.save(model, to_path) return model diff --git a/notebook/services/contents/tests/test_contents_api.py b/notebook/services/contents/tests/test_contents_api.py index 6e4ad49dbc..592d22365b 100644 --- a/notebook/services/contents/tests/test_contents_api.py +++ b/notebook/services/contents/tests/test_contents_api.py @@ -2,7 +2,6 @@ from contextlib import contextmanager from functools import partial -import io import json import os import shutil @@ -28,10 +27,7 @@ from ipython_genutils import py3compat from ipython_genutils.tempdir import TemporaryDirectory -try: #PY3 - from base64 import encodebytes, decodebytes -except ImportError: #PY2 - from base64 import encodestring as encodebytes, decodestring as decodebytes +from base64 import encodebytes, decodebytes def uniq_stable(elems): @@ -50,7 +46,7 @@ def dirs_only(dir_model): return [x for x in dir_model['content'] if x['type']=='directory'] -class API(object): +class API: """Wrapper for contents API calls.""" def __init__(self, request): self.request = request @@ -128,16 +124,16 @@ class APITest(NotebookTestBase): """Test the kernels web service API""" dirs_nbs = [('', 'inroot'), ('Directory with spaces in', 'inspace'), - (u'unicodé', 'innonascii'), + ('unicodé', 'innonascii'), ('foo', 'a'), ('foo', 'b'), ('foo', 'name with spaces'), - ('foo', u'unicodé'), + ('foo', 'unicodé'), ('foo/bar', 'baz'), ('ordering', 'A'), ('ordering', 'b'), ('ordering', 'C'), - (u'å b', u'ç d'), + ('å b', 'ç d'), ] hidden_dirs = ['.hidden', '__pycache__'] @@ -151,36 +147,36 @@ def _blob_for_name(name): @staticmethod def _txt_for_name(name): - return u'%s text file' % name - + return f'{name} text file' + def to_os_path(self, api_path): return to_os_path(api_path, root=self.notebook_dir) - + def make_dir(self, api_path): """Create a directory at api_path""" os_path = self.to_os_path(api_path) try: os.makedirs(os_path) except OSError: - print("Directory already exists: %r" % os_path) + print(f"Directory already exists: {os_path!r}") def make_txt(self, api_path, txt): """Make a text file at a given api_path""" os_path = self.to_os_path(api_path) - with io.open(os_path, 'w', encoding='utf-8') as f: + with open(os_path, 'w', encoding='utf-8') as f: f.write(txt) - + def make_blob(self, api_path, blob): """Make a binary file at a given api_path""" os_path = self.to_os_path(api_path) - with io.open(os_path, 'wb') as f: + with open(os_path, 'wb') as f: f.write(blob) - + def make_nb(self, api_path, nb): """Make a notebook file at a given api_path""" os_path = self.to_os_path(api_path) - - with io.open(os_path, 'w', encoding='utf-8') as f: + + with open(os_path, 'w', encoding='utf-8') as f: write(nb, f, version=4) def delete_dir(self, api_path): @@ -192,10 +188,10 @@ def delete_file(self, api_path): """Delete a file at the given path if it exists.""" if self.isfile(api_path): os.unlink(self.to_os_path(api_path)) - + def isfile(self, api_path): return os.path.isfile(self.to_os_path(api_path)) - + def isdir(self, api_path): return os.path.isdir(self.to_os_path(api_path)) @@ -215,18 +211,18 @@ def setUp(self): for d, name in self.dirs_nbs: # create a notebook nb = new_notebook() - nbname = u'{}/{}.ipynb'.format(d, name) + nbname = f'{d}/{name}.ipynb' self.make_nb(nbname, nb) self.addCleanup(partial(self.delete_file, nbname)) # create a text file txt = self._txt_for_name(name) - txtname = u'{}/{}.txt'.format(d, name) + txtname = f'{d}/{name}.txt' self.make_txt(txtname, txt) self.addCleanup(partial(self.delete_file, txtname)) blob = self._blob_for_name(name) - blobname = u'{}/{}.blob'.format(d, name) + blobname = f'{d}/{name}.blob' self.make_blob(blobname, blob) self.addCleanup(partial(self.delete_file, blobname)) @@ -241,10 +237,10 @@ def test_list_notebooks(self): self.assertEqual(len(nbs), 1) self.assertEqual(nbs[0]['name'], 'inspace.ipynb') - nbs = notebooks_only(self.api.list(u'/unicodé/').json()) + nbs = notebooks_only(self.api.list('/unicodé/').json()) self.assertEqual(len(nbs), 1) self.assertEqual(nbs[0]['name'], 'innonascii.ipynb') - self.assertEqual(nbs[0]['path'], u'unicodé/innonascii.ipynb') + self.assertEqual(nbs[0]['path'], 'unicodé/innonascii.ipynb') nbs = notebooks_only(self.api.list('/foo/bar/').json()) self.assertEqual(len(nbs), 1) @@ -254,7 +250,7 @@ def test_list_notebooks(self): nbs = notebooks_only(self.api.list('foo').json()) self.assertEqual(len(nbs), 4) nbnames = { normalize('NFC', n['name']) for n in nbs } - expected = [ u'a.ipynb', u'b.ipynb', u'name with spaces.ipynb', u'unicodé.ipynb'] + expected = [ 'a.ipynb', 'b.ipynb', 'name with spaces.ipynb', 'unicodé.ipynb'] expected = { normalize('NFC', name) for name in expected } self.assertEqual(nbnames, expected) @@ -284,7 +280,7 @@ def test_get_nb_contents(self): for d, name in self.dirs_nbs: path = url_path_join(d, name + '.ipynb') nb = self.api.read(path).json() - self.assertEqual(nb['name'], u'%s.ipynb' % name) + self.assertEqual(nb['name'], f'{name}.ipynb') self.assertEqual(nb['path'], path) self.assertEqual(nb['type'], 'notebook') self.assertIn('content', nb) @@ -296,7 +292,7 @@ def test_get_nb_no_content(self): for d, name in self.dirs_nbs: path = url_path_join(d, name + '.ipynb') nb = self.api.read(path, content=False).json() - self.assertEqual(nb['name'], u'%s.ipynb' % name) + self.assertEqual(nb['name'], f'{name}.ipynb') self.assertEqual(nb['path'], path) self.assertEqual(nb['type'], 'notebook') self.assertIn('content', nb) @@ -311,7 +307,7 @@ def test_get_nb_invalid(self): 'metadata': {}, }], } - path = u'å b/Validate tést.ipynb' + path = 'å b/Validate tést.ipynb' self.make_txt(path, py3compat.cast_unicode(json.dumps(nb))) model = self.api.read(path).json() self.assertEqual(model['path'], path) @@ -329,7 +325,7 @@ def test_get_text_file_contents(self): for d, name in self.dirs_nbs: path = url_path_join(d, name + '.txt') model = self.api.read(path).json() - self.assertEqual(model['name'], u'%s.txt' % name) + self.assertEqual(model['name'], f'{name}.txt') self.assertEqual(model['path'], path) self.assertIn('content', model) self.assertEqual(model['format'], 'text') @@ -348,7 +344,7 @@ def test_get_binary_file_contents(self): for d, name in self.dirs_nbs: path = url_path_join(d, name + '.blob') model = self.api.read(path).json() - self.assertEqual(model['name'], u'%s.blob' % name) + self.assertEqual(model['name'], f'{name}.blob') self.assertEqual(model['path'], path) self.assertIn('content', model) self.assertEqual(model['format'], 'base64') @@ -364,15 +360,15 @@ def test_get_binary_file_contents(self): def test_get_bad_type(self): with assert_http_error(400): - self.api.read(u'unicodé', type='file') # this is a directory + self.api.read('unicodé', type='file') # this is a directory with assert_http_error(400): - self.api.read(u'unicodé/innonascii.ipynb', type='directory') + self.api.read('unicodé/innonascii.ipynb', type='directory') def _check_created(self, resp, path, type='notebook'): self.assertEqual(resp.status_code, 201) location_header = py3compat.str_to_unicode(resp.headers['Location']) - self.assertEqual(location_header, url_path_join(self.url_prefix, u'api/contents', url_escape(path))) + self.assertEqual(location_header, url_path_join(self.url_prefix, 'api/contents', url_escape(path))) rjson = resp.json() self.assertEqual(rjson['name'], path.rsplit('/', 1)[-1]) self.assertEqual(rjson['path'], path) @@ -381,12 +377,12 @@ def _check_created(self, resp, path, type='notebook'): assert isright(path) def test_create_untitled(self): - resp = self.api.create_untitled(path=u'å b') - self._check_created(resp, u'å b/Untitled.ipynb') + resp = self.api.create_untitled(path='å b') + self._check_created(resp, 'å b/Untitled.ipynb') # Second time - resp = self.api.create_untitled(path=u'å b') - self._check_created(resp, u'å b/Untitled1.ipynb') + resp = self.api.create_untitled(path='å b') + self._check_created(resp, 'å b/Untitled1.ipynb') # And two directories down resp = self.api.create_untitled(path='foo/bar') @@ -405,39 +401,39 @@ def test_create_untitled_txt(self): def test_upload(self): nb = new_notebook() nbmodel = {'content': nb, 'type': 'notebook'} - path = u'å b/Upload tést.ipynb' + path = 'å b/Upload tést.ipynb' resp = self.api.upload(path, body=json.dumps(nbmodel)) self._check_created(resp, path) def test_mkdir_untitled(self): - resp = self.api.mkdir_untitled(path=u'å b') - self._check_created(resp, u'å b/Untitled Folder', type='directory') + resp = self.api.mkdir_untitled(path='å b') + self._check_created(resp, 'å b/Untitled Folder', type='directory') # Second time - resp = self.api.mkdir_untitled(path=u'å b') - self._check_created(resp, u'å b/Untitled Folder 1', type='directory') + resp = self.api.mkdir_untitled(path='å b') + self._check_created(resp, 'å b/Untitled Folder 1', type='directory') # And two directories down resp = self.api.mkdir_untitled(path='foo/bar') self._check_created(resp, 'foo/bar/Untitled Folder', type='directory') def test_mkdir(self): - path = u'å b/New ∂ir' + path = 'å b/New ∂ir' resp = self.api.mkdir(path) self._check_created(resp, path, type='directory') def test_mkdir_hidden_400(self): with assert_http_error(400): - resp = self.api.mkdir(u'å b/.hidden') + resp = self.api.mkdir('å b/.hidden') def test_upload_txt(self): - body = u'ünicode téxt' + body = 'ünicode téxt' model = { 'content' : body, 'format' : 'text', 'type' : 'file', } - path = u'å b/Upload tést.txt' + path = 'å b/Upload tést.txt' resp = self.api.upload(path, body=json.dumps(model)) # check roundtrip @@ -455,7 +451,7 @@ def test_upload_b64(self): 'format' : 'base64', 'type' : 'file', } - path = u'å b/Upload tést.blob' + path = 'å b/Upload tést.blob' resp = self.api.upload(path, body=json.dumps(model)) # check roundtrip @@ -473,7 +469,7 @@ def test_upload_v2(self): nb.worksheets.append(ws) ws.cells.append(v2.new_code_cell(input='print("hi")')) nbmodel = {'content': nb, 'type': 'notebook'} - path = u'å b/Upload tést.ipynb' + path = 'å b/Upload tést.ipynb' resp = self.api.upload(path, body=json.dumps(nbmodel)) self._check_created(resp, path) resp = self.api.read(path) @@ -481,38 +477,38 @@ def test_upload_v2(self): self.assertEqual(data['content']['nbformat'], 4) def test_copy(self): - resp = self.api.copy(u'å b/ç d.ipynb', u'å b') - self._check_created(resp, u'å b/ç d-Copy1.ipynb') - - resp = self.api.copy(u'å b/ç d.ipynb', u'å b') - self._check_created(resp, u'å b/ç d-Copy2.ipynb') - + resp = self.api.copy('å b/ç d.ipynb', 'å b') + self._check_created(resp, 'å b/ç d-Copy1.ipynb') + + resp = self.api.copy('å b/ç d.ipynb', 'å b') + self._check_created(resp, 'å b/ç d-Copy2.ipynb') + def test_copy_copy(self): - resp = self.api.copy(u'å b/ç d.ipynb', u'å b') - self._check_created(resp, u'å b/ç d-Copy1.ipynb') - - resp = self.api.copy(u'å b/ç d-Copy1.ipynb', u'å b') - self._check_created(resp, u'å b/ç d-Copy2.ipynb') - + resp = self.api.copy('å b/ç d.ipynb', 'å b') + self._check_created(resp, 'å b/ç d-Copy1.ipynb') + + resp = self.api.copy('å b/ç d-Copy1.ipynb', 'å b') + self._check_created(resp, 'å b/ç d-Copy2.ipynb') + def test_copy_path(self): - resp = self.api.copy(u'foo/a.ipynb', u'å b') - self._check_created(resp, u'å b/a.ipynb') - - resp = self.api.copy(u'foo/a.ipynb', u'å b') - self._check_created(resp, u'å b/a-Copy1.ipynb') + resp = self.api.copy('foo/a.ipynb', 'å b') + self._check_created(resp, 'å b/a.ipynb') + + resp = self.api.copy('foo/a.ipynb', 'å b') + self._check_created(resp, 'å b/a-Copy1.ipynb') def test_copy_put_400(self): with assert_http_error(400): - resp = self.api.copy_put(u'å b/ç d.ipynb', u'å b/cøpy.ipynb') + resp = self.api.copy_put('å b/ç d.ipynb', 'å b/cøpy.ipynb') def test_copy_dir_400(self): # can't copy directories with assert_http_error(400): - resp = self.api.copy(u'å b', u'foo') + resp = self.api.copy('å b', 'foo') def test_delete(self): for d, name in self.dirs_nbs: - print('%r, %r' % (d, name)) + print(f'{d!r}, {name!r}') resp = self.api.delete(url_path_join(d, name + '.ipynb')) self.assertEqual(resp.status_code, 204) @@ -537,15 +533,15 @@ def test_delete_non_empty_dir(self): self.skipTest("Disabled deleting non-empty dirs on Windows") # Test that non empty directory can be deleted try: - self.api.delete(u'å b') + self.api.delete('å b') except requests.HTTPError as e: if e.response.status_code == 400: - if not self.can_send2trash(u'å b'): + if not self.can_send2trash('å b'): self.skipTest("Dir can't be sent to trash") raise # Check if directory has actually been deleted with assert_http_error(404): - self.api.list(u'å b') + self.api.list('å b') def test_rename(self): resp = self.api.rename('foo/a.ipynb', 'foo/z.ipynb') @@ -555,7 +551,7 @@ def test_rename(self): assert self.isfile('foo/z.ipynb') nbs = notebooks_only(self.api.list('foo').json()) - nbnames = set(n['name'] for n in nbs) + nbnames = {n['name'] for n in nbs} self.assertIn('z.ipynb', nbnames) self.assertNotIn('a.ipynb', nbnames) @@ -599,7 +595,7 @@ def test_save(self): resp = self.api.read('foo/a.ipynb') nbcontent = json.loads(resp.text)['content'] nb = from_dict(nbcontent) - nb.cells.append(new_markdown_cell(u'Created by test ³')) + nb.cells.append(new_markdown_cell('Created by test ³')) nbmodel = {'content': nb, 'type': 'notebook'} resp = self.api.save('foo/a.ipynb', body=json.dumps(nbmodel)) @@ -607,7 +603,7 @@ def test_save(self): nbcontent = self.api.read('foo/a.ipynb').json()['content'] newnb = from_dict(nbcontent) self.assertEqual(newnb.cells[0].source, - u'Created by test ³') + 'Created by test ³') def test_checkpoints(self): resp = self.api.read('foo/a.ipynb') diff --git a/notebook/services/contents/tests/test_fileio.py b/notebook/services/contents/tests/test_fileio.py index adc06d97f1..281be6a3d6 100644 --- a/notebook/services/contents/tests/test_fileio.py +++ b/notebook/services/contents/tests/test_fileio.py @@ -3,7 +3,6 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. -import io as stdlib_io import os.path import unittest import pytest @@ -17,13 +16,14 @@ umask = 0 def test_atomic_writing(): - class CustomExc(Exception): pass + class CustomExc(Exception): + pass with TemporaryDirectory() as td: f1 = os.path.join(td, 'penguin') - with stdlib_io.open(f1, 'w') as f: - f.write(u'Before') - + with open(f1, 'w') as f: + f.write('Before') + if os.name != 'nt': os.chmod(f1, 0o701) orig_mode = stat.S_IMODE(os.stat(f1).st_mode) @@ -40,18 +40,18 @@ class CustomExc(Exception): pass with pytest.raises(CustomExc): with atomic_writing(f1) as f: - f.write(u'Failing write') + f.write('Failing write') raise CustomExc # Because of the exception, the file should not have been modified - with stdlib_io.open(f1, 'r') as f: - assert f.read() == u'Before' + with open(f1) as f: + assert f.read() == 'Before' with atomic_writing(f1) as f: - f.write(u'Overwritten') + f.write('Overwritten') - with stdlib_io.open(f1, 'r') as f: - assert f.read() == u'Overwritten' + with open(f1) as f: + assert f.read() == 'Overwritten' if os.name != 'nt': mode = stat.S_IMODE(os.stat(f1).st_mode) @@ -60,10 +60,10 @@ class CustomExc(Exception): pass if have_symlink: # Check that writing over a file preserves a symlink with atomic_writing(f2) as f: - f.write(u'written from symlink') - - with stdlib_io.open(f1, 'r') as f: - assert f.read() == u'written from symlink' + f.write('written from symlink') + + with open(f1) as f: + assert f.read() == 'written from symlink' class TestWithSetUmask(unittest.TestCase): def setUp(self): @@ -71,7 +71,7 @@ def setUp(self): global umask umask = os.umask(0) os.umask(umask) - + def tearDown(self): # restore umask os.umask(umask) @@ -82,14 +82,14 @@ def test_atomic_writing_umask(self): os.umask(0o022) f1 = os.path.join(td, '1') with atomic_writing(f1) as f: - f.write(u'1') + f.write('1') mode = stat.S_IMODE(os.stat(f1).st_mode) assert mode == 0o644 - + os.umask(0o057) f2 = os.path.join(td, '2') with atomic_writing(f2) as f: - f.write(u'2') + f.write('2') mode = stat.S_IMODE(os.stat(f2).st_mode) assert mode == 0o620 @@ -97,36 +97,36 @@ def test_atomic_writing_umask(self): def test_atomic_writing_newlines(): with TemporaryDirectory() as td: path = os.path.join(td, 'testfile') - - lf = u'a\nb\nc\n' - plat = lf.replace(u'\n', os.linesep) - crlf = lf.replace(u'\n', u'\r\n') - + + lf = 'a\nb\nc\n' + plat = lf.replace('\n', os.linesep) + crlf = lf.replace('\n', '\r\n') + # test default - with stdlib_io.open(path, 'w') as f: + with open(path, 'w') as f: f.write(lf) - with stdlib_io.open(path, 'r', newline='') as f: + with open(path, newline='') as f: read = f.read() assert read == plat - + # test newline=LF - with stdlib_io.open(path, 'w', newline='\n') as f: + with open(path, 'w', newline='\n') as f: f.write(lf) - with stdlib_io.open(path, 'r', newline='') as f: + with open(path, newline='') as f: read = f.read() assert read == lf - + # test newline=CRLF with atomic_writing(path, newline='\r\n') as f: f.write(lf) - with stdlib_io.open(path, 'r', newline='') as f: + with open(path, newline='') as f: read = f.read() assert read == crlf - + # test newline=no convert - text = u'crlf\r\ncr\rlf\n' + text = 'crlf\r\ncr\rlf\n' with atomic_writing(path, newline='') as f: f.write(text) - with stdlib_io.open(path, 'r', newline='') as f: + with open(path, newline='') as f: read = f.read() assert read == text diff --git a/notebook/services/contents/tests/test_largefilemanager.py b/notebook/services/contents/tests/test_largefilemanager.py index 13d294b9b0..d52c697acb 100644 --- a/notebook/services/contents/tests/test_largefilemanager.py +++ b/notebook/services/contents/tests/test_largefilemanager.py @@ -13,7 +13,7 @@ def _make_dir(contents_manager, api_path): try: os.makedirs(os_path) except OSError: - print("Directory already exists: %r" % os_path) + print(f"Directory already exists: {os_path!r}") class TestLargeFileManager(TestCase): @@ -68,7 +68,7 @@ def test_save(self): try: model = {'name': 'test', 'path': 'test', 'chunk': 2, 'type': 'file', - 'content': u'test', 'format': 'json'} + 'content': 'test', 'format': 'json'} cm.save(model, model['path']) except web.HTTPError as e: self.assertEqual("HTTP 400: Bad Request (Must specify format of file contents as 'text' or 'base64')", @@ -76,7 +76,7 @@ def test_save(self): # Save model for different chunks model = {'name': 'test', 'path': 'test', 'type': 'file', - 'content': u'test==', 'format': 'text'} + 'content': 'test==', 'format': 'text'} name = model['name'] path = model['path'] cm.save(model, path) diff --git a/notebook/services/contents/tests/test_manager.py b/notebook/services/contents/tests/test_manager.py index dfe5d272f5..cac98e893d 100644 --- a/notebook/services/contents/tests/test_manager.py +++ b/notebook/services/contents/tests/test_manager.py @@ -26,14 +26,14 @@ def _make_dir(contents_manager, api_path): try: os.makedirs(os_path) except OSError: - print("Directory already exists: %r" % os_path) + print(f"Directory already exists: {os_path!r}") class TestFileContentsManager(TestCase): @contextmanager def assertRaisesHTTPError(self, status, msg=None): - msg = msg or "Should have raised HTTPError(%i)" % status + msg = msg or f"Should have raised HTTPError({status})" try: yield except HTTPError as e: @@ -87,7 +87,7 @@ def test_get_os_path(self): self.assertEqual(path, fs_path) def test_checkpoint_subdir(self): - subd = u'sub ∂ir' + subd = 'sub ∂ir' cp_name = 'test-cp.ipynb' with TemporaryDirectory() as td: root = td @@ -98,7 +98,7 @@ def test_checkpoint_subdir(self): 'cp', 'test.ipynb' ) cp_subdir = cpm.checkpoint_path( - 'cp', '/%s/test.ipynb' % subd + 'cp', f'/{subd}/test.ipynb' ) self.assertNotEqual(cp_dir, cp_subdir) self.assertEqual(cp_dir, os.path.join(root, cpm.checkpoint_dir, cp_name)) @@ -113,7 +113,7 @@ def test_bad_symlink(self): file_model = cm.new_untitled(path=path, ext='.txt') # create a broken symlink - self.symlink(cm, "target", '%s/%s' % (path, 'bad symlink')) + self.symlink(cm, "target", f'{path}/{"bad symlink"}') model = cm.get(path) contents = { @@ -149,7 +149,7 @@ def test_good_symlink(self): cm = FileContentsManager(root_dir=td) parent = 'test good symlink' name = 'good symlink' - path = '{0}/{1}'.format(parent, name) + path = f'{parent}/{name}' _make_dir(cm, parent) file_model = cm.new(path=parent + '/zfoo.txt') @@ -175,7 +175,7 @@ def test_403(self): os.chmod(os_path, 0o400) try: with cm.open(os_path, 'w') as f: - f.write(u"don't care") + f.write("don't care") except HTTPError as e: self.assertEqual(e.status_code, 403) else: @@ -201,7 +201,7 @@ def test_escape_root(self): with self.assertRaisesHTTPError(404): cm.save(model={ 'type': 'file', - 'content': u'', + 'content': '', 'format': 'text', }, path='../foo') @@ -209,7 +209,7 @@ def test_escape_root(self): class TestContentsManager(TestCase): @contextmanager def assertRaisesHTTPError(self, status, msg=None): - msg = msg or "Should have raised HTTPError(%i)" % status + msg = msg or f"Should have raised HTTPError({status})" try: yield except HTTPError as e: @@ -309,7 +309,7 @@ def test_new_untitled(self): self.assertIn('type', model) self.assertEqual(model['type'], 'file') self.assertEqual(model['name'], 'untitled') - self.assertEqual(model['path'], '%s/untitled' % sub_dir) + self.assertEqual(model['path'], f'{sub_dir}/untitled') # Test with a compound extension model = cm.new_untitled(path=sub_dir, ext='.foo.bar') @@ -378,19 +378,19 @@ def test_get(self): self.assertIn('path', model2) self.assertIn('content', model2) self.assertEqual(model2['name'], 'Untitled.ipynb') - self.assertEqual(model2['path'], '{0}/{1}'.format(sub_dir.strip('/'), name)) + self.assertEqual(model2['path'], f'{sub_dir.strip("/")}/{name}') # Test with a regular file. file_model_path = cm.new_untitled(path=sub_dir, ext='.txt')['path'] file_model = cm.get(file_model_path) self.assertDictContainsSubset( { - 'content': u'', - 'format': u'text', - 'mimetype': u'text/plain', - 'name': u'untitled.txt', - 'path': u'foo/untitled.txt', - 'type': u'file', + 'content': '', + 'format': 'text', + 'mimetype': 'text/plain', + 'name': 'untitled.txt', + 'path': 'foo/untitled.txt', + 'type': 'file', 'writable': True, }, file_model, @@ -413,7 +413,7 @@ def test_get(self): # Directory contents should match the contents of each individual entry # when requested with content=False. model2_no_content = cm.get(sub_dir + name, content=False) - file_model_no_content = cm.get(u'foo/untitled.txt', content=False) + file_model_no_content = cm.get('foo/untitled.txt', content=False) sub_sub_dir_no_content = cm.get('foo/bar', content=False) self.assertEqual(sub_sub_dir_no_content['path'], 'foo/bar') self.assertEqual(sub_sub_dir_no_content['name'], 'bar') @@ -428,7 +428,7 @@ def test_get(self): elif entry['path'] == file_model_no_content['path']: self.assertEqual(entry, file_model_no_content) else: - self.fail("Unexpected directory entry: %s" % entry()) + self.fail(f"Unexpected directory entry: {entry()}") with self.assertRaises(HTTPError): cm.get('foo', type='file') @@ -582,9 +582,9 @@ def test_delete_root(self): def test_copy(self): cm = self.contents_manager - parent = u'å b' - name = u'nb √.ipynb' - path = u'{0}/{1}'.format(parent, name) + parent = 'å b' + name = 'nb √.ipynb' + path = f'{parent}/{name}' self.make_dir(parent) orig = cm.new(path=path) @@ -593,11 +593,11 @@ def test_copy(self): self.assertEqual(copy['name'], orig['name'].replace('.ipynb', '-Copy1.ipynb')) # copy with specified name - copy2 = cm.copy(path, u'å b/copy 2.ipynb') - self.assertEqual(copy2['name'], u'copy 2.ipynb') - self.assertEqual(copy2['path'], u'å b/copy 2.ipynb') + copy2 = cm.copy(path, 'å b/copy 2.ipynb') + self.assertEqual(copy2['name'], 'copy 2.ipynb') + self.assertEqual(copy2['path'], 'å b/copy 2.ipynb') # copy with specified path - copy2 = cm.copy(path, u'/') + copy2 = cm.copy(path, '/') self.assertEqual(copy2['name'], name) self.assertEqual(copy2['path'], name) diff --git a/notebook/services/kernels/handlers.py b/notebook/services/kernels/handlers.py index a2d4ddbab9..d2c5f8ef72 100644 --- a/notebook/services/kernels/handlers.py +++ b/notebook/services/kernels/handlers.py @@ -124,7 +124,7 @@ def rate_limit_window(self): return self.settings.get('rate_limit_window', 1.0) def __repr__(self): - return "%s(%s)" % (self.__class__.__name__, getattr(self, 'kernel_id', 'uninitialized')) + return f"{self.__class__.__name__}({getattr(self, 'kernel_id', 'uninitialized')})" def create_stream(self): km = self.kernel_manager @@ -252,7 +252,7 @@ def nudge(count): if not both_done.done(): log = self.log.warning if count % 10 == 0 else self.log.debug - log("Nudge: attempt %s on kernel %s" % (count, self.kernel_id)) + log(f"Nudge: attempt {count} on kernel {self.kernel_id}") self.session.send(shell_channel, "kernel_info_request") self.session.send(control_channel, "kernel_info_request") nonlocal nudge_handle @@ -323,7 +323,7 @@ def _finish_kernel_info(self, info): protocol_version = info.get('protocol_version', client_protocol_version) if protocol_version != client_protocol_version: self.session.adapt_version = int(protocol_version.split('.')[0]) - self.log.info("Adapting from protocol version {protocol_version} (kernel {kernel_id}) to {client_protocol_version} (client).".format(protocol_version=protocol_version, kernel_id=self.kernel_id, client_protocol_version=client_protocol_version)) + self.log.info(f"Adapting from protocol version {protocol_version} (kernel {self.kernel_id}) to {client_protocol_version} (client).") if not self._kernel_info_future.done(): self._kernel_info_future.set_result(info) @@ -384,7 +384,7 @@ def _register_session(self): This is likely due to a client reconnecting from a lost network connection, where the socket on our side has not been cleaned up yet. """ - self.session_key = '%s:%s' % (self.kernel_id, self.session.session) + self.session_key = f'{self.kernel_id}:{self.session.session}' stale_handler = self._open_sessions.get(self.session_key) if stale_handler: self.log.warning("Replacing stale connection: %s", self.session_key) @@ -457,7 +457,7 @@ def on_message(self, msg): am = self.kernel_manager.allowed_message_types mt = msg['header']['msg_type'] if am and mt not in am: - self.log.warning('Received message of type "%s", which is not allowed. Ignoring.' % mt) + self.log.warning(f'Received message of type "{mt}", which is not allowed. Ignoring.') else: stream = self.channels[channel] self.session.send(stream, msg) @@ -504,7 +504,7 @@ def write_stderr(error_message): # Increment the bytes and message count self._iopub_window_msg_count += 1 if msg_type == 'stream': - byte_count = sum([len(x) for x in msg_list]) + byte_count = sum(len(x) for x in msg_list) else: byte_count = 0 self._iopub_window_byte_count += byte_count @@ -522,7 +522,7 @@ def write_stderr(error_message): if self.iopub_msg_rate_limit > 0 and msg_rate > self.iopub_msg_rate_limit: if not self._iopub_msgs_exceeded: self._iopub_msgs_exceeded = True - write_stderr(dedent("""\ + write_stderr(dedent(f"""\ IOPub message rate exceeded. The notebook server will temporarily stop sending output to the client in order to avoid crashing it. @@ -530,9 +530,9 @@ def write_stderr(error_message): `--NotebookApp.iopub_msg_rate_limit`. Current values: - NotebookApp.iopub_msg_rate_limit={} (msgs/sec) - NotebookApp.rate_limit_window={} (secs) - """.format(self.iopub_msg_rate_limit, self.rate_limit_window))) + NotebookApp.iopub_msg_rate_limit={self.iopub_msg_rate_limit} (msgs/sec) + NotebookApp.rate_limit_window={self.rate_limit_window} (secs) + """)) else: # resume once we've got some headroom below the limit if self._iopub_msgs_exceeded and msg_rate < (0.8 * self.iopub_msg_rate_limit): @@ -544,7 +544,7 @@ def write_stderr(error_message): if self.iopub_data_rate_limit > 0 and data_rate > self.iopub_data_rate_limit: if not self._iopub_data_exceeded: self._iopub_data_exceeded = True - write_stderr(dedent("""\ + write_stderr(dedent(f"""\ IOPub data rate exceeded. The notebook server will temporarily stop sending output to the client in order to avoid crashing it. @@ -552,9 +552,9 @@ def write_stderr(error_message): `--NotebookApp.iopub_data_rate_limit`. Current values: - NotebookApp.iopub_data_rate_limit={} (bytes/sec) - NotebookApp.rate_limit_window={} (secs) - """.format(self.iopub_data_rate_limit, self.rate_limit_window))) + NotebookApp.iopub_data_rate_limit={self.iopub_data_rate_limit} (bytes/sec) + NotebookApp.rate_limit_window={self.rate_limit_window} (secs) + """)) else: # resume once we've got some headroom below the limit if self._iopub_data_exceeded and data_rate < (0.8 * self.iopub_data_rate_limit): @@ -640,7 +640,7 @@ def on_restart_failed(self): default_handlers = [ (r"/api/kernels", MainKernelHandler), - (r"/api/kernels/%s" % _kernel_id_regex, KernelHandler), - (r"/api/kernels/%s/%s" % (_kernel_id_regex, _kernel_action_regex), KernelActionHandler), - (r"/api/kernels/%s/channels" % _kernel_id_regex, ZMQChannelsHandler), + (fr"/api/kernels/{_kernel_id_regex}", KernelHandler), + (fr"/api/kernels/{_kernel_id_regex}/{_kernel_action_regex}", KernelActionHandler), + (fr"/api/kernels/{_kernel_id_regex}/channels", ZMQChannelsHandler), ] diff --git a/notebook/services/kernels/kernelmanager.py b/notebook/services/kernels/kernelmanager.py index 7ed182dfc0..037c42be0e 100644 --- a/notebook/services/kernels/kernelmanager.py +++ b/notebook/services/kernels/kernelmanager.py @@ -33,7 +33,7 @@ try: from jupyter_client.multikernelmanager import AsyncMultiKernelManager except ImportError: - class AsyncMultiKernelManager(object): + class AsyncMultiKernelManager: """Empty class to satisfy unused reference by AsyncMappingKernelManager.""" def __init__(self, **kwargs): pass @@ -71,7 +71,7 @@ def _update_root_dir(self, proposal): # If we receive a non-absolute path, make it absolute. value = os.path.abspath(value) if not exists(value) or not os.path.isdir(value): - raise TraitError("kernel root dir %r is not a directory" % value) + raise TraitError(f"kernel root dir {value!r} is not a directory") return value cull_idle_timeout = Integer(0, config=True, @@ -176,8 +176,8 @@ async def start_kernel(self, kernel_id=None, path=None, **kwargs): kernel_id = await maybe_future(self.pinned_superclass.start_kernel(self, **kwargs)) self._kernel_connections[kernel_id] = 0 self.start_watching_activity(kernel_id) - self.log.info("Kernel started: %s, name: %s" % (kernel_id, self._kernels[kernel_id].kernel_name)) - self.log.debug("Kernel args: %r" % kwargs) + self.log.info(f"Kernel started: {kernel_id}, name: {self._kernels[kernel_id].kernel_name}") + self.log.debug(f"Kernel args: {kwargs!r}") # register callback for failed auto-restart self.add_restart_callback(kernel_id, lambda : self._handle_kernel_died(kernel_id), @@ -192,7 +192,7 @@ async def start_kernel(self, kernel_id=None, path=None, **kwargs): else: self._check_kernel_id(kernel_id) - self.log.info("Using existing kernel: %s" % kernel_id) + self.log.info(f"Using existing kernel: {kernel_id}") # Initialize culling if not already if not self._initialized_culler: @@ -391,7 +391,7 @@ def list_kernels(self): def _check_kernel_id(self, kernel_id): """Check a that a kernel_id exists and raise 404 if not.""" if kernel_id not in self: - raise web.HTTPError(404, u'Kernel does not exist: %s' % kernel_id) + raise web.HTTPError(404, f'Kernel does not exist: {kernel_id}') # monitoring activity: @@ -457,8 +457,10 @@ async def cull_kernels(self): try: await self.cull_kernel_if_idle(kernel_id) except Exception as e: - self.log.exception("The following exception was encountered while checking the " - "idle duration of kernel {}: {}".format(kernel_id, e)) + self.log.exception( + f"The following exception was encountered while checking the idle duration of kernel " + f"{kernel_id}: {e}" + ) async def cull_kernel_if_idle(self, kernel_id): try: diff --git a/notebook/services/kernels/tests/test_kernels_api.py b/notebook/services/kernels/tests/test_kernels_api.py index 7895f36a81..38a9dfb00c 100644 --- a/notebook/services/kernels/tests/test_kernels_api.py +++ b/notebook/services/kernels/tests/test_kernels_api.py @@ -1,7 +1,6 @@ """Test the kernels service API.""" import json -import sys import time from requests import HTTPError @@ -24,7 +23,7 @@ async_testing_enabled = False -class KernelAPI(object): +class KernelAPI: """Wrapper for kernel REST API requests""" def __init__(self, request, base_url, headers): self.request = request @@ -204,11 +203,11 @@ class AsyncKernelAPITest(KernelAPITest): def setup_class(cls): if not async_testing_enabled: # Can be removed once jupyter_client >= 6.1 is required. raise SkipTest("AsyncKernelAPITest tests skipped due to down-level jupyter_client!") - super(AsyncKernelAPITest, cls).setup_class() + super().setup_class() @classmethod def get_argv(cls): - argv = super(AsyncKernelAPITest, cls).get_argv() + argv = super().get_argv() # before we extend the argv with the class, ensure that appropriate jupyter_client is available. # if not available, don't set kernel_manager_class, resulting in the repeat of sync-based tests. @@ -244,11 +243,11 @@ class KernelCullingTest(NotebookTestBase): @classmethod def get_argv(cls): - argv = super(KernelCullingTest, cls).get_argv() + argv = super().get_argv() # Enable culling with 5s timeout and 1s intervals - argv.extend(['--MappingKernelManager.cull_idle_timeout={}'.format(CULL_TIMEOUT), - '--MappingKernelManager.cull_interval={}'.format(CULL_INTERVAL), + argv.extend([f'--MappingKernelManager.cull_idle_timeout={CULL_TIMEOUT}', + f'--MappingKernelManager.cull_interval={CULL_INTERVAL}', '--MappingKernelManager.cull_connected=False']) return argv diff --git a/notebook/services/kernelspecs/handlers.py b/notebook/services/kernelspecs/handlers.py index a01d307fb2..bc68f367c8 100644 --- a/notebook/services/kernelspecs/handlers.py +++ b/notebook/services/kernelspecs/handlers.py @@ -87,7 +87,7 @@ def get(self, kernel_name): try: spec = yield maybe_future(ksm.get_kernel_spec(kernel_name)) except KeyError as e: - raise web.HTTPError(404, u'Kernel spec %s not found' % kernel_name) from e + raise web.HTTPError(404, f'Kernel spec {kernel_name} not found') from e if is_kernelspec_model(spec): model = spec else: @@ -102,5 +102,5 @@ def get(self, kernel_name): default_handlers = [ (r"/api/kernelspecs", MainKernelSpecHandler), - (r"/api/kernelspecs/%s" % kernel_name_regex, KernelSpecHandler), + (fr"/api/kernelspecs/{kernel_name_regex}", KernelSpecHandler), ] diff --git a/notebook/services/kernelspecs/tests/test_kernelspecs_api.py b/notebook/services/kernelspecs/tests/test_kernelspecs_api.py index 215bfc861b..846743933d 100644 --- a/notebook/services/kernelspecs/tests/test_kernelspecs_api.py +++ b/notebook/services/kernelspecs/tests/test_kernelspecs_api.py @@ -1,17 +1,14 @@ """Test the kernel specs webservice API.""" import errno -import io import json import os import shutil pjoin = os.path.join -import requests - from jupyter_client.kernelspec import NATIVE_KERNEL_NAME -from notebook.utils import url_path_join, url_escape +from notebook.utils import url_path_join from notebook.tests.launchnotebook import NotebookTestBase, assert_http_error # Copied from jupyter_client.tests.test_kernelspec so updating that doesn't @@ -20,10 +17,10 @@ 'display_name':'Test kernel', } -some_resource = u"The very model of a modern major general" +some_resource = "The very model of a modern major general" -class KernelSpecAPI(object): +class KernelSpecAPI: """Wrapper for notebook API calls.""" def __init__(self, request): self.request = request @@ -41,7 +38,7 @@ def list(self): def kernel_spec_info(self, name): return self._req('GET', url_path_join('api/kernelspecs', name)) - + def kernel_resource(self, name, path): return self._req('GET', url_path_join('kernelspecs', name, path)) @@ -60,11 +57,11 @@ def create_spec(self, name): except OSError as e: if e.errno != errno.EEXIST: raise - + with open(pjoin(sample_kernel_dir, 'kernel.json'), 'w') as f: json.dump(sample_kernel_json, f) - - with io.open(pjoin(sample_kernel_dir, 'resource.txt'), 'w', + + with open(pjoin(sample_kernel_dir, 'resource.txt'), 'w', encoding='utf-8') as f: f.write(some_resource) @@ -76,10 +73,10 @@ def test_list_kernelspecs_bad(self): except OSError as e: if e.errno != errno.EEXIST: raise - + with open(pjoin(bad_kernel_dir, 'kernel.json'), 'w') as f: f.write("garbage") - + model = self.ks_api.list().json() assert isinstance(model, dict) self.assertEqual(model['default'], NATIVE_KERNEL_NAME) @@ -87,9 +84,9 @@ def test_list_kernelspecs_bad(self): assert isinstance(specs, dict) # 2: the sample kernelspec created in setUp, and the native Python kernel self.assertGreaterEqual(len(specs), 2) - + shutil.rmtree(bad_kernel_dir) - + def test_list_kernelspecs(self): model = self.ks_api.list().json() assert isinstance(model, dict) @@ -123,14 +120,14 @@ def test_get_kernelspec_spaces(self): def test_get_nonexistant_kernelspec(self): with assert_http_error(404): self.ks_api.kernel_spec_info('nonexistant') - + def test_get_kernel_resource_file(self): res = self.ks_api.kernel_resource('sAmple', 'resource.txt') self.assertEqual(res.text, some_resource) - + def test_get_nonexistant_resource(self): with assert_http_error(404): self.ks_api.kernel_resource('nonexistant', 'resource.txt') - + with assert_http_error(404): self.ks_api.kernel_resource('sample', 'nonexistant.txt') diff --git a/notebook/services/nbconvert/handlers.py b/notebook/services/nbconvert/handlers.py index 7c65ee0fd9..563381c91b 100644 --- a/notebook/services/nbconvert/handlers.py +++ b/notebook/services/nbconvert/handlers.py @@ -13,7 +13,7 @@ def get(self): try: from nbconvert.exporters import base except ImportError as e: - raise web.HTTPError(500, "Could not import nbconvert: %s" % e) from e + raise web.HTTPError(500, f"Could not import nbconvert: {e}") from e res = {} exporters = base.get_export_names() for exporter_name in exporters: diff --git a/notebook/services/nbconvert/tests/test_nbconvert_api.py b/notebook/services/nbconvert/tests/test_nbconvert_api.py index d6ef9d2ca5..b7e9059dc0 100644 --- a/notebook/services/nbconvert/tests/test_nbconvert_api.py +++ b/notebook/services/nbconvert/tests/test_nbconvert_api.py @@ -1,9 +1,7 @@ -import requests - from notebook.utils import url_path_join from notebook.tests.launchnotebook import NotebookTestBase -class NbconvertAPI(object): +class NbconvertAPI: """Wrapper for nbconvert API calls.""" def __init__(self, request): self.request = request diff --git a/notebook/services/sessions/handlers.py b/notebook/services/sessions/handlers.py index fc4b3d1d38..7c0540fd6b 100644 --- a/notebook/services/sessions/handlers.py +++ b/notebook/services/sessions/handlers.py @@ -76,10 +76,12 @@ def post(self): kernel_id=kernel_id, name=name, type=mtype)) except NoSuchKernel: - msg = ("The '%s' kernel is not available. Please pick another " - "suitable kernel instead, or install that kernel." % kernel_name) - status_msg = '%s not found' % kernel_name - self.log.warning('Kernel not found: %s' % kernel_name) + msg = ( + f"The '{kernel_name}' kernel is not available. " + f"Please pick another suitable kernel instead, or install that kernel." + ) + status_msg = f'{kernel_name} not found' + self.log.warning(f'Kernel not found: {kernel_name}') self.set_status(501) self.finish(json.dumps(dict(message=msg, short_message=status_msg))) return @@ -133,7 +135,7 @@ def patch(self, session_id): if model['kernel'].get('id') is not None: kernel_id = model['kernel']['id'] if kernel_id not in km: - raise web.HTTPError(400, "No such kernel: %s" % kernel_id) + raise web.HTTPError(400, f"No such kernel: {kernel_id}") changes['kernel_id'] = kernel_id elif model['kernel'].get('name') is not None: kernel_name = model['kernel']['name'] @@ -174,7 +176,7 @@ def delete(self, session_id): _session_id_regex = r"(?P\w+-\w+-\w+-\w+-\w+)" default_handlers = [ - (r"/api/sessions/%s" % _session_id_regex, SessionHandler), + (fr"/api/sessions/{_session_id_regex}", SessionHandler), (r"/api/sessions", SessionRootHandler) ] diff --git a/notebook/services/sessions/sessionmanager.py b/notebook/services/sessions/sessionmanager.py index 5686332868..206090839a 100644 --- a/notebook/services/sessions/sessionmanager.py +++ b/notebook/services/sessions/sessionmanager.py @@ -171,9 +171,9 @@ def get_session(self, **kwargs): for column in kwargs.keys(): if column not in self._columns: raise TypeError("No such column: %r", column) - conditions.append("%s=?" % column) + conditions.append(f"{column}=?") - query = "SELECT * FROM session WHERE %s" % (' AND '.join(conditions)) + query = f"SELECT * FROM session WHERE {' AND '.join(conditions)}" self.cursor.execute(query, list(kwargs.values())) try: @@ -185,14 +185,14 @@ def get_session(self, **kwargs): if row is None: q = [] for key, value in kwargs.items(): - q.append("%s=%r" % (key, value)) + q.append(f"{key}={value!r}") - raise web.HTTPError(404, u'Session not found: %s' % (', '.join(q))) + raise web.HTTPError(404, f'Session not found: {", ".join(q)}') try: model = yield maybe_future(self.row_to_model(row)) except KeyError as e: - raise web.HTTPError(404, u'Session not found: %s' % str(e)) + raise web.HTTPError(404, f'Session not found: {e}') raise gen.Return(model) @gen.coroutine @@ -220,9 +220,9 @@ def update_session(self, session_id, **kwargs): sets = [] for column in kwargs.keys(): if column not in self._columns: - raise TypeError("No such column: %r" % column) - sets.append("%s=?" % column) - query = "UPDATE session SET %s WHERE session_id=?" % (', '.join(sets)) + raise TypeError(f"No such column: {column!r}") + sets.append(f"{column}=?") + query = f"UPDATE session SET {', '.join(sets)} WHERE session_id=?" self.cursor.execute(query, list(kwargs.values()) + [session_id]) def kernel_culled(self, kernel_id): diff --git a/notebook/services/sessions/tests/test_sessionmanager.py b/notebook/services/sessions/tests/test_sessionmanager.py index 9af38033c8..b58f8f51d5 100644 --- a/notebook/services/sessions/tests/test_sessionmanager.py +++ b/notebook/services/sessions/tests/test_sessionmanager.py @@ -11,7 +11,7 @@ from notebook.services.contents.manager import ContentsManager from notebook._tz import utcnow, isoformat -class DummyKernel(object): +class DummyKernel: def __init__(self, kernel_name='python'): self.kernel_name = kernel_name @@ -22,11 +22,11 @@ class DummyMKM(MappingKernelManager): """MappingKernelManager interface that doesn't start kernels, for testing""" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.id_letters = iter(u'ABCDEFGHIJK') + self.id_letters = iter('ABCDEFGHIJK') def _new_id(self): return next(self.id_letters) - + def start_kernel(self, kernel_id=None, path=None, kernel_name='python', **kwargs): kernel_id = kernel_id or self._new_id() k = self._kernels[kernel_id] = DummyKernel(kernel_name=kernel_name) @@ -40,7 +40,7 @@ def shutdown_kernel(self, kernel_id, now=False): class TestSessionManager(TestCase): - + def setUp(self): self.sm = SessionManager( kernel_manager=DummyMKM(), @@ -59,7 +59,7 @@ def co_add(): sessions.append(session) raise gen.Return(sessions) return self.loop.run_sync(co_add) - + def create_session(self, **kwargs): return self.create_sessions(kwargs)[0] @@ -68,8 +68,8 @@ def test_get_session(self): session_id = self.create_session(path='/path/to/test.ipynb', kernel_name='bar')['id'] model = self.loop.run_sync(lambda: sm.get_session(session_id=session_id)) expected = {'id':session_id, - 'path': u'/path/to/test.ipynb', - 'notebook': {'path': u'/path/to/test.ipynb', 'name': None}, + 'path': '/path/to/test.ipynb', + 'notebook': {'path': '/path/to/test.ipynb', 'name': None}, 'type': 'notebook', 'name': None, 'kernel': { @@ -103,18 +103,18 @@ def test_get_session_dead_kernel(self): def test_list_sessions(self): sm = self.sm sessions = self.create_sessions( - dict(path='/path/to/1/test1.ipynb', kernel_name='python'), - dict(path='/path/to/2/test2.py', type='file', kernel_name='python'), - dict(path='/path/to/3', name='foo', type='console', kernel_name='python'), + {'path': '/path/to/1/test1.ipynb', 'kernel_name': 'python'}, + {'path': '/path/to/2/test2.py', 'type': 'file', 'kernel_name': 'python'}, + {'path': '/path/to/3', 'name': 'foo', 'type': 'console', 'kernel_name': 'python'}, ) - + sessions = self.loop.run_sync(lambda: sm.list_sessions()) expected = [ { 'id':sessions[0]['id'], - 'path': u'/path/to/1/test1.ipynb', + 'path': '/path/to/1/test1.ipynb', 'type': 'notebook', - 'notebook': {'path': u'/path/to/1/test1.ipynb', 'name': None}, + 'notebook': {'path': '/path/to/1/test1.ipynb', 'name': None}, 'name': None, 'kernel': { 'id': 'A', @@ -125,7 +125,7 @@ def test_list_sessions(self): } }, { 'id':sessions[1]['id'], - 'path': u'/path/to/2/test2.py', + 'path': '/path/to/2/test2.py', 'type': 'file', 'name': None, 'kernel': { @@ -137,7 +137,7 @@ def test_list_sessions(self): } }, { 'id':sessions[2]['id'], - 'path': u'/path/to/3', + 'path': '/path/to/3', 'type': 'console', 'name': 'foo', 'kernel': { @@ -163,10 +163,10 @@ def test_list_sessions_dead_kernel(self): expected = [ { 'id': sessions[1]['id'], - 'path': u'/path/to/2/test2.ipynb', + 'path': '/path/to/2/test2.ipynb', 'type': 'notebook', 'name': None, - 'notebook': {'path': u'/path/to/2/test2.ipynb', 'name': None}, + 'notebook': {'path': '/path/to/2/test2.ipynb', 'name': None}, 'kernel': { 'id': 'B', 'name':'python', @@ -185,10 +185,10 @@ def test_update_session(self): self.loop.run_sync(lambda: sm.update_session(session_id, path='/path/to/new_name.ipynb')) model = self.loop.run_sync(lambda: sm.get_session(session_id=session_id)) expected = {'id':session_id, - 'path': u'/path/to/new_name.ipynb', + 'path': '/path/to/new_name.ipynb', 'type': 'notebook', 'name': None, - 'notebook': {'path': u'/path/to/new_name.ipynb', 'name': None}, + 'notebook': {'path': '/path/to/new_name.ipynb', 'name': None}, 'kernel': { 'id': 'A', 'name':'julia', @@ -218,10 +218,10 @@ def test_delete_session(self): new_sessions = self.loop.run_sync(lambda: sm.list_sessions()) expected = [{ 'id': sessions[0]['id'], - 'path': u'/path/to/1/test1.ipynb', + 'path': '/path/to/1/test1.ipynb', 'type': 'notebook', 'name': None, - 'notebook': {'path': u'/path/to/1/test1.ipynb', 'name': None}, + 'notebook': {'path': '/path/to/1/test1.ipynb', 'name': None}, 'kernel': { 'id': 'A', 'name':'python', @@ -232,7 +232,7 @@ def test_delete_session(self): }, { 'id': sessions[2]['id'], 'type': 'console', - 'path': u'/path/to/3', + 'path': '/path/to/3', 'name': 'foo', 'kernel': { 'id': 'C', diff --git a/notebook/services/sessions/tests/test_sessions_api.py b/notebook/services/sessions/tests/test_sessions_api.py index cb4bc0bdea..67321e5e34 100644 --- a/notebook/services/sessions/tests/test_sessions_api.py +++ b/notebook/services/sessions/tests/test_sessions_api.py @@ -2,11 +2,9 @@ import errno from functools import partial -import io import os import json import shutil -import sys import time from unittest import SkipTest @@ -25,7 +23,7 @@ pjoin = os.path.join -class SessionAPI(object): +class SessionAPI: """Wrapper for notebook API calls.""" def __init__(self, request): self.request = request @@ -101,7 +99,7 @@ def setUp(self): raise self.addCleanup(partial(shutil.rmtree, subdir, ignore_errors=True)) - with io.open(pjoin(subdir, 'nb1.ipynb'), 'w', encoding='utf-8') as f: + with open(pjoin(subdir, 'nb1.ipynb'), 'w', encoding='utf-8') as f: nb = new_notebook() write(nb, f, version=4) @@ -130,7 +128,7 @@ def test_create(self): self.assertIn('id', newsession) self.assertEqual(newsession['path'], 'foo/nb1.ipynb') self.assertEqual(newsession['type'], 'notebook') - self.assertEqual(resp.headers['Location'], self.url_prefix + 'api/sessions/{0}'.format(newsession['id'])) + self.assertEqual(resp.headers['Location'], f'{self.url_prefix}api/sessions/{newsession["id"]}') sessions = self.sess_api.list().json() self.assertEqual(sessions, [newsession]) @@ -174,7 +172,7 @@ def test_create_with_kernel_id(self): self.assertIn('id', newsession) self.assertEqual(newsession['path'], 'foo/nb1.ipynb') self.assertEqual(newsession['kernel']['id'], kernel['id']) - self.assertEqual(resp.headers['Location'], self.url_prefix + 'api/sessions/{0}'.format(newsession['id'])) + self.assertEqual(resp.headers['Location'], f'{self.url_prefix}api/sessions/{newsession["id"]}') sessions = self.sess_api.list().json() self.assertEqual(sessions, [newsession]) @@ -273,11 +271,11 @@ class AsyncSessionAPITest(SessionAPITest): def setup_class(cls): if not async_testing_enabled: # Can be removed once jupyter_client >= 6.1 is required. raise SkipTest("AsyncSessionAPITest tests skipped due to down-level jupyter_client!") - super(AsyncSessionAPITest, cls).setup_class() + super().setup_class() @classmethod def get_argv(cls): - argv = super(AsyncSessionAPITest, cls).get_argv() + argv = super().get_argv() # Before we extend the argv with the class, ensure that appropriate jupyter_client is available. # if not available, don't set kernel_manager_class, resulting in the repeat of sync-based tests. diff --git a/notebook/terminal/__init__.py b/notebook/terminal/__init__.py index 57e74c9bf4..401663702e 100644 --- a/notebook/terminal/__init__.py +++ b/notebook/terminal/__init__.py @@ -5,7 +5,7 @@ from ..utils import check_version if not check_version(terminado.__version__, '0.8.3'): - raise ImportError("terminado >= 0.8.3 required, found %s" % terminado.__version__) + raise ImportError(f"terminado >= 0.8.3 required, found {terminado.__version__}") from ipython_genutils.py3compat import which from notebook.utils import url_path_join as ujoin diff --git a/notebook/terminal/handlers.py b/notebook/terminal/handlers.py index 0e026d00ab..99a592d484 100644 --- a/notebook/terminal/handlers.py +++ b/notebook/terminal/handlers.py @@ -15,8 +15,12 @@ class TerminalHandler(IPythonHandler): """Render the terminal interface.""" @web.authenticated def get(self, term_name): - self.write(self.render_template('terminal.html', - ws_path="terminals/websocket/%s" % term_name)) + self.write( + self.render_template( + 'terminal.html', + ws_path=f"terminals/websocket/{term_name}", + ) + ) class NamedTerminalHandler(IPythonHandler): @@ -35,7 +39,7 @@ class NewTerminalHandler(IPythonHandler): def get(self, term_name): if term_name == 'new': raise web.HTTPError(400, "Terminal name 'new' is reserved.") - new_path = self.request.path.replace("new/{}".format(term_name), term_name) + new_path = self.request.path.replace(f"new/{term_name}", term_name) if term_name in self.terminal_manager.terminals: self.set_header('Location', new_path) self.set_status(302) @@ -50,7 +54,7 @@ class TermSocket(WebSocketMixin, IPythonHandler, terminado.TermSocket): def origin_check(self): """Terminado adds redundant origin_check - + Tornado already calls check_origin, so don't do anything here. """ return True diff --git a/notebook/terminal/terminalmanager.py b/notebook/terminal/terminalmanager.py index ed901310ec..5a4a29e29b 100644 --- a/notebook/terminal/terminalmanager.py +++ b/notebook/terminal/terminalmanager.py @@ -49,7 +49,7 @@ def create(self): def create_with_name(self, name): """Create a new terminal.""" if name in self.terminals: - raise web.HTTPError(409, "A terminal with name '{}' already exists.".format(name)) + raise web.HTTPError(409, f"A terminal with name '{name}' already exists.") term = self.get_terminal(name) return self._finish_create(name, term) @@ -110,7 +110,7 @@ def get_terminal_model(self, name): def _check_terminal(self, name): """Check a that terminal 'name' exists and raise 404 if not.""" if name not in self.terminals: - raise web.HTTPError(404, u'Terminal not found: %s' % name) + raise web.HTTPError(404, f'Terminal not found: {name}') def _initialize_culler(self): """Start culler if 'cull_inactive_timeout' is greater than zero. @@ -139,8 +139,9 @@ async def _cull_terminals(self): try: await self._cull_inactive_terminal(name) except Exception as e: - self.log.exception("The following exception was encountered while checking the " - "activity of terminal {}: {}".format(name, e)) + self.log.exception( + f"The following exception was encountered while checking the activity of terminal {name}: {e}" + ) async def _cull_inactive_terminal(self, name): try: diff --git a/notebook/terminal/tests/test_terminals_api.py b/notebook/terminal/tests/test_terminals_api.py index 9e0cba82da..2eb22174c9 100644 --- a/notebook/terminal/tests/test_terminals_api.py +++ b/notebook/terminal/tests/test_terminals_api.py @@ -9,7 +9,7 @@ from notebook.tests.launchnotebook import NotebookTestBase, assert_http_error -class TerminalAPI(object): +class TerminalAPI: """Wrapper for terminal REST API requests""" def __init__(self, request, base_url, headers): self.request = request diff --git a/notebook/tests/conftest.py b/notebook/tests/conftest.py index b9aee32cdc..135e814673 100644 --- a/notebook/tests/conftest.py +++ b/notebook/tests/conftest.py @@ -1,9 +1,7 @@ - - def pytest_addoption(parser): parser.addoption('--integration_tests', action='store_true', dest="integration_tests", default=False, help="enable integration tests") def pytest_configure(config): if not config.option.integration_tests: - setattr(config.option, 'markexpr', 'not integration_tests') \ No newline at end of file + setattr(config.option, 'markexpr', 'not integration_tests') diff --git a/notebook/tests/launchnotebook.py b/notebook/tests/launchnotebook.py index 426ce43ffd..53686e8aac 100644 --- a/notebook/tests/launchnotebook.py +++ b/notebook/tests/launchnotebook.py @@ -69,12 +69,12 @@ def wait_until_dead(cls): cls.notebook_thread.join(timeout=MAX_WAITTIME) if cls.notebook_thread.is_alive(): raise TimeoutError("Undead notebook server") - + @classmethod def auth_headers(cls): headers = {} if cls.token: - headers['Authorization'] = 'token %s' % cls.token + headers['Authorization'] = f'token {cls.token}' return headers @staticmethod @@ -84,7 +84,7 @@ def fetch_url(url): @classmethod def request(cls, verb, path, **kwargs): """Send a request to my server - + with authentication and everything. """ headers = kwargs.setdefault('headers', {}) @@ -219,13 +219,13 @@ def cleanup_zmq(): @classmethod def base_url(cls): - return 'http://localhost:%i%s' % (cls.port, cls.url_prefix) + return f'http://localhost:{cls.port}{cls.url_prefix}' class UNIXSocketNotebookTestBase(NotebookTestBase): # Rely on `/tmp` to avoid any Linux socket length max buffer # issues. Key on PID for process-wise concurrency. - sock = '/tmp/.notebook.%i.sock' % os.getpid() + sock = f'/tmp/.notebook.{os.getpid()}.sock' @classmethod def get_bind_args(cls): @@ -233,7 +233,7 @@ def get_bind_args(cls): @classmethod def base_url(cls): - return '%s%s' % (urlencode_unix_socket(cls.sock), cls.url_prefix) + return f'{urlencode_unix_socket(cls.sock)}{cls.url_prefix}' @staticmethod def fetch_url(url): @@ -251,7 +251,7 @@ def assert_http_error(status, msg=None): except requests.HTTPError as e: real_status = e.response.status_code assert real_status == status, \ - "Expected status %d, got %d" % (status, real_status) + f"Expected status {status}, got {real_status}" if msg: assert msg in str(e), e else: diff --git a/notebook/tests/selenium/conftest.py b/notebook/tests/selenium/conftest.py index 64cdfa23bd..63dfff864d 100644 --- a/notebook/tests/selenium/conftest.py +++ b/notebook/tests/selenium/conftest.py @@ -39,8 +39,8 @@ def notebook_server(): info = {} with TemporaryDirectory() as td: nbdir = info['nbdir'] = pjoin(td, 'notebooks') - os.makedirs(pjoin(nbdir, u'sub ∂ir1', u'sub ∂ir 1a')) - os.makedirs(pjoin(nbdir, u'sub ∂ir2', u'sub ∂ir 1b')) + os.makedirs(pjoin(nbdir, 'sub ∂ir1', 'sub ∂ir 1a')) + os.makedirs(pjoin(nbdir, 'sub ∂ir2', 'sub ∂ir 1b')) info['extra_env'] = { 'JUPYTER_CONFIG_DIR': pjoin(td, 'jupyter_config'), @@ -60,7 +60,7 @@ def notebook_server(): print("command=", command) proc = info['popen'] = Popen(command, cwd=nbdir, env=env) info_file_path = pjoin(td, 'jupyter_runtime', - 'nbserver-%i.json' % proc.pid) + f'nbserver-{proc.pid:d}.json') info.update(_wait_for_server(proc, info_file_path)) print("Notebook server info:", info) @@ -91,10 +91,10 @@ def make_sauce_driver(): if capabilities['browserName'] == 'firefox': # Attempt to work around issue where browser loses authentication capabilities['version'] = '57.0' - hub_url = "%s:%s@localhost:4445" % (username, access_key) + hub_url = f"{username}:{access_key}@localhost:4445" print("Connecting remote driver on Sauce Labs") driver = Remote(desired_capabilities=capabilities, - command_executor="http://%s/wd/hub" % hub_url) + command_executor=f"http://{hub_url}/wd/hub") return driver diff --git a/notebook/tests/selenium/test_dashboard_nav.py b/notebook/tests/selenium/test_dashboard_nav.py index 8e09979025..f15a634578 100644 --- a/notebook/tests/selenium/test_dashboard_nav.py +++ b/notebook/tests/selenium/test_dashboard_nav.py @@ -1,21 +1,15 @@ import os -from selenium.webdriver.common.by import By -from selenium.webdriver.support.ui import WebDriverWait -from selenium.webdriver.support import expected_conditions as EC - from notebook.utils import url_path_join from notebook.tests.selenium.utils import wait_for_selector pjoin = os.path.join class PageError(Exception): - """Error for an action being incompatible with the current jupyter web page. - - """ + """Error for an action being incompatible with the current jupyter web page.""" def __init__(self, message): self.message = message - + def url_in_tree(browser, url=None): if url is None: @@ -26,7 +20,7 @@ def url_in_tree(browser, url=None): def get_list_items(browser): """Gets list items from a directory listing page - + Raises PageError if not in directory listing page (url has tree in it) """ if not url_in_tree(browser): @@ -42,9 +36,7 @@ def get_list_items(browser): } for a in browser.find_elements_by_class_name('item_link')] def only_dir_links(browser): - """Return only links that point at other directories in the tree - - """ + """Return only links that point at other directories in the tree""" items = get_list_items(browser) return [i for i in items if url_in_tree(browser, i['link']) and i['label'] != '..'] diff --git a/notebook/tests/selenium/test_deletecell.py b/notebook/tests/selenium/test_deletecell.py index 8253b9e63a..0e60adfaa6 100644 --- a/notebook/tests/selenium/test_deletecell.py +++ b/notebook/tests/selenium/test_deletecell.py @@ -1,6 +1,5 @@ - def cell_is_deletable(nb, index): - JS = 'return Jupyter.notebook.get_cell({}).is_deletable();'.format(index) + JS = f'return Jupyter.notebook.get_cell({index}).is_deletable();' return nb.browser.execute_script(JS) def remove_all_cells(notebook): @@ -20,11 +19,11 @@ def test_delete_cells(prefill_notebook): notebook.set_cell_metadata(0, 'deletable', 'false') notebook.set_cell_metadata(1, 'deletable', 0 - ) + ) assert not cell_is_deletable(notebook, 0) assert cell_is_deletable(notebook, 1) assert cell_is_deletable(notebook, 2) - + # Try to delete cell a (should not be deleted) notebook.delete_cell(0) assert notebook.get_cells_contents() == [a, b, c] diff --git a/notebook/tests/selenium/test_display_image.py b/notebook/tests/selenium/test_display_image.py index 0ee9f0c800..4e3adfd598 100644 --- a/notebook/tests/selenium/test_display_image.py +++ b/notebook/tests/selenium/test_display_image.py @@ -27,9 +27,9 @@ def validate_img(notebook, cell_index, image_fmt, retina): b64data = b64_image_data[image_fmt] commands = [ - 'b64data = %s' % b64data, + f'b64data = {b64data}', 'data = base64.decodebytes(b64data)', - 'display(Image(data, retina=%s))' % retina + f'display(Image(data, retina={retina}))' ] notebook.append("\n".join(commands)) notebook.execute_cell(cell_index) @@ -40,7 +40,7 @@ def validate_img(notebook, cell_index, image_fmt, retina): src = img_element.get_attribute("src") prefix = src.split(',')[0] - expected_prefix = "data:%s;base64" % image_fmt + expected_prefix = f"data:{image_fmt};base64" assert prefix == expected_prefix expected_size = 1 if retina else 2 diff --git a/notebook/tests/selenium/test_display_isolation.py b/notebook/tests/selenium/test_display_isolation.py index 461f31352b..51ca082bcd 100644 --- a/notebook/tests/selenium/test_display_isolation.py +++ b/notebook/tests/selenium/test_display_isolation.py @@ -28,18 +28,16 @@ def isolated_html(notebook): red = 'rgb(255, 0, 0)' blue = 'rgb(0, 0, 255)' test_str = "
Should turn red from non-isolation
" - notebook.add_and_execute_cell(content="display(HTML(%r))" % test_str) + notebook.add_and_execute_cell(content=f"display(HTML({test_str!r}))") non_isolated = ( - "" % red + - "
Should be red
") - display_ni = "display(HTML(%r), metadata={'isolated':False})" % ( - non_isolated) + f"" + f"
Should be red
") + display_ni = f"display(HTML({non_isolated!r}), metadata={{'isolated':False}})" notebook.add_and_execute_cell(content=display_ni) isolated = ( - "" % blue + - "
Should be blue
") - display_i = "display(HTML(%r), metadata={'isolated':True})" % ( - isolated) + f"" + f"
Should be blue
") + display_i = f"display(HTML({isolated!r}), metadata={{'isolated':True}})" notebook.add_and_execute_cell(content=display_i) iframe = wait_for_tag(notebook.browser, "iframe", single=True) @@ -69,7 +67,7 @@ def isolated_svg(notebook): """ yellow = "rgb(255, 255, 0)" black = "rgb(0, 0, 0)" - svg_1_str = """s1 = ''''''""" % yellow + svg_1_str = f"""s1 = ''''''""" svg_2_str = """s2 = ''''''""" notebook.add_and_execute_cell(content=svg_1_str) diff --git a/notebook/tests/selenium/test_kernel_menu.py b/notebook/tests/selenium/test_kernel_menu.py index 678f39be8e..b68b5b3db3 100644 --- a/notebook/tests/selenium/test_kernel_menu.py +++ b/notebook/tests/selenium/test_kernel_menu.py @@ -1,4 +1,3 @@ -from selenium.common.exceptions import TimeoutException from selenium.webdriver.common.by import By from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.support.ui import WebDriverWait @@ -56,5 +55,5 @@ def test_menu_items(notebook): wait_for_selector(browser, menu_item, visible=True, single=True).click() WebDriverWait(browser, 10).until( lambda b: notebook.is_kernel_running(), - message="Restart (%r) after shutdown did not start kernel" % menu_item + message=f"Restart ({menu_item!r}) after shutdown did not start kernel" ) diff --git a/notebook/tests/selenium/test_merge_cells.py b/notebook/tests/selenium/test_merge_cells.py index 23c2b87e85..0fb4dd5662 100644 --- a/notebook/tests/selenium/test_merge_cells.py +++ b/notebook/tests/selenium/test_merge_cells.py @@ -19,18 +19,18 @@ def test_merge_cells(prefill_notebook): # Focus on the second cell and merge it with the cell above notebook.focus_cell(1) notebook.browser.execute_script("Jupyter.notebook.merge_cell_above();") - merged_a_b = "%s\n\n%s" % (a, b) + merged_a_b = f"{a}\n\n{b}" assert notebook.get_cells_contents() == [merged_a_b, c, d, e, f] # Focus on the second cell and merge it with the cell below notebook.focus_cell(1) notebook.browser.execute_script("Jupyter.notebook.merge_cell_below();") - merged_c_d = "%s\n\n%s" % (c, d) + merged_c_d = f"{c}\n\n{d}" assert notebook.get_cells_contents() == [merged_a_b, merged_c_d, e, f] # Merge everything down to a single cell with selected cells notebook.select_cell_range(0,3) notebook.browser.execute_script("Jupyter.notebook.merge_selected_cells();") - merged_all = "%s\n\n%s\n\n%s\n\n%s" % (merged_a_b, merged_c_d, e, f) + merged_all = f"{merged_a_b}\n\n{merged_c_d}\n\n{e}\n\n{f}" assert notebook.get_cells_contents() == [merged_all] diff --git a/notebook/tests/selenium/test_multiselect_toggle.py b/notebook/tests/selenium/test_multiselect_toggle.py index 14f51d0259..372d83b275 100644 --- a/notebook/tests/selenium/test_multiselect_toggle.py +++ b/notebook/tests/selenium/test_multiselect_toggle.py @@ -1,4 +1,3 @@ - INITIAL_CELLS = ['print("a")', 'print("b")', 'print("c")'] def test_multiselect_toggle(prefill_notebook): notebook = prefill_notebook(INITIAL_CELLS) @@ -24,7 +23,7 @@ def select_cells(): cell_output_states = notebook.browser.execute_script( "return Jupyter.notebook.get_cells().map(c => c.collapsed)") assert cell_output_states == [False] * 3, "ensure that all cells are not collapsed" - + # Test that cells, which start off not scrolled are scrolled after # calling the multiselected scroll toggle. select_cells() diff --git a/notebook/tests/selenium/test_prompt_numbers.py b/notebook/tests/selenium/test_prompt_numbers.py index 42e27775ec..38872b8550 100755 --- a/notebook/tests/selenium/test_prompt_numbers.py +++ b/notebook/tests/selenium/test_prompt_numbers.py @@ -1,4 +1,3 @@ - def test_prompt_numbers(prefill_notebook): notebook = prefill_notebook(['print("a")']) diff --git a/notebook/tests/selenium/test_save.py b/notebook/tests/selenium/test_save.py index 3ed7721f31..d566866b06 100644 --- a/notebook/tests/selenium/test_save.py +++ b/notebook/tests/selenium/test_save.py @@ -58,8 +58,7 @@ def test_save(notebook): break hrefs_nonmatch.append(href) else: - raise AssertionError("{!r} not found in {!r}" - .format(escaped_name, hrefs_nonmatch)) + raise AssertionError(f"{escaped_name!r} not found in {hrefs_nonmatch!r}") current_name = notebook.browser.execute_script("return Jupyter.notebook.notebook_name") assert current_name == nbname diff --git a/notebook/tests/selenium/test_save_as_notebook.py b/notebook/tests/selenium/test_save_as_notebook.py index 567087d438..781410ce83 100644 --- a/notebook/tests/selenium/test_save_as_notebook.py +++ b/notebook/tests/selenium/test_save_as_notebook.py @@ -19,8 +19,8 @@ def get_notebook_name(nb): return nb.browser.execute_script(JS) def set_notebook_name(nb, name): - JS = 'Jupyter.notebook.rename("{}")'.format(name) - nb.browser.execute_script(JS) + JS = f'Jupyter.notebook.rename("{name}")' + nb.browser.execute_script(JS) def test_save_notebook_as(notebook): # Set a name for comparison later diff --git a/notebook/tests/selenium/test_save_readonly_as.py b/notebook/tests/selenium/test_save_readonly_as.py index 12e7b720c7..94c066753b 100644 --- a/notebook/tests/selenium/test_save_readonly_as.py +++ b/notebook/tests/selenium/test_save_readonly_as.py @@ -1,4 +1,4 @@ -from notebook.tests.selenium.utils import wait_for_selector, Notebook +from notebook.tests.selenium.utils import wait_for_selector from selenium.webdriver.common.keys import Keys from selenium.webdriver.support.ui import WebDriverWait diff --git a/notebook/tests/selenium/utils.py b/notebook/tests/selenium/utils.py index 4407fce39d..d828611cfb 100644 --- a/notebook/tests/selenium/utils.py +++ b/notebook/tests/selenium/utils.py @@ -92,29 +92,29 @@ def multiple_found(driver): class CellTypeError(ValueError): - + def __init__(self, message=""): self.message = message class Notebook: - + def __init__(self, browser): self.browser = browser self._wait_for_start() self.disable_autosave_and_onbeforeunload() - + def __len__(self): return len(self.cells) - + def __getitem__(self, key): return self.cells[key] - + def __setitem__(self, key, item): if isinstance(key, int): self.edit_cell(index=key, content=item, render=False) # TODO: re-add slicing support, handle general python slicing behaviour - # includes: overwriting the entire self.cells object if you do + # includes: overwriting the entire self.cells object if you do # self[:] = [] # elif isinstance(key, slice): # indices = (self.index(cell) for cell in self[key]) @@ -138,20 +138,20 @@ def body(self): @property def cells(self): """Gets all cells once they are visible. - + """ return self.browser.find_elements_by_class_name("cell") - + @property def current_index(self): return self.index(self.current_cell) - + def index(self, cell): return self.cells.index(cell) def disable_autosave_and_onbeforeunload(self): """Disable request to save before closing window and autosave. - + This is most easily done by using js directly. """ self.browser.execute_script("window.onbeforeunload = null;") @@ -159,7 +159,7 @@ def disable_autosave_and_onbeforeunload(self): def to_command_mode(self): """Changes us into command mode on currently focused cell - + """ self.body.send_keys(Keys.ESCAPE) self.browser.execute_script("return Jupyter.notebook.handle_command_mode(" @@ -171,7 +171,7 @@ def focus_cell(self, index=0): cell.click() self.to_command_mode() self.current_cell = cell - + def select_cell_range(self, initial_index=0, final_index=0): self.focus_cell(initial_index) self.to_command_mode() @@ -199,9 +199,8 @@ def convert_cell_type(self, index=0, cell_type="code"): elif cell_type == "code": self.current_cell.send_keys("y") else: - raise CellTypeError(("{} is not a valid cell type," - "use 'code', 'markdown', or 'raw'").format(cell_type)) - + raise CellTypeError(f"{cell_type} is not a valid cell type,use 'code', 'markdown', or 'raw'") + self.wait_for_stale_cell(cell) self.focus_cell(index) return self.current_cell @@ -209,7 +208,7 @@ def convert_cell_type(self, index=0, cell_type="code"): def wait_for_stale_cell(self, cell): """ This is needed to switch a cell's mode and refocus it, or to render it. - Warning: there is currently no way to do this when changing between + Warning: there is currently no way to do this when changing between markdown and raw cells. """ wait = WebDriverWait(self.browser, 10) @@ -234,15 +233,15 @@ def wait_for_cell_output(self, index=0, timeout=10): ) def set_cell_metadata(self, index, key, value): - JS = 'Jupyter.notebook.get_cell({}).metadata.{} = {}'.format(index, key, value) + JS = f'Jupyter.notebook.get_cell({index}).metadata.{key} = {value}' return self.browser.execute_script(JS) def get_cell_type(self, index=0): - JS = 'return Jupyter.notebook.get_cell({}).cell_type'.format(index) + JS = f'return Jupyter.notebook.get_cell({index}).cell_type' return self.browser.execute_script(JS) - + def set_cell_input_prompt(self, index, prmpt_val): - JS = 'Jupyter.notebook.get_cell({}).set_input_prompt({})'.format(index, prmpt_val) + JS = f'Jupyter.notebook.get_cell({index}).set_input_prompt({prmpt_val})' self.browser.execute_script(JS) def edit_cell(self, cell=None, index=0, content="", render=False): @@ -267,7 +266,7 @@ def edit_cell(self, cell=None, index=0, content="", render=False): def execute_cell(self, cell_or_index=None): if isinstance(cell_or_index, int): index = cell_or_index - elif isinstance(cell_or_index, WebElement): + elif isinstance(cell_or_index, WebElement): index = self.index(cell_or_index) else: raise TypeError("execute_cell only accepts a WebElement or an int") @@ -295,18 +294,18 @@ def delete_cell(self, index): def add_markdown_cell(self, index=-1, content="", render=True): self.add_cell(index, cell_type="markdown") self.edit_cell(index=index, content=content, render=render) - + def append(self, *values, cell_type="code"): for i, value in enumerate(values): if isinstance(value, str): self.add_cell(cell_type=cell_type, content=value) else: - raise TypeError("Don't know how to add cell from %r" % value) - + raise TypeError(f"Don't know how to add cell from {value!r}") + def extend(self, values): self.append(*values) - + def run_all(self): for cell in self: self.execute_cell(cell) @@ -320,7 +319,7 @@ def is_kernel_running(self): ) def clear_cell_output(self, index): - JS = 'Jupyter.notebook.clear_output({})'.format(index) + JS = f'Jupyter.notebook.clear_output({index})' self.browser.execute_script(JS) @classmethod @@ -336,24 +335,24 @@ def select_kernel(browser, kernel_name='kernel-python3'): wait = WebDriverWait(browser, 10) new_button = wait.until(EC.element_to_be_clickable((By.ID, "new-dropdown-button"))) new_button.click() - kernel_selector = '#{} a'.format(kernel_name) + kernel_selector = f'#{kernel_name} a' kernel = wait_for_selector(browser, kernel_selector, single=True) kernel.click() @contextmanager def new_window(browser): - """Contextmanager for switching to & waiting for a window created. - - This context manager gives you the ability to create a new window inside + """Contextmanager for switching to & waiting for a window created. + + This context manager gives you the ability to create a new window inside the created context and it will switch you to that new window. - + Usage example: - + from notebook.tests.selenium.utils import new_window, Notebook - + ⋮ # something that creates a browser object - + with new_window(browser): select_kernel(browser, kernel_name=kernel_name) nb = Notebook(browser) @@ -400,7 +399,7 @@ def trigger_keystrokes(browser, *keys): browser.send_keys(getattr(Keys, keys[0].upper(), keys[0])) def validate_dualmode_state(notebook, mode, index): - '''Validate the entire dual mode state of the notebook. + '''Validate the entire dual mode state of the notebook. Checks if the specified cell is selected, and the mode and keyboard mode are the same. Depending on the mode given: Command: Checks that no cells are in focus or in edit mode. @@ -462,7 +461,7 @@ def is_focused_on(index): assert is_focused_on(None) #no focused cells assert is_only_cell_edit(None) #no cells in edit mode - + elif mode == 'edit': assert is_focused_on(index) #The specified cell is focused diff --git a/notebook/tests/test_files.py b/notebook/tests/test_files.py index b711945756..c7abbc3eba 100644 --- a/notebook/tests/test_files.py +++ b/notebook/tests/test_files.py @@ -1,12 +1,9 @@ """Test the /files/ handler.""" -import io import os -from unicodedata import normalize pjoin = os.path.join -import requests import json from nbformat import write @@ -16,21 +13,20 @@ from notebook.utils import url_path_join from .launchnotebook import NotebookTestBase -from ipython_genutils import py3compat class FilesTest(NotebookTestBase): def test_hidden_files(self): not_hidden = [ - u'å b', - u'å b/ç. d', + 'å b', + 'å b/ç. d', ] hidden = [ - u'.å b', - u'å b/.ç d', + '.å b', + 'å b/.ç d', ] dirs = not_hidden + hidden - + nbdir = self.notebook_dir for d in dirs: path = pjoin(nbdir, d.replace('/', os.sep)) @@ -82,23 +78,23 @@ def test_contents_manager(self): nb = new_notebook( cells=[ - new_markdown_cell(u'Created by test ³'), + new_markdown_cell('Created by test ³'), new_code_cell("print(2*6)", outputs=[ new_output("stream", text="12"), ]) ] ) - with io.open(pjoin(nbdir, 'testnb.ipynb'), 'w', + with open(pjoin(nbdir, 'testnb.ipynb'), 'w', encoding='utf-8') as f: write(nb, f, version=4) - with io.open(pjoin(nbdir, 'test.bin'), 'wb') as f: + with open(pjoin(nbdir, 'test.bin'), 'wb') as f: f.write(b'\xff' + os.urandom(5)) f.close() - with io.open(pjoin(nbdir, 'test.txt'), 'w') as f: - f.write(u'foobar') + with open(pjoin(nbdir, 'test.txt'), 'w') as f: + f.write('foobar') f.close() r = self.request('GET', 'files/testnb.ipynb') @@ -116,14 +112,14 @@ def test_contents_manager(self): self.assertEqual(r.status_code, 200) self.assertEqual(r.headers['content-type'], 'text/plain; charset=UTF-8') self.assertEqual(r.text, 'foobar') - + def test_download(self): nbdir = self.notebook_dir - + text = 'hello' with open(pjoin(nbdir, 'test.txt'), 'w') as f: f.write(text) - + r = self.request('GET', 'files/test.txt') disposition = r.headers.get('Content-Disposition', '') self.assertNotIn('attachment', disposition) @@ -132,24 +128,24 @@ def test_download(self): disposition = r.headers.get('Content-Disposition', '') self.assertIn('attachment', disposition) self.assertIn("filename*=utf-8''test.txt", disposition) - + def test_view_html(self): nbdir = self.notebook_dir - + html = '
Test test
' with open(pjoin(nbdir, 'test.html'), 'w') as f: f.write(html) - + r = self.request('GET', 'view/test.html') self.assertEqual(r.status_code, 200) def test_old_files_redirect(self): """pre-2.0 'files/' prefixed links are properly redirected""" nbdir = self.notebook_dir - + os.mkdir(pjoin(nbdir, 'files')) os.makedirs(pjoin(nbdir, 'sub', 'files')) - + for prefix in ('', 'sub'): with open(pjoin(nbdir, prefix, 'files', 'f1.txt'), 'w') as f: f.write(prefix + '/files/f1') diff --git a/notebook/tests/test_gateway.py b/notebook/tests/test_gateway.py index f84ba06369..fc198177ac 100644 --- a/notebook/tests/test_gateway.py +++ b/notebook/tests/test_gateway.py @@ -6,7 +6,6 @@ from io import StringIO from unittest.mock import patch -from tornado import gen from tornado.web import HTTPError from tornado.httpclient import HTTPRequest, HTTPResponse @@ -62,7 +61,7 @@ async def mock_gateway_request(url, **kwargs): response = await maybe_future(HTTPResponse(request, 200, buffer=response_buf)) return response else: - raise HTTPError(404, message='Kernelspec does not exist: %s' % requested_kernelspec) + raise HTTPError(404, message=f'Kernelspec does not exist: {requested_kernelspec}') # Create kernel if endpoint.endswith('/api/kernels') and method == 'POST': @@ -96,16 +95,16 @@ async def mock_gateway_request(url, **kwargs): response = await maybe_future(HTTPResponse(request, 204)) return response else: - raise HTTPError(404, message='Kernel does not exist: %s' % requested_kernel_id) + raise HTTPError(404, message=f'Kernel does not exist: {requested_kernel_id}') elif action == 'restart': if requested_kernel_id in running_kernels: response_buf = StringIO(json.dumps(running_kernels.get(requested_kernel_id))) response = await maybe_future(HTTPResponse(request, 204, buffer=response_buf)) return response else: - raise HTTPError(404, message='Kernel does not exist: %s' % requested_kernel_id) + raise HTTPError(404, message=f'Kernel does not exist: {requested_kernel_id}') else: - raise HTTPError(404, message='Bad action detected: %s' % action) + raise HTTPError(404, message=f'Bad action detected: {action}') # Shutdown existing kernel if endpoint.rfind('/api/kernels/') >= 0 and method == 'DELETE': @@ -122,7 +121,7 @@ async def mock_gateway_request(url, **kwargs): response = await maybe_future(HTTPResponse(request, 200, buffer=response_buf)) return response else: - raise HTTPError(404, message='Kernel does not exist: %s' % requested_kernel_id) + raise HTTPError(404, message=f'Kernel does not exist: {requested_kernel_id}') mocked_gateway = patch('notebook.gateway.managers.gateway_request', mock_gateway_request) @@ -136,23 +135,23 @@ class TestGateway(NotebookTestBase): @classmethod def setup_class(cls): GatewayClient.clear_instance() - super(TestGateway, cls).setup_class() + super().setup_class() @classmethod def teardown_class(cls): GatewayClient.clear_instance() - super(TestGateway, cls).teardown_class() + super().teardown_class() @classmethod def get_patch_env(cls): - test_env = super(TestGateway, cls).get_patch_env() + test_env = super().get_patch_env() test_env.update({'JUPYTER_GATEWAY_URL': TestGateway.mock_gateway_url, 'JUPYTER_GATEWAY_CONNECT_TIMEOUT': '44.4'}) return test_env @classmethod def get_argv(cls): - argv = super(TestGateway, cls).get_argv() + argv = super().get_argv() argv.extend(['--GatewayClient.request_timeout=96.0', '--GatewayClient.http_user=' + TestGateway.mock_http_user]) return argv diff --git a/notebook/tests/test_nbextensions.py b/notebook/tests/test_nbextensions.py index 3d9549658a..655b2284e9 100644 --- a/notebook/tests/test_nbextensions.py +++ b/notebook/tests/test_nbextensions.py @@ -69,9 +69,9 @@ def cleanup_tempdirs(): self.src = self.tempdir() self.files = files = [ - pjoin(u'ƒile'), - pjoin(u'∂ir', u'ƒile1'), - pjoin(u'∂ir', u'∂ir2', u'ƒile2'), + pjoin('ƒile'), + pjoin('∂ir', 'ƒile1'), + pjoin('∂ir', '∂ir2', 'ƒile2'), ] for file_name in files: fullpath = os.path.join(self.src, file_name) @@ -107,15 +107,15 @@ def cleanup_tempdirs(): def assert_dir_exists(self, path): if not os.path.exists(path): do_exist = os.listdir(os.path.dirname(path)) - self.fail(u"%s should exist (found %s)" % (path, do_exist)) + self.fail(f"{path} should exist (found {do_exist})") def assert_not_dir_exists(self, path): if os.path.exists(path): - self.fail(u"%s should not exist" % path) + self.fail(f"{path} should not exist") def assert_installed(self, relative_path, user=False): if user: - nbext = pjoin(self.data_dir, u'nbextensions') + nbext = pjoin(self.data_dir, 'nbextensions') else: nbext = self.system_nbext self.assert_dir_exists( @@ -124,7 +124,7 @@ def assert_installed(self, relative_path, user=False): def assert_not_installed(self, relative_path, user=False): if user: - nbext = pjoin(self.data_dir, u'nbextensions') + nbext = pjoin(self.data_dir, 'nbextensions') else: nbext = self.system_nbext self.assert_not_dir_exists( @@ -150,17 +150,17 @@ def test_create_nbextensions_user(self): with TemporaryDirectory() as td: install_nbextension(self.src, user=True) self.assert_installed( - pjoin(basename(self.src), u'ƒile'), + pjoin(basename(self.src), 'ƒile'), user=True ) def test_create_nbextensions_system(self): with TemporaryDirectory() as td: - self.system_nbext = pjoin(td, u'nbextensions') + self.system_nbext = pjoin(td, 'nbextensions') with patch.object(nbextensions, 'SYSTEM_JUPYTER_PATH', [td]): install_nbextension(self.src, user=False) self.assert_installed( - pjoin(basename(self.src), u'ƒile'), + pjoin(basename(self.src), 'ƒile'), user=False ) @@ -170,28 +170,28 @@ def test_single_file(self): self.assert_installed(file_name) def test_single_dir(self): - d = u'∂ir' + d = '∂ir' install_nbextension(pjoin(self.src, d)) self.assert_installed(self.files[-1]) def test_single_dir_trailing_slash(self): - d = u'∂ir/' + d = '∂ir/' install_nbextension(pjoin(self.src, d)) self.assert_installed(self.files[-1]) if os.name == 'nt': - d = u'∂ir\\' + d = '∂ir\\' install_nbextension(pjoin(self.src, d)) self.assert_installed(self.files[-1]) def test_destination_file(self): file_name = self.files[0] - install_nbextension(pjoin(self.src, file_name), destination = u'ƒiledest') - self.assert_installed(u'ƒiledest') + install_nbextension(pjoin(self.src, file_name), destination = 'ƒiledest') + self.assert_installed('ƒiledest') def test_destination_dir(self): - d = u'∂ir' - install_nbextension(pjoin(self.src, d), destination = u'ƒiledest2') - self.assert_installed(pjoin(u'ƒiledest2', u'∂ir2', u'ƒile2')) + d = '∂ir' + install_nbextension(pjoin(self.src, d), destination = 'ƒiledest2') + self.assert_installed(pjoin('ƒiledest2', '∂ir2', 'ƒile2')) def test_install_nbextension(self): with self.assertRaises(TypeError): @@ -199,7 +199,7 @@ def test_install_nbextension(self): def test_overwrite_file(self): with TemporaryDirectory() as d: - fname = u'ƒ.js' + fname = 'ƒ.js' src = pjoin(d, fname) with open(src, 'w') as f: f.write('first') @@ -216,12 +216,12 @@ def test_overwrite_file(self): def test_overwrite_dir(self): with TemporaryDirectory() as src: base = basename(src) - fname = u'ƒ.js' + fname = 'ƒ.js' touch(pjoin(src, fname)) install_nbextension(src) self.assert_installed(pjoin(base, fname)) os.remove(pjoin(src, fname)) - fname2 = u'∂.js' + fname2 = '∂.js' touch(pjoin(src, fname2)) install_nbextension(src, overwrite=True) self.assert_installed(pjoin(base, fname2)) @@ -229,7 +229,7 @@ def test_overwrite_dir(self): def test_update_file(self): with TemporaryDirectory() as d: - fname = u'ƒ.js' + fname = 'ƒ.js' src = pjoin(d, fname) with open(src, 'w') as f: f.write('first') @@ -247,7 +247,7 @@ def test_update_file(self): def test_skip_old_file(self): with TemporaryDirectory() as d: - fname = u'ƒ.js' + fname = 'ƒ.js' src = pjoin(d, fname) mtime = touch(src) install_nbextension(src) @@ -287,11 +287,11 @@ def _add_file(f, fname, buf): for i,ext in enumerate((".tar.gz", ".tgz", ".tar.bz2")): path = pjoin(self.src, "myjsext" + ext) with tarfile.open(path, 'w') as f: - _add_file(f, "b%i.js" % i, b"b();") - _add_file(f, "foo/b%i.js" % i, b"foo();") + _add_file(f, f"b{i}.js", b"b();") + _add_file(f, f"foo/b{i}.js", b"foo();") install_nbextension(path) - self.assert_installed("b%i.js" % i) - self.assert_installed(pjoin("foo", "b%i.js" % i)) + self.assert_installed(f"b{i}.js") + self.assert_installed(pjoin("foo", f"b{i}.js")) def test_install_url(self): def fake_urlretrieve(url, dest): @@ -311,7 +311,7 @@ def fake_urlretrieve(url, dest): def test_check_nbextension(self): with TemporaryDirectory() as d: - f = u'ƒ.js' + f = 'ƒ.js' src = pjoin(d, f) touch(src) install_nbextension(src, user=True) @@ -323,7 +323,7 @@ def test_check_nbextension(self): @pytest.mark.skipif(sys.platform == "win32", reason="do not run on windows") def test_install_symlink(self): with TemporaryDirectory() as d: - f = u'ƒ.js' + f = 'ƒ.js' src = pjoin(d, f) touch(src) install_nbextension(src, symlink=True) @@ -335,8 +335,8 @@ def test_install_symlink(self): @pytest.mark.skipif(sys.platform == "win32", reason="do not run on windows") def test_overwrite_broken_symlink(self): with TemporaryDirectory() as d: - f = u'ƒ.js' - f2 = u'ƒ2.js' + f = 'ƒ.js' + f2 = 'ƒ2.js' src = pjoin(d, f) src2 = pjoin(d, f2) touch(src) @@ -351,8 +351,8 @@ def test_overwrite_broken_symlink(self): @pytest.mark.skipif(sys.platform == "win32", reason="do not run on windows") def test_install_symlink_destination(self): with TemporaryDirectory() as d: - f = u'ƒ.js' - flink = u'ƒlink.js' + f = 'ƒ.js' + flink = 'ƒlink.js' src = pjoin(d, f) touch(src) install_nbextension(src, symlink=True, destination=flink) @@ -367,7 +367,7 @@ def test_install_symlink_bad(self): install_nbextension("http://example.com/foo.js", symlink=True) with TemporaryDirectory() as d: - zf = u'ƒ.zip' + zf = 'ƒ.zip' zsrc = pjoin(d, zf) with zipfile.ZipFile(zsrc, 'w') as z: z.writestr("a.js", b"b();") @@ -377,7 +377,7 @@ def test_install_symlink_bad(self): def test_install_destination_bad(self): with TemporaryDirectory() as d: - zf = u'ƒ.zip' + zf = 'ƒ.zip' zsrc = pjoin(d, zf) with zipfile.ZipFile(zsrc, 'w') as z: z.writestr("a.js", b"b();") @@ -387,24 +387,24 @@ def test_install_destination_bad(self): def test_nbextension_enable(self): with TemporaryDirectory() as d: - f = u'ƒ.js' + f = 'ƒ.js' src = pjoin(d, f) touch(src) install_nbextension(src, user=True) - enable_nbextension(section='notebook', require=u'ƒ') + enable_nbextension(section='notebook', require='ƒ') config_dir = os.path.join(_get_config_dir(user=True), 'nbconfig') cm = BaseJSONConfigManager(config_dir=config_dir) - enabled = cm.get('notebook').get('load_extensions', {}).get(u'ƒ', False) + enabled = cm.get('notebook').get('load_extensions', {}).get('ƒ', False) assert enabled def test_nbextension_disable(self): self.test_nbextension_enable() - disable_nbextension(section='notebook', require=u'ƒ') + disable_nbextension(section='notebook', require='ƒ') config_dir = os.path.join(_get_config_dir(user=True), 'nbconfig') cm = BaseJSONConfigManager(config_dir=config_dir) - enabled = cm.get('notebook').get('load_extensions', {}).get(u'ƒ', False) + enabled = cm.get('notebook').get('load_extensions', {}).get('ƒ', False) assert not enabled diff --git a/notebook/tests/test_notebookapp.py b/notebook/tests/test_notebookapp.py index d48501b935..8a87d58c73 100644 --- a/notebook/tests/test_notebookapp.py +++ b/notebook/tests/test_notebookapp.py @@ -4,8 +4,6 @@ import logging import os import re -import signal -from subprocess import Popen, PIPE, STDOUT import sys from tempfile import NamedTemporaryFile @@ -159,7 +157,7 @@ def list_running_servers(runtime_dir): 'secure': False, 'token': '', 'password': False, - 'url': 'http://localhost:%i' % port, + 'url': f'http://localhost:{port}', } mock_servers = patch('notebook.notebookapp.list_running_servers', list_running_servers) @@ -211,7 +209,7 @@ class NotebookAppJSONLoggingTests(NotebookTestBase): """Tests for when json logging is enabled.""" @classmethod def setup_class(cls): - super(NotebookAppJSONLoggingTests, cls).setup_class() + super().setup_class() try: import json_logging cls.json_logging_available = True @@ -220,7 +218,7 @@ def setup_class(cls): @classmethod def get_patch_env(cls): - test_env = super(NotebookAppJSONLoggingTests, cls).get_patch_env() + test_env = super().get_patch_env() test_env.update({'JUPYTER_ENABLE_JSON_LOGGING': 'true'}) return test_env diff --git a/notebook/tests/test_notebookapp_integration.py b/notebook/tests/test_notebookapp_integration.py index 328fab05bc..e03ea0203d 100644 --- a/notebook/tests/test_notebookapp_integration.py +++ b/notebook/tests/test_notebookapp_integration.py @@ -1,5 +1,4 @@ import os -import pytest import stat import subprocess import sys @@ -22,7 +21,7 @@ def test_shutdown_sock_server_integration(): encoded_sock_path = urlencode_unix_socket_path(sock) p = subprocess.Popen( - ['jupyter-notebook', '--sock=%s' % sock, '--sock-mode=0700'], + ['jupyter-notebook', f'--sock={sock}', '--sock-mode=0700'], stdout=subprocess.PIPE, stderr=subprocess.PIPE ) @@ -111,12 +110,12 @@ def test_stop_multi_integration(): # Unix socket. sock = UNIXSocketNotebookTestBase.sock p2 = subprocess.Popen( - ['jupyter-notebook', '--sock=%s' % sock] + ['jupyter-notebook', f'--sock={sock}'] ) # Specified port p3 = subprocess.Popen( - ['jupyter-notebook', '--no-browser', '--port=%s' % TEST_PORT] + ['jupyter-notebook', '--no-browser', f'--port={TEST_PORT}'] ) time.sleep(3) @@ -146,12 +145,12 @@ def test_stop_multi_integration(): def test_launch_socket_collision(): """Tests UNIX socket in-use detection for lifecycle correctness.""" sock = UNIXSocketNotebookTestBase.sock - check_msg = 'socket %s is already in use' % sock + check_msg = f'socket {sock} is already in use' _ensure_stopped() # Start a server. - cmd = ['jupyter-notebook', '--sock=%s' % sock] + cmd = ['jupyter-notebook', f'--sock={sock}'] p1 = subprocess.Popen(cmd) time.sleep(3) @@ -161,7 +160,7 @@ def test_launch_socket_collision(): except subprocess.CalledProcessError as e: assert check_msg in e.output.decode() else: - raise AssertionError('expected error, instead got %s' % e.output.decode()) + raise AssertionError(f'expected error, instead got {e.output.decode()}') # Stop the background server, ensure it's stopped and wait on the process to exit. subprocess.check_call(['jupyter-notebook', 'stop', sock]) diff --git a/notebook/tests/test_paths.py b/notebook/tests/test_paths.py index 33f44afe4d..fc8dc14847 100644 --- a/notebook/tests/test_paths.py +++ b/notebook/tests/test_paths.py @@ -1,4 +1,3 @@ - import re from notebook.base.handlers import path_regex @@ -6,7 +5,7 @@ from .launchnotebook import NotebookTestBase # build regexps that tornado uses: -path_pat = re.compile('^' + '/x%s' % path_regex + '$') +path_pat = re.compile(f'^/x{path_regex}$') def test_path_regex(): diff --git a/notebook/tests/test_serverextensions.py b/notebook/tests/test_serverextensions.py index 898647926b..b4a94806bc 100644 --- a/notebook/tests/test_serverextensions.py +++ b/notebook/tests/test_serverextensions.py @@ -1,4 +1,3 @@ -import imp import os import site import sys @@ -13,7 +12,7 @@ from jupyter_core import paths from notebook.serverextensions import toggle_serverextension_python -from notebook import nbextensions, serverextensions, extensions +from notebook import nbextensions, extensions from notebook.notebookapp import NotebookApp from notebook.nbextensions import _get_config_dir @@ -31,7 +30,7 @@ def test_help_output(): outer_file = __file__ -class MockExtensionModule(object): +class MockExtensionModule: __file__ = outer_file @staticmethod @@ -41,13 +40,13 @@ def _jupyter_server_extension_paths(): }] loaded = False - + def load_jupyter_server_extension(self, app): self.loaded = True class MockEnvTestCase(TestCase): - + def tempdir(self): td = TemporaryDirectory() self.tempdirs.append(td) @@ -64,7 +63,7 @@ def setUp(self): self.system_config_dir = os.path.join(self.test_dir, 'system_config') self.system_path = [self.system_data_dir] self.system_config_path = [self.system_config_dir] - + self.patches = [] p = patch.dict('os.environ', { 'JUPYTER_CONFIG_DIR': self.config_dir, @@ -97,7 +96,7 @@ def setUp(self): self.assertEqual(paths.jupyter_config_path(), [self.config_dir] + self.system_config_path) self.assertEqual(extensions._get_config_dir(user=False), self.system_config_dir) self.assertEqual(paths.jupyter_path(), [self.data_dir] + self.system_path) - + def tearDown(self): for modulename in self._mock_extensions: sys.modules.pop(modulename) diff --git a/notebook/tests/test_utils.py b/notebook/tests/test_utils.py index 51f0e8accc..b6c386a2bc 100644 --- a/notebook/tests/test_utils.py +++ b/notebook/tests/test_utils.py @@ -84,7 +84,7 @@ def test_is_hidden(): def test_is_hidden_win32(): with TemporaryDirectory() as root: root = cast_unicode(root) - subdir1 = os.path.join(root, u'subdir') + subdir1 = os.path.join(root, 'subdir') os.makedirs(subdir1) assert not is_hidden(subdir1, root) r = ctypes.windll.kernel32.SetFileAttributesW(subdir1, 0x02) diff --git a/notebook/traittypes.py b/notebook/traittypes.py index 226657c1f4..53bb125369 100644 --- a/notebook/traittypes.py +++ b/notebook/traittypes.py @@ -75,7 +75,7 @@ class name where an object was defined. if article == "the" or (article is None and not inspect.isclass(value)): if name is not None: - result = "{} {}".format(typename, name) + result = f"{typename} {name}" if article is not None: return add_article(result, True, capital) else: @@ -91,7 +91,7 @@ class name where an object was defined. name = value.__func__.__name__ tick_wrap = True elif type(value).__repr__ in (object.__repr__, type.__repr__): - name = "at '%s'" % hex(id(value)) + name = f"at '{id(value):x}'" verbose = False else: name = repr(value) @@ -107,8 +107,9 @@ class name where an object was defined. return typename return add_article(typename, False, capital) else: - raise ValueError("The 'article' argument should " - "be 'the', 'a', 'an', or None not %r" % article) + raise ValueError( + f"The 'article' argument should be 'the', 'a', 'an', or None not {article!r}" + ) def add_article(name, definite=False, capital=False): @@ -196,8 +197,10 @@ def validate(self, obj, value): try: value = self._resolve_string(value) except ImportError: - raise TraitError("The '%s' trait of %s instance must be a type, but " - "%r could not be imported" % (self.name, obj, value)) + raise TraitError( + f"The '{self.name}' trait of {obj} instance must be a type, " + f"but {value!r} could not be imported" + ) try: if self.subclass_from_klasses(value): return value @@ -283,8 +286,9 @@ class or its subclasses. Our implementation is quite different elif all(inspect.isclass(k) or isinstance(k, str) for k in klasses): self.klasses = klasses else: - raise TraitError('The klasses attribute must be a list of class names or classes' - ' not: %r' % klass) + raise TraitError( + f'The klasses attribute must be a list of class names or classes not: {klass!r}' + ) if (kw is not None) and not isinstance(kw, dict): raise TraitError("The 'kw' argument must be a dict or None.") @@ -294,7 +298,7 @@ class or its subclasses. Our implementation is quite different self.default_args = args self.default_kwargs = kw - super(InstanceFromClasses, self).__init__(**kwargs) + super().__init__(**kwargs) def instance_from_importable_klasses(self, value): "Check that a given class is a subclasses found in the klasses list." diff --git a/notebook/tree/handlers.py b/notebook/tree/handlers.py index bb4958d289..62e0840c0c 100644 --- a/notebook/tree/handlers.py +++ b/notebook/tree/handlers.py @@ -73,6 +73,6 @@ def get(self, path=''): default_handlers = [ - (r"/tree%s" % path_regex, TreeHandler), + (fr"/tree{path_regex}", TreeHandler), (r"/tree", TreeHandler), ] diff --git a/notebook/tree/tests/handlers.py b/notebook/tree/tests/handlers.py index ef42527616..7c37e191fa 100644 --- a/notebook/tree/tests/handlers.py +++ b/notebook/tree/tests/handlers.py @@ -72,6 +72,6 @@ def get(self, path=''): default_handlers = [ - (r"/tree%s" % path_regex, TreeHandler), + (fr"/tree{path_regex}", TreeHandler), (r"/tree", TreeHandler), ] diff --git a/notebook/tree/tests/test_tree_handler.py b/notebook/tree/tests/test_tree_handler.py index 801185ae51..b602b9402b 100644 --- a/notebook/tree/tests/test_tree_handler.py +++ b/notebook/tree/tests/test_tree_handler.py @@ -1,13 +1,10 @@ """Test the /tree handlers""" import os -import io from notebook.utils import url_path_join from nbformat import write from nbformat.v4 import new_notebook from urllib.parse import urlparse -import requests - from notebook.tests.launchnotebook import NotebookTestBase class TreeTest(NotebookTestBase): @@ -16,12 +13,12 @@ def setUp(self): d = os.path.join(nbdir, 'foo') os.mkdir(d) - with io.open(os.path.join(d, 'bar.ipynb'), 'w', encoding='utf-8') as f: + with open(os.path.join(d, 'bar.ipynb'), 'w', encoding='utf-8') as f: nb = new_notebook() write(nb, f, version=4) - with io.open(os.path.join(d, 'baz.txt'), 'w', encoding='utf-8') as f: - f.write(u'flamingo') + with open(os.path.join(d, 'baz.txt'), 'w', encoding='utf-8') as f: + f.write('flamingo') self.base_url() diff --git a/notebook/utils.py b/notebook/utils.py index ef6d9e437c..f7b6380dfe 100644 --- a/notebook/utils.py +++ b/notebook/utils.py @@ -79,14 +79,14 @@ def url_escape(path): Turns '/foo bar/' into '/foo%20bar/' """ parts = py3compat.unicode_to_str(path, encoding='utf8').split('/') - return u'/'.join([quote(p) for p in parts]) + return '/'.join([quote(p) for p in parts]) def url_unescape(path): """Unescape special characters in a URL path Turns '/foo%20bar/' into '/foo bar/' """ - return u'/'.join([ + return '/'.join([ py3compat.str_to_unicode(unquote(p), encoding='utf8') for p in py3compat.unicode_to_str(path, encoding='utf8').split('/') ]) @@ -382,7 +382,7 @@ def urldecode_unix_socket_path(socket_path): def urlencode_unix_socket(socket_path): """Encodes a UNIX socket URL from a socket path for the `http+unix` URI form.""" - return 'http+unix://%s' % urlencode_unix_socket_path(socket_path) + return f'http+unix://{urlencode_unix_socket_path(socket_path)}' def unix_socket_in_use(socket_path): @@ -393,7 +393,7 @@ def unix_socket_in_use(socket_path): try: sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) sock.connect(socket_path) - except socket.error: + except OSError: return False else: return True diff --git a/notebook/view/handlers.py b/notebook/view/handlers.py index e788964662..2ceed50eb6 100644 --- a/notebook/view/handlers.py +++ b/notebook/view/handlers.py @@ -13,7 +13,7 @@ class ViewHandler(IPythonHandler): def get(self, path): path = path.strip('/') if not self.contents_manager.file_exists(path): - raise web.HTTPError(404, u'File does not exist: %s' % path) + raise web.HTTPError(404, f'File does not exist: {path}') basename = path.rsplit('/', 1)[-1] file_url = url_path_join(self.base_url, 'files', url_escape(path)) @@ -22,5 +22,5 @@ def get(self, path): ) default_handlers = [ - (r"/view%s" % path_regex, ViewHandler), + (fr"/view{path_regex}", ViewHandler), ] diff --git a/setup.py b/setup.py index e255a3129a..628644bbb7 100755 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ pip_message = 'This may be due to an out of date pip. Make sure you have pip >= 9.0.1.' try: import pip - pip_version = tuple([int(x) for x in pip.__version__.split('.')[:3]]) + pip_version = tuple(int(x) for x in pip.__version__.split('.')[:3]) if pip_version < (9, 0, 1) : pip_message = 'Your pip version is out of date, please install pip >= 9.0.1. '\ 'pip {} detected.'.format(pip.__version__) @@ -103,10 +103,10 @@ 'License :: OSI Approved :: BSD License', 'Programming Language :: Python', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9' + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', ], zip_safe = False, install_requires = [ @@ -136,7 +136,7 @@ 'test:sys_platform != "win32"': ['requests-unixsocket'], 'json-logging': ['json-logging'] }, - python_requires = '>=3.6', + python_requires = '>=3.7', entry_points = { 'console_scripts': [ 'jupyter-notebook = notebook.notebookapp:main', diff --git a/setupbase.py b/setupbase.py index 8849a85e3b..fd69363b9f 100644 --- a/setupbase.py +++ b/setupbase.py @@ -99,7 +99,7 @@ def find_package_data(): """ # This is not enough for these things to appear in a sdist. # We need to muck with the MANIFEST to get this to work - + # exclude components and less from the walk; # we will build the components separately excludes = [ @@ -119,12 +119,12 @@ def find_package_data(): continue for f in files: static_data.append(pjoin(parent, f)) - + # for verification purposes, explicitly add main.min.js # so that installation will fail if they are missing for app in ['auth', 'edit', 'notebook', 'terminal', 'tree']: static_data.append(pjoin('static', app, 'js', 'main.min.js')) - + components = pjoin("static", "components") # select the components we actually need to install # (there are lots of resources we bundle for sdist-reasons that we don't actually use) @@ -173,10 +173,10 @@ def find_package_data(): mj('config', 'TeX-AMS-MML_HTMLorMML-full.js'), mj('config', 'Safe.js'), ]) - + trees = [] mj_out = mj('jax', 'output') - + if os.path.exists(mj_out): for output in os.listdir(mj_out): path = pjoin(mj_out, output) @@ -210,7 +210,7 @@ def find_package_data(): 'notebook.services.api': ['api.yaml'], 'notebook.i18n': ['*/LC_MESSAGES/*.*'], } - + return package_data @@ -229,7 +229,7 @@ def check_package_data(package_data): def check_package_data_first(command): """decorator for checking package_data before running a given command - + Probably only needs to wrap build_py """ class DecoratedCommand(command): @@ -361,21 +361,21 @@ def run(self): class Bower(Command): description = "fetch static client-side components with bower" - + user_options = [ ('force', 'f', "force fetching of bower dependencies"), ] - + def initialize_options(self): self.force = False - + def finalize_options(self): self.force = bool(self.force) - + bower_dir = pjoin(static, 'components') node_modules = pjoin(repo_root, 'node_modules') sanitizer_dir = pjoin(bower_dir, 'sanitizer') - + def should_run(self): if self.force: return True @@ -402,15 +402,15 @@ def run(self): if not self.should_run(): print("bower dependencies up to date") return - + if self.should_run_npm(): print("installing build dependencies with npm") run(['npm', 'install'], cwd=repo_root) os.utime(self.node_modules, None) - + env = os.environ.copy() env['PATH'] = npm_path - + try: run( ['bower', 'install', '--allow-root', '--config.interactive=false'], @@ -453,9 +453,9 @@ def patch_out_bootstrap_bw_print(): class CompileCSS(Command): """Recompile Notebook CSS - + Regenerate the compiled CSS from LESS sources. - + Requires various dev dependencies, such as require and lessc. """ description = "Recompile Notebook CSS" @@ -479,7 +479,7 @@ def run(self): env['PATH'] = npm_path patch_out_bootstrap_bw_print() - + for src, dst in zip(self.sources, self.targets): try: run(['lessc', @@ -498,7 +498,7 @@ def run(self): class CompileJS(Command): """Rebuild Notebook Javascript main.min.js files and translation files. - + Calls require via build-main.js """ description = "Rebuild Notebook Javascript main.min.js files" @@ -514,7 +514,7 @@ def finalize_options(self): apps = ['notebook', 'tree', 'edit', 'terminal', 'auth'] targets = [ pjoin(static, app, 'js', 'main.min.js') for app in apps ] - + def sources(self, name): """Generator yielding .js sources that an application depends on""" yield pjoin(repo_root, 'tools', 'build-main.js') @@ -535,7 +535,7 @@ def sources(self, name): continue for f in files: yield pjoin(parent, f) - + def should_run(self, name, target): if self.force or not os.path.exists(target): return True @@ -582,13 +582,13 @@ class JavascriptVersion(Command): """write the javascript version to notebook javascript""" description = "Write Jupyter version to javascript" user_options = [] - + def initialize_options(self): pass - + def finalize_options(self): pass - + def run(self): nsfile = pjoin(repo_root, "notebook", "static", "base", "js", "namespace.js") with open(nsfile) as f: @@ -597,7 +597,7 @@ def run(self): found = False for line in lines: if line.strip().startswith("Jupyter.version"): - line = ' Jupyter.version = "{0}";\n'.format(version) + line = f' Jupyter.version = "{version}";\n' found = True f.write(line) if not found: diff --git a/tools/secure_notebook.py b/tools/secure_notebook.py index 1984d33946..ec74399556 100644 --- a/tools/secure_notebook.py +++ b/tools/secure_notebook.py @@ -3,12 +3,10 @@ script to automatically setup notebook over SSL. Generate cert and keyfiles (rsa 1024) in ~/.ssh/, ask for a password, and add -the corresponding entries in the notebook json configuration file. +the corresponding entries in the notebook json configuration file. """ -import six - from notebook.auth import passwd from traitlets.config.loader import JSONFileConfigLoader, ConfigFileNotFound from jupyter_core.paths import jupyter_config_dir @@ -19,7 +17,6 @@ from OpenSSL import crypto from os.path import exists, join -import io import os import json import traceback @@ -33,7 +30,7 @@ def create_self_signed_cert(cert_dir, keyfile, certfile): """ if exists(join(cert_dir, certfile)) or exists(join(cert_dir, keyfile)): - raise FileExistsError('{} or {} already exist in {}. Aborting.'.format(keyfile, certfile, cert_dir)) + raise FileExistsError(f'{keyfile} or {certfile} already exist in {cert_dir}. Aborting.') else: # create a key pair k = crypto.PKey() @@ -54,11 +51,11 @@ def create_self_signed_cert(cert_dir, keyfile, certfile): cert.set_pubkey(k) cert.sign(k, 'sha256') - with io.open(join(cert_dir, certfile), "wt") as f: + with open(join(cert_dir, certfile), "wt") as f: f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert).decode('utf8')) os.chmod(join(cert_dir, certfile), 0o600) - - with io.open(join(cert_dir, keyfile), "wt") as f: + + with open(join(cert_dir, keyfile), "wt") as f: f.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, k).decode('utf8')) os.chmod(join(cert_dir, keyfile), 0o600) @@ -68,7 +65,7 @@ def create_self_signed_cert(cert_dir, keyfile, certfile): def persist_config(mode=0o600): """Context manager that can be use to modify a config object - On exit of the context manager, the config will be written back to disk, + On exit of the context manager, the config will be written back to disk, by default with 600 permissions. """ @@ -81,8 +78,8 @@ def persist_config(mode=0o600): yield config filepath = os.path.join(jupyter_config_dir(), 'jupyter_notebook_config.json') - with io.open(filepath, 'w') as f: - f.write(six.u(json.dumps(config, indent=2))) + with open(filepath, 'w') as f: + f.write(json.dumps(config, indent=2)) try: os.chmod(filepath, mode) except Exception: