mirror of
https://github.com/jupyterhub/jupyterhub.git
synced 2025-10-07 18:14:10 +00:00
pyupgrade: run pyupgrade --py36-plus and black on all but tests
This commit is contained in:
@@ -29,5 +29,5 @@ dependencies = package_json['dependencies']
|
|||||||
for dep in dependencies:
|
for dep in dependencies:
|
||||||
src = join(node_modules, dep)
|
src = join(node_modules, dep)
|
||||||
dest = join(components, dep)
|
dest = join(components, dep)
|
||||||
print("%s -> %s" % (src, dest))
|
print(f"{src} -> {dest}")
|
||||||
shutil.copytree(src, dest)
|
shutil.copytree(src, dest)
|
||||||
|
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
#
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
@@ -29,9 +28,9 @@ myst_enable_extensions = [
|
|||||||
master_doc = 'index'
|
master_doc = 'index'
|
||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = u'JupyterHub'
|
project = 'JupyterHub'
|
||||||
copyright = u'2016, Project Jupyter team'
|
copyright = '2016, Project Jupyter team'
|
||||||
author = u'Project Jupyter team'
|
author = 'Project Jupyter team'
|
||||||
|
|
||||||
# Autopopulate version
|
# Autopopulate version
|
||||||
from os.path import dirname
|
from os.path import dirname
|
||||||
@@ -146,8 +145,8 @@ latex_documents = [
|
|||||||
(
|
(
|
||||||
master_doc,
|
master_doc,
|
||||||
'JupyterHub.tex',
|
'JupyterHub.tex',
|
||||||
u'JupyterHub Documentation',
|
'JupyterHub Documentation',
|
||||||
u'Project Jupyter team',
|
'Project Jupyter team',
|
||||||
'manual',
|
'manual',
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
@@ -164,7 +163,7 @@ latex_documents = [
|
|||||||
|
|
||||||
# One entry per manual page. List of tuples
|
# One entry per manual page. List of tuples
|
||||||
# (source start file, name, description, authors, manual section).
|
# (source start file, name, description, authors, manual section).
|
||||||
man_pages = [(master_doc, 'jupyterhub', u'JupyterHub Documentation', [author], 1)]
|
man_pages = [(master_doc, 'jupyterhub', 'JupyterHub Documentation', [author], 1)]
|
||||||
|
|
||||||
# man_show_urls = False
|
# man_show_urls = False
|
||||||
|
|
||||||
@@ -178,7 +177,7 @@ texinfo_documents = [
|
|||||||
(
|
(
|
||||||
master_doc,
|
master_doc,
|
||||||
'JupyterHub',
|
'JupyterHub',
|
||||||
u'JupyterHub Documentation',
|
'JupyterHub Documentation',
|
||||||
author,
|
author,
|
||||||
'JupyterHub',
|
'JupyterHub',
|
||||||
'One line description of project.',
|
'One line description of project.',
|
||||||
|
@@ -13,7 +13,7 @@ c.JupyterHub.load_groups = {group_name: ['ellisonbg', 'willingc']}
|
|||||||
c.JupyterHub.services = [
|
c.JupyterHub.services = [
|
||||||
{
|
{
|
||||||
'name': service_name,
|
'name': service_name,
|
||||||
'url': 'http://127.0.0.1:{}'.format(service_port),
|
'url': f'http://127.0.0.1:{service_port}',
|
||||||
'api_token': 'c3a29e5d386fd7c9aa1e8fe9d41c282ec8b',
|
'api_token': 'c3a29e5d386fd7c9aa1e8fe9d41c282ec8b',
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@@ -13,7 +13,7 @@ c.JupyterHub.load_groups = {group_name: ['ellisonbg', 'willingc']}
|
|||||||
c.JupyterHub.services = [
|
c.JupyterHub.services = [
|
||||||
{
|
{
|
||||||
'name': service_name,
|
'name': service_name,
|
||||||
'url': 'http://127.0.0.1:{}'.format(service_port),
|
'url': f'http://127.0.0.1:{service_port}',
|
||||||
'command': ['jupyterhub-singleuser', '--debug'],
|
'command': ['jupyterhub-singleuser', '--debug'],
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@@ -60,7 +60,7 @@ def _check_version(hub_version, singleuser_version, log):
|
|||||||
log_method = log.debug
|
log_method = log.debug
|
||||||
else:
|
else:
|
||||||
# log warning-level for more significant mismatch, such as 0.8 vs 0.9, etc.
|
# log warning-level for more significant mismatch, such as 0.8 vs 0.9, etc.
|
||||||
key = '%s-%s' % (hub_version, singleuser_version)
|
key = f'{hub_version}-{singleuser_version}'
|
||||||
global _version_mismatch_warning_logged
|
global _version_mismatch_warning_logged
|
||||||
if _version_mismatch_warning_logged.get(key):
|
if _version_mismatch_warning_logged.get(key):
|
||||||
do_log = False # We already logged this warning so don't log it again.
|
do_log = False # We already logged this warning so don't log it again.
|
||||||
|
@@ -190,9 +190,9 @@ class OAuthAuthorizeHandler(OAuthHandler, BaseHandler):
|
|||||||
.. versionadded: 1.1
|
.. versionadded: 1.1
|
||||||
"""
|
"""
|
||||||
# get the oauth client ids for the user's own server(s)
|
# get the oauth client ids for the user's own server(s)
|
||||||
own_oauth_client_ids = set(
|
own_oauth_client_ids = {
|
||||||
spawner.oauth_client_id for spawner in user.spawners.values()
|
spawner.oauth_client_id for spawner in user.spawners.values()
|
||||||
)
|
}
|
||||||
if (
|
if (
|
||||||
# it's the user's own server
|
# it's the user's own server
|
||||||
oauth_client.identifier in own_oauth_client_ids
|
oauth_client.identifier in own_oauth_client_ids
|
||||||
|
@@ -74,9 +74,7 @@ class InfoAPIHandler(APIHandler):
|
|||||||
|
|
||||||
def _class_info(typ):
|
def _class_info(typ):
|
||||||
"""info about a class (Spawner or Authenticator)"""
|
"""info about a class (Spawner or Authenticator)"""
|
||||||
info = {
|
info = {'class': f'{typ.__module__}.{typ.__name__}'}
|
||||||
'class': '{mod}.{name}'.format(mod=typ.__module__, name=typ.__name__)
|
|
||||||
}
|
|
||||||
pkg = typ.__module__.split('.')[0]
|
pkg = typ.__module__.split('.')[0]
|
||||||
try:
|
try:
|
||||||
version = sys.modules[pkg].__version__
|
version = sys.modules[pkg].__version__
|
||||||
|
@@ -210,9 +210,7 @@ class UserListAPIHandler(APIHandler):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.log.error("Failed to create user: %s" % name, exc_info=True)
|
self.log.error("Failed to create user: %s" % name, exc_info=True)
|
||||||
self.users.delete(user)
|
self.users.delete(user)
|
||||||
raise web.HTTPError(
|
raise web.HTTPError(400, f"Failed to create user {name}: {str(e)}")
|
||||||
400, "Failed to create user %s: %s" % (name, str(e))
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
created.append(user)
|
created.append(user)
|
||||||
|
|
||||||
@@ -394,7 +392,7 @@ class UserTokenListAPIHandler(APIHandler):
|
|||||||
if not note:
|
if not note:
|
||||||
note = "Requested via api"
|
note = "Requested via api"
|
||||||
if requester is not user:
|
if requester is not user:
|
||||||
note += " by %s %s" % (kind, requester.name)
|
note += f" by {kind} {requester.name}"
|
||||||
|
|
||||||
token_roles = body.get('roles')
|
token_roles = body.get('roles')
|
||||||
try:
|
try:
|
||||||
@@ -434,7 +432,7 @@ class UserTokenAPIHandler(APIHandler):
|
|||||||
Raises 404 if not found for any reason
|
Raises 404 if not found for any reason
|
||||||
(e.g. wrong owner, invalid key format, etc.)
|
(e.g. wrong owner, invalid key format, etc.)
|
||||||
"""
|
"""
|
||||||
not_found = "No such token %s for user %s" % (token_id, user.name)
|
not_found = f"No such token {token_id} for user {user.name}"
|
||||||
prefix, id_ = token_id[:1], token_id[1:]
|
prefix, id_ = token_id[:1], token_id[1:]
|
||||||
if prefix != 'a':
|
if prefix != 'a':
|
||||||
raise web.HTTPError(404, not_found)
|
raise web.HTTPError(404, not_found)
|
||||||
@@ -508,7 +506,7 @@ class UserServerAPIHandler(APIHandler):
|
|||||||
self.set_status(202)
|
self.set_status(202)
|
||||||
return
|
return
|
||||||
elif pending:
|
elif pending:
|
||||||
raise web.HTTPError(400, "%s is pending %s" % (spawner._log_name, pending))
|
raise web.HTTPError(400, f"{spawner._log_name} is pending {pending}")
|
||||||
|
|
||||||
if spawner.ready:
|
if spawner.ready:
|
||||||
# include notify, so that a server that died is noticed immediately
|
# include notify, so that a server that died is noticed immediately
|
||||||
@@ -554,7 +552,7 @@ class UserServerAPIHandler(APIHandler):
|
|||||||
raise web.HTTPError(400, "Named servers are not enabled.")
|
raise web.HTTPError(400, "Named servers are not enabled.")
|
||||||
if server_name not in user.orm_spawners:
|
if server_name not in user.orm_spawners:
|
||||||
raise web.HTTPError(
|
raise web.HTTPError(
|
||||||
404, "%s has no server named '%s'" % (user_name, server_name)
|
404, f"{user_name} has no server named '{server_name}'"
|
||||||
)
|
)
|
||||||
elif remove:
|
elif remove:
|
||||||
raise web.HTTPError(400, "Cannot delete the default server")
|
raise web.HTTPError(400, "Cannot delete the default server")
|
||||||
@@ -572,7 +570,7 @@ class UserServerAPIHandler(APIHandler):
|
|||||||
if spawner.pending:
|
if spawner.pending:
|
||||||
raise web.HTTPError(
|
raise web.HTTPError(
|
||||||
400,
|
400,
|
||||||
"%s is pending %s, please wait" % (spawner._log_name, spawner.pending),
|
f"{spawner._log_name} is pending {spawner.pending}, please wait",
|
||||||
)
|
)
|
||||||
|
|
||||||
stop_future = None
|
stop_future = None
|
||||||
@@ -627,7 +625,7 @@ class SpawnProgressAPIHandler(APIHandler):
|
|||||||
|
|
||||||
async def send_event(self, event):
|
async def send_event(self, event):
|
||||||
try:
|
try:
|
||||||
self.write('data: {}\n\n'.format(json.dumps(event)))
|
self.write(f'data: {json.dumps(event)}\n\n')
|
||||||
await self.flush()
|
await self.flush()
|
||||||
except StreamClosedError:
|
except StreamClosedError:
|
||||||
self.log.warning("Stream closed while handling %s", self.request.uri)
|
self.log.warning("Stream closed while handling %s", self.request.uri)
|
||||||
@@ -681,7 +679,7 @@ class SpawnProgressAPIHandler(APIHandler):
|
|||||||
ready_event = {
|
ready_event = {
|
||||||
'progress': 100,
|
'progress': 100,
|
||||||
'ready': True,
|
'ready': True,
|
||||||
'message': "Server ready at {}".format(url),
|
'message': f"Server ready at {url}",
|
||||||
'html_message': 'Server ready at <a href="{0}">{0}</a>'.format(url),
|
'html_message': 'Server ready at <a href="{0}">{0}</a>'.format(url),
|
||||||
'url': url,
|
'url': url,
|
||||||
}
|
}
|
||||||
@@ -784,7 +782,7 @@ class ActivityAPIHandler(APIHandler):
|
|||||||
if server_name not in spawners:
|
if server_name not in spawners:
|
||||||
raise web.HTTPError(
|
raise web.HTTPError(
|
||||||
400,
|
400,
|
||||||
"No such server '{}' for user {}".format(server_name, user.name),
|
f"No such server '{server_name}' for user {user.name}",
|
||||||
)
|
)
|
||||||
# check that each per-server field is a dict
|
# check that each per-server field is a dict
|
||||||
if not isinstance(server_info, dict):
|
if not isinstance(server_info, dict):
|
||||||
|
@@ -2208,7 +2208,7 @@ class JupyterHub(Application):
|
|||||||
"""
|
"""
|
||||||
# this should be all the subclasses of Expiring
|
# this should be all the subclasses of Expiring
|
||||||
for cls in (orm.APIToken, orm.OAuthCode):
|
for cls in (orm.APIToken, orm.OAuthCode):
|
||||||
self.log.debug("Purging expired {name}s".format(name=cls.__name__))
|
self.log.debug(f"Purging expired {cls.__name__}s")
|
||||||
cls.purge_expired(self.db)
|
cls.purge_expired(self.db)
|
||||||
|
|
||||||
async def init_api_tokens(self):
|
async def init_api_tokens(self):
|
||||||
@@ -2232,7 +2232,7 @@ class JupyterHub(Application):
|
|||||||
if self.domain:
|
if self.domain:
|
||||||
domain = 'services.' + self.domain
|
domain = 'services.' + self.domain
|
||||||
parsed = urlparse(self.subdomain_host)
|
parsed = urlparse(self.subdomain_host)
|
||||||
host = '%s://services.%s' % (parsed.scheme, parsed.netloc)
|
host = f'{parsed.scheme}://services.{parsed.netloc}'
|
||||||
else:
|
else:
|
||||||
domain = host = ''
|
domain = host = ''
|
||||||
|
|
||||||
@@ -2359,14 +2359,12 @@ class JupyterHub(Application):
|
|||||||
|
|
||||||
def _user_summary(user):
|
def _user_summary(user):
|
||||||
"""user is an orm.User, not a full user"""
|
"""user is an orm.User, not a full user"""
|
||||||
parts = ['{0: >8}'.format(user.name)]
|
parts = [f'{user.name: >8}']
|
||||||
if user.admin:
|
if user.admin:
|
||||||
parts.append('admin')
|
parts.append('admin')
|
||||||
for name, spawner in sorted(user.orm_spawners.items(), key=itemgetter(0)):
|
for name, spawner in sorted(user.orm_spawners.items(), key=itemgetter(0)):
|
||||||
if spawner.server:
|
if spawner.server:
|
||||||
parts.append(
|
parts.append(f'{user.name}:{name} running at {spawner.server}')
|
||||||
'%s:%s running at %s' % (user.name, name, spawner.server)
|
|
||||||
)
|
|
||||||
return ' '.join(parts)
|
return ' '.join(parts)
|
||||||
|
|
||||||
async def user_stopped(user, server_name):
|
async def user_stopped(user, server_name):
|
||||||
@@ -2703,7 +2701,7 @@ class JupyterHub(Application):
|
|||||||
self.log.warning(
|
self.log.warning(
|
||||||
"Use JupyterHub in config, not JupyterHubApp. Outdated config:\n%s",
|
"Use JupyterHub in config, not JupyterHubApp. Outdated config:\n%s",
|
||||||
'\n'.join(
|
'\n'.join(
|
||||||
'JupyterHubApp.{key} = {value!r}'.format(key=key, value=value)
|
f'JupyterHubApp.{key} = {value!r}'
|
||||||
for key, value in self.config.JupyterHubApp.items()
|
for key, value in self.config.JupyterHubApp.items()
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@@ -2725,7 +2723,7 @@ class JupyterHub(Application):
|
|||||||
mod = sys.modules.get(cls.__module__.split('.')[0])
|
mod = sys.modules.get(cls.__module__.split('.')[0])
|
||||||
version = getattr(mod, '__version__', '')
|
version = getattr(mod, '__version__', '')
|
||||||
if version:
|
if version:
|
||||||
version = '-{}'.format(version)
|
version = f'-{version}'
|
||||||
else:
|
else:
|
||||||
version = ''
|
version = ''
|
||||||
self.log.info(
|
self.log.info(
|
||||||
@@ -3025,11 +3023,7 @@ class JupyterHub(Application):
|
|||||||
|
|
||||||
# start the service(s)
|
# start the service(s)
|
||||||
for service_name, service in self._service_map.items():
|
for service_name, service in self._service_map.items():
|
||||||
msg = (
|
msg = f'{service_name} at {service.url}' if service.url else service_name
|
||||||
'%s at %s' % (service_name, service.url)
|
|
||||||
if service.url
|
|
||||||
else service_name
|
|
||||||
)
|
|
||||||
if service.managed:
|
if service.managed:
|
||||||
self.log.info("Starting managed service %s", msg)
|
self.log.info("Starting managed service %s", msg)
|
||||||
try:
|
try:
|
||||||
|
@@ -926,7 +926,7 @@ class LocalAuthenticator(Authenticator):
|
|||||||
p.wait()
|
p.wait()
|
||||||
if p.returncode:
|
if p.returncode:
|
||||||
err = p.stdout.read().decode('utf8', 'replace')
|
err = p.stdout.read().decode('utf8', 'replace')
|
||||||
raise RuntimeError("Failed to create system user %s: %s" % (name, err))
|
raise RuntimeError(f"Failed to create system user {name}: {err}")
|
||||||
|
|
||||||
|
|
||||||
class PAMAuthenticator(LocalAuthenticator):
|
class PAMAuthenticator(LocalAuthenticator):
|
||||||
|
@@ -91,7 +91,7 @@ def backup_db_file(db_file, log=None):
|
|||||||
for i in range(1, 10):
|
for i in range(1, 10):
|
||||||
if not os.path.exists(backup_db_file):
|
if not os.path.exists(backup_db_file):
|
||||||
break
|
break
|
||||||
backup_db_file = '{}.{}.{}'.format(db_file, timestamp, i)
|
backup_db_file = f'{db_file}.{timestamp}.{i}'
|
||||||
#
|
#
|
||||||
if os.path.exists(backup_db_file):
|
if os.path.exists(backup_db_file):
|
||||||
raise OSError("backup db file already exists: %s" % backup_db_file)
|
raise OSError("backup db file already exists: %s" % backup_db_file)
|
||||||
|
@@ -622,7 +622,7 @@ class BaseHandler(RequestHandler):
|
|||||||
next_url = next_url.replace('\\', '%5C')
|
next_url = next_url.replace('\\', '%5C')
|
||||||
if (next_url + '/').startswith(
|
if (next_url + '/').startswith(
|
||||||
(
|
(
|
||||||
'%s://%s/' % (self.request.protocol, self.request.host),
|
f'{self.request.protocol}://{self.request.host}/',
|
||||||
'//%s/' % self.request.host,
|
'//%s/' % self.request.host,
|
||||||
)
|
)
|
||||||
) or (
|
) or (
|
||||||
@@ -748,7 +748,7 @@ class BaseHandler(RequestHandler):
|
|||||||
refreshing = user is not None
|
refreshing = user is not None
|
||||||
|
|
||||||
if user and username != user.name:
|
if user and username != user.name:
|
||||||
raise ValueError("Username doesn't match! %s != %s" % (username, user.name))
|
raise ValueError(f"Username doesn't match! {username} != {user.name}")
|
||||||
|
|
||||||
if user is None:
|
if user is None:
|
||||||
user = self.find_user(username)
|
user = self.find_user(username)
|
||||||
@@ -830,14 +830,14 @@ class BaseHandler(RequestHandler):
|
|||||||
user_server_name = user.name
|
user_server_name = user.name
|
||||||
|
|
||||||
if server_name:
|
if server_name:
|
||||||
user_server_name = '%s:%s' % (user.name, server_name)
|
user_server_name = f'{user.name}:{server_name}'
|
||||||
|
|
||||||
if server_name in user.spawners and user.spawners[server_name].pending:
|
if server_name in user.spawners and user.spawners[server_name].pending:
|
||||||
pending = user.spawners[server_name].pending
|
pending = user.spawners[server_name].pending
|
||||||
SERVER_SPAWN_DURATION_SECONDS.labels(
|
SERVER_SPAWN_DURATION_SECONDS.labels(
|
||||||
status=ServerSpawnStatus.already_pending
|
status=ServerSpawnStatus.already_pending
|
||||||
).observe(time.perf_counter() - spawn_start_time)
|
).observe(time.perf_counter() - spawn_start_time)
|
||||||
raise RuntimeError("%s pending %s" % (user_server_name, pending))
|
raise RuntimeError(f"{user_server_name} pending {pending}")
|
||||||
|
|
||||||
# count active servers and pending spawns
|
# count active servers and pending spawns
|
||||||
# we could do careful bookkeeping to avoid
|
# we could do careful bookkeeping to avoid
|
||||||
@@ -1114,7 +1114,7 @@ class BaseHandler(RequestHandler):
|
|||||||
raise KeyError("User %s has no such spawner %r", user.name, server_name)
|
raise KeyError("User %s has no such spawner %r", user.name, server_name)
|
||||||
spawner = user.spawners[server_name]
|
spawner = user.spawners[server_name]
|
||||||
if spawner.pending:
|
if spawner.pending:
|
||||||
raise RuntimeError("%s pending %s" % (spawner._log_name, spawner.pending))
|
raise RuntimeError(f"{spawner._log_name} pending {spawner.pending}")
|
||||||
# set user._stop_pending before doing anything async
|
# set user._stop_pending before doing anything async
|
||||||
# to avoid races
|
# to avoid races
|
||||||
spawner._stop_pending = True
|
spawner._stop_pending = True
|
||||||
|
@@ -239,7 +239,7 @@ class SpawnHandler(BaseHandler):
|
|||||||
raise web.HTTPError(400, "%s is already running" % (spawner._log_name))
|
raise web.HTTPError(400, "%s is already running" % (spawner._log_name))
|
||||||
elif spawner.pending:
|
elif spawner.pending:
|
||||||
raise web.HTTPError(
|
raise web.HTTPError(
|
||||||
400, "%s is pending %s" % (spawner._log_name, spawner.pending)
|
400, f"{spawner._log_name} is pending {spawner.pending}"
|
||||||
)
|
)
|
||||||
|
|
||||||
form_options = {}
|
form_options = {}
|
||||||
@@ -348,9 +348,7 @@ class SpawnPendingHandler(BaseHandler):
|
|||||||
raise web.HTTPError(404, "No such user: %s" % for_user)
|
raise web.HTTPError(404, "No such user: %s" % for_user)
|
||||||
|
|
||||||
if server_name and server_name not in user.spawners:
|
if server_name and server_name not in user.spawners:
|
||||||
raise web.HTTPError(
|
raise web.HTTPError(404, f"{user.name} has no such server {server_name}")
|
||||||
404, "%s has no such server %s" % (user.name, server_name)
|
|
||||||
)
|
|
||||||
|
|
||||||
spawner = user.spawners[server_name]
|
spawner = user.spawners[server_name]
|
||||||
|
|
||||||
@@ -466,7 +464,7 @@ class AdminHandler(BaseHandler):
|
|||||||
admin_access=self.settings.get('admin_access', False),
|
admin_access=self.settings.get('admin_access', False),
|
||||||
allow_named_servers=self.allow_named_servers,
|
allow_named_servers=self.allow_named_servers,
|
||||||
named_server_limit_per_user=self.named_server_limit_per_user,
|
named_server_limit_per_user=self.named_server_limit_per_user,
|
||||||
server_version='{} {}'.format(__version__, self.version_hash),
|
server_version=f'{__version__} {self.version_hash}',
|
||||||
api_page_limit=self.settings["api_page_default_limit"],
|
api_page_limit=self.settings["api_page_default_limit"],
|
||||||
)
|
)
|
||||||
self.finish(html)
|
self.finish(html)
|
||||||
|
@@ -106,12 +106,12 @@ def _scrub_headers(headers):
|
|||||||
else:
|
else:
|
||||||
# no space, hide the whole thing in case there was a mistake
|
# no space, hide the whole thing in case there was a mistake
|
||||||
auth_type = ''
|
auth_type = ''
|
||||||
headers['Authorization'] = '{} [secret]'.format(auth_type)
|
headers['Authorization'] = f'{auth_type} [secret]'
|
||||||
if 'Cookie' in headers:
|
if 'Cookie' in headers:
|
||||||
c = SimpleCookie(headers['Cookie'])
|
c = SimpleCookie(headers['Cookie'])
|
||||||
redacted = []
|
redacted = []
|
||||||
for name in c.keys():
|
for name in c.keys():
|
||||||
redacted.append("{}=[secret]".format(name))
|
redacted.append(f"{name}=[secret]")
|
||||||
headers['Cookie'] = '; '.join(redacted)
|
headers['Cookie'] = '; '.join(redacted)
|
||||||
return headers
|
return headers
|
||||||
|
|
||||||
@@ -185,6 +185,6 @@ def log_request(handler):
|
|||||||
# to get headers from tornado
|
# to get headers from tornado
|
||||||
location = handler._headers.get('Location')
|
location = handler._headers.get('Location')
|
||||||
if location:
|
if location:
|
||||||
ns['location'] = ' -> {}'.format(_scrub_uri(location))
|
ns['location'] = f' -> {_scrub_uri(location)}'
|
||||||
log_method(msg.format(**ns))
|
log_method(msg.format(**ns))
|
||||||
prometheus_log_method(handler)
|
prometheus_log_method(handler)
|
||||||
|
@@ -198,6 +198,6 @@ def prometheus_log_method(handler):
|
|||||||
"""
|
"""
|
||||||
REQUEST_DURATION_SECONDS.labels(
|
REQUEST_DURATION_SECONDS.labels(
|
||||||
method=handler.request.method,
|
method=handler.request.method,
|
||||||
handler='{}.{}'.format(handler.__class__.__module__, type(handler).__name__),
|
handler=f'{handler.__class__.__module__}.{type(handler).__name__}',
|
||||||
code=handler.get_status(),
|
code=handler.get_status(),
|
||||||
).observe(handler.request.request_time())
|
).observe(handler.request.request_time())
|
||||||
|
@@ -148,7 +148,7 @@ class Server(HasTraits):
|
|||||||
def host(self):
|
def host(self):
|
||||||
if self.connect_url:
|
if self.connect_url:
|
||||||
parsed = urlparse(self.connect_url)
|
parsed = urlparse(self.connect_url)
|
||||||
return "{proto}://{host}".format(proto=parsed.scheme, host=parsed.netloc)
|
return f"{parsed.scheme}://{parsed.netloc}"
|
||||||
|
|
||||||
if ':' in self._connect_ip:
|
if ':' in self._connect_ip:
|
||||||
fmt = "{proto}://[{ip}]:{port}"
|
fmt = "{proto}://[{ip}]:{port}"
|
||||||
@@ -162,7 +162,7 @@ class Server(HasTraits):
|
|||||||
def url(self):
|
def url(self):
|
||||||
if self.connect_url:
|
if self.connect_url:
|
||||||
return self.connect_url
|
return self.connect_url
|
||||||
return "{host}{uri}".format(host=self.host, uri=self.base_url)
|
return f"{self.host}{self.base_url}"
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "{name}(url={url}, bind_url={bind})".format(
|
return "{name}(url={url}, bind_url={bind})".format(
|
||||||
@@ -218,4 +218,4 @@ class Hub(Server):
|
|||||||
return url_path_join(self.url, 'api')
|
return url_path_join(self.url, 'api')
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<%s %s:%s>" % (self.__class__.__name__, self.ip, self.port)
|
return f"<{self.__class__.__name__} {self.ip}:{self.port}>"
|
||||||
|
@@ -145,7 +145,7 @@ class Server(Base):
|
|||||||
cookie_name = Column(Unicode(255), default='cookie')
|
cookie_name = Column(Unicode(255), default='cookie')
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<Server(%s:%s)>" % (self.ip, self.port)
|
return f"<Server({self.ip}:{self.port})>"
|
||||||
|
|
||||||
|
|
||||||
# lots of things have roles
|
# lots of things have roles
|
||||||
@@ -192,7 +192,7 @@ class Role(Base):
|
|||||||
groups = relationship('Group', secondary='group_role_map', backref='roles')
|
groups = relationship('Group', secondary='group_role_map', backref='roles')
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<%s %s (%s) - scopes: %s>" % (
|
return "<{} {} ({}) - scopes: {}>".format(
|
||||||
self.__class__.__name__,
|
self.__class__.__name__,
|
||||||
self.name,
|
self.name,
|
||||||
self.description,
|
self.description,
|
||||||
@@ -1039,6 +1039,8 @@ def get_class(resource_name):
|
|||||||
}
|
}
|
||||||
if resource_name not in class_dict:
|
if resource_name not in class_dict:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Kind must be one of %s, not %s" % (", ".join(class_dict), resource_name)
|
"Kind must be one of {}, not {}".format(
|
||||||
|
", ".join(class_dict), resource_name
|
||||||
|
)
|
||||||
)
|
)
|
||||||
return class_dict[resource_name]
|
return class_dict[resource_name]
|
||||||
|
@@ -170,9 +170,7 @@ class Pagination(Configurable):
|
|||||||
|
|
||||||
if self.page > 1:
|
if self.page > 1:
|
||||||
prev_page = self.page - 1
|
prev_page = self.page - 1
|
||||||
links.append(
|
links.append(f'<li><a href="?page={prev_page}">«</a></li>')
|
||||||
'<li><a href="?page={prev_page}">«</a></li>'.format(prev_page=prev_page)
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
links.append(
|
links.append(
|
||||||
'<li class="disabled"><span><span aria-hidden="true">«</span></span></li>'
|
'<li class="disabled"><span><span aria-hidden="true">«</span></span></li>'
|
||||||
@@ -198,9 +196,7 @@ class Pagination(Configurable):
|
|||||||
|
|
||||||
if self.page >= 1 and self.page < self.total_pages:
|
if self.page >= 1 and self.page < self.total_pages:
|
||||||
next_page = self.page + 1
|
next_page = self.page + 1
|
||||||
links.append(
|
links.append(f'<li><a href="?page={next_page}">»</a></li>')
|
||||||
'<li><a href="?page={next_page}">»</a></li>'.format(next_page=next_page)
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
links.append(
|
links.append(
|
||||||
'<li class="disabled"><span><span aria-hidden="true">»</span></span></li>'
|
'<li class="disabled"><span><span aria-hidden="true">»</span></span></li>'
|
||||||
|
@@ -509,7 +509,7 @@ class ConfigurableHTTPProxy(Proxy):
|
|||||||
if self.app.internal_ssl:
|
if self.app.internal_ssl:
|
||||||
proto = 'https'
|
proto = 'https'
|
||||||
|
|
||||||
return "{proto}://{url}".format(proto=proto, url=url)
|
return f"{proto}://{url}"
|
||||||
|
|
||||||
command = Command(
|
command = Command(
|
||||||
'configurable-http-proxy',
|
'configurable-http-proxy',
|
||||||
@@ -565,7 +565,7 @@ class ConfigurableHTTPProxy(Proxy):
|
|||||||
pid_file = os.path.abspath(self.pid_file)
|
pid_file = os.path.abspath(self.pid_file)
|
||||||
self.log.warning("Found proxy pid file: %s", pid_file)
|
self.log.warning("Found proxy pid file: %s", pid_file)
|
||||||
try:
|
try:
|
||||||
with open(pid_file, "r") as f:
|
with open(pid_file) as f:
|
||||||
pid = int(f.read().strip())
|
pid = int(f.read().strip())
|
||||||
except ValueError:
|
except ValueError:
|
||||||
self.log.warning("%s did not appear to contain a pid", pid_file)
|
self.log.warning("%s did not appear to contain a pid", pid_file)
|
||||||
@@ -823,7 +823,7 @@ class ConfigurableHTTPProxy(Proxy):
|
|||||||
req = HTTPRequest(
|
req = HTTPRequest(
|
||||||
url,
|
url,
|
||||||
method=method,
|
method=method,
|
||||||
headers={'Authorization': 'token {}'.format(self.auth_token)},
|
headers={'Authorization': f'token {self.auth_token}'},
|
||||||
body=body,
|
body=body,
|
||||||
connect_timeout=3, # default: 20s
|
connect_timeout=3, # default: 20s
|
||||||
request_timeout=10, # default: 20s
|
request_timeout=10, # default: 20s
|
||||||
@@ -845,13 +845,13 @@ class ConfigurableHTTPProxy(Proxy):
|
|||||||
)
|
)
|
||||||
return False # a falsy return value make exponential_backoff retry
|
return False # a falsy return value make exponential_backoff retry
|
||||||
else:
|
else:
|
||||||
self.log.error("api_request to proxy failed: {0}".format(e))
|
self.log.error(f"api_request to proxy failed: {e}")
|
||||||
# An unhandled error here will help the hub invoke cleanup logic
|
# An unhandled error here will help the hub invoke cleanup logic
|
||||||
raise
|
raise
|
||||||
|
|
||||||
result = await exponential_backoff(
|
result = await exponential_backoff(
|
||||||
_wait_for_api_request,
|
_wait_for_api_request,
|
||||||
'Repeated api_request to proxy path "{}" failed.'.format(path),
|
f'Repeated api_request to proxy path "{path}" failed.',
|
||||||
timeout=30,
|
timeout=30,
|
||||||
)
|
)
|
||||||
return result
|
return result
|
||||||
|
@@ -94,7 +94,7 @@ def expand_self_scope(name):
|
|||||||
'read:tokens',
|
'read:tokens',
|
||||||
'access:servers',
|
'access:servers',
|
||||||
]
|
]
|
||||||
return {"{}!user={}".format(scope, name) for scope in scope_list}
|
return {f"{scope}!user={name}" for scope in scope_list}
|
||||||
|
|
||||||
|
|
||||||
def horizontal_filter(func):
|
def horizontal_filter(func):
|
||||||
@@ -331,7 +331,7 @@ def existing_only(func):
|
|||||||
role = orm.Role.find(db, rolename)
|
role = orm.Role.find(db, rolename)
|
||||||
if entity is None:
|
if entity is None:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"%r of kind %r does not exist" % (entity, type(entity).__name__)
|
f"{entity!r} of kind {type(entity).__name__!r} does not exist"
|
||||||
)
|
)
|
||||||
elif role is None:
|
elif role is None:
|
||||||
raise ValueError("Role %r does not exist" % rolename)
|
raise ValueError("Role %r does not exist" % rolename)
|
||||||
|
@@ -434,7 +434,7 @@ def parse_scopes(scope_list):
|
|||||||
if parsed_scopes[base_scope] != Scope.ALL:
|
if parsed_scopes[base_scope] != Scope.ALL:
|
||||||
key, _, value = filter_.partition('=')
|
key, _, value = filter_.partition('=')
|
||||||
if key not in parsed_scopes[base_scope]:
|
if key not in parsed_scopes[base_scope]:
|
||||||
parsed_scopes[base_scope][key] = set([value])
|
parsed_scopes[base_scope][key] = {value}
|
||||||
else:
|
else:
|
||||||
parsed_scopes[base_scope][key].add(value)
|
parsed_scopes[base_scope][key].add(value)
|
||||||
return parsed_scopes
|
return parsed_scopes
|
||||||
|
@@ -739,7 +739,7 @@ class HubOAuth(HubAuth):
|
|||||||
cookie_suffix = ''.join(
|
cookie_suffix = ''.join(
|
||||||
random.choice(string.ascii_letters) for i in range(8)
|
random.choice(string.ascii_letters) for i in range(8)
|
||||||
)
|
)
|
||||||
cookie_name = '{}-{}'.format(self.state_cookie_name, cookie_suffix)
|
cookie_name = f'{self.state_cookie_name}-{cookie_suffix}'
|
||||||
extra_state['cookie_name'] = cookie_name
|
extra_state['cookie_name'] = cookie_name
|
||||||
else:
|
else:
|
||||||
cookie_name = self.state_cookie_name
|
cookie_name = self.state_cookie_name
|
||||||
|
@@ -560,7 +560,7 @@ class SingleUserNotebookAppMixin(Configurable):
|
|||||||
url=self.hub_activity_url,
|
url=self.hub_activity_url,
|
||||||
method='POST',
|
method='POST',
|
||||||
headers={
|
headers={
|
||||||
"Authorization": "token {}".format(self.hub_auth.api_token),
|
"Authorization": f"token {self.hub_auth.api_token}",
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
},
|
},
|
||||||
body=json.dumps(
|
body=json.dumps(
|
||||||
@@ -811,7 +811,7 @@ def _patch_app_base_handlers(app):
|
|||||||
BaseHandler = import_item("notebook.base.handlers.IPythonHandler")
|
BaseHandler = import_item("notebook.base.handlers.IPythonHandler")
|
||||||
else:
|
else:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"{}.base_handler_class must be defined".format(app.__class__.__name__)
|
f"{app.__class__.__name__}.base_handler_class must be defined"
|
||||||
)
|
)
|
||||||
base_handlers.append(BaseHandler)
|
base_handlers.append(BaseHandler)
|
||||||
|
|
||||||
|
@@ -97,7 +97,7 @@ class Spawner(LoggingConfigurable):
|
|||||||
Used in logging for consistency with named servers.
|
Used in logging for consistency with named servers.
|
||||||
"""
|
"""
|
||||||
if self.name:
|
if self.name:
|
||||||
return '%s:%s' % (self.user.name, self.name)
|
return f'{self.user.name}:{self.name}'
|
||||||
else:
|
else:
|
||||||
return self.user.name
|
return self.user.name
|
||||||
|
|
||||||
@@ -1258,7 +1258,7 @@ class Spawner(LoggingConfigurable):
|
|||||||
try:
|
try:
|
||||||
r = await exponential_backoff(
|
r = await exponential_backoff(
|
||||||
_wait_for_death,
|
_wait_for_death,
|
||||||
'Process did not die in {timeout} seconds'.format(timeout=timeout),
|
f'Process did not die in {timeout} seconds',
|
||||||
start_wait=self.death_interval,
|
start_wait=self.death_interval,
|
||||||
timeout=timeout,
|
timeout=timeout,
|
||||||
)
|
)
|
||||||
@@ -1277,7 +1277,7 @@ def _try_setcwd(path):
|
|||||||
os.chdir(path)
|
os.chdir(path)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
exc = e # break exception instance out of except scope
|
exc = e # break exception instance out of except scope
|
||||||
print("Couldn't set CWD to %s (%s)" % (path, e), file=sys.stderr)
|
print(f"Couldn't set CWD to {path} ({e})", file=sys.stderr)
|
||||||
path, _ = os.path.split(path)
|
path, _ = os.path.split(path)
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
@@ -1423,7 +1423,7 @@ class LocalProcessSpawner(Spawner):
|
|||||||
|
|
||||||
Local processes only need the process id.
|
Local processes only need the process id.
|
||||||
"""
|
"""
|
||||||
super(LocalProcessSpawner, self).load_state(state)
|
super().load_state(state)
|
||||||
if 'pid' in state:
|
if 'pid' in state:
|
||||||
self.pid = state['pid']
|
self.pid = state['pid']
|
||||||
|
|
||||||
@@ -1432,14 +1432,14 @@ class LocalProcessSpawner(Spawner):
|
|||||||
|
|
||||||
Local processes only need the process id.
|
Local processes only need the process id.
|
||||||
"""
|
"""
|
||||||
state = super(LocalProcessSpawner, self).get_state()
|
state = super().get_state()
|
||||||
if self.pid:
|
if self.pid:
|
||||||
state['pid'] = self.pid
|
state['pid'] = self.pid
|
||||||
return state
|
return state
|
||||||
|
|
||||||
def clear_state(self):
|
def clear_state(self):
|
||||||
"""Clear stored state about this spawner (pid)"""
|
"""Clear stored state about this spawner (pid)"""
|
||||||
super(LocalProcessSpawner, self).clear_state()
|
super().clear_state()
|
||||||
self.pid = 0
|
self.pid = 0
|
||||||
|
|
||||||
def user_env(self, env):
|
def user_env(self, env):
|
||||||
@@ -1488,8 +1488,8 @@ class LocalProcessSpawner(Spawner):
|
|||||||
home = user.pw_dir
|
home = user.pw_dir
|
||||||
|
|
||||||
# Create dir for user's certs wherever we're starting
|
# Create dir for user's certs wherever we're starting
|
||||||
hub_dir = "{home}/.jupyterhub".format(home=home)
|
hub_dir = f"{home}/.jupyterhub"
|
||||||
out_dir = "{hub_dir}/jupyterhub-certs".format(hub_dir=hub_dir)
|
out_dir = f"{hub_dir}/jupyterhub-certs"
|
||||||
shutil.rmtree(out_dir, ignore_errors=True)
|
shutil.rmtree(out_dir, ignore_errors=True)
|
||||||
os.makedirs(out_dir, 0o700, exist_ok=True)
|
os.makedirs(out_dir, 0o700, exist_ok=True)
|
||||||
|
|
||||||
|
@@ -328,7 +328,7 @@ class User:
|
|||||||
# self.escaped_name may contain @ which is legal in URLs but not cookie keys
|
# self.escaped_name may contain @ which is legal in URLs but not cookie keys
|
||||||
client_id = 'jupyterhub-user-%s' % quote(self.name)
|
client_id = 'jupyterhub-user-%s' % quote(self.name)
|
||||||
if server_name:
|
if server_name:
|
||||||
client_id = '%s-%s' % (client_id, quote(server_name))
|
client_id = f'{client_id}-{quote(server_name)}'
|
||||||
|
|
||||||
trusted_alt_names = []
|
trusted_alt_names = []
|
||||||
trusted_alt_names.extend(self.settings.get('trusted_alt_names', []))
|
trusted_alt_names.extend(self.settings.get('trusted_alt_names', []))
|
||||||
@@ -452,7 +452,7 @@ class User:
|
|||||||
"""Get the *host* for my server (proto://domain[:port])"""
|
"""Get the *host* for my server (proto://domain[:port])"""
|
||||||
# FIXME: escaped_name probably isn't escaped enough in general for a domain fragment
|
# FIXME: escaped_name probably isn't escaped enough in general for a domain fragment
|
||||||
parsed = urlparse(self.settings['subdomain_host'])
|
parsed = urlparse(self.settings['subdomain_host'])
|
||||||
h = '%s://%s' % (parsed.scheme, self.domain)
|
h = f'{parsed.scheme}://{self.domain}'
|
||||||
if parsed.port:
|
if parsed.port:
|
||||||
h += ':%i' % parsed.port
|
h += ':%i' % parsed.port
|
||||||
return h
|
return h
|
||||||
@@ -464,7 +464,7 @@ class User:
|
|||||||
Full name.domain/path if using subdomains, otherwise just my /base/url
|
Full name.domain/path if using subdomains, otherwise just my /base/url
|
||||||
"""
|
"""
|
||||||
if self.settings.get('subdomain_host'):
|
if self.settings.get('subdomain_host'):
|
||||||
return '{host}{path}'.format(host=self.host, path=self.base_url)
|
return f'{self.host}{self.base_url}'
|
||||||
else:
|
else:
|
||||||
return self.base_url
|
return self.base_url
|
||||||
|
|
||||||
@@ -533,9 +533,7 @@ class User:
|
|||||||
else:
|
else:
|
||||||
# spawn via POST or on behalf of another user.
|
# spawn via POST or on behalf of another user.
|
||||||
# nothing we can do here but fail
|
# nothing we can do here but fail
|
||||||
raise web.HTTPError(
|
raise web.HTTPError(400, f"{self.name}'s authentication has expired")
|
||||||
400, "{}'s authentication has expired".format(self.name)
|
|
||||||
)
|
|
||||||
|
|
||||||
async def spawn(self, server_name='', options=None, handler=None):
|
async def spawn(self, server_name='', options=None, handler=None):
|
||||||
"""Start the user's spawner
|
"""Start the user's spawner
|
||||||
|
@@ -77,7 +77,7 @@ def can_connect(ip, port):
|
|||||||
ip = '127.0.0.1'
|
ip = '127.0.0.1'
|
||||||
try:
|
try:
|
||||||
socket.create_connection((ip, port)).close()
|
socket.create_connection((ip, port)).close()
|
||||||
except socket.error as e:
|
except OSError as e:
|
||||||
if e.errno not in {errno.ECONNREFUSED, errno.ETIMEDOUT}:
|
if e.errno not in {errno.ECONNREFUSED, errno.ETIMEDOUT}:
|
||||||
app_log.error("Unexpected error connecting to %s:%i %s", ip, port, e)
|
app_log.error("Unexpected error connecting to %s:%i %s", ip, port, e)
|
||||||
return False
|
return False
|
||||||
@@ -225,7 +225,7 @@ async def wait_for_http_server(url, timeout=10, ssl_context=None):
|
|||||||
else:
|
else:
|
||||||
app_log.debug("Server at %s responded with %s", url, e.code)
|
app_log.debug("Server at %s responded with %s", url, e.code)
|
||||||
return e.response
|
return e.response
|
||||||
except (OSError, socket.error) as e:
|
except OSError as e:
|
||||||
if e.errno not in {
|
if e.errno not in {
|
||||||
errno.ECONNABORTED,
|
errno.ECONNABORTED,
|
||||||
errno.ECONNREFUSED,
|
errno.ECONNREFUSED,
|
||||||
@@ -602,7 +602,7 @@ def _parse_accept_header(accept):
|
|||||||
media_params.append(('vendor', vnd))
|
media_params.append(('vendor', vnd))
|
||||||
# and re-write media_type to something like application/json so
|
# and re-write media_type to something like application/json so
|
||||||
# it can be used usefully when looking up emitters
|
# it can be used usefully when looking up emitters
|
||||||
media_type = '{}/{}'.format(typ, extra)
|
media_type = f'{typ}/{extra}'
|
||||||
|
|
||||||
q = 1.0
|
q = 1.0
|
||||||
for part in parts:
|
for part in parts:
|
||||||
|
7
setup.py
7
setup.py
@@ -1,12 +1,9 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# coding: utf-8
|
|
||||||
# Copyright (c) Jupyter Development Team.
|
# Copyright (c) Jupyter Development Team.
|
||||||
# Distributed under the terms of the Modified BSD License.
|
# Distributed under the terms of the Modified BSD License.
|
||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
# Minimal Python version sanity check (from IPython)
|
# Minimal Python version sanity check (from IPython)
|
||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
@@ -239,8 +236,8 @@ class CSS(BaseCommand):
|
|||||||
'lessc',
|
'lessc',
|
||||||
'--',
|
'--',
|
||||||
'--clean-css',
|
'--clean-css',
|
||||||
'--source-map-basepath={}'.format(static),
|
f'--source-map-basepath={static}',
|
||||||
'--source-map={}'.format(sourcemap),
|
f'--source-map={sourcemap}',
|
||||||
'--source-map-rootpath=../',
|
'--source-map-rootpath=../',
|
||||||
style_less,
|
style_less,
|
||||||
style_css,
|
style_css,
|
||||||
|
Reference in New Issue
Block a user