mirror of
https://github.com/jupyterhub/jupyterhub.git
synced 2025-10-18 23:42:59 +00:00
Removed duplicate files
This commit is contained in:
@@ -45,12 +45,10 @@ from ..metrics import ServerSpawnStatus
|
||||
from ..metrics import ServerStopStatus
|
||||
from ..metrics import TOTAL_USERS
|
||||
from ..objects import Server
|
||||
from ..scopes import needs_scope
|
||||
from ..spawner import LocalProcessSpawner
|
||||
from ..user import User
|
||||
from ..utils import AnyTimeoutError
|
||||
from ..utils import get_accepted_mimetype
|
||||
from ..utils import get_browser_protocol
|
||||
from ..utils import maybe_future
|
||||
from ..utils import url_path_join
|
||||
|
||||
@@ -73,12 +71,6 @@ SESSION_COOKIE_NAME = 'jupyterhub-session-id'
|
||||
class BaseHandler(RequestHandler):
|
||||
"""Base Handler class with access to common methods and properties."""
|
||||
|
||||
# by default, only accept cookie-based authentication
|
||||
# The APIHandler base class enables token auth
|
||||
# versionadded: 2.0
|
||||
_accept_cookie_auth = True
|
||||
_accept_token_auth = False
|
||||
|
||||
async def prepare(self):
|
||||
"""Identify the user during the prepare stage of each request
|
||||
|
||||
@@ -348,7 +340,6 @@ class BaseHandler(RequestHandler):
|
||||
auth_info['auth_state'] = await user.get_auth_state()
|
||||
return await self.auth_to_user(auth_info, user)
|
||||
|
||||
@functools.lru_cache()
|
||||
def get_token(self):
|
||||
"""get token from authorization header"""
|
||||
token = self.get_auth_token()
|
||||
@@ -419,11 +410,9 @@ class BaseHandler(RequestHandler):
|
||||
async def get_current_user(self):
|
||||
"""get current username"""
|
||||
if not hasattr(self, '_jupyterhub_user'):
|
||||
user = None
|
||||
try:
|
||||
if self._accept_token_auth:
|
||||
user = self.get_current_user_token()
|
||||
if user is None and self._accept_cookie_auth:
|
||||
user = self.get_current_user_token()
|
||||
if user is None:
|
||||
user = self.get_current_user_cookie()
|
||||
if user and isinstance(user, User):
|
||||
user = await self.refresh_auth(user)
|
||||
@@ -526,16 +515,10 @@ class BaseHandler(RequestHandler):
|
||||
path=url_path_join(self.base_url, 'services'),
|
||||
**kwargs,
|
||||
)
|
||||
# clear_cookie only accepts a subset of set_cookie's kwargs
|
||||
clear_xsrf_cookie_kwargs = {
|
||||
key: value
|
||||
for key, value in self.settings.get('xsrf_cookie_kwargs', {})
|
||||
if key in {"path", "domain"}
|
||||
}
|
||||
|
||||
# clear tornado cookie
|
||||
self.clear_cookie(
|
||||
'_xsrf',
|
||||
**clear_xsrf_cookie_kwargs,
|
||||
**self.settings.get('xsrf_cookie_kwargs', {}),
|
||||
)
|
||||
# Reset _jupyterhub_user
|
||||
self._jupyterhub_user = None
|
||||
@@ -640,10 +623,12 @@ class BaseHandler(RequestHandler):
|
||||
next_url = self.get_argument('next', default='')
|
||||
# protect against some browsers' buggy handling of backslash as slash
|
||||
next_url = next_url.replace('\\', '%5C')
|
||||
proto = get_browser_protocol(self.request)
|
||||
host = self.request.host
|
||||
|
||||
if (next_url + '/').startswith((f'{proto}://{host}/', f'//{host}/',)) or (
|
||||
if (next_url + '/').startswith(
|
||||
(
|
||||
f'{self.request.protocol}://{self.request.host}/',
|
||||
f'//{self.request.host}/',
|
||||
)
|
||||
) or (
|
||||
self.subdomain_host
|
||||
and urlparse(next_url).netloc
|
||||
and ("." + urlparse(next_url).netloc).endswith(
|
||||
@@ -777,25 +762,15 @@ class BaseHandler(RequestHandler):
|
||||
# Only set `admin` if the authenticator returned an explicit value.
|
||||
if admin is not None and admin != user.admin:
|
||||
user.admin = admin
|
||||
# always ensure default roles ('user', 'admin' if admin) are assigned
|
||||
# after a successful login
|
||||
roles.assign_default_roles(self.db, entity=user)
|
||||
|
||||
# apply authenticator-managed groups
|
||||
if self.authenticator.manage_groups:
|
||||
group_names = authenticated.get("groups")
|
||||
if group_names is not None:
|
||||
user.sync_groups(group_names)
|
||||
|
||||
roles.assign_default_roles(self.db, entity=user)
|
||||
self.db.commit()
|
||||
# always set auth_state and commit,
|
||||
# because there could be key-rotation or clearing of previous values
|
||||
# going on.
|
||||
if not self.authenticator.enable_auth_state:
|
||||
# auth_state is not enabled. Force None.
|
||||
auth_state = None
|
||||
|
||||
await user.save_auth_state(auth_state)
|
||||
|
||||
return user
|
||||
|
||||
async def login_user(self, data=None):
|
||||
@@ -809,7 +784,6 @@ class BaseHandler(RequestHandler):
|
||||
self.set_login_cookie(user)
|
||||
self.statsd.incr('login.success')
|
||||
self.statsd.timing('login.authenticate.success', auth_timer.ms)
|
||||
|
||||
self.log.info("User logged in: %s", user.name)
|
||||
user._auth_refreshed = time.monotonic()
|
||||
return user
|
||||
@@ -1398,9 +1372,6 @@ class UserUrlHandler(BaseHandler):
|
||||
Note that this only occurs if bob's server is not already running.
|
||||
"""
|
||||
|
||||
# accept token auth for API requests that are probably to non-running servers
|
||||
_accept_token_auth = True
|
||||
|
||||
def _fail_api_request(self, user_name='', server_name=''):
|
||||
"""Fail an API request to a not-running server"""
|
||||
self.log.warning(
|
||||
@@ -1465,24 +1436,54 @@ class UserUrlHandler(BaseHandler):
|
||||
delete = non_get
|
||||
|
||||
@web.authenticated
|
||||
@needs_scope("access:servers")
|
||||
async def get(self, user_name, user_path):
|
||||
if not user_path:
|
||||
user_path = '/'
|
||||
current_user = self.current_user
|
||||
if user_name != current_user.name:
|
||||
|
||||
if (
|
||||
current_user
|
||||
and current_user.name != user_name
|
||||
and current_user.admin
|
||||
and self.settings.get('admin_access', False)
|
||||
):
|
||||
# allow admins to spawn on behalf of users
|
||||
user = self.find_user(user_name)
|
||||
if user is None:
|
||||
# no such user
|
||||
raise web.HTTPError(404, f"No such user {user_name}")
|
||||
raise web.HTTPError(404, "No such user %s" % user_name)
|
||||
self.log.info(
|
||||
f"User {current_user.name} requesting spawn on behalf of {user.name}"
|
||||
"Admin %s requesting spawn on behalf of %s",
|
||||
current_user.name,
|
||||
user.name,
|
||||
)
|
||||
admin_spawn = True
|
||||
should_spawn = True
|
||||
redirect_to_self = False
|
||||
else:
|
||||
user = current_user
|
||||
admin_spawn = False
|
||||
# For non-admins, spawn if the user requested is the current user
|
||||
# otherwise redirect users to their own server
|
||||
should_spawn = current_user and current_user.name == user_name
|
||||
redirect_to_self = not should_spawn
|
||||
|
||||
if redirect_to_self:
|
||||
# logged in as a different non-admin user, redirect to user's own server
|
||||
# this is only a stop-gap for a common mistake,
|
||||
# because the same request will be a 403
|
||||
# if the requested server is running
|
||||
self.statsd.incr('redirects.user_to_user', 1)
|
||||
self.log.warning(
|
||||
"User %s requested server for %s, which they don't own",
|
||||
current_user.name,
|
||||
user_name,
|
||||
)
|
||||
target = url_path_join(current_user.url, user_path or '')
|
||||
if self.request.query:
|
||||
target = url_concat(target, parse_qsl(self.request.query))
|
||||
self.redirect(target)
|
||||
return
|
||||
|
||||
# If people visit /user/:user_name directly on the Hub,
|
||||
# the redirects will just loop, because the proxy is bypassed.
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -1,898 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
"""Mixins to regular notebook server to add JupyterHub auth.
|
||||
|
||||
Meant to be compatible with jupyter_server and classic notebook
|
||||
|
||||
Use make_singleuser_app to create a compatible Application class
|
||||
with JupyterHub authentication mixins enabled.
|
||||
"""
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import secrets
|
||||
import sys
|
||||
import warnings
|
||||
from datetime import datetime
|
||||
from datetime import timezone
|
||||
from textwrap import dedent
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from jinja2 import ChoiceLoader
|
||||
from jinja2 import FunctionLoader
|
||||
from tornado import ioloop
|
||||
from tornado.httpclient import AsyncHTTPClient
|
||||
from tornado.httpclient import HTTPRequest
|
||||
from tornado.web import RequestHandler
|
||||
from traitlets import Any
|
||||
from traitlets import Bool
|
||||
from traitlets import Bytes
|
||||
from traitlets import CUnicode
|
||||
from traitlets import default
|
||||
from traitlets import import_item
|
||||
from traitlets import Integer
|
||||
from traitlets import observe
|
||||
from traitlets import TraitError
|
||||
from traitlets import Unicode
|
||||
from traitlets import validate
|
||||
from traitlets.config import Configurable
|
||||
|
||||
from .._version import __version__
|
||||
from .._version import _check_version
|
||||
from ..log import log_request
|
||||
from ..services.auth import HubOAuth
|
||||
from ..services.auth import HubOAuthCallbackHandler
|
||||
from ..services.auth import HubOAuthenticated
|
||||
from ..utils import exponential_backoff
|
||||
from ..utils import isoformat
|
||||
from ..utils import make_ssl_context
|
||||
from ..utils import url_path_join
|
||||
|
||||
|
||||
def _bool_env(key):
|
||||
"""Cast an environment variable to bool
|
||||
|
||||
0, empty, or unset is False; All other values are True.
|
||||
"""
|
||||
if os.environ.get(key, "") in {"", "0"}:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
# Authenticate requests with the Hub
|
||||
|
||||
|
||||
class HubAuthenticatedHandler(HubOAuthenticated):
|
||||
"""Class we are going to patch-in for authentication with the Hub"""
|
||||
|
||||
@property
|
||||
def allow_admin(self):
|
||||
return self.settings.get(
|
||||
'admin_access', os.getenv('JUPYTERHUB_ADMIN_ACCESS') or False
|
||||
)
|
||||
|
||||
@property
|
||||
def hub_auth(self):
|
||||
return self.settings['hub_auth']
|
||||
|
||||
@property
|
||||
def hub_users(self):
|
||||
return {self.settings['user']}
|
||||
|
||||
@property
|
||||
def hub_groups(self):
|
||||
if self.settings['group']:
|
||||
return {self.settings['group']}
|
||||
return set()
|
||||
|
||||
|
||||
class JupyterHubLoginHandlerMixin:
|
||||
"""LoginHandler that hooks up Hub authentication"""
|
||||
|
||||
@staticmethod
|
||||
def login_available(settings):
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def is_token_authenticated(handler):
|
||||
"""Is the request token-authenticated?"""
|
||||
if getattr(handler, '_cached_hub_user', None) is None:
|
||||
# ensure get_user has been called, so we know if we're token-authenticated
|
||||
handler.get_current_user()
|
||||
return getattr(handler, '_token_authenticated', False)
|
||||
|
||||
@staticmethod
|
||||
def get_user(handler):
|
||||
"""alternative get_current_user to query the Hub
|
||||
|
||||
Thus shouldn't be called anymore because HubAuthenticatedHandler
|
||||
should have already overridden get_current_user().
|
||||
|
||||
Keep here to protect uncommon circumstance of multiple BaseHandlers
|
||||
from missing auth.
|
||||
|
||||
e.g. when multiple BaseHandler classes are used.
|
||||
"""
|
||||
if HubAuthenticatedHandler not in handler.__class__.mro():
|
||||
warnings.warn(
|
||||
f"Expected to see HubAuthenticatedHandler in {handler.__class__}.mro(),"
|
||||
" patching in at call time. Hub authentication is still applied.",
|
||||
RuntimeWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
# patch HubAuthenticated into the instance
|
||||
handler.__class__ = type(
|
||||
handler.__class__.__name__,
|
||||
(HubAuthenticatedHandler, handler.__class__),
|
||||
{},
|
||||
)
|
||||
# patch into the class itself so this doesn't happen again for the same class
|
||||
patch_base_handler(handler.__class__)
|
||||
return handler.get_current_user()
|
||||
|
||||
@classmethod
|
||||
def validate_security(cls, app, ssl_options=None):
|
||||
"""Prevent warnings about security from base class"""
|
||||
return
|
||||
|
||||
|
||||
class JupyterHubLogoutHandlerMixin:
|
||||
def get(self):
|
||||
self.settings['hub_auth'].clear_cookie(self)
|
||||
self.redirect(
|
||||
self.settings['hub_host']
|
||||
+ url_path_join(self.settings['hub_prefix'], 'logout')
|
||||
)
|
||||
|
||||
|
||||
class OAuthCallbackHandlerMixin(HubOAuthCallbackHandler):
|
||||
"""Mixin IPythonHandler to get the right error pages, etc."""
|
||||
|
||||
@property
|
||||
def hub_auth(self):
|
||||
return self.settings['hub_auth']
|
||||
|
||||
|
||||
# register new hub related command-line aliases
|
||||
aliases = {
|
||||
'user': 'SingleUserNotebookApp.user',
|
||||
'group': 'SingleUserNotebookApp.group',
|
||||
'hub-prefix': 'SingleUserNotebookApp.hub_prefix',
|
||||
'hub-host': 'SingleUserNotebookApp.hub_host',
|
||||
'hub-api-url': 'SingleUserNotebookApp.hub_api_url',
|
||||
'base-url': 'SingleUserNotebookApp.base_url',
|
||||
}
|
||||
flags = {
|
||||
'disable-user-config': (
|
||||
{'SingleUserNotebookApp': {'disable_user_config': True}},
|
||||
"Disable user-controlled configuration of the notebook server.",
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
page_template = """
|
||||
{% extends "templates/page.html" %}
|
||||
|
||||
{% block header_buttons %}
|
||||
{{super()}}
|
||||
|
||||
<span>
|
||||
<a href='{{hub_control_panel_url}}'
|
||||
class='btn btn-default btn-sm navbar-btn pull-right'
|
||||
style='margin-right: 4px; margin-left: 2px;'>
|
||||
Control Panel
|
||||
</a>
|
||||
</span>
|
||||
{% endblock %}
|
||||
|
||||
{% block logo %}
|
||||
<img src='{{logo_url}}' alt='Jupyter Notebook'/>
|
||||
{% endblock logo %}
|
||||
|
||||
{% block script %}
|
||||
{{ super() }}
|
||||
<script type='text/javascript'>
|
||||
function _remove_redirects_param() {
|
||||
// remove ?redirects= param from URL so that
|
||||
// successful page loads don't increment the redirect loop counter
|
||||
if (window.location.search.length <= 1) {
|
||||
return;
|
||||
}
|
||||
var search_parameters = window.location.search.slice(1).split('&');
|
||||
for (var i = 0; i < search_parameters.length; i++) {
|
||||
if (search_parameters[i].split('=')[0] === 'redirects') {
|
||||
// remote token from search parameters
|
||||
search_parameters.splice(i, 1);
|
||||
var new_search = '';
|
||||
if (search_parameters.length) {
|
||||
new_search = '?' + search_parameters.join('&');
|
||||
}
|
||||
var new_url = window.location.origin +
|
||||
window.location.pathname +
|
||||
new_search +
|
||||
window.location.hash;
|
||||
window.history.replaceState({}, "", new_url);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
_remove_redirects_param();
|
||||
</script>
|
||||
{% endblock script %}
|
||||
"""
|
||||
|
||||
|
||||
def _exclude_home(path_list):
|
||||
"""Filter out any entries in a path list that are in my home directory.
|
||||
|
||||
Used to disable per-user configuration.
|
||||
"""
|
||||
home = os.path.expanduser('~')
|
||||
for p in path_list:
|
||||
if not p.startswith(home):
|
||||
yield p
|
||||
|
||||
|
||||
class SingleUserNotebookAppMixin(Configurable):
|
||||
"""A Subclass of the regular NotebookApp that is aware of the parent multiuser context."""
|
||||
|
||||
description = dedent(
|
||||
"""
|
||||
Single-user server for JupyterHub. Extends the Jupyter Notebook server.
|
||||
|
||||
Meant to be invoked by JupyterHub Spawners, not directly.
|
||||
"""
|
||||
)
|
||||
|
||||
examples = ""
|
||||
subcommands = {}
|
||||
version = __version__
|
||||
|
||||
# must be set in mixin subclass
|
||||
# make_singleuser_app sets these
|
||||
# aliases = aliases
|
||||
# flags = flags
|
||||
# login_handler_class = JupyterHubLoginHandler
|
||||
# logout_handler_class = JupyterHubLogoutHandler
|
||||
# oauth_callback_handler_class = OAuthCallbackHandler
|
||||
# classes = NotebookApp.classes + [HubOAuth]
|
||||
|
||||
# disable single-user app's localhost checking
|
||||
allow_remote_access = True
|
||||
|
||||
# don't store cookie secrets
|
||||
cookie_secret_file = ''
|
||||
# always generate a new cookie secret on launch
|
||||
# ensures that each spawn clears any cookies from previous session,
|
||||
# triggering OAuth again
|
||||
cookie_secret = Bytes()
|
||||
|
||||
def _cookie_secret_default(self):
|
||||
return secrets.token_bytes(32)
|
||||
|
||||
user = CUnicode().tag(config=True)
|
||||
group = CUnicode().tag(config=True)
|
||||
|
||||
@default('user')
|
||||
def _default_user(self):
|
||||
return os.environ.get('JUPYTERHUB_USER') or ''
|
||||
|
||||
@default('group')
|
||||
def _default_group(self):
|
||||
return os.environ.get('JUPYTERHUB_GROUP') or ''
|
||||
|
||||
@observe('user')
|
||||
def _user_changed(self, change):
|
||||
self.log.name = change.new
|
||||
|
||||
@default("default_url")
|
||||
def _default_url(self):
|
||||
return os.environ.get("JUPYTERHUB_DEFAULT_URL", "/tree/")
|
||||
|
||||
hub_host = Unicode().tag(config=True)
|
||||
|
||||
hub_prefix = Unicode('/hub/').tag(config=True)
|
||||
|
||||
@default('keyfile')
|
||||
def _keyfile_default(self):
|
||||
return os.environ.get('JUPYTERHUB_SSL_KEYFILE') or ''
|
||||
|
||||
@default('certfile')
|
||||
def _certfile_default(self):
|
||||
return os.environ.get('JUPYTERHUB_SSL_CERTFILE') or ''
|
||||
|
||||
@default('client_ca')
|
||||
def _client_ca_default(self):
|
||||
return os.environ.get('JUPYTERHUB_SSL_CLIENT_CA') or ''
|
||||
|
||||
@default('hub_prefix')
|
||||
def _hub_prefix_default(self):
|
||||
base_url = os.environ.get('JUPYTERHUB_BASE_URL') or '/'
|
||||
return base_url + 'hub/'
|
||||
|
||||
hub_api_url = Unicode().tag(config=True)
|
||||
|
||||
@default('hub_api_url')
|
||||
def _hub_api_url_default(self):
|
||||
return os.environ.get('JUPYTERHUB_API_URL') or 'http://127.0.0.1:8081/hub/api'
|
||||
|
||||
# defaults for some configurables that may come from service env variables:
|
||||
@default('base_url')
|
||||
def _base_url_default(self):
|
||||
return os.environ.get('JUPYTERHUB_SERVICE_PREFIX') or '/'
|
||||
|
||||
# Note: this may be removed if notebook module is >= 5.0.0b1
|
||||
@validate('base_url')
|
||||
def _validate_base_url(self, proposal):
|
||||
"""ensure base_url starts and ends with /"""
|
||||
value = proposal.value
|
||||
if not value.startswith('/'):
|
||||
value = '/' + value
|
||||
if not value.endswith('/'):
|
||||
value = value + '/'
|
||||
return value
|
||||
|
||||
@default('port')
|
||||
def _port_default(self):
|
||||
if os.environ.get('JUPYTERHUB_SERVICE_URL'):
|
||||
url = urlparse(os.environ['JUPYTERHUB_SERVICE_URL'])
|
||||
if url.port:
|
||||
return url.port
|
||||
elif url.scheme == 'http':
|
||||
return 80
|
||||
elif url.scheme == 'https':
|
||||
return 443
|
||||
return 8888
|
||||
|
||||
@default('ip')
|
||||
def _ip_default(self):
|
||||
if os.environ.get('JUPYTERHUB_SERVICE_URL'):
|
||||
url = urlparse(os.environ['JUPYTERHUB_SERVICE_URL'])
|
||||
if url.hostname:
|
||||
return url.hostname
|
||||
return '127.0.0.1'
|
||||
|
||||
# disable some single-user configurables
|
||||
token = ''
|
||||
open_browser = False
|
||||
quit_button = False
|
||||
trust_xheaders = True
|
||||
|
||||
port_retries = (
|
||||
0 # disable port-retries, since the Spawner will tell us what port to use
|
||||
)
|
||||
|
||||
disable_user_config = Bool(
|
||||
False,
|
||||
help="""Disable user configuration of single-user server.
|
||||
|
||||
Prevents user-writable files that normally configure the single-user server
|
||||
from being loaded, ensuring admins have full control of configuration.
|
||||
""",
|
||||
).tag(config=True)
|
||||
|
||||
@default("disable_user_config")
|
||||
def _default_disable_user_config(self):
|
||||
return _bool_env("JUPYTERHUB_DISABLE_USER_CONFIG")
|
||||
|
||||
@default("root_dir")
|
||||
def _default_root_dir(self):
|
||||
if os.environ.get("JUPYTERHUB_ROOT_DIR"):
|
||||
proposal = {"value": os.environ["JUPYTERHUB_ROOT_DIR"]}
|
||||
# explicitly call validator, not called on default values
|
||||
return self._notebook_dir_validate(proposal)
|
||||
else:
|
||||
return os.getcwd()
|
||||
|
||||
# notebook_dir is used by the classic notebook server
|
||||
# root_dir is the future in jupyter server
|
||||
@default("notebook_dir")
|
||||
def _default_notebook_dir(self):
|
||||
return self._default_root_dir()
|
||||
|
||||
@validate("notebook_dir", "root_dir")
|
||||
def _notebook_dir_validate(self, proposal):
|
||||
value = os.path.expanduser(proposal['value'])
|
||||
# Strip any trailing slashes
|
||||
# *except* if it's root
|
||||
_, path = os.path.splitdrive(value)
|
||||
if path == os.sep:
|
||||
return value
|
||||
value = value.rstrip(os.sep)
|
||||
if not os.path.isabs(value):
|
||||
# If we receive a non-absolute path, make it absolute.
|
||||
value = os.path.abspath(value)
|
||||
if not os.path.isdir(value):
|
||||
raise TraitError("No such notebook dir: %r" % value)
|
||||
return value
|
||||
|
||||
@default('log_level')
|
||||
def _log_level_default(self):
|
||||
if _bool_env("JUPYTERHUB_DEBUG"):
|
||||
return logging.DEBUG
|
||||
else:
|
||||
return logging.INFO
|
||||
|
||||
@default('log_datefmt')
|
||||
def _log_datefmt_default(self):
|
||||
"""Exclude date from default date format"""
|
||||
return "%Y-%m-%d %H:%M:%S"
|
||||
|
||||
@default('log_format')
|
||||
def _log_format_default(self):
|
||||
"""override default log format to include time"""
|
||||
return "%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s %(module)s:%(lineno)d]%(end_color)s %(message)s"
|
||||
|
||||
def _confirm_exit(self):
|
||||
# disable the exit confirmation for background notebook processes
|
||||
self.io_loop.add_callback_from_signal(self.io_loop.stop)
|
||||
|
||||
def migrate_config(self):
|
||||
if self.disable_user_config:
|
||||
# disable config-migration when user config is disabled
|
||||
return
|
||||
else:
|
||||
super().migrate_config()
|
||||
|
||||
@property
|
||||
def config_file_paths(self):
|
||||
path = super().config_file_paths
|
||||
|
||||
if self.disable_user_config:
|
||||
# filter out user-writable config dirs if user config is disabled
|
||||
path = list(_exclude_home(path))
|
||||
return path
|
||||
|
||||
@property
|
||||
def nbextensions_path(self):
|
||||
path = super().nbextensions_path
|
||||
|
||||
if self.disable_user_config:
|
||||
path = list(_exclude_home(path))
|
||||
return path
|
||||
|
||||
@validate('static_custom_path')
|
||||
def _validate_static_custom_path(self, proposal):
|
||||
path = proposal['value']
|
||||
if self.disable_user_config:
|
||||
path = list(_exclude_home(path))
|
||||
return path
|
||||
|
||||
# create dynamic default http client,
|
||||
# configured with any relevant ssl config
|
||||
hub_http_client = Any()
|
||||
|
||||
@default('hub_http_client')
|
||||
def _default_client(self):
|
||||
ssl_context = make_ssl_context(
|
||||
self.keyfile, self.certfile, cafile=self.client_ca
|
||||
)
|
||||
AsyncHTTPClient.configure(None, defaults={"ssl_options": ssl_context})
|
||||
return AsyncHTTPClient()
|
||||
|
||||
async def check_hub_version(self):
|
||||
"""Test a connection to my Hub
|
||||
|
||||
- exit if I can't connect at all
|
||||
- check version and warn on sufficient mismatch
|
||||
"""
|
||||
client = self.hub_http_client
|
||||
RETRIES = 5
|
||||
for i in range(1, RETRIES + 1):
|
||||
try:
|
||||
resp = await client.fetch(self.hub_api_url)
|
||||
except Exception:
|
||||
self.log.exception(
|
||||
"Failed to connect to my Hub at %s (attempt %i/%i). Is it running?",
|
||||
self.hub_api_url,
|
||||
i,
|
||||
RETRIES,
|
||||
)
|
||||
await asyncio.sleep(min(2**i, 16))
|
||||
else:
|
||||
break
|
||||
else:
|
||||
self.exit(1)
|
||||
|
||||
hub_version = resp.headers.get('X-JupyterHub-Version')
|
||||
_check_version(hub_version, __version__, self.log)
|
||||
|
||||
server_name = Unicode()
|
||||
|
||||
@default('server_name')
|
||||
def _server_name_default(self):
|
||||
return os.environ.get('JUPYTERHUB_SERVER_NAME', '')
|
||||
|
||||
hub_activity_url = Unicode(
|
||||
config=True, help="URL for sending JupyterHub activity updates"
|
||||
)
|
||||
|
||||
@default('hub_activity_url')
|
||||
def _default_activity_url(self):
|
||||
return os.environ.get('JUPYTERHUB_ACTIVITY_URL', '')
|
||||
|
||||
hub_activity_interval = Integer(
|
||||
300,
|
||||
config=True,
|
||||
help="""
|
||||
Interval (in seconds) on which to update the Hub
|
||||
with our latest activity.
|
||||
""",
|
||||
)
|
||||
|
||||
@default('hub_activity_interval')
|
||||
def _default_activity_interval(self):
|
||||
env_value = os.environ.get('JUPYTERHUB_ACTIVITY_INTERVAL')
|
||||
if env_value:
|
||||
return int(env_value)
|
||||
else:
|
||||
return 300
|
||||
|
||||
_last_activity_sent = Any(allow_none=True)
|
||||
|
||||
async def notify_activity(self):
|
||||
"""Notify jupyterhub of activity"""
|
||||
client = self.hub_http_client
|
||||
last_activity = self.web_app.last_activity()
|
||||
if not last_activity:
|
||||
self.log.debug("No activity to send to the Hub")
|
||||
return
|
||||
if last_activity:
|
||||
# protect against mixed timezone comparisons
|
||||
if not last_activity.tzinfo:
|
||||
# assume naive timestamps are utc
|
||||
self.log.warning("last activity is using naive timestamps")
|
||||
last_activity = last_activity.replace(tzinfo=timezone.utc)
|
||||
|
||||
if self._last_activity_sent and last_activity < self._last_activity_sent:
|
||||
self.log.debug("No activity since %s", self._last_activity_sent)
|
||||
return
|
||||
|
||||
last_activity_timestamp = isoformat(last_activity)
|
||||
|
||||
async def notify():
|
||||
self.log.debug("Notifying Hub of activity %s", last_activity_timestamp)
|
||||
req = HTTPRequest(
|
||||
url=self.hub_activity_url,
|
||||
method='POST',
|
||||
headers={
|
||||
"Authorization": f"token {self.hub_auth.api_token}",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body=json.dumps(
|
||||
{
|
||||
'servers': {
|
||||
self.server_name: {'last_activity': last_activity_timestamp}
|
||||
},
|
||||
'last_activity': last_activity_timestamp,
|
||||
}
|
||||
),
|
||||
)
|
||||
try:
|
||||
await client.fetch(req)
|
||||
except Exception:
|
||||
self.log.exception("Error notifying Hub of activity")
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
await exponential_backoff(
|
||||
notify,
|
||||
fail_message="Failed to notify Hub of activity",
|
||||
start_wait=1,
|
||||
max_wait=15,
|
||||
timeout=60,
|
||||
)
|
||||
self._last_activity_sent = last_activity
|
||||
|
||||
async def keep_activity_updated(self):
|
||||
if not self.hub_activity_url or not self.hub_activity_interval:
|
||||
self.log.warning("Activity events disabled")
|
||||
return
|
||||
self.log.info(
|
||||
"Updating Hub with activity every %s seconds", self.hub_activity_interval
|
||||
)
|
||||
while True:
|
||||
try:
|
||||
await self.notify_activity()
|
||||
except Exception as e:
|
||||
self.log.exception("Error notifying Hub of activity")
|
||||
# add 20% jitter to the interval to avoid alignment
|
||||
# of lots of requests from user servers
|
||||
t = self.hub_activity_interval * (1 + 0.2 * (random.random() - 0.5))
|
||||
await asyncio.sleep(t)
|
||||
|
||||
def initialize(self, argv=None):
|
||||
# disable trash by default
|
||||
# this can be re-enabled by config
|
||||
self.config.FileContentsManager.delete_to_trash = False
|
||||
return super().initialize(argv)
|
||||
|
||||
def start(self):
|
||||
self.log.info("Starting jupyterhub-singleuser server version %s", __version__)
|
||||
# start by hitting Hub to check version
|
||||
ioloop.IOLoop.current().run_sync(self.check_hub_version)
|
||||
ioloop.IOLoop.current().add_callback(self.keep_activity_updated)
|
||||
super().start()
|
||||
|
||||
def init_hub_auth(self):
|
||||
api_token = None
|
||||
if os.getenv('JPY_API_TOKEN'):
|
||||
# Deprecated env variable (as of 0.7.2)
|
||||
api_token = os.environ['JPY_API_TOKEN']
|
||||
if os.getenv('JUPYTERHUB_API_TOKEN'):
|
||||
api_token = os.environ['JUPYTERHUB_API_TOKEN']
|
||||
|
||||
if not api_token:
|
||||
self.exit(
|
||||
"JUPYTERHUB_API_TOKEN env is required to run jupyterhub-singleuser. Did you launch it manually?"
|
||||
)
|
||||
self.hub_auth = HubOAuth(
|
||||
parent=self,
|
||||
api_token=api_token,
|
||||
api_url=self.hub_api_url,
|
||||
hub_prefix=self.hub_prefix,
|
||||
base_url=self.base_url,
|
||||
keyfile=self.keyfile,
|
||||
certfile=self.certfile,
|
||||
client_ca=self.client_ca,
|
||||
)
|
||||
# smoke check
|
||||
if not self.hub_auth.oauth_client_id:
|
||||
raise ValueError("Missing OAuth client ID")
|
||||
|
||||
def init_webapp(self):
|
||||
# load the hub-related settings into the tornado settings dict
|
||||
self.init_hub_auth()
|
||||
s = self.tornado_settings
|
||||
s['log_function'] = log_request
|
||||
s['user'] = self.user
|
||||
s['group'] = self.group
|
||||
s['hub_prefix'] = self.hub_prefix
|
||||
s['hub_host'] = self.hub_host
|
||||
s['hub_auth'] = self.hub_auth
|
||||
csp_report_uri = s['csp_report_uri'] = self.hub_host + url_path_join(
|
||||
self.hub_prefix, 'security/csp-report'
|
||||
)
|
||||
headers = s.setdefault('headers', {})
|
||||
headers['X-JupyterHub-Version'] = __version__
|
||||
# set CSP header directly to workaround bugs in jupyter/notebook 5.0
|
||||
headers.setdefault(
|
||||
'Content-Security-Policy',
|
||||
';'.join(["frame-ancestors 'self'", "report-uri " + csp_report_uri]),
|
||||
)
|
||||
super().init_webapp()
|
||||
|
||||
# add OAuth callback
|
||||
self.web_app.add_handlers(
|
||||
r".*$",
|
||||
[
|
||||
(
|
||||
urlparse(self.hub_auth.oauth_redirect_uri).path,
|
||||
self.oauth_callback_handler_class,
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
# apply X-JupyterHub-Version to *all* request handlers (even redirects)
|
||||
self.patch_default_headers()
|
||||
self.patch_templates()
|
||||
|
||||
def patch_default_headers(self):
|
||||
if hasattr(RequestHandler, '_orig_set_default_headers'):
|
||||
return
|
||||
RequestHandler._orig_set_default_headers = RequestHandler.set_default_headers
|
||||
|
||||
def set_jupyterhub_header(self):
|
||||
self._orig_set_default_headers()
|
||||
self.set_header('X-JupyterHub-Version', __version__)
|
||||
|
||||
RequestHandler.set_default_headers = set_jupyterhub_header
|
||||
|
||||
def patch_templates(self):
|
||||
"""Patch page templates to add Hub-related buttons"""
|
||||
|
||||
self.jinja_template_vars['logo_url'] = self.hub_host + url_path_join(
|
||||
self.hub_prefix, 'logo'
|
||||
)
|
||||
self.jinja_template_vars['hub_host'] = self.hub_host
|
||||
self.jinja_template_vars['hub_prefix'] = self.hub_prefix
|
||||
env = self.web_app.settings['jinja2_env']
|
||||
|
||||
env.globals['hub_control_panel_url'] = self.hub_host + url_path_join(
|
||||
self.hub_prefix, 'home'
|
||||
)
|
||||
|
||||
# patch jinja env loading to modify page template
|
||||
def get_page(name):
|
||||
if name == 'page.html':
|
||||
return page_template
|
||||
|
||||
orig_loader = env.loader
|
||||
env.loader = ChoiceLoader([FunctionLoader(get_page), orig_loader])
|
||||
|
||||
def load_server_extensions(self):
|
||||
# Loading LabApp sets $JUPYTERHUB_API_TOKEN on load, which is incorrect
|
||||
r = super().load_server_extensions()
|
||||
# clear the token in PageConfig at this step
|
||||
# so that cookie auth is used
|
||||
# FIXME: in the future,
|
||||
# it would probably make sense to set page_config.token to the token
|
||||
# from the current request.
|
||||
if 'page_config_data' in self.web_app.settings:
|
||||
self.web_app.settings['page_config_data']['token'] = ''
|
||||
return r
|
||||
|
||||
|
||||
def detect_base_package(App):
|
||||
"""Detect the base package for an App class
|
||||
|
||||
Will return 'notebook' or 'jupyter_server'
|
||||
based on which package App subclasses from.
|
||||
|
||||
Will return None if neither is identified (e.g. fork package, or duck-typing).
|
||||
"""
|
||||
# guess notebook or jupyter_server based on App class inheritance
|
||||
for cls in App.mro():
|
||||
pkg = cls.__module__.split(".", 1)[0]
|
||||
if pkg in {"notebook", "jupyter_server"}:
|
||||
return pkg
|
||||
return None
|
||||
|
||||
|
||||
def _nice_cls_repr(cls):
|
||||
"""Nice repr of classes, e.g. 'module.submod.Class'
|
||||
|
||||
Also accepts tuples of classes
|
||||
"""
|
||||
return f"{cls.__module__}.{cls.__name__}"
|
||||
|
||||
|
||||
def patch_base_handler(BaseHandler, log=None):
|
||||
"""Patch HubAuthenticated into a base handler class
|
||||
|
||||
so anything inheriting from BaseHandler uses Hub authentication.
|
||||
This works *even after* subclasses have imported and inherited from BaseHandler.
|
||||
|
||||
.. versionadded: 1.5
|
||||
Made available as an importable utility
|
||||
"""
|
||||
if log is None:
|
||||
log = logging.getLogger()
|
||||
|
||||
if HubAuthenticatedHandler not in BaseHandler.__bases__:
|
||||
new_bases = (HubAuthenticatedHandler,) + BaseHandler.__bases__
|
||||
log.info(
|
||||
"Patching auth into {mod}.{name}({old_bases}) -> {name}({new_bases})".format(
|
||||
mod=BaseHandler.__module__,
|
||||
name=BaseHandler.__name__,
|
||||
old_bases=', '.join(
|
||||
_nice_cls_repr(cls) for cls in BaseHandler.__bases__
|
||||
),
|
||||
new_bases=', '.join(_nice_cls_repr(cls) for cls in new_bases),
|
||||
)
|
||||
)
|
||||
BaseHandler.__bases__ = new_bases
|
||||
# We've now inserted our class as a parent of BaseHandler,
|
||||
# but we also need to ensure BaseHandler *itself* doesn't
|
||||
# override the public tornado API methods we have inserted.
|
||||
# If they are defined in BaseHandler, explicitly replace them with our methods.
|
||||
for name in ("get_current_user", "get_login_url"):
|
||||
if name in BaseHandler.__dict__:
|
||||
log.debug(
|
||||
f"Overriding {BaseHandler}.{name} with HubAuthenticatedHandler.{name}"
|
||||
)
|
||||
method = getattr(HubAuthenticatedHandler, name)
|
||||
setattr(BaseHandler, name, method)
|
||||
return BaseHandler
|
||||
|
||||
|
||||
def _patch_app_base_handlers(app):
|
||||
"""Patch Hub Authentication into the base handlers of an app
|
||||
|
||||
Patches HubAuthenticatedHandler into:
|
||||
|
||||
- App.base_handler_class (if defined)
|
||||
- jupyter_server's JupyterHandler (if already imported)
|
||||
- notebook's IPythonHandler (if already imported)
|
||||
"""
|
||||
BaseHandler = app_base_handler = getattr(app, "base_handler_class", None)
|
||||
|
||||
base_handlers = []
|
||||
if BaseHandler is not None:
|
||||
base_handlers.append(BaseHandler)
|
||||
|
||||
# patch juptyer_server and notebook handlers if they have been imported
|
||||
for base_handler_name in [
|
||||
"jupyter_server.base.handlers.JupyterHandler",
|
||||
"notebook.base.handlers.IPythonHandler",
|
||||
]:
|
||||
modname, _ = base_handler_name.rsplit(".", 1)
|
||||
if modname in sys.modules:
|
||||
base_handlers.append(import_item(base_handler_name))
|
||||
|
||||
if not base_handlers:
|
||||
pkg = detect_base_package(app.__class__)
|
||||
if pkg == "jupyter_server":
|
||||
BaseHandler = import_item("jupyter_server.base.handlers.JupyterHandler")
|
||||
elif pkg == "notebook":
|
||||
BaseHandler = import_item("notebook.base.handlers.IPythonHandler")
|
||||
else:
|
||||
raise ValueError(
|
||||
f"{app.__class__.__name__}.base_handler_class must be defined"
|
||||
)
|
||||
base_handlers.append(BaseHandler)
|
||||
|
||||
# patch-in HubAuthenticatedHandler to base handler classes
|
||||
for BaseHandler in base_handlers:
|
||||
patch_base_handler(BaseHandler)
|
||||
|
||||
# return the first entry
|
||||
return base_handlers[0]
|
||||
|
||||
|
||||
def make_singleuser_app(App):
|
||||
"""Make and return a singleuser notebook app
|
||||
|
||||
given existing notebook or jupyter_server Application classes,
|
||||
mix-in jupyterhub auth.
|
||||
|
||||
Instances of App must have the following attributes defining classes:
|
||||
|
||||
- .login_handler_class
|
||||
- .logout_handler_class
|
||||
- .base_handler_class (only required if not a subclass of the default app
|
||||
in jupyter_server or notebook)
|
||||
|
||||
App should be a subclass of `notebook.notebookapp.NotebookApp`
|
||||
or `jupyter_server.serverapp.ServerApp`.
|
||||
"""
|
||||
|
||||
empty_parent_app = App()
|
||||
log = empty_parent_app.log
|
||||
|
||||
# detect base classes
|
||||
LoginHandler = empty_parent_app.login_handler_class
|
||||
LogoutHandler = empty_parent_app.logout_handler_class
|
||||
BaseHandler = _patch_app_base_handlers(empty_parent_app)
|
||||
|
||||
# create Handler classes from mixins + bases
|
||||
class JupyterHubLoginHandler(JupyterHubLoginHandlerMixin, LoginHandler):
|
||||
pass
|
||||
|
||||
class JupyterHubLogoutHandler(JupyterHubLogoutHandlerMixin, LogoutHandler):
|
||||
pass
|
||||
|
||||
class OAuthCallbackHandler(OAuthCallbackHandlerMixin, BaseHandler):
|
||||
pass
|
||||
|
||||
# create merged aliases & flags
|
||||
merged_aliases = {}
|
||||
merged_aliases.update(empty_parent_app.aliases or {})
|
||||
merged_aliases.update(aliases)
|
||||
|
||||
merged_flags = {}
|
||||
merged_flags.update(empty_parent_app.flags or {})
|
||||
merged_flags.update(flags)
|
||||
# create mixed-in App class, bringing it all together
|
||||
class SingleUserNotebookApp(SingleUserNotebookAppMixin, App):
|
||||
aliases = merged_aliases
|
||||
flags = merged_flags
|
||||
classes = empty_parent_app.classes + [HubOAuth]
|
||||
|
||||
login_handler_class = JupyterHubLoginHandler
|
||||
logout_handler_class = JupyterHubLogoutHandler
|
||||
oauth_callback_handler_class = OAuthCallbackHandler
|
||||
|
||||
def initialize(self, *args, **kwargs):
|
||||
result = super().initialize(*args, **kwargs)
|
||||
# run patch again after initialize, so extensions have already been loaded
|
||||
# probably a no-op most of the time
|
||||
_patch_app_base_handlers(self)
|
||||
return result
|
||||
|
||||
return SingleUserNotebookApp
|
@@ -1,394 +0,0 @@
|
||||
"""Test the JupyterHub entry point"""
|
||||
import binascii
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
from subprocess import check_output
|
||||
from subprocess import PIPE
|
||||
from subprocess import Popen
|
||||
from tempfile import NamedTemporaryFile
|
||||
from tempfile import TemporaryDirectory
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
import traitlets
|
||||
from distutils.version import LooseVersion as V
|
||||
from traitlets.config import Config
|
||||
|
||||
from .. import orm
|
||||
from ..app import COOKIE_SECRET_BYTES
|
||||
from ..app import JupyterHub
|
||||
from .mocking import MockHub
|
||||
from .test_api import add_user
|
||||
|
||||
|
||||
def test_help_all():
|
||||
out = check_output([sys.executable, '-m', 'jupyterhub', '--help-all']).decode(
|
||||
'utf8', 'replace'
|
||||
)
|
||||
assert '--ip' in out
|
||||
assert '--JupyterHub.ip' in out
|
||||
|
||||
|
||||
@pytest.mark.skipif(V(traitlets.__version__) < V('5'), reason="requires traitlets 5")
|
||||
def test_show_config(tmpdir):
|
||||
tmpdir.chdir()
|
||||
p = Popen(
|
||||
[sys.executable, '-m', 'jupyterhub', '--show-config', '--debug'], stdout=PIPE
|
||||
)
|
||||
p.wait(timeout=10)
|
||||
out = p.stdout.read().decode('utf8', 'replace')
|
||||
assert 'log_level' in out
|
||||
|
||||
p = Popen(
|
||||
[sys.executable, '-m', 'jupyterhub', '--show-config-json', '--debug'],
|
||||
stdout=PIPE,
|
||||
)
|
||||
p.wait(timeout=10)
|
||||
out = p.stdout.read().decode('utf8', 'replace')
|
||||
config = json.loads(out)
|
||||
assert 'JupyterHub' in config
|
||||
assert config["JupyterHub"]["log_level"] == 10
|
||||
|
||||
|
||||
def test_token_app():
|
||||
cmd = [sys.executable, '-m', 'jupyterhub', 'token']
|
||||
out = check_output(cmd + ['--help-all']).decode('utf8', 'replace')
|
||||
with TemporaryDirectory() as td:
|
||||
with open(os.path.join(td, 'jupyterhub_config.py'), 'w') as f:
|
||||
f.write("c.Authenticator.admin_users={'user'}")
|
||||
out = check_output(cmd + ['user'], cwd=td).decode('utf8', 'replace').strip()
|
||||
assert re.match(r'^[a-z0-9]+$', out)
|
||||
|
||||
|
||||
def test_raise_error_on_missing_specified_config():
|
||||
"""
|
||||
Using the -f or --config flag when starting JupyterHub should require the
|
||||
file to be found and exit if it isn't.
|
||||
"""
|
||||
# subprocess.run doesn't have a timeout flag, so if this test would fail by
|
||||
# not letting jupyterhub error out, we would wait forever. subprocess.Popen
|
||||
# allow us to manually timeout.
|
||||
process = Popen(
|
||||
[sys.executable, '-m', 'jupyterhub', '--config', 'not-available.py']
|
||||
)
|
||||
# wait impatiently for the process to exit like we want it to
|
||||
for i in range(100):
|
||||
time.sleep(0.1)
|
||||
returncode = process.poll()
|
||||
if returncode is not None:
|
||||
break
|
||||
else:
|
||||
process.kill()
|
||||
assert returncode == 1
|
||||
|
||||
|
||||
def test_generate_config():
|
||||
with NamedTemporaryFile(prefix='jupyterhub_config', suffix='.py') as tf:
|
||||
cfg_file = tf.name
|
||||
with open(cfg_file, 'w') as f:
|
||||
f.write("c.A = 5")
|
||||
p = Popen(
|
||||
[sys.executable, '-m', 'jupyterhub', '--generate-config', '-f', cfg_file],
|
||||
stdout=PIPE,
|
||||
stdin=PIPE,
|
||||
)
|
||||
out, _ = p.communicate(b'n')
|
||||
out = out.decode('utf8', 'replace')
|
||||
assert os.path.exists(cfg_file)
|
||||
with open(cfg_file) as f:
|
||||
cfg_text = f.read()
|
||||
assert cfg_text == 'c.A = 5'
|
||||
|
||||
p = Popen(
|
||||
[sys.executable, '-m', 'jupyterhub', '--generate-config', '-f', cfg_file],
|
||||
stdout=PIPE,
|
||||
stdin=PIPE,
|
||||
)
|
||||
out, _ = p.communicate(b'x\ny')
|
||||
out = out.decode('utf8', 'replace')
|
||||
assert os.path.exists(cfg_file)
|
||||
with open(cfg_file) as f:
|
||||
cfg_text = f.read()
|
||||
os.remove(cfg_file)
|
||||
assert cfg_file in out
|
||||
assert 'Spawner.cmd' in cfg_text
|
||||
assert 'Authenticator.allowed_users' in cfg_text
|
||||
|
||||
|
||||
async def test_init_tokens(request):
|
||||
with TemporaryDirectory() as td:
|
||||
db_file = os.path.join(td, 'jupyterhub.sqlite')
|
||||
tokens = {
|
||||
'super-secret-token': 'alyx',
|
||||
'also-super-secret': 'gordon',
|
||||
'boagasdfasdf': 'chell',
|
||||
}
|
||||
kwargs = {'db_url': db_file, 'api_tokens': tokens}
|
||||
ssl_enabled = getattr(request.module, "ssl_enabled", False)
|
||||
if ssl_enabled:
|
||||
kwargs['internal_certs_location'] = td
|
||||
app = MockHub(**kwargs)
|
||||
await app.initialize([])
|
||||
db = app.db
|
||||
for token, username in tokens.items():
|
||||
api_token = orm.APIToken.find(db, token)
|
||||
assert api_token is not None
|
||||
user = api_token.user
|
||||
assert user.name == username
|
||||
|
||||
# simulate second startup, reloading same tokens:
|
||||
app = MockHub(**kwargs)
|
||||
await app.initialize([])
|
||||
db = app.db
|
||||
for token, username in tokens.items():
|
||||
api_token = orm.APIToken.find(db, token)
|
||||
assert api_token is not None
|
||||
user = api_token.user
|
||||
assert user.name == username
|
||||
|
||||
# don't allow failed token insertion to create users:
|
||||
tokens['short'] = 'gman'
|
||||
app = MockHub(**kwargs)
|
||||
with pytest.raises(ValueError):
|
||||
await app.initialize([])
|
||||
assert orm.User.find(app.db, 'gman') is None
|
||||
|
||||
|
||||
def test_write_cookie_secret(tmpdir, request):
|
||||
secret_path = str(tmpdir.join('cookie_secret'))
|
||||
kwargs = {'cookie_secret_file': secret_path}
|
||||
ssl_enabled = getattr(request.module, "ssl_enabled", False)
|
||||
if ssl_enabled:
|
||||
kwargs['internal_certs_location'] = str(tmpdir)
|
||||
hub = MockHub(**kwargs)
|
||||
hub.init_secrets()
|
||||
assert os.path.exists(secret_path)
|
||||
assert os.stat(secret_path).st_mode & 0o600
|
||||
assert not os.stat(secret_path).st_mode & 0o177
|
||||
|
||||
|
||||
def test_cookie_secret_permissions(tmpdir, request):
|
||||
secret_file = tmpdir.join('cookie_secret')
|
||||
secret_path = str(secret_file)
|
||||
secret = os.urandom(COOKIE_SECRET_BYTES)
|
||||
secret_file.write(binascii.b2a_hex(secret))
|
||||
kwargs = {'cookie_secret_file': secret_path}
|
||||
ssl_enabled = getattr(request.module, "ssl_enabled", False)
|
||||
if ssl_enabled:
|
||||
kwargs['internal_certs_location'] = str(tmpdir)
|
||||
hub = MockHub(**kwargs)
|
||||
|
||||
# raise with public secret file
|
||||
os.chmod(secret_path, 0o664)
|
||||
with pytest.raises(SystemExit):
|
||||
hub.init_secrets()
|
||||
|
||||
# ok with same file, proper permissions
|
||||
os.chmod(secret_path, 0o660)
|
||||
hub.init_secrets()
|
||||
assert hub.cookie_secret == secret
|
||||
|
||||
|
||||
def test_cookie_secret_content(tmpdir, request):
|
||||
secret_file = tmpdir.join('cookie_secret')
|
||||
secret_file.write('not base 64: uñiço∂e')
|
||||
secret_path = str(secret_file)
|
||||
os.chmod(secret_path, 0o660)
|
||||
kwargs = {'cookie_secret_file': secret_path}
|
||||
ssl_enabled = getattr(request.module, "ssl_enabled", False)
|
||||
if ssl_enabled:
|
||||
kwargs['internal_certs_location'] = str(tmpdir)
|
||||
hub = MockHub(**kwargs)
|
||||
with pytest.raises(SystemExit):
|
||||
hub.init_secrets()
|
||||
|
||||
|
||||
def test_cookie_secret_env(tmpdir, request):
|
||||
kwargs = {'cookie_secret_file': str(tmpdir.join('cookie_secret'))}
|
||||
ssl_enabled = getattr(request.module, "ssl_enabled", False)
|
||||
if ssl_enabled:
|
||||
kwargs['internal_certs_location'] = str(tmpdir)
|
||||
hub = MockHub(**kwargs)
|
||||
|
||||
with patch.dict(os.environ, {'JPY_COOKIE_SECRET': 'not hex'}):
|
||||
with pytest.raises(ValueError):
|
||||
hub.init_secrets()
|
||||
|
||||
with patch.dict(os.environ, {'JPY_COOKIE_SECRET': 'abc123'}):
|
||||
hub.init_secrets()
|
||||
assert hub.cookie_secret == binascii.a2b_hex('abc123')
|
||||
assert not os.path.exists(hub.cookie_secret_file)
|
||||
|
||||
|
||||
def test_cookie_secret_string_():
|
||||
cfg = Config()
|
||||
|
||||
cfg.JupyterHub.cookie_secret = "not hex"
|
||||
with pytest.raises(ValueError):
|
||||
JupyterHub(config=cfg)
|
||||
|
||||
cfg.JupyterHub.cookie_secret = "abc123"
|
||||
app = JupyterHub(config=cfg)
|
||||
assert app.cookie_secret == binascii.a2b_hex('abc123')
|
||||
|
||||
|
||||
async def test_load_groups(tmpdir, request):
|
||||
to_load = {
|
||||
'blue': ['cyclops', 'rogue', 'wolverine'],
|
||||
'gold': ['storm', 'jean-grey', 'colossus'],
|
||||
}
|
||||
kwargs = {'load_groups': to_load}
|
||||
ssl_enabled = getattr(request.module, "ssl_enabled", False)
|
||||
if ssl_enabled:
|
||||
kwargs['internal_certs_location'] = str(tmpdir)
|
||||
hub = MockHub(**kwargs)
|
||||
hub.init_db()
|
||||
await hub.init_users()
|
||||
await hub.init_groups()
|
||||
db = hub.db
|
||||
blue = orm.Group.find(db, name='blue')
|
||||
assert blue is not None
|
||||
assert sorted(u.name for u in blue.users) == sorted(to_load['blue'])
|
||||
gold = orm.Group.find(db, name='gold')
|
||||
assert gold is not None
|
||||
assert sorted(u.name for u in gold.users) == sorted(to_load['gold'])
|
||||
|
||||
|
||||
async def test_resume_spawners(tmpdir, request):
|
||||
if not os.getenv('JUPYTERHUB_TEST_DB_URL'):
|
||||
p = patch.dict(
|
||||
os.environ,
|
||||
{
|
||||
'JUPYTERHUB_TEST_DB_URL': 'sqlite:///%s'
|
||||
% tmpdir.join('jupyterhub.sqlite')
|
||||
},
|
||||
)
|
||||
p.start()
|
||||
request.addfinalizer(p.stop)
|
||||
|
||||
async def new_hub():
|
||||
kwargs = {}
|
||||
ssl_enabled = getattr(request.module, "ssl_enabled", False)
|
||||
if ssl_enabled:
|
||||
kwargs['internal_certs_location'] = str(tmpdir)
|
||||
app = MockHub(test_clean_db=False, **kwargs)
|
||||
app.config.ConfigurableHTTPProxy.should_start = False
|
||||
app.config.ConfigurableHTTPProxy.auth_token = 'unused'
|
||||
await app.initialize([])
|
||||
return app
|
||||
|
||||
app = await new_hub()
|
||||
db = app.db
|
||||
# spawn a user's server
|
||||
name = 'kurt'
|
||||
user = add_user(db, app, name=name)
|
||||
await user.spawn()
|
||||
proc = user.spawner.proc
|
||||
assert proc is not None
|
||||
|
||||
# stop the Hub without cleaning up servers
|
||||
app.cleanup_servers = False
|
||||
app.stop()
|
||||
|
||||
# proc is still running
|
||||
assert proc.poll() is None
|
||||
|
||||
# resume Hub, should still be running
|
||||
app = await new_hub()
|
||||
db = app.db
|
||||
user = app.users[name]
|
||||
assert user.running
|
||||
assert user.spawner.server is not None
|
||||
|
||||
# stop the Hub without cleaning up servers
|
||||
app.cleanup_servers = False
|
||||
app.stop()
|
||||
|
||||
# stop the server while the Hub is down. BAMF!
|
||||
proc.terminate()
|
||||
proc.wait(timeout=10)
|
||||
assert proc.poll() is not None
|
||||
|
||||
# resume Hub, should be stopped
|
||||
app = await new_hub()
|
||||
db = app.db
|
||||
user = app.users[name]
|
||||
assert not user.running
|
||||
assert user.spawner.server is None
|
||||
assert list(db.query(orm.Server)) == []
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'hub_config, expected',
|
||||
[
|
||||
({'ip': '0.0.0.0'}, {'bind_url': 'http://0.0.0.0:8000/'}),
|
||||
(
|
||||
{'port': 123, 'base_url': '/prefix'},
|
||||
{'bind_url': 'http://:123/prefix/', 'base_url': '/prefix/'},
|
||||
),
|
||||
({'bind_url': 'http://0.0.0.0:12345/sub'}, {'base_url': '/sub/'}),
|
||||
(
|
||||
# no config, test defaults
|
||||
{},
|
||||
{'base_url': '/', 'bind_url': 'http://:8000', 'ip': '', 'port': 8000},
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_url_config(hub_config, expected):
|
||||
# construct the config object
|
||||
cfg = Config()
|
||||
for key, value in hub_config.items():
|
||||
cfg.JupyterHub[key] = value
|
||||
|
||||
# instantiate the Hub and load config
|
||||
app = JupyterHub(config=cfg)
|
||||
# validate config
|
||||
for key, value in hub_config.items():
|
||||
if key not in expected:
|
||||
assert getattr(app, key) == value
|
||||
|
||||
# validate additional properties
|
||||
for key, value in expected.items():
|
||||
assert getattr(app, key) == value
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"base_url, hub_routespec, expected_routespec, should_warn, bad_prefix",
|
||||
[
|
||||
(None, None, "/", False, False),
|
||||
("/", "/", "/", False, False),
|
||||
("/base", "/base", "/base/", False, False),
|
||||
("/", "/hub", "/hub/", True, False),
|
||||
(None, "hub/api", "/hub/api/", True, False),
|
||||
("/base", "/hub/", "/hub/", True, True),
|
||||
(None, "/hub/api/health", "/hub/api/health/", True, True),
|
||||
],
|
||||
)
|
||||
def test_hub_routespec(
|
||||
base_url, hub_routespec, expected_routespec, should_warn, bad_prefix, caplog
|
||||
):
|
||||
cfg = Config()
|
||||
if base_url:
|
||||
cfg.JupyterHub.base_url = base_url
|
||||
if hub_routespec:
|
||||
cfg.JupyterHub.hub_routespec = hub_routespec
|
||||
with caplog.at_level(logging.WARNING):
|
||||
app = JupyterHub(config=cfg, log=logging.getLogger())
|
||||
app.init_hub()
|
||||
hub = app.hub
|
||||
assert hub.routespec == expected_routespec
|
||||
|
||||
if should_warn:
|
||||
assert "custom route for Hub" in caplog.text
|
||||
assert hub_routespec in caplog.text
|
||||
else:
|
||||
assert "custom route for Hub" not in caplog.text
|
||||
|
||||
if bad_prefix:
|
||||
assert "may not receive" in caplog.text
|
||||
else:
|
||||
assert "may not receive" not in caplog.text
|
@@ -16,8 +16,8 @@ import random
|
||||
import secrets
|
||||
import sys
|
||||
import warnings
|
||||
from datetime import datetime
|
||||
from datetime import timezone
|
||||
from importlib import import_module
|
||||
from textwrap import dedent
|
||||
from urllib.parse import urlparse
|
||||
|
||||
@@ -606,34 +606,10 @@ class SingleUserNotebookAppMixin(Configurable):
|
||||
t = self.hub_activity_interval * (1 + 0.2 * (random.random() - 0.5))
|
||||
await asyncio.sleep(t)
|
||||
|
||||
def _log_app_versions(self):
|
||||
"""Log application versions at startup
|
||||
|
||||
Logs versions of jupyterhub and singleuser-server base versions (jupyterlab, jupyter_server, notebook)
|
||||
"""
|
||||
self.log.info(f"Starting jupyterhub single-user server version {__version__}")
|
||||
|
||||
# don't log these package versions
|
||||
seen = {"jupyterhub", "traitlets", "jupyter_core", "builtins"}
|
||||
|
||||
for cls in self.__class__.mro():
|
||||
module_name = cls.__module__.partition(".")[0]
|
||||
if module_name not in seen:
|
||||
seen.add(module_name)
|
||||
try:
|
||||
mod = import_module(module_name)
|
||||
mod_version = getattr(mod, "__version__")
|
||||
except Exception:
|
||||
mod_version = ""
|
||||
self.log.info(
|
||||
f"Extending {cls.__module__}.{cls.__name__} from {module_name} {mod_version}"
|
||||
)
|
||||
|
||||
def initialize(self, argv=None):
|
||||
# disable trash by default
|
||||
# this can be re-enabled by config
|
||||
self.config.FileContentsManager.delete_to_trash = False
|
||||
self._log_app_versions()
|
||||
return super().initialize(argv)
|
||||
|
||||
def start(self):
|
||||
@@ -679,7 +655,6 @@ class SingleUserNotebookAppMixin(Configurable):
|
||||
s['hub_prefix'] = self.hub_prefix
|
||||
s['hub_host'] = self.hub_host
|
||||
s['hub_auth'] = self.hub_auth
|
||||
s['page_config_hook'] = self.page_config_hook
|
||||
csp_report_uri = s['csp_report_uri'] = self.hub_host + url_path_join(
|
||||
self.hub_prefix, 'security/csp-report'
|
||||
)
|
||||
@@ -707,18 +682,6 @@ class SingleUserNotebookAppMixin(Configurable):
|
||||
self.patch_default_headers()
|
||||
self.patch_templates()
|
||||
|
||||
def page_config_hook(self, handler, page_config):
|
||||
"""JupyterLab page config hook
|
||||
|
||||
Adds JupyterHub info to page config.
|
||||
|
||||
Places the JupyterHub API token in PageConfig.token.
|
||||
|
||||
Only has effect on jupyterlab_server >=2.9
|
||||
"""
|
||||
page_config["token"] = self.hub_auth.get_token(handler) or ""
|
||||
return page_config
|
||||
|
||||
def patch_default_headers(self):
|
||||
if hasattr(RequestHandler, '_orig_set_default_headers'):
|
||||
return
|
||||
|
@@ -15,6 +15,7 @@ from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
import traitlets
|
||||
from distutils.version import LooseVersion as V
|
||||
from traitlets.config import Config
|
||||
|
||||
from .. import orm
|
||||
@@ -32,7 +33,7 @@ def test_help_all():
|
||||
assert '--JupyterHub.ip' in out
|
||||
|
||||
|
||||
@pytest.mark.skipif(traitlets.version_info < (5,), reason="requires traitlets 5")
|
||||
@pytest.mark.skipif(V(traitlets.__version__) < V('5'), reason="requires traitlets 5")
|
||||
def test_show_config(tmpdir):
|
||||
tmpdir.chdir()
|
||||
p = Popen(
|
||||
@@ -246,7 +247,6 @@ async def test_load_groups(tmpdir, request):
|
||||
kwargs['internal_certs_location'] = str(tmpdir)
|
||||
hub = MockHub(**kwargs)
|
||||
hub.init_db()
|
||||
await hub.init_role_creation()
|
||||
await hub.init_users()
|
||||
await hub.init_groups()
|
||||
db = hub.db
|
||||
|
Reference in New Issue
Block a user