Merge branch 'master' into remove-unused-imports

This commit is contained in:
Erik Sundell
2020-10-01 12:07:37 +02:00
90 changed files with 2005 additions and 1110 deletions

View File

@@ -19,6 +19,10 @@ jobs:
name: smoke test jupyterhub name: smoke test jupyterhub
command: | command: |
docker run --rm -it jupyterhub/jupyterhub jupyterhub --help docker run --rm -it jupyterhub/jupyterhub jupyterhub --help
- run:
name: verify static files
command: |
docker run --rm -it -v $PWD/dockerfiles:/io jupyterhub/jupyterhub python3 /io/test.py
docs: docs:
# This is the base environment that Circle will use # This is the base environment that Circle will use

View File

@@ -1,33 +0,0 @@
---
name: "\U0001F41B Bug report"
about: Create a report to help us repair something that is currently broken
---
<!-- Thank you for contributing. These HTML commments will not render in the issue, but you can delete them once you've read them if you prefer! -->
### Bug description
<!-- Use this section to clearly and concisely describe the bug. -->
#### Expected behaviour
<!-- Tell us what you thought would happen. -->
#### Actual behaviour
<!-- Tell us what it actually happens. Tip: running JupyterHub in `--debug` mode (`jupyterhub --debug`) can also be helpful for troubleshooting. -->
### How to reproduce
<!-- Use this section to describe the steps that a user would take to experience this bug. -->
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
### Your personal set up
<!-- Tell us a little about the system you're using. -->
- OS:
<!-- [e.g. linux, OSX] -->
- Version:
<!-- e.g. jupyterhub --version. --->
- Configuration:
<!-- Be careful not to share any sensible information. --->

View File

@@ -1,4 +0,0 @@
contact_links:
- name: "\U0001F914 All other questions, including if you're not sure what to do."
url: https://discourse.jupyter.org
about: Search on Discourse for similar questions or ask for help there.

View File

@@ -1,25 +0,0 @@
---
name: "\U0001F680 Feature request"
about: Suggest a new feature or a big change to JupyterHub
---
<!-- Thank you for contributing. These HTML commments will not render in the issue, but you can delete them once you've read them if you prefer! -->
### Proposed change
<!-- Use this section to describe the feature you'd like to be added. -->
### Alternative options
<!-- Use this section to describe alternative options and why you've decided on the proposed feature above. -->
### Who would use this feature?
<!-- Describe the audience for this feature. This information will affect who chooses to work on the feature with you. -->
### How much effort will adding it take?
<!-- Try to estimate how much work adding this feature will require. This information will affect who chooses to work on the feature with you. -->
### Who can do this work?
<!-- What skills are needed? Who can be recruited to add this feature? This information will affect who chooses to work on the feature with you. -->

1
.gitignore vendored
View File

@@ -27,3 +27,4 @@ htmlcov
.vscode/ .vscode/
.pytest_cache .pytest_cache
pip-wheel-metadata pip-wheel-metadata
docs/source/reference/metrics.rst

View File

@@ -3,7 +3,7 @@ repos:
rev: v1.9.0 rev: v1.9.0
hooks: hooks:
- id: reorder-python-imports - id: reorder-python-imports
- repo: https://github.com/ambv/black - repo: https://github.com/psf/black
rev: 19.10b0 rev: 19.10b0
hooks: hooks:
- id: black - id: black

View File

@@ -37,6 +37,15 @@ before_install:
install: install:
- pip install --upgrade pip - pip install --upgrade pip
- pip install --upgrade --pre -r dev-requirements.txt . - pip install --upgrade --pre -r dev-requirements.txt .
- |
if [[ "$MASTER_DEPENDENCIES" == "True" ]]; then
pip install git+https://github.com/ipython/traitlets#egg=traitlets --force
fi
- |
if [[ "$TEST" == "jupyter_server" ]]; then
pip uninstall notebook --yes
pip install jupyter_server
fi
- pip freeze - pip freeze
# run tests # run tests
@@ -87,8 +96,25 @@ jobs:
- PGPASSWORD=hub[test/:? - PGPASSWORD=hub[test/:?
# The password in url below is url-encoded with: urllib.parse.quote($PGPASSWORD, safe='') # The password in url below is url-encoded with: urllib.parse.quote($PGPASSWORD, safe='')
- JUPYTERHUB_TEST_DB_URL=postgresql://jupyterhub:hub%5Btest%2F%3A%3F@127.0.0.1/jupyterhub - JUPYTERHUB_TEST_DB_URL=postgresql://jupyterhub:hub%5Btest%2F%3A%3F@127.0.0.1/jupyterhub
- name: python:3.8 + master dependencies
python: 3.8
env:
- PGUSER=jupyterhub
- PGPASSWORD=hub[test/:?
# The password in url below is url-encoded with: urllib.parse.quote($PGPASSWORD, safe='')
- JUPYTERHUB_TEST_DB_URL=postgresql://jupyterhub:hub%5Btest%2F%3A%3F@127.0.0.1/jupyterhub
- MASTER_DEPENDENCIES=True
- name: python:3.8 + jupyter_server
python: 3.8
env:
- TEST=jupyter_server
- JUPYTERHUB_SINGLEUSER_APP=jupyterhub.tests.mockserverapp.MockServerApp
- name: python:nightly - name: python:nightly
python: nightly python: nightly
allow_failures: allow_failures:
- name: python:nightly - name: python:nightly
# https://github.com/jupyterhub/jupyterhub/issues/3141
# The latest traitlets is close to release so it should not fail
# - name: python:3.8 + master dependencies
fast_finish: true fast_finish: true

View File

@@ -66,7 +66,7 @@ pre-commit run
which should run any autoformatting on your code which should run any autoformatting on your code
and tell you about any errors it couldn't fix automatically. and tell you about any errors it couldn't fix automatically.
You may also install [black integration](https://github.com/ambv/black#editor-integration) You may also install [black integration](https://github.com/psf/black#editor-integration)
into your text editor to format code automatically. into your text editor to format code automatically.
If you have already committed files before setting up the pre-commit If you have already committed files before setting up the pre-commit

View File

@@ -21,8 +21,7 @@
# your jupyterhub_config.py will be added automatically # your jupyterhub_config.py will be added automatically
# from your docker directory. # from your docker directory.
# https://github.com/tianon/docker-brew-ubuntu-core/commit/d4313e13366d24a97bd178db4450f63e221803f1 ARG BASE_IMAGE=ubuntu:focal-20200729@sha256:6f2fb2f9fb5582f8b587837afd6ea8f37d8d1d9e41168c90f410a6ef15fa8ce5
ARG BASE_IMAGE=ubuntu:bionic-20191029@sha256:6e9f67fa63b0323e9a1e587fd71c561ba48a034504fb804fd26fd8800039835d
FROM $BASE_IMAGE AS builder FROM $BASE_IMAGE AS builder
USER root USER root
@@ -41,16 +40,18 @@ RUN apt-get update \
&& apt-get clean \ && apt-get clean \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
RUN python3 -m pip install --upgrade setuptools pip wheel
# copy everything except whats in .dockerignore, its a # copy everything except whats in .dockerignore, its a
# compromise between needing to rebuild and maintaining # compromise between needing to rebuild and maintaining
# what needs to be part of the build # what needs to be part of the build
COPY . /src/jupyterhub/ COPY . /src/jupyterhub/
COPY jupyterhub/ /src/jupyterhub/jupyterhub
COPY share/ /src/jupyterhub/share
WORKDIR /src/jupyterhub WORKDIR /src/jupyterhub
RUN python3 -m pip install --upgrade setuptools pip wheel
RUN python3 -m pip wheel -v --wheel-dir wheelhouse . # Build client component packages (they will be copied into ./share and
# packaged with the built wheel.)
RUN python3 setup.py bdist_wheel
RUN python3 -m pip wheel --wheel-dir wheelhouse dist/*.whl
FROM $BASE_IMAGE FROM $BASE_IMAGE
@@ -87,7 +88,6 @@ RUN npm install -g configurable-http-proxy@^4.2.0 \
# install the wheels we built in the first stage # install the wheels we built in the first stage
COPY --from=builder /src/jupyterhub/wheelhouse /tmp/wheelhouse COPY --from=builder /src/jupyterhub/wheelhouse /tmp/wheelhouse
COPY --from=builder /src/jupyterhub/share /src/jupyterhub/share
RUN python3 -m pip install --no-cache /tmp/wheelhouse/* RUN python3 -m pip install --no-cache /tmp/wheelhouse/*
RUN mkdir -p /srv/jupyterhub/ RUN mkdir -p /srv/jupyterhub/

View File

@@ -74,6 +74,7 @@ for administration of the Hub and its users.
The `nodejs-legacy` package installs the `node` executable and is currently The `nodejs-legacy` package installs the `node` executable and is currently
required for npm to work on Debian/Ubuntu. required for npm to work on Debian/Ubuntu.
- If using the default PAM Authenticator, a [pluggable authentication module (PAM)](https://en.wikipedia.org/wiki/Pluggable_authentication_module).
- TLS certificate and key for HTTPS communication - TLS certificate and key for HTTPS communication
- Domain name - Domain name

16
demo-image/Dockerfile Normal file
View File

@@ -0,0 +1,16 @@
# Demo JupyterHub Docker image
#
# This should only be used for demo or testing and not as a base image to build on.
#
# It includes the notebook package and it uses the DummyAuthenticator and the SimpleLocalProcessSpawner.
ARG BASE_IMAGE=jupyterhub/jupyterhub-onbuild
FROM ${BASE_IMAGE}
# Install the notebook package
RUN python3 -m pip install notebook
# Create a demo user
RUN useradd --create-home demo
RUN chown demo .
USER demo

25
demo-image/README.md Normal file
View File

@@ -0,0 +1,25 @@
## Demo Dockerfile
This is a demo JupyterHub Docker image to help you get a quick overview of what
JupyterHub is and how it works.
It uses the SimpleLocalProcessSpawner to spawn new user servers and
DummyAuthenticator for authentication.
The DummyAuthenticator allows you to log in with any username & password and the
SimpleLocalProcessSpawner allows starting servers without having to create a
local user for each JupyterHub user.
### Important!
This should only be used for demo or testing purposes!
It shouldn't be used as a base image to build on.
### Try it
1. `cd` to the root of your jupyterhub repo.
2. Build the demo image with `docker build -t jupyterhub-demo demo-image`.
3. Run the demo image with `docker run -d -p 8000:8000 jupyterhub-demo`.
4. Visit http://localhost:8000 and login with any username and password
5. Happy demo-ing :tada:!

View File

@@ -0,0 +1,7 @@
# Configuration file for jupyterhub-demo
c = get_config()
# Use DummyAuthenticator and SimpleSpawner
c.JupyterHub.spawner_class = "simple"
c.JupyterHub.authenticator_class = "dummy"

9
dockerfiles/test.py Normal file
View File

@@ -0,0 +1,9 @@
import os
from jupyterhub._data import DATA_FILES_PATH
print(f"DATA_FILES_PATH={DATA_FILES_PATH}")
for sub_path in ("templates", "static/components", "static/css/style.min.css"):
path = os.path.join(DATA_FILES_PATH, sub_path)
assert os.path.exists(path), path

View File

@@ -48,6 +48,7 @@ help:
@echo " doctest to run all doctests embedded in the documentation (if enabled)" @echo " doctest to run all doctests embedded in the documentation (if enabled)"
@echo " coverage to run coverage check of the documentation (if enabled)" @echo " coverage to run coverage check of the documentation (if enabled)"
@echo " spelling to run spell check on documentation" @echo " spelling to run spell check on documentation"
@echo " metrics to generate documentation for metrics by inspecting the source code"
clean: clean:
rm -rf $(BUILDDIR)/* rm -rf $(BUILDDIR)/*
@@ -60,7 +61,12 @@ rest-api: source/_static/rest-api/index.html
source/_static/rest-api/index.html: rest-api.yml node_modules source/_static/rest-api/index.html: rest-api.yml node_modules
npm run rest-api npm run rest-api
html: rest-api metrics: source/reference/metrics.rst
source/reference/metrics.rst: generate-metrics.py
python3 generate-metrics.py
html: rest-api metrics
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo @echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html." @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."

57
docs/generate-metrics.py Normal file
View File

@@ -0,0 +1,57 @@
import os
from os.path import join
from pytablewriter import RstSimpleTableWriter
from pytablewriter.style import Style
import jupyterhub.metrics
HERE = os.path.abspath(os.path.dirname(__file__))
class Generator:
@classmethod
def create_writer(cls, table_name, headers, values):
writer = RstSimpleTableWriter()
writer.table_name = table_name
writer.headers = headers
writer.value_matrix = values
writer.margin = 1
[writer.set_style(header, Style(align="center")) for header in headers]
return writer
def _parse_metrics(self):
table_rows = []
for name in dir(jupyterhub.metrics):
obj = getattr(jupyterhub.metrics, name)
if obj.__class__.__module__.startswith('prometheus_client.'):
for metric in obj.describe():
table_rows.append([metric.type, metric.name, metric.documentation])
return table_rows
def prometheus_metrics(self):
generated_directory = f"{HERE}/source/reference"
if not os.path.exists(generated_directory):
os.makedirs(generated_directory)
filename = f"{generated_directory}/metrics.rst"
table_name = ""
headers = ["Type", "Name", "Description"]
values = self._parse_metrics()
writer = self.create_writer(table_name, headers, values)
title = "List of Prometheus Metrics"
underline = "============================"
content = f"{title}\n{underline}\n{writer.dumps()}"
with open(filename, 'w') as f:
f.write(content)
print(f"Generated {filename}.")
def main():
doc_generator = Generator()
doc_generator.prometheus_metrics()
if __name__ == "__main__":
main()

View File

@@ -1,8 +1,11 @@
-r ../requirements.txt -r ../requirements.txt
alabaster_jupyterhub alabaster_jupyterhub
autodoc-traits # Temporary fix of #3021. Revert back to released autodoc-traits when
# 0.1.0 released.
https://github.com/jupyterhub/autodoc-traits/archive/75885ee24636efbfebfceed1043459715049cd84.zip
pydata-sphinx-theme pydata-sphinx-theme
pytablewriter>=0.56
recommonmark>=0.6 recommonmark>=0.6
sphinx-copybutton sphinx-copybutton
sphinx-jsonschema sphinx-jsonschema

View File

@@ -3,7 +3,7 @@ swagger: '2.0'
info: info:
title: JupyterHub title: JupyterHub
description: The REST API for JupyterHub description: The REST API for JupyterHub
version: 0.9.0dev version: 1.2.0dev
license: license:
name: BSD-3-Clause name: BSD-3-Clause
schemes: schemes:
@@ -283,7 +283,10 @@ paths:
required: true required: true
type: string type: string
- name: server_name - name: server_name
description: name given to a named-server description: |
name given to a named-server.
Note that depending on your JupyterHub infrastructure there are chracterter size limitation to `server_name`. Default spawner with K8s pod will not allow Jupyter Notebooks to be spawned with a name that contains more than 253 characters (keep in mind that the pod will be spawned with extra characters to identify the user and hub).
in: path in: path
required: true required: true
type: string type: string

View File

@@ -1,106 +1,4 @@
div#helm-chart-schema h2, /* Added to avoid logo being too squeezed */
div#helm-chart-schema h3, .navbar-brand {
div#helm-chart-schema h4, height: 4rem !important;
div#helm-chart-schema h5,
div#helm-chart-schema h6 {
font-family: courier new;
}
h3, h3 ~ * {
margin-left: 3% !important;
}
h4, h4 ~ * {
margin-left: 6% !important;
}
h5, h5 ~ * {
margin-left: 9% !important;
}
h6, h6 ~ * {
margin-left: 12% !important;
}
h7, h7 ~ * {
margin-left: 15% !important;
}
img.logo {
width:100%
}
.right-next {
float: right;
max-width: 45%;
overflow: auto;
text-overflow: ellipsis;
white-space: nowrap;
}
.right-next::after{
content: ' »';
}
.left-prev {
float: left;
max-width: 45%;
overflow: auto;
text-overflow: ellipsis;
white-space: nowrap;
}
.left-prev::before{
content: '« ';
}
.prev-next-bottom {
margin-top: 3em;
}
.prev-next-top {
margin-bottom: 1em;
}
/* Sidebar TOC and headers */
div.sphinxsidebarwrapper div {
margin-bottom: .8em;
}
div.sphinxsidebar h3 {
font-size: 1.3em;
padding-top: 0px;
font-weight: 800;
margin-left: 0px !important;
}
div.sphinxsidebar p.caption {
font-size: 1.2em;
margin-bottom: 0px;
margin-left: 0px !important;
font-weight: 900;
color: #767676;
}
div.sphinxsidebar ul {
font-size: .8em;
margin-top: 0px;
padding-left: 3%;
margin-left: 0px !important;
}
div.relations ul {
font-size: 1em;
margin-left: 0px !important;
}
div#searchbox form {
margin-left: 0px !important;
}
/* body elements */
.toctree-wrapper span.caption-text {
color: #767676;
font-style: italic;
font-weight: 300;
} }

View File

@@ -1,16 +0,0 @@
{# Custom template for navigation.html
alabaster theme does not provide blocks for titles to
be overridden so this custom theme handles title and
toctree for sidebar
#}
<h3>{{ _('Table of Contents') }}</h3>
{{ toctree(includehidden=theme_sidebar_includehidden, collapse=theme_sidebar_collapse) }}
{% if theme_extra_nav_links %}
<hr />
<ul>
{% for text, uri in theme_extra_nav_links.items() %}
<li class="toctree-l1"><a href="{{ uri }}">{{ text }}</a></li>
{% endfor %}
</ul>
{% endif %}

View File

@@ -1,17 +0,0 @@
{# Custom template for relations.html
alabaster theme does not provide previous/next page by default
#}
<div class="relations">
<h3>Navigation</h3>
<ul>
<li><a href="{{ pathto(master_doc) }}">Documentation Home</a><ul>
{%- if prev %}
<li><a href="{{ prev.link|e }}" title="Previous">Previous topic</a></li>
{%- endif %}
{%- if next %}
<li><a href="{{ next.link|e }}" title="Next">Next topic</a></li>
{%- endif %}
</ul>
</ul>
</div>

View File

@@ -7,6 +7,8 @@ command line for details.
## [Unreleased] ## [Unreleased]
## 1.1 ## 1.1
### [1.1.0] 2020-01-17 ### [1.1.0] 2020-01-17
@@ -116,7 +118,7 @@ Thanks to everyone who has contributed to this release!
- Log JupyterHub version on startup [#2752](https://github.com/jupyterhub/jupyterhub/pull/2752) ([@consideRatio](https://github.com/consideRatio)) - Log JupyterHub version on startup [#2752](https://github.com/jupyterhub/jupyterhub/pull/2752) ([@consideRatio](https://github.com/consideRatio))
- Reduce verbosity for "Failing suspected API request to not-running server" (new) [#2751](https://github.com/jupyterhub/jupyterhub/pull/2751) ([@rkdarst](https://github.com/rkdarst)) - Reduce verbosity for "Failing suspected API request to not-running server" (new) [#2751](https://github.com/jupyterhub/jupyterhub/pull/2751) ([@rkdarst](https://github.com/rkdarst))
- Add missing package for json schema doc build [#2744](https://github.com/jupyterhub/jupyterhub/pull/2744) ([@willingc](https://github.com/willingc)) - Add missing package for json schema doc build [#2744](https://github.com/jupyterhub/jupyterhub/pull/2744) ([@willingc](https://github.com/willingc))
- blacklist urllib3 versions with encoding bug [#2743](https://github.com/jupyterhub/jupyterhub/pull/2743) ([@minrk](https://github.com/minrk)) - block urllib3 versions with encoding bug [#2743](https://github.com/jupyterhub/jupyterhub/pull/2743) ([@minrk](https://github.com/minrk))
- Remove tornado deprecated/unnecessary AsyncIOMainLoop().install() call [#2740](https://github.com/jupyterhub/jupyterhub/pull/2740) ([@kinow](https://github.com/kinow)) - Remove tornado deprecated/unnecessary AsyncIOMainLoop().install() call [#2740](https://github.com/jupyterhub/jupyterhub/pull/2740) ([@kinow](https://github.com/kinow))
- Fix deprecated call [#2739](https://github.com/jupyterhub/jupyterhub/pull/2739) ([@kinow](https://github.com/kinow)) - Fix deprecated call [#2739](https://github.com/jupyterhub/jupyterhub/pull/2739) ([@kinow](https://github.com/kinow))
- Remove duplicate hub and authenticator traitlets from Spawner [#2736](https://github.com/jupyterhub/jupyterhub/pull/2736) ([@eslavich](https://github.com/eslavich)) - Remove duplicate hub and authenticator traitlets from Spawner [#2736](https://github.com/jupyterhub/jupyterhub/pull/2736) ([@eslavich](https://github.com/eslavich))
@@ -231,8 +233,8 @@ whether it was through discussion, testing, documentation, or development.
This hook may transform the return value of `Authenticator.authenticate()` This hook may transform the return value of `Authenticator.authenticate()`
and return a new authentication dictionary, and return a new authentication dictionary,
e.g. specifying admin privileges, group membership, e.g. specifying admin privileges, group membership,
or custom white/blacklisting logic. or custom allowed/blocked logic.
This hook is called *after* existing normalization and whitelist checking. This hook is called *after* existing normalization and allowed-username checking.
- `Spawner.options_from_form` may now be async - `Spawner.options_from_form` may now be async
- Added `JupyterHub.shutdown_on_logout` option to trigger shutdown of a user's - Added `JupyterHub.shutdown_on_logout` option to trigger shutdown of a user's
servers when they log out. servers when they log out.
@@ -418,7 +420,7 @@ and tornado < 5.0.
launching an IPython session connected to your JupyterHub database. launching an IPython session connected to your JupyterHub database.
- Include `User.auth_state` in user model on single-user REST endpoints for admins only. - Include `User.auth_state` in user model on single-user REST endpoints for admins only.
- Include `Server.state` in server model on REST endpoints for admins only. - Include `Server.state` in server model on REST endpoints for admins only.
- Add `Authenticator.blacklist` for blacklisting users instead of whitelisting. - Add `Authenticator.blacklist` for blocking users instead of allowing.
- Pass `c.JupyterHub.tornado_settings['cookie_options']` down to Spawners - Pass `c.JupyterHub.tornado_settings['cookie_options']` down to Spawners
so that cookie options (e.g. `expires_days`) can be set globally for the whole application. so that cookie options (e.g. `expires_days`) can be set globally for the whole application.
- SIGINFO (`ctrl-t`) handler showing the current status of all running threads, - SIGINFO (`ctrl-t`) handler showing the current status of all running threads,

View File

@@ -19,10 +19,9 @@ extensions = [
'autodoc_traits', 'autodoc_traits',
'sphinx_copybutton', 'sphinx_copybutton',
'sphinx-jsonschema', 'sphinx-jsonschema',
'recommonmark',
] ]
templates_path = ['_templates']
# The master toctree document. # The master toctree document.
master_doc = 'index' master_doc = 'index'
@@ -58,15 +57,67 @@ default_role = 'literal'
import recommonmark import recommonmark
from recommonmark.transform import AutoStructify from recommonmark.transform import AutoStructify
# -- Config -------------------------------------------------------------
from jupyterhub.app import JupyterHub
from docutils import nodes
from sphinx.directives.other import SphinxDirective
from contextlib import redirect_stdout
from io import StringIO
# create a temp instance of JupyterHub just to get the output of the generate-config
# and help --all commands.
jupyterhub_app = JupyterHub()
class ConfigDirective(SphinxDirective):
"""Generate the configuration file output for use in the documentation."""
has_content = False
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = False
option_spec = {}
def run(self):
# The generated configuration file for this version
generated_config = jupyterhub_app.generate_config_file()
# post-process output
home_dir = os.environ['HOME']
generated_config = generated_config.replace(home_dir, '$HOME', 1)
par = nodes.literal_block(text=generated_config)
return [par]
class HelpAllDirective(SphinxDirective):
"""Print the output of jupyterhub help --all for use in the documentation."""
has_content = False
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = False
option_spec = {}
def run(self):
# The output of the help command for this version
buffer = StringIO()
with redirect_stdout(buffer):
jupyterhub_app.print_help('--help-all')
all_help = buffer.getvalue()
# post-process output
home_dir = os.environ['HOME']
all_help = all_help.replace(home_dir, '$HOME', 1)
par = nodes.literal_block(text=all_help)
return [par]
def setup(app): def setup(app):
app.add_config_value('recommonmark_config', {'enable_eval_rst': True}, True) app.add_config_value('recommonmark_config', {'enable_eval_rst': True}, True)
app.add_stylesheet('custom.css') app.add_css_file('custom.css')
app.add_transform(AutoStructify) app.add_transform(AutoStructify)
app.add_directive('jupyterhub-generate-config', ConfigDirective)
app.add_directive('jupyterhub-help-all', HelpAllDirective)
source_parsers = {'.md': 'recommonmark.parser.CommonMarkParser'}
source_suffix = ['.rst', '.md'] source_suffix = ['.rst', '.md']
# source_encoding = 'utf-8-sig' # source_encoding = 'utf-8-sig'
@@ -165,10 +216,10 @@ intersphinx_mapping = {'https://docs.python.org/3/': None}
on_rtd = os.environ.get('READTHEDOCS', None) == 'True' on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if on_rtd: if on_rtd:
# readthedocs.org uses their theme by default, so no need to specify it # readthedocs.org uses their theme by default, so no need to specify it
# build rest-api, since RTD doesn't run make # build both metrics and rest-api, since RTD doesn't run make
from subprocess import check_call as sh from subprocess import check_call as sh
sh(['make', 'rest-api'], cwd=docs) sh(['make', 'metrics', 'rest-api'], cwd=docs)
# -- Spell checking ------------------------------------------------------- # -- Spell checking -------------------------------------------------------

View File

@@ -83,7 +83,6 @@ these will be moved at a future review of the roadmap.
- (prometheus?) API for resource monitoring - (prometheus?) API for resource monitoring
- tracking activity on single-user servers instead of the proxy - tracking activity on single-user servers instead of the proxy
- notes and activity tracking per API token - notes and activity tracking per API token
- UI for managing named servers
### Later ### Later

View File

@@ -4,23 +4,23 @@ The default Authenticator uses [PAM][] to authenticate system users with
their username and password. With the default Authenticator, any user their username and password. With the default Authenticator, any user
with an account and password on the system will be allowed to login. with an account and password on the system will be allowed to login.
## Create a whitelist of users ## Create a set of allowed users
You can restrict which users are allowed to login with a whitelist, You can restrict which users are allowed to login with a set,
`Authenticator.whitelist`: `Authenticator.allowed_users`:
```python ```python
c.Authenticator.whitelist = {'mal', 'zoe', 'inara', 'kaylee'} c.Authenticator.allowed_users = {'mal', 'zoe', 'inara', 'kaylee'}
``` ```
Users in the whitelist are added to the Hub database when the Hub is Users in the `allowed_users` set are added to the Hub database when the Hub is
started. started.
## Configure admins (`admin_users`) ## Configure admins (`admin_users`)
Admin users of JupyterHub, `admin_users`, can add and remove users from Admin users of JupyterHub, `admin_users`, can add and remove users from
the user `whitelist`. `admin_users` can take actions on other users' the user `allowed_users` set. `admin_users` can take actions on other users'
behalf, such as stopping and restarting their servers. behalf, such as stopping and restarting their servers.
A set of initial admin users, `admin_users` can configured be as follows: A set of initial admin users, `admin_users` can configured be as follows:
@@ -28,7 +28,7 @@ A set of initial admin users, `admin_users` can configured be as follows:
```python ```python
c.Authenticator.admin_users = {'mal', 'zoe'} c.Authenticator.admin_users = {'mal', 'zoe'}
``` ```
Users in the admin list are automatically added to the user `whitelist`, Users in the admin set are automatically added to the user `allowed_users` set,
if they are not already present. if they are not already present.
Each authenticator may have different ways of determining whether a user is an Each authenticator may have different ways of determining whether a user is an
@@ -53,12 +53,12 @@ sure your users know if admin_access is enabled.**
Users can be added to and removed from the Hub via either the admin Users can be added to and removed from the Hub via either the admin
panel or the REST API. When a user is **added**, the user will be panel or the REST API. When a user is **added**, the user will be
automatically added to the whitelist and database. Restarting the Hub automatically added to the allowed users set and database. Restarting the Hub
will not require manually updating the whitelist in your config file, will not require manually updating the allowed users set in your config file,
as the users will be loaded from the database. as the users will be loaded from the database.
After starting the Hub once, it is not sufficient to **remove** a user After starting the Hub once, it is not sufficient to **remove** a user
from the whitelist in your config file. You must also remove the user from the allowed users set in your config file. You must also remove the user
from the Hub's database, either by deleting the user from JupyterHub's from the Hub's database, either by deleting the user from JupyterHub's
admin page, or you can clear the `jupyterhub.sqlite` database and start admin page, or you can clear the `jupyterhub.sqlite` database and start
fresh. fresh.

View File

@@ -7,7 +7,7 @@ It can be used in a class of students, a corporate data science group or scienti
research group. It is a multi-user **Hub** that spawns, manages, and proxies multiple research group. It is a multi-user **Hub** that spawns, manages, and proxies multiple
instances of the single-user `Jupyter notebook`_ server. instances of the single-user `Jupyter notebook`_ server.
To make life easier, JupyterHub have distributions. Be sure to To make life easier, JupyterHub has distributions. Be sure to
take a look at them before continuing with the configuration of the broad take a look at them before continuing with the configuration of the broad
original system of `JupyterHub`_. Today, you can find two main cases: original system of `JupyterHub`_. Today, you can find two main cases:

View File

@@ -26,6 +26,10 @@ Before installing JupyterHub, you will need:
The `nodejs-legacy` package installs the `node` executable and is currently The `nodejs-legacy` package installs the `node` executable and is currently
required for npm to work on Debian/Ubuntu. required for npm to work on Debian/Ubuntu.
- A [pluggable authentication module (PAM)](https://en.wikipedia.org/wiki/Pluggable_authentication_module)
to use the [default Authenticator](./getting-started/authenticators-users-basics.md).
PAM is often available by default on most distributions, if this is not the case it can be installed by
using the operating system's package manager.
- TLS certificate and key for HTTPS communication - TLS certificate and key for HTTPS communication
- Domain name - Domain name

View File

@@ -52,7 +52,7 @@ c.GitHubOAuthenticator.oauth_callback_url = os.environ['OAUTH_CALLBACK_URL']
c.LocalAuthenticator.create_system_users = True c.LocalAuthenticator.create_system_users = True
# specify users and admin # specify users and admin
c.Authenticator.whitelist = {'rgbkrk', 'minrk', 'jhamrick'} c.Authenticator.allowed_users = {'rgbkrk', 'minrk', 'jhamrick'}
c.Authenticator.admin_users = {'jhamrick', 'rgbkrk'} c.Authenticator.admin_users = {'jhamrick', 'rgbkrk'}
# uses the default spawner # uses the default spawner

View File

@@ -83,8 +83,11 @@ server {
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
# websocket headers # websocket headers
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade; proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade; proxy_set_header Connection $connection_upgrade;
proxy_buffering off;
} }
# Managing requests to verify letsencrypt host # Managing requests to verify letsencrypt host
@@ -139,6 +142,20 @@ Now restart `nginx`, restart the JupyterHub, and enjoy accessing
`https://HUB.DOMAIN.TLD` while serving other content securely on `https://HUB.DOMAIN.TLD` while serving other content securely on
`https://NO_HUB.DOMAIN.TLD`. `https://NO_HUB.DOMAIN.TLD`.
### SELinux permissions for nginx
On distributions with SELinux enabled (e.g. Fedora), one may encounter permission errors
when the nginx service is started.
We need to allow nginx to perform network relay and connect to the jupyterhub port. The
following commands do that:
```bash
semanage port -a -t http_port_t -p tcp 8000
setsebool -P httpd_can_network_relay 1
setsebool -P httpd_can_network_connect 1
```
Replace 8000 with the port the jupyterhub server is running from.
## Apache ## Apache
@@ -199,8 +216,8 @@ In case of the need to run the jupyterhub under /jhub/ or other location please
httpd.conf amendments: httpd.conf amendments:
```bash ```bash
RewriteRule /jhub/(.*) ws://127.0.0.1:8000/jhub/$1 [P,L] RewriteRule /jhub/(.*) ws://127.0.0.1:8000/jhub/$1 [NE.P,L]
RewriteRule /jhub/(.*) http://127.0.0.1:8000/jhub/$1 [P,L] RewriteRule /jhub/(.*) http://127.0.0.1:8000/jhub/$1 [NE,P,L]
ProxyPass /jhub/ http://127.0.0.1:8000/jhub/ ProxyPass /jhub/ http://127.0.0.1:8000/jhub/
ProxyPassReverse /jhub/ http://127.0.0.1:8000/jhub/ ProxyPassReverse /jhub/ http://127.0.0.1:8000/jhub/

View File

@@ -0,0 +1,30 @@
==============================
Configuration Reference
==============================
.. important::
Make sure the version of JupyterHub for this documentation matches your
installation version, as the output of this command may change between versions.
JupyterHub configuration
------------------------
As explained in the `Configuration Basics <../getting-started/config-basics.html#generate-a-default-config-file>`_
section, the ``jupyterhub_config.py`` can be automatically generated via
.. code-block:: bash
jupyterhub --generate-config
The following contains the output of that command for reference.
.. jupyterhub-generate-config::
JupyterHub help command output
------------------------------
This section contains the output of the command ``jupyterhub --help-all``.
.. jupyterhub-help-all::

View File

@@ -57,7 +57,7 @@ To do this we add to `/etc/sudoers` (use `visudo` for safe editing of sudoers):
For example: For example:
```bash ```bash
# comma-separated whitelist of users that can spawn single-user servers # comma-separated list of users that can spawn single-user servers
# this should include all of your Hub users # this should include all of your Hub users
Runas_Alias JUPYTER_USERS = rhea, zoe, wash Runas_Alias JUPYTER_USERS = rhea, zoe, wash

View File

@@ -16,6 +16,7 @@ what happens under-the-hood when you deploy and configure your JupyterHub.
proxy proxy
separate-proxy separate-proxy
rest rest
monitoring
database database
templates templates
../events/index ../events/index
@@ -24,3 +25,4 @@ what happens under-the-hood when you deploy and configure your JupyterHub.
config-ghoauth config-ghoauth
config-proxy config-proxy
config-sudo config-sudo
config-reference

View File

@@ -0,0 +1,20 @@
Monitoring
==========
This section covers details on monitoring the state of your JupyterHub installation.
JupyterHub expose the ``/metrics`` endpoint that returns text describing its current
operational state formatted in a way `Prometheus <https://prometheus.io/docs/introduction/overview/>`_ understands.
Prometheus is a separate open source tool that can be configured to repeatedly poll
JupyterHub's ``/metrics`` endpoint to parse and save its current state.
By doing so, Prometheus can describe JupyterHub's evolving state over time.
This evolving state can then be accessed through Prometheus that expose its underlying
storage to those allowed to access it, and be presented with dashboards by a
tool like `Grafana <https://grafana.com/docs/grafana/latest/getting-started/what-is-grafana/>`_.
.. toctree::
:maxdepth: 2
metrics

View File

@@ -57,6 +57,9 @@ generating an API token is available from the JupyterHub user interface:
## Add API tokens to the config file ## Add API tokens to the config file
**This is deprecated. We are in no rush to remove this feature,
but please consider if service tokens are right for you.**
You may also add a dictionary of API tokens and usernames to the hub's You may also add a dictionary of API tokens and usernames to the hub's
configuration file, `jupyterhub_config.py` (note that configuration file, `jupyterhub_config.py` (note that
the **key** is the 'secret-token' while the **value** is the 'username'): the **key** is the 'secret-token' while the **value** is the 'username'):
@@ -67,6 +70,41 @@ c.JupyterHub.api_tokens = {
} }
``` ```
### Updating to admin services
The `api_tokens` configuration has been softly deprecated since the introduction of services.
We have no plans to remove it,
but users are encouraged to use service configuration instead.
If you have been using `api_tokens` to create an admin user
and a token for that user to perform some automations,
the services mechanism may be a better fit.
If you have the following configuration:
```python
c.JupyterHub.admin_users = {"service-admin",}
c.JupyterHub.api_tokens = {
"secret-token": "service-admin",
}
```
This can be updated to create an admin service, with the following configuration:
```python
c.JupyterHub.services = [
{
"name": "service-token",
"admin": True,
"api_token": "secret-token",
},
]
```
The token will have the same admin permissions,
but there will no longer be a user account created to house it.
The main noticeable difference is that there will be no notebook server associated with the account
and the service will not show up in the various user list pages and APIs.
## Make an API request ## Make an API request
To authenticate your requests, pass the API token in the request's To authenticate your requests, pass the API token in the request's

View File

@@ -151,6 +151,8 @@ c.JupyterHub.services = [
{ {
'name': 'my-web-service', 'name': 'my-web-service',
'url': 'https://10.0.1.1:1984', 'url': 'https://10.0.1.1:1984',
# any secret >8 characters, you'll use api_token to
# authenticate api requests to the hub from your service
'api_token': 'super-secret', 'api_token': 'super-secret',
} }
] ]
@@ -313,7 +315,7 @@ class MyHandler(HubAuthenticated, web.RequestHandler):
The HubAuth will automatically load the desired configuration from the Service The HubAuth will automatically load the desired configuration from the Service
environment variables. environment variables.
If you want to limit user access, you can whitelist users through either the If you want to limit user access, you can specify allowed users through either the
`.hub_users` attribute or `.hub_groups`. These are sets that check against the `.hub_users` attribute or `.hub_groups`. These are sets that check against the
username and user group list, respectively. If a user matches neither the user username and user group list, respectively. If a user matches neither the user
list nor the group list, they will not be allowed access. If both are left list nor the group list, they will not be allowed access. If both are left
@@ -331,7 +333,9 @@ and taking note of the following process:
1. retrieve the cookie `jupyterhub-services` from the request. 1. retrieve the cookie `jupyterhub-services` from the request.
2. Make an API request `GET /hub/api/authorizations/cookie/jupyterhub-services/cookie-value`, 2. Make an API request `GET /hub/api/authorizations/cookie/jupyterhub-services/cookie-value`,
where cookie-value is the url-encoded value of the `jupyterhub-services` cookie. where cookie-value is the url-encoded value of the `jupyterhub-services` cookie.
This request must be authenticated with a Hub API token in the `Authorization` header. This request must be authenticated with a Hub API token in the `Authorization` header,
for example using the `api_token` from your [external service's configuration](#externally-managed-services).
For example, with [requests][]: For example, with [requests][]:
```python ```python

View File

@@ -27,8 +27,8 @@ Some examples include:
servers using batch systems servers using batch systems
- [YarnSpawner](https://github.com/jupyterhub/yarnspawner) for spawning notebook - [YarnSpawner](https://github.com/jupyterhub/yarnspawner) for spawning notebook
servers in YARN containers on a Hadoop cluster servers in YARN containers on a Hadoop cluster
- [RemoteSpawner](https://github.com/zonca/remotespawner) to spawn notebooks - [SSHSpawner](https://github.com/NERSC/sshspawner) to spawn notebooks
and a remote server and tunnel the port via SSH on a remote server using SSH
## Spawner control methods ## Spawner control methods

View File

@@ -7,8 +7,8 @@ problem and how to resolve it.
[*Behavior*](#behavior) [*Behavior*](#behavior)
- JupyterHub proxy fails to start - JupyterHub proxy fails to start
- sudospawner fails to run - sudospawner fails to run
- What is the default behavior when none of the lists (admin, whitelist, - What is the default behavior when none of the lists (admin, allowed,
group whitelist) are set? allowed groups) are set?
- JupyterHub Docker container not accessible at localhost - JupyterHub Docker container not accessible at localhost
[*Errors*](#errors) [*Errors*](#errors)
@@ -55,14 +55,14 @@ or add:
to the config file, `jupyterhub_config.py`. to the config file, `jupyterhub_config.py`.
### What is the default behavior when none of the lists (admin, whitelist, group whitelist) are set? ### What is the default behavior when none of the lists (admin, allowed, allowed groups) are set?
When nothing is given for these lists, there will be no admins, and all users When nothing is given for these lists, there will be no admins, and all users
who can authenticate on the system (i.e. all the unix users on the server with who can authenticate on the system (i.e. all the unix users on the server with
a password) will be allowed to start a server. The whitelist lets you limit a password) will be allowed to start a server. The allowed username set lets you limit
this to a particular set of users, and the admin_users lets you specify who this to a particular set of users, and admin_users lets you specify who
among them may use the admin interface (not necessary, unless you need to do among them may use the admin interface (not necessary, unless you need to do
things like inspect other users' servers, or modify the userlist at runtime). things like inspect other users' servers, or modify the user list at runtime).
### JupyterHub Docker container not accessible at localhost ### JupyterHub Docker container not accessible at localhost
@@ -152,7 +152,7 @@ You should see a similar 200 message, as above, in the Hub log when you first
visit your single-user notebook server. If you don't see this message in the log, it visit your single-user notebook server. If you don't see this message in the log, it
may mean that your single-user notebook server isn't connecting to your Hub. may mean that your single-user notebook server isn't connecting to your Hub.
If you see 403 (forbidden) like this, it's a token problem: If you see 403 (forbidden) like this, it's likely a token problem:
``` ```
403 GET /hub/api/authorizations/cookie/jupyterhub-token-name/[secret] (@10.0.1.4) 4.14ms 403 GET /hub/api/authorizations/cookie/jupyterhub-token-name/[secret] (@10.0.1.4) 4.14ms
@@ -196,6 +196,10 @@ After this, when you start your server via JupyterHub, it will build a
new container. If this was the underlying cause of the issue, you should see new container. If this was the underlying cause of the issue, you should see
your server again. your server again.
##### Proxy settings (403 GET)
When your whole JupyterHub sits behind a organization proxy (*not* a reverse proxy like NGINX as part of your setup and *not* the configurable-http-proxy) the environment variables `HTTP_PROXY`, `HTTPS_PROXY`, `http_proxy` and `https_proxy` might be set. This confuses the jupyterhub-singleuser servers: When connecting to the Hub for authorization they connect via the proxy instead of directly connecting to the Hub on localhost. The proxy might deny the request (403 GET). This results in the singleuser server thinking it has a wrong auth token. To circumvent this you should add `<hub_url>,<hub_ip>,localhost,127.0.0.1` to the environment variables `NO_PROXY` and `no_proxy`.
### Launching Jupyter Notebooks to run as an externally managed JupyterHub service with the `jupyterhub-singleuser` command returns a `JUPYTERHUB_API_TOKEN` error ### Launching Jupyter Notebooks to run as an externally managed JupyterHub service with the `jupyterhub-singleuser` command returns a `JUPYTERHUB_API_TOKEN` error
[JupyterHub services](https://jupyterhub.readthedocs.io/en/stable/reference/services.html) allow processes to interact with JupyterHub's REST API. Example use-cases include: [JupyterHub services](https://jupyterhub.readthedocs.io/en/stable/reference/services.html) allow processes to interact with JupyterHub's REST API. Example use-cases include:
@@ -328,8 +332,7 @@ notebook servers to default to JupyterLab:
### How do I set up JupyterHub for a workshop (when users are not known ahead of time)? ### How do I set up JupyterHub for a workshop (when users are not known ahead of time)?
1. Set up JupyterHub using OAuthenticator for GitHub authentication 1. Set up JupyterHub using OAuthenticator for GitHub authentication
2. Configure whitelist to be an empty list in` jupyterhub_config.py` 2. Configure admin list to have workshop leaders be listed with administrator privileges.
3. Configure admin list to have workshop leaders be listed with administrator privileges.
Users will need a GitHub account to login and be authenticated by the Hub. Users will need a GitHub account to login and be authenticated by the Hub.

View File

@@ -1,41 +1,4 @@
# `cull-idle` Example # idle-culler example
The `cull_idle_servers.py` file provides a script to cull and shut down idle The idle culler has been moved to its own repository at
single-user notebook servers. This script is used when `cull-idle` is run as [jupyterhub/jupyterhub-idle-culler](https://github.com/jupyterhub/jupyterhub-idle-culler).
a Service or when it is run manually as a standalone script.
## Configure `cull-idle` to run as a Hub-Managed Service
In `jupyterhub_config.py`, add the following dictionary for the `cull-idle`
Service to the `c.JupyterHub.services` list:
```python
c.JupyterHub.services = [
{
'name': 'cull-idle',
'admin': True,
'command': [sys.executable, 'cull_idle_servers.py', '--timeout=3600'],
}
]
```
where:
- `'admin': True` indicates that the Service has 'admin' permissions, and
- `'command'` indicates that the Service will be managed by the Hub.
## Run `cull-idle` manually as a standalone script
This will run `cull-idle` manually. `cull-idle` can be run as a standalone
script anywhere with access to the Hub, and will periodically check for idle
servers and shut them down via the Hub's REST API. In order to shutdown the
servers, the token given to cull-idle must have admin privileges.
Generate an API token and store it in the `JUPYTERHUB_API_TOKEN` environment
variable. Run `cull_idle_servers.py` manually.
```bash
export JUPYTERHUB_API_TOKEN=$(jupyterhub token)
python3 cull_idle_servers.py [--timeout=900] [--url=http://127.0.0.1:8081/hub/api]
```

View File

@@ -1,401 +0,0 @@
#!/usr/bin/env python3
"""script to monitor and cull idle single-user servers
Caveats:
last_activity is not updated with high frequency,
so cull timeout should be greater than the sum of:
- single-user websocket ping interval (default: 30s)
- JupyterHub.last_activity_interval (default: 5 minutes)
You can run this as a service managed by JupyterHub with this in your config::
c.JupyterHub.services = [
{
'name': 'cull-idle',
'admin': True,
'command': [sys.executable, 'cull_idle_servers.py', '--timeout=3600'],
}
]
Or run it manually by generating an API token and storing it in `JUPYTERHUB_API_TOKEN`:
export JUPYTERHUB_API_TOKEN=$(jupyterhub token)
python3 cull_idle_servers.py [--timeout=900] [--url=http://127.0.0.1:8081/hub/api]
This script uses the same ``--timeout`` and ``--max-age`` values for
culling users and users' servers. If you want a different value for
users and servers, you should add this script to the services list
twice, just with different ``name``s, different values, and one with
the ``--cull-users`` option.
"""
import json
import os
from datetime import datetime
from datetime import timezone
from functools import partial
try:
from urllib.parse import quote
except ImportError:
from urllib import quote
import dateutil.parser
from tornado.gen import coroutine, multi
from tornado.locks import Semaphore
from tornado.log import app_log
from tornado.httpclient import AsyncHTTPClient, HTTPRequest
from tornado.ioloop import IOLoop, PeriodicCallback
from tornado.options import define, options, parse_command_line
def parse_date(date_string):
"""Parse a timestamp
If it doesn't have a timezone, assume utc
Returned datetime object will always be timezone-aware
"""
dt = dateutil.parser.parse(date_string)
if not dt.tzinfo:
# assume naive timestamps are UTC
dt = dt.replace(tzinfo=timezone.utc)
return dt
def format_td(td):
"""
Nicely format a timedelta object
as HH:MM:SS
"""
if td is None:
return "unknown"
if isinstance(td, str):
return td
seconds = int(td.total_seconds())
h = seconds // 3600
seconds = seconds % 3600
m = seconds // 60
seconds = seconds % 60
return "{h:02}:{m:02}:{seconds:02}".format(h=h, m=m, seconds=seconds)
@coroutine
def cull_idle(
url, api_token, inactive_limit, cull_users=False, max_age=0, concurrency=10
):
"""Shutdown idle single-user servers
If cull_users, inactive *users* will be deleted as well.
"""
auth_header = {'Authorization': 'token %s' % api_token}
req = HTTPRequest(url=url + '/users', headers=auth_header)
now = datetime.now(timezone.utc)
client = AsyncHTTPClient()
if concurrency:
semaphore = Semaphore(concurrency)
@coroutine
def fetch(req):
"""client.fetch wrapped in a semaphore to limit concurrency"""
yield semaphore.acquire()
try:
return (yield client.fetch(req))
finally:
yield semaphore.release()
else:
fetch = client.fetch
resp = yield fetch(req)
users = json.loads(resp.body.decode('utf8', 'replace'))
futures = []
@coroutine
def handle_server(user, server_name, server, max_age, inactive_limit):
"""Handle (maybe) culling a single server
"server" is the entire server model from the API.
Returns True if server is now stopped (user removable),
False otherwise.
"""
log_name = user['name']
if server_name:
log_name = '%s/%s' % (user['name'], server_name)
if server.get('pending'):
app_log.warning(
"Not culling server %s with pending %s", log_name, server['pending']
)
return False
# jupyterhub < 0.9 defined 'server.url' once the server was ready
# as an *implicit* signal that the server was ready.
# 0.9 adds a dedicated, explicit 'ready' field.
# By current (0.9) definitions, servers that have no pending
# events and are not ready shouldn't be in the model,
# but let's check just to be safe.
if not server.get('ready', bool(server['url'])):
app_log.warning(
"Not culling not-ready not-pending server %s: %s", log_name, server
)
return False
if server.get('started'):
age = now - parse_date(server['started'])
else:
# started may be undefined on jupyterhub < 0.9
age = None
# check last activity
# last_activity can be None in 0.9
if server['last_activity']:
inactive = now - parse_date(server['last_activity'])
else:
# no activity yet, use start date
# last_activity may be None with jupyterhub 0.9,
# which introduces the 'started' field which is never None
# for running servers
inactive = age
# CUSTOM CULLING TEST CODE HERE
# Add in additional server tests here. Return False to mean "don't
# cull", True means "cull immediately", or, for example, update some
# other variables like inactive_limit.
#
# Here, server['state'] is the result of the get_state method
# on the spawner. This does *not* contain the below by
# default, you may have to modify your spawner to make this
# work. The `user` variable is the user model from the API.
#
# if server['state']['profile_name'] == 'unlimited'
# return False
# inactive_limit = server['state']['culltime']
should_cull = (
inactive is not None and inactive.total_seconds() >= inactive_limit
)
if should_cull:
app_log.info(
"Culling server %s (inactive for %s)", log_name, format_td(inactive)
)
if max_age and not should_cull:
# only check started if max_age is specified
# so that we can still be compatible with jupyterhub 0.8
# which doesn't define the 'started' field
if age is not None and age.total_seconds() >= max_age:
app_log.info(
"Culling server %s (age: %s, inactive for %s)",
log_name,
format_td(age),
format_td(inactive),
)
should_cull = True
if not should_cull:
app_log.debug(
"Not culling server %s (age: %s, inactive for %s)",
log_name,
format_td(age),
format_td(inactive),
)
return False
if server_name:
# culling a named server
delete_url = url + "/users/%s/servers/%s" % (
quote(user['name']),
quote(server['name']),
)
else:
delete_url = url + '/users/%s/server' % quote(user['name'])
req = HTTPRequest(url=delete_url, method='DELETE', headers=auth_header)
resp = yield fetch(req)
if resp.code == 202:
app_log.warning("Server %s is slow to stop", log_name)
# return False to prevent culling user with pending shutdowns
return False
return True
@coroutine
def handle_user(user):
"""Handle one user.
Create a list of their servers, and async exec them. Wait for
that to be done, and if all servers are stopped, possibly cull
the user.
"""
# shutdown servers first.
# Hub doesn't allow deleting users with running servers.
# jupyterhub 0.9 always provides a 'servers' model.
# 0.8 only does this when named servers are enabled.
if 'servers' in user:
servers = user['servers']
else:
# jupyterhub < 0.9 without named servers enabled.
# create servers dict with one entry for the default server
# from the user model.
# only if the server is running.
servers = {}
if user['server']:
servers[''] = {
'last_activity': user['last_activity'],
'pending': user['pending'],
'url': user['server'],
}
server_futures = [
handle_server(user, server_name, server, max_age, inactive_limit)
for server_name, server in servers.items()
]
results = yield multi(server_futures)
if not cull_users:
return
# some servers are still running, cannot cull users
still_alive = len(results) - sum(results)
if still_alive:
app_log.debug(
"Not culling user %s with %i servers still alive",
user['name'],
still_alive,
)
return False
should_cull = False
if user.get('created'):
age = now - parse_date(user['created'])
else:
# created may be undefined on jupyterhub < 0.9
age = None
# check last activity
# last_activity can be None in 0.9
if user['last_activity']:
inactive = now - parse_date(user['last_activity'])
else:
# no activity yet, use start date
# last_activity may be None with jupyterhub 0.9,
# which introduces the 'created' field which is never None
inactive = age
should_cull = (
inactive is not None and inactive.total_seconds() >= inactive_limit
)
if should_cull:
app_log.info("Culling user %s (inactive for %s)", user['name'], inactive)
if max_age and not should_cull:
# only check created if max_age is specified
# so that we can still be compatible with jupyterhub 0.8
# which doesn't define the 'started' field
if age is not None and age.total_seconds() >= max_age:
app_log.info(
"Culling user %s (age: %s, inactive for %s)",
user['name'],
format_td(age),
format_td(inactive),
)
should_cull = True
if not should_cull:
app_log.debug(
"Not culling user %s (created: %s, last active: %s)",
user['name'],
format_td(age),
format_td(inactive),
)
return False
req = HTTPRequest(
url=url + '/users/%s' % user['name'], method='DELETE', headers=auth_header
)
yield fetch(req)
return True
for user in users:
futures.append((user['name'], handle_user(user)))
for (name, f) in futures:
try:
result = yield f
except Exception:
app_log.exception("Error processing %s", name)
else:
if result:
app_log.debug("Finished culling %s", name)
if __name__ == '__main__':
define(
'url',
default=os.environ.get('JUPYTERHUB_API_URL'),
help="The JupyterHub API URL",
)
define('timeout', default=600, help="The idle timeout (in seconds)")
define(
'cull_every',
default=0,
help="The interval (in seconds) for checking for idle servers to cull",
)
define(
'max_age',
default=0,
help="The maximum age (in seconds) of servers that should be culled even if they are active",
)
define(
'cull_users',
default=False,
help="""Cull users in addition to servers.
This is for use in temporary-user cases such as tmpnb.""",
)
define(
'concurrency',
default=10,
help="""Limit the number of concurrent requests made to the Hub.
Deleting a lot of users at the same time can slow down the Hub,
so limit the number of API requests we have outstanding at any given time.
""",
)
parse_command_line()
if not options.cull_every:
options.cull_every = options.timeout // 2
api_token = os.environ['JUPYTERHUB_API_TOKEN']
try:
AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient")
except ImportError as e:
app_log.warning(
"Could not load pycurl: %s\n"
"pycurl is recommended if you have a large number of users.",
e,
)
loop = IOLoop.current()
cull = partial(
cull_idle,
url=options.url,
api_token=api_token,
inactive_limit=options.timeout,
cull_users=options.cull_users,
max_age=options.max_age,
concurrency=options.concurrency,
)
# schedule first cull immediately
# because PeriodicCallback doesn't start until the end of the first interval
loop.add_callback(cull)
# schedule periodic cull
pc = PeriodicCallback(cull, 1e3 * options.cull_every)
pc.start()
try:
loop.start()
except KeyboardInterrupt:
pass

View File

@@ -1,11 +0,0 @@
import sys
# run cull-idle as a service
c.JupyterHub.services = [
{
'name': 'cull-idle',
'admin': True,
'command': [sys.executable, 'cull_idle_servers.py', '--timeout=3600'],
}
]

View File

@@ -1,4 +1,7 @@
#!/bin/bash #!/bin/bash
set -exuo pipefail set -exuo pipefail
# build jupyterhub-onbuild image
docker build --build-arg BASE_IMAGE=$DOCKER_REPO:$DOCKER_TAG -t ${DOCKER_REPO}-onbuild:$DOCKER_TAG onbuild docker build --build-arg BASE_IMAGE=$DOCKER_REPO:$DOCKER_TAG -t ${DOCKER_REPO}-onbuild:$DOCKER_TAG onbuild
# build jupyterhub-demo image
docker build --build-arg BASE_IMAGE=${DOCKER_REPO}-onbuild:$DOCKER_TAG -t ${DOCKER_REPO}-demo:$DOCKER_TAG demo-image

View File

@@ -2,8 +2,11 @@
set -exuo pipefail set -exuo pipefail
export ONBUILD=${DOCKER_REPO}-onbuild export ONBUILD=${DOCKER_REPO}-onbuild
export DEMO=${DOCKER_REPO}-demo
export REPOS="${DOCKER_REPO} ${ONBUILD} ${DEMO}"
# push ONBUILD image # push ONBUILD image
docker push $ONBUILD:$DOCKER_TAG docker push $ONBUILD:$DOCKER_TAG
docker push $DEMO:$DOCKER_TAG
function get_hub_version() { function get_hub_version() {
rm -f hub_version rm -f hub_version
@@ -20,25 +23,20 @@ function get_hub_version() {
fi fi
} }
get_hub_version get_hub_version
# when building master, push 0.9.0.dev as well for repo in ${REPOS}; do
docker tag $DOCKER_REPO:$DOCKER_TAG $DOCKER_REPO:$hub_xyz # when building master, push 0.9.0.dev as well
docker push $DOCKER_REPO:$hub_xyz docker tag $repo:$DOCKER_TAG $repo:$hub_xyz
docker tag $ONBUILD:$DOCKER_TAG $ONBUILD:$hub_xyz docker push $repo:$hub_xyz
docker push $ONBUILD:$hub_xyz
# when building 0.9.x, push 0.9 as well # when building 0.9.x, push 0.9 as well
docker tag $DOCKER_REPO:$DOCKER_TAG $DOCKER_REPO:$hub_xy docker tag $repo:$DOCKER_TAG $repo:$hub_xy
docker push $DOCKER_REPO:$hub_xy docker push $repo:$hub_xy
docker tag $ONBUILD:$DOCKER_TAG $ONBUILD:$hub_xy
docker push $ONBUILD:$hub_xyz
# if building a stable release, tag latest as well # if building a stable release, tag latest as well
if [[ "$latest" == "1" ]]; then if [[ "$latest" == "1" ]]; then
docker tag $DOCKER_REPO:$DOCKER_TAG $DOCKER_REPO:latest docker tag $repo:$DOCKER_TAG $repo:latest
docker push $DOCKER_REPO:latest docker push $repo:latest
docker tag $ONBUILD:$DOCKER_TAG $ONBUILD:latest fi
docker push $ONBUILD:latest done
fi

View File

@@ -18,6 +18,15 @@ version_info = (
__version__ = ".".join(map(str, version_info[:3])) + ".".join(version_info[3:]) __version__ = ".".join(map(str, version_info[:3])) + ".".join(version_info[3:])
# Singleton flag to only log the major/minor mismatch warning once per mismatch combo.
_version_mismatch_warning_logged = {}
def reset_globals():
"""Used to reset globals between test cases."""
global _version_mismatch_warning_logged
_version_mismatch_warning_logged = {}
def _check_version(hub_version, singleuser_version, log): def _check_version(hub_version, singleuser_version, log):
"""Compare Hub and single-user server versions""" """Compare Hub and single-user server versions"""
@@ -42,19 +51,27 @@ def _check_version(hub_version, singleuser_version, log):
hub_major_minor = V(hub_version).version[:2] hub_major_minor = V(hub_version).version[:2]
singleuser_major_minor = V(singleuser_version).version[:2] singleuser_major_minor = V(singleuser_version).version[:2]
extra = "" extra = ""
do_log = True
if singleuser_major_minor == hub_major_minor: if singleuser_major_minor == hub_major_minor:
# patch-level mismatch or lower, log difference at debug-level # patch-level mismatch or lower, log difference at debug-level
# because this should be fine # because this should be fine
log_method = log.debug log_method = log.debug
else: else:
# log warning-level for more significant mismatch, such as 0.8 vs 0.9, etc. # log warning-level for more significant mismatch, such as 0.8 vs 0.9, etc.
log_method = log.warning key = '%s-%s' % (hub_version, singleuser_version)
extra = " This could cause failure to authenticate and result in redirect loops!" global _version_mismatch_warning_logged
log_method( if _version_mismatch_warning_logged.get(key):
"jupyterhub version %s != jupyterhub-singleuser version %s." + extra, do_log = False # We already logged this warning so don't log it again.
hub_version, else:
singleuser_version, log_method = log.warning
) extra = " This could cause failure to authenticate and result in redirect loops!"
_version_mismatch_warning_logged[key] = True
if do_log:
log_method(
"jupyterhub version %s != jupyterhub-singleuser version %s." + extra,
hub_version,
singleuser_version,
)
else: else:
log.debug( log.debug(
"jupyterhub and jupyterhub-singleuser both on version %s" % hub_version "jupyterhub and jupyterhub-singleuser both on version %s" % hub_version

View File

@@ -201,7 +201,7 @@ class OAuthAuthorizeHandler(OAuthHandler, BaseHandler):
def needs_oauth_confirm(self, user, oauth_client): def needs_oauth_confirm(self, user, oauth_client):
"""Return whether the given oauth client needs to prompt for access for the given user """Return whether the given oauth client needs to prompt for access for the given user
Checks whitelist for oauth clients Checks list for oauth clients that don't need confirmation
(i.e. the user's own server) (i.e. the user's own server)
@@ -214,9 +214,8 @@ class OAuthAuthorizeHandler(OAuthHandler, BaseHandler):
if ( if (
# it's the user's own server # it's the user's own server
oauth_client.identifier in own_oauth_client_ids oauth_client.identifier in own_oauth_client_ids
# or it's in the global whitelist # or it's in the global no-confirm list
or oauth_client.identifier or oauth_client.identifier in self.settings.get('oauth_no_confirm', set())
in self.settings.get('oauth_no_confirm_whitelist', set())
): ):
return False return False
# default: require confirmation # default: require confirmation
@@ -229,7 +228,7 @@ class OAuthAuthorizeHandler(OAuthHandler, BaseHandler):
Render oauth confirmation page: Render oauth confirmation page:
"Server at ... would like permission to ...". "Server at ... would like permission to ...".
Users accessing their own server or a service whitelist Users accessing their own server or a blessed service
will skip confirmation. will skip confirmation.
""" """

View File

@@ -23,6 +23,7 @@ def service_model(service):
'command': service.command, 'command': service.command,
'pid': service.proc.pid if service.proc else 0, 'pid': service.proc.pid if service.proc else 0,
'info': service.info, 'info': service.info,
'display': service.display,
} }

View File

@@ -55,6 +55,7 @@ from traitlets import (
Instance, Instance,
Bytes, Bytes,
Float, Float,
Union,
observe, observe,
default, default,
validate, validate,
@@ -561,10 +562,23 @@ class JupyterHub(Application):
def _url_part_changed(self, change): def _url_part_changed(self, change):
"""propagate deprecated ip/port/base_url config to the bind_url""" """propagate deprecated ip/port/base_url config to the bind_url"""
urlinfo = urlparse(self.bind_url) urlinfo = urlparse(self.bind_url)
urlinfo = urlinfo._replace(netloc='%s:%i' % (self.ip, self.port)) if ':' in self.ip:
fmt = '[%s]:%i'
else:
fmt = '%s:%i'
urlinfo = urlinfo._replace(netloc=fmt % (self.ip, self.port))
urlinfo = urlinfo._replace(path=self.base_url) urlinfo = urlinfo._replace(path=self.base_url)
bind_url = urlunparse(urlinfo) bind_url = urlunparse(urlinfo)
# Warn if both bind_url and ip/port/base_url are set
if bind_url != self.bind_url: if bind_url != self.bind_url:
if self.bind_url != self._bind_url_default():
self.log.warning(
"Both bind_url and ip/port/base_url have been configured. "
"JupyterHub.ip, JupyterHub.port, JupyterHub.base_url are"
" deprecated in JupyterHub 0.9,"
" please use JupyterHub.bind_url instead."
)
self.bind_url = bind_url self.bind_url = bind_url
bind_url = Unicode( bind_url = Unicode(
@@ -727,10 +741,10 @@ class JupyterHub(Application):
help="""The ip or hostname for proxies and spawners to use help="""The ip or hostname for proxies and spawners to use
for connecting to the Hub. for connecting to the Hub.
Use when the bind address (`hub_ip`) is 0.0.0.0 or otherwise different Use when the bind address (`hub_ip`) is 0.0.0.0, :: or otherwise different
from the connect address. from the connect address.
Default: when `hub_ip` is 0.0.0.0, use `socket.gethostname()`, otherwise use `hub_ip`. Default: when `hub_ip` is 0.0.0.0 or ::, use `socket.gethostname()`, otherwise use `hub_ip`.
Note: Some spawners or proxy implementations might not support hostnames. Check your Note: Some spawners or proxy implementations might not support hostnames. Check your
spawner or proxy documentation to see if they have extra requirements. spawner or proxy documentation to see if they have extra requirements.
@@ -1301,12 +1315,25 @@ class JupyterHub(Application):
""" """
).tag(config=True) ).tag(config=True)
default_url = Unicode( default_url = Union(
[Unicode(), Callable()],
help=""" help="""
The default URL for users when they arrive (e.g. when user directs to "/") The default URL for users when they arrive (e.g. when user directs to "/")
By default, redirects users to their own server. By default, redirects users to their own server.
"""
Can be a Unicode string (e.g. '/hub/home') or a callable based on the handler object:
::
def default_url_fn(handler):
user = handler.current_user
if user and user.admin:
return '/hub/admin'
return '/hub/home'
c.JupyterHub.default_url = default_url_fn
""",
).tag(config=True) ).tag(config=True)
user_redirect_hook = Callable( user_redirect_hook = Callable(
@@ -1676,22 +1703,22 @@ class JupyterHub(Application):
# the admin_users config variable will never be used after this point. # the admin_users config variable will never be used after this point.
# only the database values will be referenced. # only the database values will be referenced.
whitelist = [ allowed_users = [
self.authenticator.normalize_username(name) self.authenticator.normalize_username(name)
for name in self.authenticator.whitelist for name in self.authenticator.allowed_users
] ]
self.authenticator.whitelist = set(whitelist) # force normalization self.authenticator.allowed_users = set(allowed_users) # force normalization
for username in whitelist: for username in allowed_users:
if not self.authenticator.validate_username(username): if not self.authenticator.validate_username(username):
raise ValueError("username %r is not valid" % username) raise ValueError("username %r is not valid" % username)
if not whitelist: if not allowed_users:
self.log.info( self.log.info(
"Not using whitelist. Any authenticated user will be allowed." "Not using allowed_users. Any authenticated user will be allowed."
) )
# add whitelisted users to the db # add allowed users to the db
for name in whitelist: for name in allowed_users:
user = orm.User.find(db, name) user = orm.User.find(db, name)
if user is None: if user is None:
user = orm.User(name=name) user = orm.User(name=name)
@@ -1701,9 +1728,9 @@ class JupyterHub(Application):
db.commit() db.commit()
# Notify authenticator of all users. # Notify authenticator of all users.
# This ensures Auth whitelist is up-to-date with the database. # This ensures Authenticator.allowed_users is up-to-date with the database.
# This lets whitelist be used to set up initial list, # This lets .allowed_users be used to set up initial list,
# but changes to the whitelist can occur in the database, # but changes to the allowed_users set can occur in the database,
# and persist across sessions. # and persist across sessions.
total_users = 0 total_users = 0
for user in db.query(orm.User): for user in db.query(orm.User):
@@ -1740,9 +1767,9 @@ class JupyterHub(Application):
user.created = user.last_activity or datetime.utcnow() user.created = user.last_activity or datetime.utcnow()
db.commit() db.commit()
# The whitelist set and the users in the db are now the same. # The allowed_users set and the users in the db are now the same.
# From this point on, any user changes should be done simultaneously # From this point on, any user changes should be done simultaneously
# to the whitelist set and user db, unless the whitelist is empty (all users allowed). # to the allowed_users set and user db, unless the allowed set is empty (all users allowed).
TOTAL_USERS.set(total_users) TOTAL_USERS.set(total_users)
@@ -1757,11 +1784,11 @@ class JupyterHub(Application):
for username in usernames: for username in usernames:
username = self.authenticator.normalize_username(username) username = self.authenticator.normalize_username(username)
if not ( if not (
await maybe_future( await maybe_future(self.authenticator.check_allowed(username, None))
self.authenticator.check_whitelist(username, None)
)
): ):
raise ValueError("Username %r is not in whitelist" % username) raise ValueError(
"Username %r is not in Authenticator.allowed_users" % username
)
user = orm.User.find(db, name=username) user = orm.User.find(db, name=username)
if user is None: if user is None:
if not self.authenticator.validate_username(username): if not self.authenticator.validate_username(username):
@@ -1785,11 +1812,14 @@ class JupyterHub(Application):
if kind == 'user': if kind == 'user':
name = self.authenticator.normalize_username(name) name = self.authenticator.normalize_username(name)
if not ( if not (
await maybe_future(self.authenticator.check_whitelist(name, None)) await maybe_future(self.authenticator.check_allowed(name, None))
): ):
raise ValueError("Token name %r is not in whitelist" % name) raise ValueError(
"Token user name %r is not in Authenticator.allowed_users"
% name
)
if not self.authenticator.validate_username(name): if not self.authenticator.validate_username(name):
raise ValueError("Token name %r is not valid" % name) raise ValueError("Token user name %r is not valid" % name)
if kind == 'service': if kind == 'service':
if not any(service["name"] == name for service in self.services): if not any(service["name"] == name for service in self.services):
self.log.warning( self.log.warning(
@@ -1828,17 +1858,27 @@ class JupyterHub(Application):
# purge expired tokens hourly # purge expired tokens hourly
purge_expired_tokens_interval = 3600 purge_expired_tokens_interval = 3600
def purge_expired_tokens(self):
"""purge all expiring token objects from the database
run periodically
"""
# this should be all the subclasses of Expiring
for cls in (orm.APIToken, orm.OAuthAccessToken, orm.OAuthCode):
self.log.debug("Purging expired {name}s".format(name=cls.__name__))
cls.purge_expired(self.db)
async def init_api_tokens(self): async def init_api_tokens(self):
"""Load predefined API tokens (for services) into database""" """Load predefined API tokens (for services) into database"""
await self._add_tokens(self.service_tokens, kind='service') await self._add_tokens(self.service_tokens, kind='service')
await self._add_tokens(self.api_tokens, kind='user') await self._add_tokens(self.api_tokens, kind='user')
purge_expired_tokens = partial(orm.APIToken.purge_expired, self.db)
purge_expired_tokens() self.purge_expired_tokens()
# purge expired tokens hourly # purge expired tokens hourly
# we don't need to be prompt about this # we don't need to be prompt about this
# because expired tokens cannot be used anyway # because expired tokens cannot be used anyway
pc = PeriodicCallback( pc = PeriodicCallback(
purge_expired_tokens, 1e3 * self.purge_expired_tokens_interval self.purge_expired_tokens, 1e3 * self.purge_expired_tokens_interval
) )
pc.start() pc.start()
@@ -2170,14 +2210,14 @@ class JupyterHub(Application):
else: else:
version_hash = datetime.now().strftime("%Y%m%d%H%M%S") version_hash = datetime.now().strftime("%Y%m%d%H%M%S")
oauth_no_confirm_whitelist = set() oauth_no_confirm_list = set()
for service in self._service_map.values(): for service in self._service_map.values():
if service.oauth_no_confirm: if service.oauth_no_confirm:
self.log.warning( self.log.warning(
"Allowing service %s to complete OAuth without confirmation on an authorization web page", "Allowing service %s to complete OAuth without confirmation on an authorization web page",
service.name, service.name,
) )
oauth_no_confirm_whitelist.add(service.oauth_client_id) oauth_no_confirm_list.add(service.oauth_client_id)
settings = dict( settings = dict(
log_function=log_request, log_function=log_request,
@@ -2213,7 +2253,7 @@ class JupyterHub(Application):
default_server_name=self._default_server_name, default_server_name=self._default_server_name,
named_server_limit_per_user=self.named_server_limit_per_user, named_server_limit_per_user=self.named_server_limit_per_user,
oauth_provider=self.oauth_provider, oauth_provider=self.oauth_provider,
oauth_no_confirm_whitelist=oauth_no_confirm_whitelist, oauth_no_confirm_list=oauth_no_confirm_list,
concurrent_spawn_limit=self.concurrent_spawn_limit, concurrent_spawn_limit=self.concurrent_spawn_limit,
spawn_throttle_retry_range=self.spawn_throttle_retry_range, spawn_throttle_retry_range=self.spawn_throttle_retry_range,
active_server_limit=self.active_server_limit, active_server_limit=self.active_server_limit,
@@ -2347,7 +2387,6 @@ class JupyterHub(Application):
if init_spawners_timeout < 0: if init_spawners_timeout < 0:
# negative timeout means forever (previous, most stable behavior) # negative timeout means forever (previous, most stable behavior)
init_spawners_timeout = 86400 init_spawners_timeout = 86400
print(init_spawners_timeout)
init_start_time = time.perf_counter() init_start_time = time.perf_counter()
init_spawners_future = asyncio.ensure_future(self.init_spawners()) init_spawners_future = asyncio.ensure_future(self.init_spawners())
@@ -2716,6 +2755,40 @@ class JupyterHub(Application):
self.log.critical("Received signalnum %s, , initiating shutdown...", signum) self.log.critical("Received signalnum %s, , initiating shutdown...", signum)
raise SystemExit(128 + signum) raise SystemExit(128 + signum)
def _init_asyncio_patch(self):
"""Set default asyncio policy to be compatible with Tornado.
Tornado 6 (at least) is not compatible with the default
asyncio implementation on Windows.
Pick the older SelectorEventLoopPolicy on Windows
if the known-incompatible default policy is in use.
Do this as early as possible to make it a low priority and overrideable.
ref: https://github.com/tornadoweb/tornado/issues/2608
FIXME: If/when tornado supports the defaults in asyncio,
remove and bump tornado requirement for py38.
"""
if sys.platform.startswith("win") and sys.version_info >= (3, 8):
try:
from asyncio import (
WindowsProactorEventLoopPolicy,
WindowsSelectorEventLoopPolicy,
)
except ImportError:
pass
# not affected
else:
if (
type(asyncio.get_event_loop_policy())
is WindowsProactorEventLoopPolicy
):
# WindowsProactorEventLoopPolicy is not compatible with Tornado 6.
# Fallback to the pre-3.8 default of WindowsSelectorEventLoopPolicy.
asyncio.set_event_loop_policy(WindowsSelectorEventLoopPolicy())
_atexit_ran = False _atexit_ran = False
def atexit(self): def atexit(self):
@@ -2723,6 +2796,7 @@ class JupyterHub(Application):
if self._atexit_ran: if self._atexit_ran:
return return
self._atexit_ran = True self._atexit_ran = True
self._init_asyncio_patch()
# run the cleanup step (in a new loop, because the interrupted one is unclean) # run the cleanup step (in a new loop, because the interrupted one is unclean)
asyncio.set_event_loop(asyncio.new_event_loop()) asyncio.set_event_loop(asyncio.new_event_loop())
IOLoop.clear_current() IOLoop.clear_current()
@@ -2772,6 +2846,7 @@ class JupyterHub(Application):
@classmethod @classmethod
def launch_instance(cls, argv=None): def launch_instance(cls, argv=None):
self = cls.instance() self = cls.instance()
self._init_asyncio_patch()
loop = IOLoop.current() loop = IOLoop.current()
task = asyncio.ensure_future(self.launch_instance_async(argv)) task = asyncio.ensure_future(self.launch_instance_async(argv))
try: try:

View File

@@ -7,6 +7,7 @@ import re
import sys import sys
import warnings import warnings
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
from functools import partial
from shutil import which from shutil import which
from subprocess import PIPE from subprocess import PIPE
from subprocess import Popen from subprocess import Popen
@@ -100,41 +101,74 @@ class Authenticator(LoggingConfigurable):
""" """
).tag(config=True) ).tag(config=True)
whitelist = Set( whitelist = Set(help="Deprecated, use `Authenticator.allowed_users`", config=True,)
allowed_users = Set(
help=""" help="""
Whitelist of usernames that are allowed to log in. Set of usernames that are allowed to log in.
Use this with supported authenticators to restrict which users can log in. This is an Use this with supported authenticators to restrict which users can log in. This is an
additional whitelist that further restricts users, beyond whatever restrictions the additional list that further restricts users, beyond whatever restrictions the
authenticator has in place. authenticator has in place.
If empty, does not perform any additional restriction. If empty, does not perform any additional restriction.
.. versionchanged:: 1.2
`Authenticator.whitelist` renamed to `allowed_users`
""" """
).tag(config=True) ).tag(config=True)
blacklist = Set( blocked_users = Set(
help=""" help="""
Blacklist of usernames that are not allowed to log in. Set of usernames that are not allowed to log in.
Use this with supported authenticators to restrict which users can not log in. This is an Use this with supported authenticators to restrict which users can not log in. This is an
additional blacklist that further restricts users, beyond whatever restrictions the additional block list that further restricts users, beyond whatever restrictions the
authenticator has in place. authenticator has in place.
If empty, does not perform any additional restriction. If empty, does not perform any additional restriction.
.. versionadded: 0.9 .. versionadded: 0.9
.. versionchanged:: 1.2
`Authenticator.blacklist` renamed to `blocked_users`
""" """
).tag(config=True) ).tag(config=True)
@observe('whitelist') _deprecated_aliases = {
def _check_whitelist(self, change): "whitelist": ("allowed_users", "1.2"),
"blacklist": ("blocked_users", "1.2"),
}
@observe(*list(_deprecated_aliases))
def _deprecated_trait(self, change):
"""observer for deprecated traits"""
old_attr = change.name
new_attr, version = self._deprecated_aliases.get(old_attr)
new_value = getattr(self, new_attr)
if new_value != change.new:
# only warn if different
# protects backward-compatible config from warnings
# if they set the same value under both names
self.log.warning(
"{cls}.{old} is deprecated in JupyterHub {version}, use {cls}.{new} instead".format(
cls=self.__class__.__name__,
old=old_attr,
new=new_attr,
version=version,
)
)
setattr(self, new_attr, change.new)
@observe('allowed_users')
def _check_allowed_users(self, change):
short_names = [name for name in change['new'] if len(name) <= 1] short_names = [name for name in change['new'] if len(name) <= 1]
if short_names: if short_names:
sorted_names = sorted(short_names) sorted_names = sorted(short_names)
single = ''.join(sorted_names) single = ''.join(sorted_names)
string_set_typo = "set('%s')" % single string_set_typo = "set('%s')" % single
self.log.warning( self.log.warning(
"whitelist contains single-character names: %s; did you mean set([%r]) instead of %s?", "Allowed set contains single-character names: %s; did you mean set([%r]) instead of %s?",
sorted_names[:8], sorted_names[:8],
single, single,
string_set_typo, string_set_typo,
@@ -206,6 +240,7 @@ class Authenticator(LoggingConfigurable):
delete_invalid_users = Bool( delete_invalid_users = Bool(
False, False,
config=True,
help="""Delete any users from the database that do not pass validation help="""Delete any users from the database that do not pass validation
When JupyterHub starts, `.add_user` will be called When JupyterHub starts, `.add_user` will be called
@@ -260,39 +295,74 @@ class Authenticator(LoggingConfigurable):
def __init__(self, **kwargs): def __init__(self, **kwargs):
super().__init__(**kwargs) super().__init__(**kwargs)
for method_name in ( self._init_deprecated_methods()
'check_whitelist',
'check_blacklist', def _init_deprecated_methods(self):
'check_group_whitelist', # handles deprecated signature *and* name
# with correct subclass override priority!
for old_name, new_name in (
('check_whitelist', 'check_allowed'),
('check_blacklist', 'check_blocked_users'),
('check_group_whitelist', 'check_allowed_groups'),
): ):
original_method = getattr(self, method_name, None) old_method = getattr(self, old_name, None)
if original_method is None: if old_method is None:
# no such method (check_group_whitelist is optional) # no such method (check_group_whitelist is optional)
continue continue
signature = inspect.signature(original_method)
if 'authentication' not in signature.parameters: # allow old name to have higher priority
# if and only if it's defined in a later subclass
# than the new name
for cls in self.__class__.mro():
has_old_name = old_name in cls.__dict__
has_new_name = new_name in cls.__dict__
if has_new_name:
break
if has_old_name and not has_new_name:
warnings.warn(
"{0}.{1} should be renamed to {0}.{2} for JupyterHub >= 1.2".format(
cls.__name__, old_name, new_name
),
DeprecationWarning,
)
# use old name instead of new
# if old name is overridden in subclass
def _new_calls_old(old_name, *args, **kwargs):
return getattr(self, old_name)(*args, **kwargs)
setattr(self, new_name, partial(_new_calls_old, old_name))
break
# deprecate pre-1.0 method signatures
signature = inspect.signature(old_method)
if 'authentication' not in signature.parameters and not any(
param.kind == inspect.Parameter.VAR_KEYWORD
for param in signature.parameters.values()
):
# adapt to pre-1.0 signature for compatibility # adapt to pre-1.0 signature for compatibility
warnings.warn( warnings.warn(
""" """
{0}.{1} does not support the authentication argument, {0}.{1} does not support the authentication argument,
added in JupyterHub 1.0. added in JupyterHub 1.0. and is renamed to {2} in JupyterHub 1.2.
It should have the signature: It should have the signature:
def {1}(self, username, authentication=None): def {2}(self, username, authentication=None):
... ...
Adapting for compatibility. Adapting for compatibility.
""".format( """.format(
self.__class__.__name__, method_name self.__class__.__name__, old_name, new_name
), ),
DeprecationWarning, DeprecationWarning,
) )
def wrapped_method(username, authentication=None, **kwargs): def wrapped_method(
original_method, username, authentication=None, **kwargs
):
return original_method(username, **kwargs) return original_method(username, **kwargs)
setattr(self, method_name, wrapped_method) setattr(self, old_name, partial(wrapped_method, old_method))
async def run_post_auth_hook(self, handler, authentication): async def run_post_auth_hook(self, handler, authentication):
""" """
@@ -326,39 +396,45 @@ class Authenticator(LoggingConfigurable):
username = self.username_map.get(username, username) username = self.username_map.get(username, username)
return username return username
def check_whitelist(self, username, authentication=None): def check_allowed(self, username, authentication=None):
"""Check if a username is allowed to authenticate based on whitelist configuration """Check if a username is allowed to authenticate based on configuration
Return True if username is allowed, False otherwise. Return True if username is allowed, False otherwise.
No whitelist means any username is allowed. No allowed_users set means any username is allowed.
Names are normalized *before* being checked against the whitelist. Names are normalized *before* being checked against the allowed set.
.. versionchanged:: 1.0 .. versionchanged:: 1.0
Signature updated to accept authentication data and any future changes Signature updated to accept authentication data and any future changes
"""
if not self.whitelist:
# No whitelist means any name is allowed
return True
return username in self.whitelist
def check_blacklist(self, username, authentication=None): .. versionchanged:: 1.2
"""Check if a username is blocked to authenticate based on blacklist configuration Renamed check_whitelist to check_allowed
"""
if not self.allowed_users:
# No allowed set means any name is allowed
return True
return username in self.allowed_users
def check_blocked_users(self, username, authentication=None):
"""Check if a username is blocked to authenticate based on Authenticator.blocked configuration
Return True if username is allowed, False otherwise. Return True if username is allowed, False otherwise.
No blacklist means any username is allowed. No block list means any username is allowed.
Names are normalized *before* being checked against the blacklist. Names are normalized *before* being checked against the block list.
.. versionadded: 0.9 .. versionadded: 0.9
.. versionchanged:: 1.0 .. versionchanged:: 1.0
Signature updated to accept authentication data as second argument Signature updated to accept authentication data as second argument
.. versionchanged:: 1.2
Renamed check_blacklist to check_blocked_users
""" """
if not self.blacklist: if not self.blocked_users:
# No blacklist means any name is allowed # No block list means any name is allowed
return True return True
return username not in self.blacklist return username not in self.blocked_users
async def get_authenticated_user(self, handler, data): async def get_authenticated_user(self, handler, data):
"""Authenticate the user who is attempting to log in """Authenticate the user who is attempting to log in
@@ -367,7 +443,7 @@ class Authenticator(LoggingConfigurable):
This calls `authenticate`, which should be overridden in subclasses, This calls `authenticate`, which should be overridden in subclasses,
normalizes the username if any normalization should be done, normalizes the username if any normalization should be done,
and then validates the name in the whitelist. and then validates the name in the allowed set.
This is the outer API for authenticating a user. This is the outer API for authenticating a user.
Subclasses should not override this method. Subclasses should not override this method.
@@ -375,7 +451,7 @@ class Authenticator(LoggingConfigurable):
The various stages can be overridden separately: The various stages can be overridden separately:
- `authenticate` turns formdata into a username - `authenticate` turns formdata into a username
- `normalize_username` normalizes the username - `normalize_username` normalizes the username
- `check_whitelist` checks against the user whitelist - `check_allowed` checks against the allowed usernames
.. versionchanged:: 0.8 .. versionchanged:: 0.8
return dict instead of username return dict instead of username
@@ -389,7 +465,7 @@ class Authenticator(LoggingConfigurable):
else: else:
authenticated = {'name': authenticated} authenticated = {'name': authenticated}
authenticated.setdefault('auth_state', None) authenticated.setdefault('auth_state', None)
# Leave the default as None, but reevaluate later post-whitelist # Leave the default as None, but reevaluate later post-allowed-check
authenticated.setdefault('admin', None) authenticated.setdefault('admin', None)
# normalize the username # normalize the username
@@ -400,20 +476,18 @@ class Authenticator(LoggingConfigurable):
self.log.warning("Disallowing invalid username %r.", username) self.log.warning("Disallowing invalid username %r.", username)
return return
blacklist_pass = await maybe_future( blocked_pass = await maybe_future(
self.check_blacklist(username, authenticated) self.check_blocked_users(username, authenticated)
)
whitelist_pass = await maybe_future(
self.check_whitelist(username, authenticated)
) )
allowed_pass = await maybe_future(self.check_allowed(username, authenticated))
if blacklist_pass: if blocked_pass:
pass pass
else: else:
self.log.warning("User %r in blacklist. Stop authentication", username) self.log.warning("User %r blocked. Stop authentication", username)
return return
if whitelist_pass: if allowed_pass:
if authenticated['admin'] is None: if authenticated['admin'] is None:
authenticated['admin'] = await maybe_future( authenticated['admin'] = await maybe_future(
self.is_admin(handler, authenticated) self.is_admin(handler, authenticated)
@@ -423,7 +497,7 @@ class Authenticator(LoggingConfigurable):
return authenticated return authenticated
else: else:
self.log.warning("User %r not in whitelist.", username) self.log.warning("User %r not allowed.", username)
return return
async def refresh_user(self, user, handler=None): async def refresh_user(self, user, handler=None):
@@ -479,7 +553,7 @@ class Authenticator(LoggingConfigurable):
It must return the username on successful authentication, It must return the username on successful authentication,
and return None on failed authentication. and return None on failed authentication.
Checking the whitelist is handled separately by the caller. Checking allowed_users/blocked_users is handled separately by the caller.
.. versionchanged:: 0.8 .. versionchanged:: 0.8
Allow `authenticate` to return a dict containing auth_state. Allow `authenticate` to return a dict containing auth_state.
@@ -520,10 +594,10 @@ class Authenticator(LoggingConfigurable):
This method may be a coroutine. This method may be a coroutine.
By default, this just adds the user to the whitelist. By default, this just adds the user to the allowed_users set.
Subclasses may do more extensive things, such as adding actual unix users, Subclasses may do more extensive things, such as adding actual unix users,
but they should call super to ensure the whitelist is updated. but they should call super to ensure the allowed_users set is updated.
Note that this should be idempotent, since it is called whenever the hub restarts Note that this should be idempotent, since it is called whenever the hub restarts
for all users. for all users.
@@ -533,19 +607,19 @@ class Authenticator(LoggingConfigurable):
""" """
if not self.validate_username(user.name): if not self.validate_username(user.name):
raise ValueError("Invalid username: %s" % user.name) raise ValueError("Invalid username: %s" % user.name)
if self.whitelist: if self.allowed_users:
self.whitelist.add(user.name) self.allowed_users.add(user.name)
def delete_user(self, user): def delete_user(self, user):
"""Hook called when a user is deleted """Hook called when a user is deleted
Removes the user from the whitelist. Removes the user from the allowed_users set.
Subclasses should call super to ensure the whitelist is updated. Subclasses should call super to ensure the allowed_users set is updated.
Args: Args:
user (User): The User wrapper object user (User): The User wrapper object
""" """
self.whitelist.discard(user.name) self.allowed_users.discard(user.name)
auto_login = Bool( auto_login = Bool(
False, False,
@@ -610,6 +684,41 @@ class Authenticator(LoggingConfigurable):
return [('/login', LoginHandler)] return [('/login', LoginHandler)]
def _deprecated_method(old_name, new_name, version):
"""Create a deprecated method wrapper for a deprecated method name"""
def deprecated(self, *args, **kwargs):
warnings.warn(
(
"{cls}.{old_name} is deprecated in JupyterHub {version}."
" Please use {cls}.{new_name} instead."
).format(
cls=self.__class__.__name__,
old_name=old_name,
new_name=new_name,
version=version,
),
DeprecationWarning,
stacklevel=2,
)
old_method = getattr(self, new_name)
return old_method(*args, **kwargs)
return deprecated
import types
# deprecate white/blacklist method names
for _old_name, _new_name, _version in [
("check_whitelist", "check_allowed", "1.2"),
("check_blacklist", "check_blocked_users", "1.2"),
]:
setattr(
Authenticator, _old_name, _deprecated_method(_old_name, _new_name, _version),
)
class LocalAuthenticator(Authenticator): class LocalAuthenticator(Authenticator):
"""Base class for Authenticators that work with local Linux/UNIX users """Base class for Authenticators that work with local Linux/UNIX users
@@ -669,37 +778,37 @@ class LocalAuthenticator(Authenticator):
""" """
).tag(config=True) ).tag(config=True)
group_whitelist = Set( group_whitelist = Set(help="""DEPRECATED: use allowed_groups""",).tag(config=True)
help="""
Whitelist all users from this UNIX group.
This makes the username whitelist ineffective. allowed_groups = Set(
help="""
Allow login from all users in these UNIX groups.
If set, allowed username set is ignored.
""" """
).tag(config=True) ).tag(config=True)
@observe('group_whitelist') @observe('allowed_groups')
def _group_whitelist_changed(self, change): def _allowed_groups_changed(self, change):
""" """Log a warning if mutually exclusive user and group allowed sets are specified."""
Log a warning if both group_whitelist and user whitelist are set. if self.allowed_users:
"""
if self.whitelist:
self.log.warning( self.log.warning(
"Ignoring username whitelist because group whitelist supplied!" "Ignoring Authenticator.allowed_users set because Authenticator.allowed_groups supplied!"
) )
def check_whitelist(self, username, authentication=None): def check_allowed(self, username, authentication=None):
if self.group_whitelist: if self.allowed_groups:
return self.check_group_whitelist(username, authentication) return self.check_allowed_groups(username, authentication)
else: else:
return super().check_whitelist(username, authentication) return super().check_allowed(username, authentication)
def check_group_whitelist(self, username, authentication=None): def check_allowed_groups(self, username, authentication=None):
""" """
If group_whitelist is configured, check if authenticating user is part of group. If allowed_groups is configured, check if authenticating user is part of group.
""" """
if not self.group_whitelist: if not self.allowed_groups:
return False return False
for grnam in self.group_whitelist: for grnam in self.allowed_groups:
try: try:
group = self._getgrnam(grnam) group = self._getgrnam(grnam)
except KeyError: except KeyError:
@@ -843,7 +952,7 @@ class PAMAuthenticator(LocalAuthenticator):
Authoritative list of user groups that determine admin access. Authoritative list of user groups that determine admin access.
Users not in these groups can still be granted admin status through admin_users. Users not in these groups can still be granted admin status through admin_users.
White/blacklisting rules still apply. allowed/blocked rules still apply.
""" """
).tag(config=True) ).tag(config=True)
@@ -986,6 +1095,16 @@ class PAMAuthenticator(LocalAuthenticator):
return super().normalize_username(username) return super().normalize_username(username)
for _old_name, _new_name, _version in [
("check_group_whitelist", "check_group_allowed", "1.2"),
]:
setattr(
LocalAuthenticator,
_old_name,
_deprecated_method(_old_name, _new_name, _version),
)
class DummyAuthenticator(Authenticator): class DummyAuthenticator(Authenticator):
"""Dummy Authenticator for testing """Dummy Authenticator for testing

View File

@@ -635,8 +635,15 @@ class BaseHandler(RequestHandler):
) )
if not next_url: if not next_url:
# custom default URL # custom default URL, usually passed because user landed on that page but was not logged in
next_url = default or self.default_url if default:
next_url = default
else:
# As set in jupyterhub_config.py
if callable(self.default_url):
next_url = self.default_url(self)
else:
next_url = self.default_url
if not next_url: if not next_url:
# default URL after login # default URL after login
@@ -651,8 +658,42 @@ class BaseHandler(RequestHandler):
next_url = url_path_join(self.hub.base_url, 'spawn') next_url = url_path_join(self.hub.base_url, 'spawn')
else: else:
next_url = url_path_join(self.hub.base_url, 'home') next_url = url_path_join(self.hub.base_url, 'home')
next_url = self.append_query_parameters(next_url, exclude=['next'])
return next_url return next_url
def append_query_parameters(self, url, exclude=None):
"""Append the current request's query parameters to the given URL.
Supports an extra optional parameter ``exclude`` that when provided must
contain a list of parameters to be ignored, i.e. these parameters will
not be added to the URL.
This is important to avoid infinite loops with the next parameter being
added over and over, for instance.
The default value for ``exclude`` is an array with "next". This is useful
as most use cases in JupyterHub (all?) won't want to include the next
parameter twice (the next parameter is added elsewhere to the query
parameters).
:param str url: a URL
:param list exclude: optional list of parameters to be ignored, defaults to
a list with "next" (to avoid redirect-loops)
:rtype (str)
"""
if exclude is None:
exclude = ['next']
if self.request.query:
query_string = [
param
for param in parse_qsl(self.request.query)
if param[0] not in exclude
]
if query_string:
url = url_concat(url, query_string)
return url
async def auth_to_user(self, authenticated, user=None): async def auth_to_user(self, authenticated, user=None):
"""Persist data from .authenticate() or .refresh_user() to the User database """Persist data from .authenticate() or .refresh_user() to the User database
@@ -673,9 +714,10 @@ class BaseHandler(RequestHandler):
raise ValueError("Username doesn't match! %s != %s" % (username, user.name)) raise ValueError("Username doesn't match! %s != %s" % (username, user.name))
if user is None: if user is None:
new_user = username not in self.users user = self.find_user(username)
user = self.user_from_username(username) new_user = user is None
if new_user: if new_user:
user = self.user_from_username(username)
await maybe_future(self.authenticator.add_user(user)) await maybe_future(self.authenticator.add_user(user))
# Only set `admin` if the authenticator returned an explicit value. # Only set `admin` if the authenticator returned an explicit value.
if admin is not None and admin != user.admin: if admin is not None and admin != user.admin:
@@ -874,7 +916,7 @@ class BaseHandler(RequestHandler):
self.log.error( self.log.error(
"Stopping %s to avoid inconsistent state", user_server_name "Stopping %s to avoid inconsistent state", user_server_name
) )
await user.stop() await user.stop(server_name)
PROXY_ADD_DURATION_SECONDS.labels(status='failure').observe( PROXY_ADD_DURATION_SECONDS.labels(status='failure').observe(
time.perf_counter() - proxy_add_start_time time.perf_counter() - proxy_add_start_time
) )
@@ -907,6 +949,9 @@ class BaseHandler(RequestHandler):
self.settings['failure_count'] = 0 self.settings['failure_count'] = 0
return return
# spawn failed, increment count and abort if limit reached # spawn failed, increment count and abort if limit reached
SERVER_SPAWN_DURATION_SECONDS.labels(
status=ServerSpawnStatus.failure
).observe(time.perf_counter() - spawn_start_time)
self.settings.setdefault('failure_count', 0) self.settings.setdefault('failure_count', 0)
self.settings['failure_count'] += 1 self.settings['failure_count'] += 1
failure_count = self.settings['failure_count'] failure_count = self.settings['failure_count']
@@ -939,13 +984,16 @@ class BaseHandler(RequestHandler):
# waiting_for_response indicates server process has started, # waiting_for_response indicates server process has started,
# but is yet to become responsive. # but is yet to become responsive.
if spawner._spawn_pending and not spawner._waiting_for_response: if spawner._spawn_pending and not spawner._waiting_for_response:
# still in Spawner.start, which is taking a long time # If slow_spawn_timeout is intentionally disabled then we
# we shouldn't poll while spawn is incomplete. # don't need to log a warning, just return.
self.log.warning( if self.slow_spawn_timeout > 0:
"User %s is slow to start (timeout=%s)", # still in Spawner.start, which is taking a long time
user_server_name, # we shouldn't poll while spawn is incomplete.
self.slow_spawn_timeout, self.log.warning(
) "User %s is slow to start (timeout=%s)",
user_server_name,
self.slow_spawn_timeout,
)
return return
# start has finished, but the server hasn't come up # start has finished, but the server hasn't come up
@@ -1082,7 +1130,10 @@ class BaseHandler(RequestHandler):
except gen.TimeoutError: except gen.TimeoutError:
# hit timeout, but stop is still pending # hit timeout, but stop is still pending
self.log.warning( self.log.warning(
"User %s:%s server is slow to stop", user.name, server_name "User %s:%s server is slow to stop (timeout=%s)",
user.name,
server_name,
self.slow_stop_timeout,
) )
# return handle on the future for hooking up callbacks # return handle on the future for hooking up callbacks
@@ -1141,6 +1192,8 @@ class BaseHandler(RequestHandler):
for service in self.services.values(): for service in self.services.values():
if not service.url: if not service.url:
continue continue
if not service.display:
continue
accessible_services.append(service) accessible_services.append(service)
return accessible_services return accessible_services

View File

@@ -10,11 +10,13 @@ from http.client import responses
from jinja2 import TemplateNotFound from jinja2 import TemplateNotFound
from tornado import web from tornado import web
from tornado.httputil import url_concat from tornado.httputil import url_concat
from tornado.httputil import urlparse
from .. import __version__ from .. import __version__
from .. import orm from .. import orm
from ..metrics import SERVER_POLL_DURATION_SECONDS from ..metrics import SERVER_POLL_DURATION_SECONDS
from ..metrics import ServerPollStatus from ..metrics import ServerPollStatus
from ..pagination import Pagination
from ..utils import admin_only from ..utils import admin_only
from ..utils import maybe_future from ..utils import maybe_future
from ..utils import url_path_join from ..utils import url_path_join
@@ -170,11 +172,41 @@ class SpawnHandler(BaseHandler):
auth_state = await user.get_auth_state() auth_state = await user.get_auth_state()
await spawner.run_auth_state_hook(auth_state) await spawner.run_auth_state_hook(auth_state)
# Try to start server directly when query arguments are passed.
error_message = ''
query_options = {}
for key, byte_list in self.request.query_arguments.items():
query_options[key] = [bs.decode('utf8') for bs in byte_list]
# 'next' is reserved argument for redirect after spawn
query_options.pop('next', None)
if len(query_options) > 0:
try:
self.log.debug(
"Triggering spawn with supplied query arguments for %s",
spawner._log_name,
)
options = await maybe_future(spawner.options_from_query(query_options))
pending_url = self._get_pending_url(user, server_name)
return await self._wrap_spawn_single_user(
user, server_name, spawner, pending_url, options
)
except Exception as e:
self.log.error(
"Failed to spawn single-user server with query arguments",
exc_info=True,
)
error_message = str(e)
# fallback to behavior without failing query arguments
spawner_options_form = await spawner.get_options_form() spawner_options_form = await spawner.get_options_form()
if spawner_options_form: if spawner_options_form:
self.log.debug("Serving options form for %s", spawner._log_name) self.log.debug("Serving options form for %s", spawner._log_name)
form = await self._render_form( form = await self._render_form(
for_user=user, spawner_options_form=spawner_options_form for_user=user,
spawner_options_form=spawner_options_form,
message=error_message,
) )
self.finish(form) self.finish(form)
else: else:
@@ -250,6 +282,8 @@ class SpawnHandler(BaseHandler):
self.hub.base_url, "spawn-pending", user.escaped_name, server_name self.hub.base_url, "spawn-pending", user.escaped_name, server_name
) )
pending_url = self.append_query_parameters(pending_url, exclude=['next'])
if self.get_argument('next', None): if self.get_argument('next', None):
# preserve `?next=...` through spawn-pending # preserve `?next=...` through spawn-pending
pending_url = url_concat(pending_url, {'next': self.get_argument('next')}) pending_url = url_concat(pending_url, {'next': self.get_argument('next')})
@@ -419,12 +453,15 @@ class AdminHandler(BaseHandler):
@web.authenticated @web.authenticated
@admin_only @admin_only
async def get(self): async def get(self):
page, per_page, offset = Pagination.get_page_args(self)
available = {'name', 'admin', 'running', 'last_activity'} available = {'name', 'admin', 'running', 'last_activity'}
default_sort = ['admin', 'name'] default_sort = ['admin', 'name']
mapping = {'running': orm.Spawner.server_id} mapping = {'running': orm.Spawner.server_id}
for name in available: for name in available:
if name not in mapping: if name not in mapping:
mapping[name] = getattr(orm.User, name) table = orm.User if name != "last_activity" else orm.Spawner
mapping[name] = getattr(table, name)
default_order = { default_order = {
'name': 'asc', 'name': 'asc',
@@ -459,13 +496,24 @@ class AdminHandler(BaseHandler):
# get User.col.desc() order objects # get User.col.desc() order objects
ordered = [getattr(c, o)() for c, o in zip(cols, orders)] ordered = [getattr(c, o)() for c, o in zip(cols, orders)]
users = self.db.query(orm.User).outerjoin(orm.Spawner).order_by(*ordered) users = (
self.db.query(orm.User)
.outerjoin(orm.Spawner)
.order_by(*ordered)
.limit(per_page)
.offset(offset)
)
users = [self._user_from_orm(u) for u in users] users = [self._user_from_orm(u) for u in users]
running = [] running = []
for u in users: for u in users:
running.extend(s for s in u.spawners.values() if s.active) running.extend(s for s in u.spawners.values() if s.active)
total = self.db.query(orm.User.id).count()
pagination = Pagination(
url=self.request.uri, total=total, page=page, per_page=per_page,
)
auth_state = await self.current_user.get_auth_state() auth_state = await self.current_user.get_auth_state()
html = self.render_template( html = self.render_template(
'admin.html', 'admin.html',
@@ -478,6 +526,7 @@ class AdminHandler(BaseHandler):
allow_named_servers=self.allow_named_servers, allow_named_servers=self.allow_named_servers,
named_server_limit_per_user=self.named_server_limit_per_user, named_server_limit_per_user=self.named_server_limit_per_user,
server_version='{} {}'.format(__version__, self.version_hash), server_version='{} {}'.format(__version__, self.version_hash),
pagination=pagination,
) )
self.finish(html) self.finish(html)
@@ -601,10 +650,14 @@ class ProxyErrorHandler(BaseHandler):
class HealthCheckHandler(BaseHandler): class HealthCheckHandler(BaseHandler):
"""Answer to health check""" """Serve health check probes as quickly as possible"""
def get(self, *args): # There is nothing for us to do other than return a positive
self.finish() # HTTP status code as quickly as possible for GET or HEAD requests
def get(self):
pass
head = get
default_handlers = [ default_handlers = [

View File

@@ -12,6 +12,7 @@ from tornado.log import LogFormatter
from tornado.web import HTTPError from tornado.web import HTTPError
from tornado.web import StaticFileHandler from tornado.web import StaticFileHandler
from .handlers.pages import HealthCheckHandler
from .metrics import prometheus_log_method from .metrics import prometheus_log_method
@@ -127,7 +128,9 @@ def log_request(handler):
""" """
status = handler.get_status() status = handler.get_status()
request = handler.request request = handler.request
if status == 304 or (status < 300 and isinstance(handler, StaticFileHandler)): if status == 304 or (
status < 300 and isinstance(handler, (StaticFileHandler, HealthCheckHandler))
):
# static-file success and 304 Found are debug-level # static-file success and 304 Found are debug-level
log_method = access_log.debug log_method = access_log.debug
elif status < 400: elif status < 400:

View File

@@ -2,8 +2,6 @@
implements https://oauthlib.readthedocs.io/en/latest/oauth2/server.html implements https://oauthlib.readthedocs.io/en/latest/oauth2/server.html
""" """
from datetime import datetime
from oauthlib import uri_validate from oauthlib import uri_validate
from oauthlib.oauth2 import RequestValidator from oauthlib.oauth2 import RequestValidator
from oauthlib.oauth2 import WebApplicationServer from oauthlib.oauth2 import WebApplicationServer
@@ -247,7 +245,7 @@ class JupyterHubRequestValidator(RequestValidator):
client=orm_client, client=orm_client,
code=code['code'], code=code['code'],
# oauth has 5 minutes to complete # oauth has 5 minutes to complete
expires_at=int(datetime.utcnow().timestamp() + 300), expires_at=int(orm.OAuthCode.now() + 300),
# TODO: persist oauth scopes # TODO: persist oauth scopes
# scopes=request.scopes, # scopes=request.scopes,
user=request.user.orm_user, user=request.user.orm_user,
@@ -344,7 +342,7 @@ class JupyterHubRequestValidator(RequestValidator):
orm_access_token = orm.OAuthAccessToken( orm_access_token = orm.OAuthAccessToken(
client=client, client=client,
grant_type=orm.GrantType.authorization_code, grant_type=orm.GrantType.authorization_code,
expires_at=datetime.utcnow().timestamp() + token['expires_in'], expires_at=orm.OAuthAccessToken.now() + token['expires_in'],
refresh_token=token['refresh_token'], refresh_token=token['refresh_token'],
# TODO: save scopes, # TODO: save scopes,
# scopes=scopes, # scopes=scopes,
@@ -438,7 +436,7 @@ class JupyterHubRequestValidator(RequestValidator):
Method is used by: Method is used by:
- Authorization Code Grant - Authorization Code Grant
""" """
orm_code = self.db.query(orm.OAuthCode).filter_by(code=code).first() orm_code = orm.OAuthCode.find(self.db, code=code)
if orm_code is None: if orm_code is None:
app_log.debug("No such code: %s", code) app_log.debug("No such code: %s", code)
return False return False

View File

@@ -53,7 +53,7 @@ class Server(HasTraits):
Never used in APIs, only logging, Never used in APIs, only logging,
since it can be non-connectable value, such as '', meaning all interfaces. since it can be non-connectable value, such as '', meaning all interfaces.
""" """
if self.ip in {'', '0.0.0.0'}: if self.ip in {'', '0.0.0.0', '::'}:
return self.url.replace(self._connect_ip, self.ip or '*', 1) return self.url.replace(self._connect_ip, self.ip or '*', 1)
return self.url return self.url
@@ -87,13 +87,13 @@ class Server(HasTraits):
"""The address to use when connecting to this server """The address to use when connecting to this server
When `ip` is set to a real ip address, the same value is used. When `ip` is set to a real ip address, the same value is used.
When `ip` refers to 'all interfaces' (e.g. '0.0.0.0'), When `ip` refers to 'all interfaces' (e.g. '0.0.0.0' or '::'),
clients connect via hostname by default. clients connect via hostname by default.
Setting `connect_ip` explicitly overrides any default behavior. Setting `connect_ip` explicitly overrides any default behavior.
""" """
if self.connect_ip: if self.connect_ip:
return self.connect_ip return self.connect_ip
elif self.ip in {'', '0.0.0.0'}: elif self.ip in {'', '0.0.0.0', '::'}:
# if listening on all interfaces, default to hostname for connect # if listening on all interfaces, default to hostname for connect
return socket.gethostname() return socket.gethostname()
else: else:
@@ -149,7 +149,12 @@ class Server(HasTraits):
if self.connect_url: if self.connect_url:
parsed = urlparse(self.connect_url) parsed = urlparse(self.connect_url)
return "{proto}://{host}".format(proto=parsed.scheme, host=parsed.netloc) return "{proto}://{host}".format(proto=parsed.scheme, host=parsed.netloc)
return "{proto}://{ip}:{port}".format(
if ':' in self._connect_ip:
fmt = "{proto}://[{ip}]:{port}"
else:
fmt = "{proto}://{ip}:{port}"
return fmt.format(
proto=self.proto, ip=self._connect_ip, port=self._connect_port proto=self.proto, ip=self._connect_ip, port=self._connect_port
) )

View File

@@ -311,7 +311,46 @@ class Service(Base):
return db.query(cls).filter(cls.name == name).first() return db.query(cls).filter(cls.name == name).first()
class Hashed(object): class Expiring:
"""Mixin for expiring entries
Subclass must define at least expires_at property,
which should be unix timestamp or datetime object
"""
now = utcnow # funciton, must return float timestamp or datetime
expires_at = None # must be defined
@property
def expires_in(self):
"""Property returning expiration in seconds from now
or None
"""
if self.expires_at:
delta = self.expires_at - self.now()
if isinstance(delta, timedelta):
delta = delta.total_seconds()
return delta
else:
return None
@classmethod
def purge_expired(cls, db):
"""Purge expired API Tokens from the database"""
now = cls.now()
deleted = False
for obj in (
db.query(cls).filter(cls.expires_at != None).filter(cls.expires_at < now)
):
app_log.debug("Purging expired %s", obj)
deleted = True
db.delete(obj)
if deleted:
db.commit()
class Hashed(Expiring):
"""Mixin for tables with hashed tokens""" """Mixin for tables with hashed tokens"""
prefix_length = 4 prefix_length = 4
@@ -368,11 +407,21 @@ class Hashed(object):
"""Start the query for matching token. """Start the query for matching token.
Returns an SQLAlchemy query already filtered by prefix-matches. Returns an SQLAlchemy query already filtered by prefix-matches.
.. versionchanged:: 1.2
Excludes expired matches.
""" """
prefix = token[: cls.prefix_length] prefix = token[: cls.prefix_length]
# since we can't filter on hashed values, filter on prefix # since we can't filter on hashed values, filter on prefix
# so we aren't comparing with all tokens # so we aren't comparing with all tokens
return db.query(cls).filter(bindparam('prefix', prefix).startswith(cls.prefix)) prefix_match = db.query(cls).filter(
bindparam('prefix', prefix).startswith(cls.prefix)
)
prefix_match = prefix_match.filter(
or_(cls.expires_at == None, cls.expires_at >= cls.now())
)
return prefix_match
@classmethod @classmethod
def find(cls, db, token): def find(cls, db, token):
@@ -408,6 +457,7 @@ class APIToken(Hashed, Base):
return 'a%i' % self.id return 'a%i' % self.id
# token metadata for bookkeeping # token metadata for bookkeeping
now = datetime.utcnow # for expiry
created = Column(DateTime, default=datetime.utcnow) created = Column(DateTime, default=datetime.utcnow)
expires_at = Column(DateTime, default=None, nullable=True) expires_at = Column(DateTime, default=None, nullable=True)
last_activity = Column(DateTime) last_activity = Column(DateTime)
@@ -428,20 +478,6 @@ class APIToken(Hashed, Base):
cls=self.__class__.__name__, pre=self.prefix, kind=kind, name=name cls=self.__class__.__name__, pre=self.prefix, kind=kind, name=name
) )
@classmethod
def purge_expired(cls, db):
"""Purge expired API Tokens from the database"""
now = utcnow()
deleted = False
for token in (
db.query(cls).filter(cls.expires_at != None).filter(cls.expires_at < now)
):
app_log.debug("Purging expired %s", token)
deleted = True
db.delete(token)
if deleted:
db.commit()
@classmethod @classmethod
def find(cls, db, token, *, kind=None): def find(cls, db, token, *, kind=None):
"""Find a token object by value. """Find a token object by value.
@@ -452,9 +488,6 @@ class APIToken(Hashed, Base):
`kind='service'` only returns API tokens for services `kind='service'` only returns API tokens for services
""" """
prefix_match = cls.find_prefix(db, token) prefix_match = cls.find_prefix(db, token)
prefix_match = prefix_match.filter(
or_(cls.expires_at == None, cls.expires_at >= utcnow())
)
if kind == 'user': if kind == 'user':
prefix_match = prefix_match.filter(cls.user_id != None) prefix_match = prefix_match.filter(cls.user_id != None)
elif kind == 'service': elif kind == 'service':
@@ -497,7 +530,7 @@ class APIToken(Hashed, Base):
assert service.id is not None assert service.id is not None
orm_token.service = service orm_token.service = service
if expires_in is not None: if expires_in is not None:
orm_token.expires_at = utcnow() + timedelta(seconds=expires_in) orm_token.expires_at = cls.now() + timedelta(seconds=expires_in)
db.add(orm_token) db.add(orm_token)
db.commit() db.commit()
return token return token
@@ -521,6 +554,10 @@ class OAuthAccessToken(Hashed, Base):
__tablename__ = 'oauth_access_tokens' __tablename__ = 'oauth_access_tokens'
id = Column(Integer, primary_key=True, autoincrement=True) id = Column(Integer, primary_key=True, autoincrement=True)
@staticmethod
def now():
return datetime.utcnow().timestamp()
@property @property
def api_id(self): def api_id(self):
return 'o%i' % self.id return 'o%i' % self.id
@@ -547,11 +584,12 @@ class OAuthAccessToken(Hashed, Base):
last_activity = Column(DateTime, nullable=True) last_activity = Column(DateTime, nullable=True)
def __repr__(self): def __repr__(self):
return "<{cls}('{prefix}...', client_id={client_id!r}, user={user!r}>".format( return "<{cls}('{prefix}...', client_id={client_id!r}, user={user!r}, expires_in={expires_in}>".format(
cls=self.__class__.__name__, cls=self.__class__.__name__,
client_id=self.client_id, client_id=self.client_id,
user=self.user and self.user.name, user=self.user and self.user.name,
prefix=self.prefix, prefix=self.prefix,
expires_in=self.expires_in,
) )
@classmethod @classmethod
@@ -568,8 +606,9 @@ class OAuthAccessToken(Hashed, Base):
return orm_token return orm_token
class OAuthCode(Base): class OAuthCode(Expiring, Base):
__tablename__ = 'oauth_codes' __tablename__ = 'oauth_codes'
id = Column(Integer, primary_key=True, autoincrement=True) id = Column(Integer, primary_key=True, autoincrement=True)
client_id = Column( client_id = Column(
Unicode(255), ForeignKey('oauth_clients.identifier', ondelete='CASCADE') Unicode(255), ForeignKey('oauth_clients.identifier', ondelete='CASCADE')
@@ -581,6 +620,19 @@ class OAuthCode(Base):
# state = Column(Unicode(1023)) # state = Column(Unicode(1023))
user_id = Column(Integer, ForeignKey('users.id', ondelete='CASCADE')) user_id = Column(Integer, ForeignKey('users.id', ondelete='CASCADE'))
@staticmethod
def now():
return datetime.utcnow().timestamp()
@classmethod
def find(cls, db, code):
return (
db.query(cls)
.filter(cls.code == code)
.filter(or_(cls.expires_at == None, cls.expires_at >= cls.now()))
.first()
)
class OAuthClient(Base): class OAuthClient(Base):
__tablename__ = 'oauth_clients' __tablename__ = 'oauth_clients'

180
jupyterhub/pagination.py Normal file
View File

@@ -0,0 +1,180 @@
"""Basic class to manage pagination utils."""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
class Pagination:
_page_name = 'page'
_per_page_name = 'per_page'
_default_page = 1
_default_per_page = 100
_max_per_page = 250
def __init__(self, *args, **kwargs):
"""Potential parameters.
**url**: URL in request
**page**: current page in use
**per_page**: number of records to display in the page. By default 100
**total**: total records considered while paginating
"""
self.page = kwargs.get(self._page_name, 1)
if self.per_page > self._max_per_page:
self.per_page = self._max_per_page
self.total = int(kwargs.get('total', 0))
self.url = kwargs.get('url') or self.get_url()
self.init_values()
def init_values(self):
self._cached = {}
self.skip = (self.page - 1) * self.per_page
pages = divmod(self.total, self.per_page)
self.total_pages = pages[0] + 1 if pages[1] else pages[0]
self.has_prev = self.page > 1
self.has_next = self.page < self.total_pages
@classmethod
def get_page_args(self, handler):
"""
This method gets the arguments used in the webpage to configurate the pagination
In case of no arguments, it uses the default values from this class
It returns:
- self.page: The page requested for paginating or the default value (1)
- self.per_page: The number of items to return in this page. By default 100 and no more than 250
- self.per_page * (self.page - 1): The offset to consider when managing pagination via the ORM
"""
self.page = handler.get_argument(self._page_name, self._default_page)
self.per_page = handler.get_argument(
self._per_page_name, self._default_per_page
)
try:
self.per_page = int(self.per_page)
if self.per_page > self._max_per_page:
self.per_page = self._max_per_page
except:
self.per_page = self._default_per_page
try:
self.page = int(self.page)
if self.page < 1:
self.page = self._default_page
except:
self.page = self._default_page
return self.page, self.per_page, self.per_page * (self.page - 1)
@property
def info(self):
"""Get the pagination information."""
start = 1 + (self.page - 1) * self.per_page
end = start + self.per_page - 1
if end > self.total:
end = self.total
if start > self.total:
start = self.total
return {'total': self.total, 'start': start, 'end': end}
def calculate_pages_window(self):
"""Calculates the set of pages to render later in links() method.
It returns the list of pages to render via links for the pagination
By default, as we've observed in other applications, we're going to render
only a finite and predefined number of pages, avoiding visual fatigue related
to a long list of pages. By default, we render 7 pages plus some inactive links with the characters '...'
to point out that there are other pages that aren't explicitly rendered.
The primary way of work is to provide current webpage and 5 next pages, the last 2 ones
(in case the current page + 5 does not overflow the total lenght of pages) and the first one for reference.
"""
self.separator_character = '...'
default_pages_to_render = 7
after_page = 5
before_end = 2
# Add 1 to self.total_pages since our default page is 1 and not 0
total_pages = self.total_pages + 1
pages = []
if total_pages > default_pages_to_render:
if self.page > 1:
pages.extend([1, '...'])
if total_pages < self.page + after_page:
pages.extend(list(range(self.page, total_pages)))
else:
if total_pages >= self.page + after_page + before_end:
pages.extend(list(range(self.page, self.page + after_page)))
pages.append('...')
pages.extend(list(range(total_pages - before_end, total_pages)))
else:
pages.extend(list(range(self.page, self.page + after_page)))
if self.page + after_page < total_pages:
# show only last page when the after_page window left space to show it
pages.append('...')
pages.extend(list(range(total_pages - 1, total_pages)))
return pages
else:
return list(range(1, total_pages))
@property
def links(self):
"""Get the links for the pagination.
Getting the input from calculate_pages_window(), generates the HTML code
for the pages to render, plus the arrows to go onwards and backwards (if needed).
"""
if self.total_pages == 1:
return []
pages_to_render = self.calculate_pages_window()
links = ['<nav>']
links.append('<ul class="pagination">')
if self.page > 1:
prev_page = self.page - 1
links.append(
'<li><a href="?page={prev_page}">«</a></li>'.format(prev_page=prev_page)
)
else:
links.append(
'<li class="disabled"><span><span aria-hidden="true">«</span></span></li>'
)
for page in list(pages_to_render):
if page == self.page:
links.append(
'<li class="active"><span>{page}<span class="sr-only">(current)</span></span></li>'.format(
page=page
)
)
elif page == self.separator_character:
links.append(
'<li class="disabled"><span> <span aria-hidden="true">...</span></span></li>'
)
else:
links.append(
'<li><a href="?page={page}">{page}</a></li>'.format(page=page)
)
if self.page >= 1 and self.page < self.total_pages:
next_page = self.page + 1
links.append(
'<li><a href="?page={next_page}">»</a></li>'.format(next_page=next_page)
)
else:
links.append(
'<li class="disabled"><span><span aria-hidden="true">»</span></span></li>'
)
links.append('</ul>')
links.append('</nav>')
return ''.join(links)

View File

@@ -371,9 +371,13 @@ class HubAuth(SingletonConfigurable):
) )
app_log.warning(r.text) app_log.warning(r.text)
msg = "Failed to check authorization" msg = "Failed to check authorization"
# pass on error_description from oauth failure # pass on error from oauth failure
try: try:
description = r.json().get("error_description") response = r.json()
# prefer more specific 'error_description', fallback to 'error'
description = response.get(
"error_description", response.get("error", "Unknown error")
)
except Exception: except Exception:
pass pass
else: else:
@@ -860,15 +864,15 @@ class HubAuthenticated(object):
if kind == 'service': if kind == 'service':
# it's a service, check hub_services # it's a service, check hub_services
if self.hub_services and name in self.hub_services: if self.hub_services and name in self.hub_services:
app_log.debug("Allowing whitelisted Hub service %s", name) app_log.debug("Allowing Hub service %s", name)
return model return model
else: else:
app_log.warning("Not allowing Hub service %s", name) app_log.warning("Not allowing Hub service %s", name)
raise UserNotAllowed(model) raise UserNotAllowed(model)
if self.hub_users and name in self.hub_users: if self.hub_users and name in self.hub_users:
# user in whitelist # user in allowed list
app_log.debug("Allowing whitelisted Hub user %s", name) app_log.debug("Allowing Hub user %s", name)
return model return model
elif self.hub_groups and set(model['groups']).intersection(self.hub_groups): elif self.hub_groups and set(model['groups']).intersection(self.hub_groups):
allowed_groups = set(model['groups']).intersection(self.hub_groups) allowed_groups = set(model['groups']).intersection(self.hub_groups)
@@ -877,7 +881,7 @@ class HubAuthenticated(object):
name, name,
','.join(sorted(allowed_groups)), ','.join(sorted(allowed_groups)),
) )
# group in whitelist # group in allowed list
return model return model
else: else:
app_log.warning("Not allowing Hub user %s", name) app_log.warning("Not allowing Hub user %s", name)

View File

@@ -201,6 +201,10 @@ class Service(LoggingConfigurable):
""" """
).tag(input=True) ).tag(input=True)
display = Bool(
True, help="""Whether to list the service on the JupyterHub UI"""
).tag(input=True)
oauth_no_confirm = Bool( oauth_no_confirm = Bool(
False, False,
help="""Skip OAuth confirmation when users access this service. help="""Skip OAuth confirmation when users access this service.
@@ -342,7 +346,7 @@ class Service(LoggingConfigurable):
env['JUPYTERHUB_SERVICE_PREFIX'] = self.server.base_url env['JUPYTERHUB_SERVICE_PREFIX'] = self.server.base_url
hub = self.hub hub = self.hub
if self.hub.ip in ('0.0.0.0', ''): if self.hub.ip in ('', '0.0.0.0', '::'):
# if the Hub is listening on all interfaces, # if the Hub is listening on all interfaces,
# tell services to connect via localhost # tell services to connect via localhost
# since they are always local subprocesses # since they are always local subprocesses

View File

@@ -0,0 +1,13 @@
"""JupyterHub single-user server entrypoints
Contains default notebook-app subclass and mixins
"""
from .app import main
from .app import SingleUserNotebookApp
from .mixins import HubAuthenticatedHandler
from .mixins import make_singleuser_app
# backward-compatibility
JupyterHubLoginHandler = SingleUserNotebookApp.login_handler_class
JupyterHubLogoutHandler = SingleUserNotebookApp.logout_handler_class
OAuthCallbackHandler = SingleUserNotebookApp.oauth_callback_handler_class

View File

@@ -0,0 +1,4 @@
from .app import main
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,20 @@
"""Make a single-user app based on the environment:
- $JUPYTERHUB_SINGLEUSER_APP, the base Application class, to be wrapped in JupyterHub authentication.
default: notebook.notebookapp.NotebookApp
"""
import os
from traitlets import import_item
from .mixins import make_singleuser_app
JUPYTERHUB_SINGLEUSER_APP = (
os.environ.get("JUPYTERHUB_SINGLEUSER_APP") or "notebook.notebookapp.NotebookApp"
)
App = import_item(JUPYTERHUB_SINGLEUSER_APP)
SingleUserNotebookApp = make_singleuser_app(App)
main = SingleUserNotebookApp.launch_instance

View File

@@ -1,8 +1,15 @@
#!/usr/bin/env python #!/usr/bin/env python
"""Extend regular notebook server to be aware of multiuser things.""" """Mixins to regular notebook server to add JupyterHub auth.
Meant to be compatible with jupyter_server and classic notebook
Use make_singleuser_app to create a compatible Application class
with JupyterHub authentication mixins enabled.
"""
# Copyright (c) Jupyter Development Team. # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License. # Distributed under the terms of the Modified BSD License.
import asyncio import asyncio
import importlib
import json import json
import os import os
import random import random
@@ -19,38 +26,29 @@ from tornado.httpclient import AsyncHTTPClient
from tornado.httpclient import HTTPRequest from tornado.httpclient import HTTPRequest
from tornado.web import HTTPError from tornado.web import HTTPError
from tornado.web import RequestHandler from tornado.web import RequestHandler
from traitlets import Any
from traitlets import Bool
from traitlets import Bytes
from traitlets import CUnicode
from traitlets import default
from traitlets import import_item
from traitlets import Integer
from traitlets import observe
from traitlets import TraitError
from traitlets import Unicode
from traitlets import validate
from traitlets.config import Configurable
try: from .._version import __version__
import notebook from .._version import _check_version
except ImportError: from ..log import log_request
raise ImportError("JupyterHub single-user server requires notebook >= 4.0") from ..services.auth import HubOAuth
from ..services.auth import HubOAuthCallbackHandler
from traitlets import ( from ..services.auth import HubOAuthenticated
Any, from ..utils import exponential_backoff
Bool, from ..utils import isoformat
Bytes, from ..utils import make_ssl_context
Integer, from ..utils import url_path_join
Unicode,
CUnicode,
default,
observe,
validate,
TraitError,
)
from notebook.notebookapp import (
NotebookApp,
aliases as notebook_aliases,
flags as notebook_flags,
)
from notebook.auth.login import LoginHandler
from notebook.auth.logout import LogoutHandler
from notebook.base.handlers import IPythonHandler
from ._version import __version__, _check_version
from .log import log_request
from .services.auth import HubOAuth, HubOAuthenticated, HubOAuthCallbackHandler
from .utils import isoformat, url_path_join, make_ssl_context, exponential_backoff
# Authenticate requests with the Hub # Authenticate requests with the Hub
@@ -80,7 +78,7 @@ class HubAuthenticatedHandler(HubOAuthenticated):
return set() return set()
class JupyterHubLoginHandler(LoginHandler): class JupyterHubLoginHandlerMixin:
"""LoginHandler that hooks up Hub authentication""" """LoginHandler that hooks up Hub authentication"""
@staticmethod @staticmethod
@@ -113,7 +111,7 @@ class JupyterHubLoginHandler(LoginHandler):
return return
class JupyterHubLogoutHandler(LogoutHandler): class JupyterHubLogoutHandlerMixin:
def get(self): def get(self):
self.settings['hub_auth'].clear_cookie(self) self.settings['hub_auth'].clear_cookie(self)
self.redirect( self.redirect(
@@ -122,7 +120,7 @@ class JupyterHubLogoutHandler(LogoutHandler):
) )
class OAuthCallbackHandler(HubOAuthCallbackHandler, IPythonHandler): class OAuthCallbackHandlerMixin(HubOAuthCallbackHandler):
"""Mixin IPythonHandler to get the right error pages, etc.""" """Mixin IPythonHandler to get the right error pages, etc."""
@property @property
@@ -131,27 +129,22 @@ class OAuthCallbackHandler(HubOAuthCallbackHandler, IPythonHandler):
# register new hub related command-line aliases # register new hub related command-line aliases
aliases = dict(notebook_aliases) aliases = {
aliases.update( 'user': 'SingleUserNotebookApp.user',
{ 'group': 'SingleUserNotebookApp.group',
'user': 'SingleUserNotebookApp.user', 'cookie-name': 'HubAuth.cookie_name',
'group': 'SingleUserNotebookApp.group', 'hub-prefix': 'SingleUserNotebookApp.hub_prefix',
'cookie-name': 'HubAuth.cookie_name', 'hub-host': 'SingleUserNotebookApp.hub_host',
'hub-prefix': 'SingleUserNotebookApp.hub_prefix', 'hub-api-url': 'SingleUserNotebookApp.hub_api_url',
'hub-host': 'SingleUserNotebookApp.hub_host', 'base-url': 'SingleUserNotebookApp.base_url',
'hub-api-url': 'SingleUserNotebookApp.hub_api_url', }
'base-url': 'SingleUserNotebookApp.base_url', flags = {
} 'disable-user-config': (
) {'SingleUserNotebookApp': {'disable_user_config': True}},
flags = dict(notebook_flags) "Disable user-controlled configuration of the notebook server.",
flags.update( )
{ }
'disable-user-config': (
{'SingleUserNotebookApp': {'disable_user_config': True}},
"Disable user-controlled configuration of the notebook server.",
)
}
)
page_template = """ page_template = """
{% extends "templates/page.html" %} {% extends "templates/page.html" %}
@@ -216,21 +209,29 @@ def _exclude_home(path_list):
yield p yield p
class SingleUserNotebookApp(NotebookApp): class SingleUserNotebookAppMixin(Configurable):
"""A Subclass of the regular NotebookApp that is aware of the parent multiuser context.""" """A Subclass of the regular NotebookApp that is aware of the parent multiuser context."""
description = dedent( description = dedent(
""" """
Single-user server for JupyterHub. Extends the Jupyter Notebook server. Single-user server for JupyterHub. Extends the Jupyter Notebook server.
Meant to be invoked by JupyterHub Spawners, and not directly. Meant to be invoked by JupyterHub Spawners, not directly.
""" """
) )
examples = "" examples = ""
subcommands = {} subcommands = {}
version = __version__ version = __version__
classes = NotebookApp.classes + [HubOAuth]
# must be set in mixin subclass
# make_singleuser_app sets these
# aliases = aliases
# flags = flags
# login_handler_class = JupyterHubLoginHandler
# logout_handler_class = JupyterHubLogoutHandler
# oauth_callback_handler_class = OAuthCallbackHandler
# classes = NotebookApp.classes + [HubOAuth]
# disable single-user app's localhost checking # disable single-user app's localhost checking
allow_remote_access = True allow_remote_access = True
@@ -323,16 +324,12 @@ class SingleUserNotebookApp(NotebookApp):
return url.hostname return url.hostname
return '127.0.0.1' return '127.0.0.1'
aliases = aliases # disable some single-user configurables
flags = flags
# disble some single-user configurables
token = '' token = ''
open_browser = False open_browser = False
quit_button = False quit_button = False
trust_xheaders = True trust_xheaders = True
login_handler_class = JupyterHubLoginHandler
logout_handler_class = JupyterHubLogoutHandler
port_retries = ( port_retries = (
0 # disable port-retries, since the Spawner will tell us what port to use 0 # disable port-retries, since the Spawner will tell us what port to use
) )
@@ -381,11 +378,11 @@ class SingleUserNotebookApp(NotebookApp):
# disable config-migration when user config is disabled # disable config-migration when user config is disabled
return return
else: else:
super(SingleUserNotebookApp, self).migrate_config() super().migrate_config()
@property @property
def config_file_paths(self): def config_file_paths(self):
path = super(SingleUserNotebookApp, self).config_file_paths path = super().config_file_paths
if self.disable_user_config: if self.disable_user_config:
# filter out user-writable config dirs if user config is disabled # filter out user-writable config dirs if user config is disabled
@@ -394,7 +391,7 @@ class SingleUserNotebookApp(NotebookApp):
@property @property
def nbextensions_path(self): def nbextensions_path(self):
path = super(SingleUserNotebookApp, self).nbextensions_path path = super().nbextensions_path
if self.disable_user_config: if self.disable_user_config:
path = list(_exclude_home(path)) path = list(_exclude_home(path))
@@ -562,7 +559,7 @@ class SingleUserNotebookApp(NotebookApp):
# start by hitting Hub to check version # start by hitting Hub to check version
ioloop.IOLoop.current().run_sync(self.check_hub_version) ioloop.IOLoop.current().run_sync(self.check_hub_version)
ioloop.IOLoop.current().add_callback(self.keep_activity_updated) ioloop.IOLoop.current().add_callback(self.keep_activity_updated)
super(SingleUserNotebookApp, self).start() super().start()
def init_hub_auth(self): def init_hub_auth(self):
api_token = None api_token = None
@@ -610,12 +607,17 @@ class SingleUserNotebookApp(NotebookApp):
'Content-Security-Policy', 'Content-Security-Policy',
';'.join(["frame-ancestors 'self'", "report-uri " + csp_report_uri]), ';'.join(["frame-ancestors 'self'", "report-uri " + csp_report_uri]),
) )
super(SingleUserNotebookApp, self).init_webapp() super().init_webapp()
# add OAuth callback # add OAuth callback
self.web_app.add_handlers( self.web_app.add_handlers(
r".*$", r".*$",
[(urlparse(self.hub_auth.oauth_redirect_uri).path, OAuthCallbackHandler)], [
(
urlparse(self.hub_auth.oauth_redirect_uri).path,
self.oauth_callback_handler_class,
)
],
) )
# apply X-JupyterHub-Version to *all* request handlers (even redirects) # apply X-JupyterHub-Version to *all* request handlers (even redirects)
@@ -656,9 +658,82 @@ class SingleUserNotebookApp(NotebookApp):
env.loader = ChoiceLoader([FunctionLoader(get_page), orig_loader]) env.loader = ChoiceLoader([FunctionLoader(get_page), orig_loader])
def main(argv=None): def detect_base_package(App):
return SingleUserNotebookApp.launch_instance(argv) """Detect the base package for an App class
Will return 'notebook' or 'jupyter_server'
based on which package App subclasses from.
Will return None if neither is identified (e.g. fork package, or duck-typing).
"""
# guess notebook or jupyter_server based on App class inheritance
for cls in App.mro():
pkg = cls.__module__.split(".", 1)[0]
if pkg in {"notebook", "jupyter_server"}:
return pkg
return None
if __name__ == "__main__": def make_singleuser_app(App):
main() """Make and return a singleuser notebook app
given existing notebook or jupyter_server Application classes,
mix-in jupyterhub auth.
Instances of App must have the following attributes defining classes:
- .login_handler_class
- .logout_handler_class
- .base_handler_class (only required if not a subclass of the default app
in jupyter_server or notebook)
App should be a subclass of `notebook.notebookapp.NotebookApp`
or `jupyter_server.serverapp.ServerApp`.
"""
empty_parent_app = App()
# detect base classes
LoginHandler = empty_parent_app.login_handler_class
LogoutHandler = empty_parent_app.logout_handler_class
BaseHandler = getattr(empty_parent_app, "base_handler_class", None)
if BaseHandler is None:
pkg = detect_base_package(App)
if pkg == "jupyter_server":
BaseHandler = import_item("jupyter_server.base.handlers.JupyterHandler")
elif pkg == "notebook":
BaseHandler = import_item("notebook.base.handlers.IPythonHandler")
else:
raise ValueError(
"{}.base_handler_class must be defined".format(App.__name__)
)
# create Handler classes from mixins + bases
class JupyterHubLoginHandler(JupyterHubLoginHandlerMixin, LoginHandler):
pass
class JupyterHubLogoutHandler(JupyterHubLogoutHandlerMixin, LogoutHandler):
pass
class OAuthCallbackHandler(OAuthCallbackHandlerMixin, BaseHandler):
pass
# create merged aliases & flags
merged_aliases = {}
merged_aliases.update(empty_parent_app.aliases or {})
merged_aliases.update(aliases)
merged_flags = {}
merged_flags.update(empty_parent_app.flags or {})
merged_flags.update(flags)
# create mixed-in App class, bringing it all together
class SingleUserNotebookApp(SingleUserNotebookAppMixin, App):
aliases = merged_aliases
flags = merged_flags
classes = empty_parent_app.classes + [HubOAuth]
login_handler_class = JupyterHubLoginHandler
logout_handler_class = JupyterHubLogoutHandler
oauth_callback_handler_class = OAuthCallbackHandler
return SingleUserNotebookApp

View File

@@ -382,6 +382,37 @@ class Spawner(LoggingConfigurable):
""" """
return form_data return form_data
def options_from_query(self, query_data):
"""Interpret query arguments passed to /spawn
Query arguments will always arrive as a dict of unicode strings.
Override this function to understand single-values, numbers, etc.
By default, options_from_form is called from this function. You can however override
this function if you need to process the query arguments differently.
This should coerce form data into the structure expected by self.user_options,
which must be a dict, and should be JSON-serializeable,
though it can contain bytes in addition to standard JSON data types.
This method should not have any side effects.
Any handling of `user_options` should be done in `.start()`
to ensure consistent behavior across servers
spawned via the API and form submission page.
Instances will receive this data on self.user_options, after passing through this function,
prior to `Spawner.start`.
.. versionadded:: 1.2
user_options are persisted in the JupyterHub database to be reused
on subsequent spawns if no options are given.
user_options is serialized to JSON as part of this persistence
(with additional support for bytes in case of uploaded file data),
and any non-bytes non-jsonable values will be replaced with None
if the user_options are re-used.
"""
return self.options_from_form(query_data)
user_options = Dict( user_options = Dict(
help=""" help="""
Dict of user specified options for the user's spawned instance of a single-user server. Dict of user specified options for the user's spawned instance of a single-user server.
@@ -400,11 +431,12 @@ class Spawner(LoggingConfigurable):
'VIRTUAL_ENV', 'VIRTUAL_ENV',
'LANG', 'LANG',
'LC_ALL', 'LC_ALL',
'JUPYTERHUB_SINGLEUSER_APP',
], ],
help=""" help="""
Whitelist of environment variables for the single-user server to inherit from the JupyterHub process. List of environment variables for the single-user server to inherit from the JupyterHub process.
This whitelist is used to ensure that sensitive information in the JupyterHub process's environment This list is used to ensure that sensitive information in the JupyterHub process's environment
(such as `CONFIGPROXY_AUTH_TOKEN`) is not passed to the single-user server's process. (such as `CONFIGPROXY_AUTH_TOKEN`) is not passed to the single-user server's process.
""", """,
).tag(config=True) ).tag(config=True)
@@ -423,7 +455,7 @@ class Spawner(LoggingConfigurable):
Environment variables that end up in the single-user server's process come from 3 sources: Environment variables that end up in the single-user server's process come from 3 sources:
- This `environment` configurable - This `environment` configurable
- The JupyterHub process' environment variables that are whitelisted in `env_keep` - The JupyterHub process' environment variables that are listed in `env_keep`
- Variables to establish contact between the single-user notebook and the hub (such as JUPYTERHUB_API_TOKEN) - Variables to establish contact between the single-user notebook and the hub (such as JUPYTERHUB_API_TOKEN)
The `environment` configurable should be set by JupyterHub administrators to add The `environment` configurable should be set by JupyterHub administrators to add
@@ -434,6 +466,11 @@ class Spawner(LoggingConfigurable):
Note that the spawner class' interface is not guaranteed to be exactly same across upgrades, Note that the spawner class' interface is not guaranteed to be exactly same across upgrades,
so if you are using the callable take care to verify it continues to work after upgrades! so if you are using the callable take care to verify it continues to work after upgrades!
.. versionchanged:: 1.2
environment from this configuration has highest priority,
allowing override of 'default' env variables,
such as JUPYTERHUB_API_URL.
""" """
).tag(config=True) ).tag(config=True)
@@ -707,16 +744,6 @@ class Spawner(LoggingConfigurable):
if key in os.environ: if key in os.environ:
env[key] = os.environ[key] env[key] = os.environ[key]
# config overrides. If the value is a callable, it will be called with
# one parameter - the current spawner instance - and the return value
# will be assigned to the environment variable. This will be called at
# spawn time.
for key, value in self.environment.items():
if callable(value):
env[key] = value(self)
else:
env[key] = value
env['JUPYTERHUB_API_TOKEN'] = self.api_token env['JUPYTERHUB_API_TOKEN'] = self.api_token
# deprecated (as of 0.7.2), for old versions of singleuser # deprecated (as of 0.7.2), for old versions of singleuser
env['JPY_API_TOKEN'] = self.api_token env['JPY_API_TOKEN'] = self.api_token
@@ -764,6 +791,18 @@ class Spawner(LoggingConfigurable):
env['JUPYTERHUB_SSL_CERTFILE'] = self.cert_paths['certfile'] env['JUPYTERHUB_SSL_CERTFILE'] = self.cert_paths['certfile']
env['JUPYTERHUB_SSL_CLIENT_CA'] = self.cert_paths['cafile'] env['JUPYTERHUB_SSL_CLIENT_CA'] = self.cert_paths['cafile']
# env overrides from config. If the value is a callable, it will be called with
# one parameter - the current spawner instance - and the return value
# will be assigned to the environment variable. This will be called at
# spawn time.
# Called last to ensure highest priority, in case of overriding other
# 'default' variables like the API url
for key, value in self.environment.items():
if callable(value):
env[key] = value(self)
else:
env[key] = value
return env return env
async def get_url(self): async def get_url(self):
@@ -904,14 +943,13 @@ class Spawner(LoggingConfigurable):
Arguments: Arguments:
paths (dict): a list of paths for key, cert, and CA. paths (dict): a list of paths for key, cert, and CA.
These paths will be resolvable and readable by the Hub process, These paths will be resolvable and readable by the Hub process,
but not necessarily by the notebook server. but not necessarily by the notebook server.
Returns: Returns:
dict: a list (potentially altered) of paths for key, cert, dict: a list (potentially altered) of paths for key, cert, and CA.
and CA. These paths should be resolvable and readable by the notebook
These paths should be resolvable and readable server to be launched.
by the notebook server to be launched.
`.move_certs` is called after certs for the singleuser notebook have `.move_certs` is called after certs for the singleuser notebook have
@@ -950,7 +988,9 @@ class Spawner(LoggingConfigurable):
args.append('--notebook-dir=%s' % _quote_safe(notebook_dir)) args.append('--notebook-dir=%s' % _quote_safe(notebook_dir))
if self.default_url: if self.default_url:
default_url = self.format_string(self.default_url) default_url = self.format_string(self.default_url)
args.append('--NotebookApp.default_url=%s' % _quote_safe(default_url)) args.append(
'--SingleUserNotebookApp.default_url=%s' % _quote_safe(default_url)
)
if self.debug: if self.debug:
args.append('--debug') args.append('--debug')
@@ -1578,5 +1618,5 @@ class SimpleLocalProcessSpawner(LocalProcessSpawner):
return env return env
def move_certs(self, paths): def move_certs(self, paths):
"""No-op for installing certs""" """No-op for installing certs."""
return paths return paths

View File

@@ -173,6 +173,9 @@ class FormSpawner(MockSpawner):
options['energy'] = form_data['energy'][0] options['energy'] = form_data['energy'][0]
if 'hello_file' in form_data: if 'hello_file' in form_data:
options['hello'] = form_data['hello_file'][0] options['hello'] = form_data['hello_file'][0]
if 'illegal_argument' in form_data:
raise ValueError("You are not allowed to specify 'illegal_argument'")
return options return options
@@ -391,6 +394,17 @@ class MockSingleUserServer(SingleUserNotebookApp):
class StubSingleUserSpawner(MockSpawner): class StubSingleUserSpawner(MockSpawner):
"""Spawner that starts a MockSingleUserServer in a thread.""" """Spawner that starts a MockSingleUserServer in a thread."""
@default("default_url")
def _default_url(self):
"""Use a default_url that any jupyter server will provide
Should be:
- authenticated, so we are testing auth
- always available (i.e. in base ServerApp and NotebookApp
"""
return "/api/spec.yaml"
_thread = None _thread = None
@gen.coroutine @gen.coroutine

View File

@@ -0,0 +1,17 @@
"""Example JupyterServer app subclass"""
from jupyter_server.base.handlers import JupyterHandler
from jupyter_server.serverapp import ServerApp
from tornado import web
class TreeHandler(JupyterHandler):
@web.authenticated
def get(self):
self.write("OK!")
class MockServerApp(ServerApp):
def initialize(self, argv=None):
self.default_url = "/tree"
super().initialize(argv)
self.web_app.add_handlers(".*$", [(self.base_url + "tree/?", TreeHandler)])

View File

@@ -1513,6 +1513,7 @@ async def test_get_services(app, mockservice_url):
'prefix': mockservice.server.base_url, 'prefix': mockservice.server.base_url,
'url': mockservice.url, 'url': mockservice.url,
'info': {}, 'info': {},
'display': True,
} }
} }
@@ -1537,6 +1538,7 @@ async def test_get_service(app, mockservice_url):
'prefix': mockservice.server.base_url, 'prefix': mockservice.server.base_url,
'url': mockservice.url, 'url': mockservice.url,
'info': {}, 'info': {},
'display': True,
} }
r = await api_request( r = await api_request(

View File

@@ -91,7 +91,7 @@ def test_generate_config():
os.remove(cfg_file) os.remove(cfg_file)
assert cfg_file in out assert cfg_file in out
assert 'Spawner.cmd' in cfg_text assert 'Spawner.cmd' in cfg_text
assert 'Authenticator.whitelist' in cfg_text assert 'Authenticator.allowed_users' in cfg_text
async def test_init_tokens(request): async def test_init_tokens(request):

View File

@@ -1,10 +1,12 @@
"""Tests for PAM authentication""" """Tests for PAM authentication"""
# Copyright (c) Jupyter Development Team. # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License. # Distributed under the terms of the Modified BSD License.
from unittest import mock import logging
import pytest import pytest
from requests import HTTPError from requests import HTTPError
from traitlets.config import Config
from unittest import mock
from .mocking import MockPAMAuthenticator from .mocking import MockPAMAuthenticator
from .mocking import MockStructGroup from .mocking import MockStructGroup
@@ -136,8 +138,8 @@ async def test_pam_auth_admin_groups():
assert authorized['admin'] is False assert authorized['admin'] is False
async def test_pam_auth_whitelist(): async def test_pam_auth_allowed():
authenticator = MockPAMAuthenticator(whitelist={'wash', 'kaylee'}) authenticator = MockPAMAuthenticator(allowed_users={'wash', 'kaylee'})
authorized = await authenticator.get_authenticated_user( authorized = await authenticator.get_authenticated_user(
None, {'username': 'kaylee', 'password': 'kaylee'} None, {'username': 'kaylee', 'password': 'kaylee'}
) )
@@ -154,11 +156,11 @@ async def test_pam_auth_whitelist():
assert authorized is None assert authorized is None
async def test_pam_auth_group_whitelist(): async def test_pam_auth_allowed_groups():
def getgrnam(name): def getgrnam(name):
return MockStructGroup('grp', ['kaylee']) return MockStructGroup('grp', ['kaylee'])
authenticator = MockPAMAuthenticator(group_whitelist={'group'}) authenticator = MockPAMAuthenticator(allowed_groups={'group'})
with mock.patch.object(authenticator, '_getgrnam', getgrnam): with mock.patch.object(authenticator, '_getgrnam', getgrnam):
authorized = await authenticator.get_authenticated_user( authorized = await authenticator.get_authenticated_user(
@@ -173,7 +175,7 @@ async def test_pam_auth_group_whitelist():
assert authorized is None assert authorized is None
async def test_pam_auth_blacklist(): async def test_pam_auth_blocked():
# Null case compared to next case # Null case compared to next case
authenticator = MockPAMAuthenticator() authenticator = MockPAMAuthenticator()
authorized = await authenticator.get_authenticated_user( authorized = await authenticator.get_authenticated_user(
@@ -182,33 +184,33 @@ async def test_pam_auth_blacklist():
assert authorized['name'] == 'wash' assert authorized['name'] == 'wash'
# Blacklist basics # Blacklist basics
authenticator = MockPAMAuthenticator(blacklist={'wash'}) authenticator = MockPAMAuthenticator(blocked_users={'wash'})
authorized = await authenticator.get_authenticated_user( authorized = await authenticator.get_authenticated_user(
None, {'username': 'wash', 'password': 'wash'} None, {'username': 'wash', 'password': 'wash'}
) )
assert authorized is None assert authorized is None
# User in both white and blacklists: default deny. Make error someday? # User in both allowed and blocked: default deny. Make error someday?
authenticator = MockPAMAuthenticator( authenticator = MockPAMAuthenticator(
blacklist={'wash'}, whitelist={'wash', 'kaylee'} blocked_users={'wash'}, allowed_users={'wash', 'kaylee'}
) )
authorized = await authenticator.get_authenticated_user( authorized = await authenticator.get_authenticated_user(
None, {'username': 'wash', 'password': 'wash'} None, {'username': 'wash', 'password': 'wash'}
) )
assert authorized is None assert authorized is None
# User not in blacklist can log in # User not in blocked set can log in
authenticator = MockPAMAuthenticator( authenticator = MockPAMAuthenticator(
blacklist={'wash'}, whitelist={'wash', 'kaylee'} blocked_users={'wash'}, allowed_users={'wash', 'kaylee'}
) )
authorized = await authenticator.get_authenticated_user( authorized = await authenticator.get_authenticated_user(
None, {'username': 'kaylee', 'password': 'kaylee'} None, {'username': 'kaylee', 'password': 'kaylee'}
) )
assert authorized['name'] == 'kaylee' assert authorized['name'] == 'kaylee'
# User in whitelist, blacklist irrelevent # User in allowed, blocked irrelevent
authenticator = MockPAMAuthenticator( authenticator = MockPAMAuthenticator(
blacklist={'mal'}, whitelist={'wash', 'kaylee'} blocked_users={'mal'}, allowed_users={'wash', 'kaylee'}
) )
authorized = await authenticator.get_authenticated_user( authorized = await authenticator.get_authenticated_user(
None, {'username': 'wash', 'password': 'wash'} None, {'username': 'wash', 'password': 'wash'}
@@ -217,15 +219,16 @@ async def test_pam_auth_blacklist():
# User in neither list # User in neither list
authenticator = MockPAMAuthenticator( authenticator = MockPAMAuthenticator(
blacklist={'mal'}, whitelist={'wash', 'kaylee'} blocked_users={'mal'}, allowed_users={'wash', 'kaylee'}
) )
authorized = await authenticator.get_authenticated_user( authorized = await authenticator.get_authenticated_user(
None, {'username': 'simon', 'password': 'simon'} None, {'username': 'simon', 'password': 'simon'}
) )
assert authorized is None assert authorized is None
# blacklist == {} authenticator = MockPAMAuthenticator(
authenticator = MockPAMAuthenticator(blacklist=set(), whitelist={'wash', 'kaylee'}) blocked_users=set(), allowed_users={'wash', 'kaylee'}
)
authorized = await authenticator.get_authenticated_user( authorized = await authenticator.get_authenticated_user(
None, {'username': 'kaylee', 'password': 'kaylee'} None, {'username': 'kaylee', 'password': 'kaylee'}
) )
@@ -252,7 +255,7 @@ async def test_deprecated_signatures():
async def test_pam_auth_no_such_group(): async def test_pam_auth_no_such_group():
authenticator = MockPAMAuthenticator(group_whitelist={'nosuchcrazygroup'}) authenticator = MockPAMAuthenticator(allowed_groups={'nosuchcrazygroup'})
authorized = await authenticator.get_authenticated_user( authorized = await authenticator.get_authenticated_user(
None, {'username': 'kaylee', 'password': 'kaylee'} None, {'username': 'kaylee', 'password': 'kaylee'}
) )
@@ -261,7 +264,7 @@ async def test_pam_auth_no_such_group():
async def test_wont_add_system_user(): async def test_wont_add_system_user():
user = orm.User(name='lioness4321') user = orm.User(name='lioness4321')
authenticator = auth.PAMAuthenticator(whitelist={'mal'}) authenticator = auth.PAMAuthenticator(allowed_users={'mal'})
authenticator.create_system_users = False authenticator.create_system_users = False
with pytest.raises(KeyError): with pytest.raises(KeyError):
await authenticator.add_user(user) await authenticator.add_user(user)
@@ -269,7 +272,7 @@ async def test_wont_add_system_user():
async def test_cant_add_system_user(): async def test_cant_add_system_user():
user = orm.User(name='lioness4321') user = orm.User(name='lioness4321')
authenticator = auth.PAMAuthenticator(whitelist={'mal'}) authenticator = auth.PAMAuthenticator(allowed_users={'mal'})
authenticator.add_user_cmd = ['jupyterhub-fake-command'] authenticator.add_user_cmd = ['jupyterhub-fake-command']
authenticator.create_system_users = True authenticator.create_system_users = True
@@ -295,7 +298,7 @@ async def test_cant_add_system_user():
async def test_add_system_user(): async def test_add_system_user():
user = orm.User(name='lioness4321') user = orm.User(name='lioness4321')
authenticator = auth.PAMAuthenticator(whitelist={'mal'}) authenticator = auth.PAMAuthenticator(allowed_users={'mal'})
authenticator.create_system_users = True authenticator.create_system_users = True
authenticator.add_user_cmd = ['echo', '/home/USERNAME'] authenticator.add_user_cmd = ['echo', '/home/USERNAME']
@@ -316,13 +319,13 @@ async def test_add_system_user():
async def test_delete_user(): async def test_delete_user():
user = orm.User(name='zoe') user = orm.User(name='zoe')
a = MockPAMAuthenticator(whitelist={'mal'}) a = MockPAMAuthenticator(allowed_users={'mal'})
assert 'zoe' not in a.whitelist assert 'zoe' not in a.allowed_users
await a.add_user(user) await a.add_user(user)
assert 'zoe' in a.whitelist assert 'zoe' in a.allowed_users
a.delete_user(user) a.delete_user(user)
assert 'zoe' not in a.whitelist assert 'zoe' not in a.allowed_users
def test_urls(): def test_urls():
@@ -460,3 +463,55 @@ async def test_post_auth_hook():
) )
assert authorized['testkey'] == 'testvalue' assert authorized['testkey'] == 'testvalue'
class MyAuthenticator(auth.Authenticator):
def check_whitelist(self, username, authentication=None):
return username == "subclass-allowed"
def test_deprecated_config(caplog):
cfg = Config()
cfg.Authenticator.whitelist = {'user'}
log = logging.getLogger("testlog")
authenticator = auth.Authenticator(config=cfg, log=log)
assert caplog.record_tuples == [
(
log.name,
logging.WARNING,
'Authenticator.whitelist is deprecated in JupyterHub 1.2, use '
'Authenticator.allowed_users instead',
)
]
assert authenticator.allowed_users == {'user'}
def test_deprecated_methods():
cfg = Config()
cfg.Authenticator.whitelist = {'user'}
authenticator = auth.Authenticator(config=cfg)
assert authenticator.check_allowed("user")
with pytest.deprecated_call():
assert authenticator.check_whitelist("user")
assert not authenticator.check_allowed("otheruser")
with pytest.deprecated_call():
assert not authenticator.check_whitelist("otheruser")
def test_deprecated_config_subclass():
cfg = Config()
cfg.MyAuthenticator.whitelist = {'user'}
with pytest.deprecated_call():
authenticator = MyAuthenticator(config=cfg)
assert authenticator.allowed_users == {'user'}
def test_deprecated_methods_subclass():
with pytest.deprecated_call():
authenticator = MyAuthenticator()
assert authenticator.check_allowed("subclass-allowed")
assert authenticator.check_whitelist("subclass-allowed")
assert not authenticator.check_allowed("otheruser")
assert not authenticator.check_whitelist("otheruser")

View File

@@ -368,3 +368,28 @@ async def test_user_redirect_hook_default_server_name(
assert redirected_url.path == url_path_join( assert redirected_url.path == url_path_join(
app.base_url, 'user', username, 'terminals/1' app.base_url, 'user', username, 'terminals/1'
) )
async def test_named_server_stop_server(app, username, named_servers):
server_name = "myserver"
await app.login_user(username)
user = app.users[username]
r = await api_request(app, 'users', username, 'server', method='post')
assert r.status_code == 201
assert r.text == ''
assert user.spawners[''].server
with mock.patch.object(
app.proxy, 'add_user', side_effect=Exception('mock exception')
):
r = await api_request(
app, 'users', username, 'servers', server_name, method='post'
)
r.raise_for_status()
assert r.status_code == 201
assert r.text == ''
assert user.spawners[server_name].server is None
assert user.spawners[''].server
assert user.running

View File

@@ -134,7 +134,7 @@ def test_token_expiry(db):
assert orm_token.expires_at > now + timedelta(seconds=50) assert orm_token.expires_at > now + timedelta(seconds=50)
assert orm_token.expires_at < now + timedelta(seconds=70) assert orm_token.expires_at < now + timedelta(seconds=70)
the_future = mock.patch( the_future = mock.patch(
'jupyterhub.orm.utcnow', lambda: now + timedelta(seconds=70) 'jupyterhub.orm.APIToken.now', lambda: now + timedelta(seconds=70)
) )
with the_future: with the_future:
found = orm.APIToken.find(db, token=token) found = orm.APIToken.find(db, token=token)
@@ -482,3 +482,78 @@ def test_group_delete_cascade(db):
db.delete(user1) db.delete(user1)
db.commit() db.commit()
assert user1 not in group1.users assert user1 not in group1.users
def test_expiring_api_token(app, user):
db = app.db
token = orm.APIToken.new(expires_in=30, user=user)
orm_token = orm.APIToken.find(db, token, kind='user')
assert orm_token
# purge_expired doesn't delete non-expired
orm.APIToken.purge_expired(db)
found = orm.APIToken.find(db, token)
assert found is orm_token
with mock.patch.object(
orm.APIToken, 'now', lambda: datetime.utcnow() + timedelta(seconds=60)
):
found = orm.APIToken.find(db, token)
assert found is None
assert orm_token in db.query(orm.APIToken)
orm.APIToken.purge_expired(db)
assert orm_token not in db.query(orm.APIToken)
def test_expiring_oauth_token(app, user):
db = app.db
token = "abc123"
now = orm.OAuthAccessToken.now
client = orm.OAuthClient(identifier="xxx", secret="yyy")
db.add(client)
orm_token = orm.OAuthAccessToken(
token=token,
grant_type=orm.GrantType.authorization_code,
client=client,
user=user,
expires_at=now() + 30,
)
db.add(orm_token)
db.commit()
found = orm.OAuthAccessToken.find(db, token)
assert found is orm_token
# purge_expired doesn't delete non-expired
orm.OAuthAccessToken.purge_expired(db)
found = orm.OAuthAccessToken.find(db, token)
assert found is orm_token
with mock.patch.object(orm.OAuthAccessToken, 'now', lambda: now() + 60):
found = orm.OAuthAccessToken.find(db, token)
assert found is None
assert orm_token in db.query(orm.OAuthAccessToken)
orm.OAuthAccessToken.purge_expired(db)
assert orm_token not in db.query(orm.OAuthAccessToken)
def test_expiring_oauth_code(app, user):
db = app.db
code = "abc123"
now = orm.OAuthCode.now
orm_code = orm.OAuthCode(code=code, expires_at=now() + 30)
db.add(orm_code)
db.commit()
found = orm.OAuthCode.find(db, code)
assert found is orm_code
# purge_expired doesn't delete non-expired
orm.OAuthCode.purge_expired(db)
found = orm.OAuthCode.find(db, code)
assert found is orm_code
with mock.patch.object(orm.OAuthCode, 'now', lambda: now() + 60):
found = orm.OAuthCode.find(db, code)
assert found is None
assert orm_code in db.query(orm.OAuthCode)
orm.OAuthCode.purge_expired(db)
assert orm_code not in db.query(orm.OAuthCode)

View File

@@ -9,6 +9,7 @@ from urllib.parse import urlparse
import pytest import pytest
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from tornado import gen from tornado import gen
from tornado.escape import url_escape
from tornado.httputil import url_concat from tornado.httputil import url_concat
from .. import orm from .. import orm
@@ -255,6 +256,47 @@ async def test_spawn_page_admin(app, admin_access):
assert "Spawning server for {}".format(u.name) in r.text assert "Spawning server for {}".format(u.name) in r.text
async def test_spawn_with_query_arguments(app):
with mock.patch.dict(app.users.settings, {'spawner_class': FormSpawner}):
base_url = ujoin(public_host(app), app.hub.base_url)
cookies = await app.login_user('jones')
orm_u = orm.User.find(app.db, 'jones')
u = app.users[orm_u]
await u.stop()
next_url = ujoin(app.base_url, 'user/jones/tree')
r = await async_requests.get(
url_concat(
ujoin(base_url, 'spawn'), {'next': next_url, 'energy': '510keV'},
),
cookies=cookies,
)
r.raise_for_status()
assert r.history
assert u.spawner.user_options == {
'energy': '510keV',
'notspecified': 5,
}
async def test_spawn_with_query_arguments_error(app):
with mock.patch.dict(app.users.settings, {'spawner_class': FormSpawner}):
base_url = ujoin(public_host(app), app.hub.base_url)
cookies = await app.login_user('jones')
orm_u = orm.User.find(app.db, 'jones')
u = app.users[orm_u]
await u.stop()
next_url = ujoin(app.base_url, 'user/jones/tree')
r = await async_requests.get(
url_concat(
ujoin(base_url, 'spawn'),
{'next': next_url, 'energy': '510keV', 'illegal_argument': '42'},
),
cookies=cookies,
)
r.raise_for_status()
assert "You are not allowed to specify " in r.text
async def test_spawn_form(app): async def test_spawn_form(app):
with mock.patch.dict(app.users.settings, {'spawner_class': FormSpawner}): with mock.patch.dict(app.users.settings, {'spawner_class': FormSpawner}):
base_url = ujoin(public_host(app), app.hub.base_url) base_url = ujoin(public_host(app), app.hub.base_url)
@@ -354,7 +396,7 @@ async def test_spawn_pending(app, username, slow_spawn):
assert page.find('div', {'class': 'progress'}) assert page.find('div', {'class': 'progress'})
# validate event source url by consuming it # validate event source url by consuming it
script = page.body.find('script').text script = page.body.find('script').string
assert 'EventSource' in script assert 'EventSource' in script
# find EventSource url in javascript # find EventSource url in javascript
# maybe not the most robust way to check this? # maybe not the most robust way to check this?
@@ -475,6 +517,58 @@ async def test_user_redirect_deprecated(app, username):
) )
@pytest.mark.parametrize(
'url, params, redirected_url, form_action',
[
(
# spawn?param=value
# will encode given parameters for an unauthenticated URL in the next url
# the next parameter will contain the app base URL (replaces BASE_URL in tests)
'spawn',
[('param', 'value')],
'/hub/login?next={{BASE_URL}}hub%2Fspawn%3Fparam%3Dvalue',
'/hub/login?next={{BASE_URL}}hub%2Fspawn%3Fparam%3Dvalue',
),
(
# login?param=fromlogin&next=encoded(/hub/spawn?param=value)
# will drop parameters given to the login page, passing only the next url
'login',
[('param', 'fromlogin'), ('next', '/hub/spawn?param=value')],
'/hub/login?param=fromlogin&next=%2Fhub%2Fspawn%3Fparam%3Dvalue',
'/hub/login?next=%2Fhub%2Fspawn%3Fparam%3Dvalue',
),
(
# login?param=value&anotherparam=anothervalue
# will drop parameters given to the login page, and use an empty next url
'login',
[('param', 'value'), ('anotherparam', 'anothervalue')],
'/hub/login?param=value&anotherparam=anothervalue',
'/hub/login?next=',
),
(
# login
# simplest case, accessing the login URL, gives an empty next url
'login',
[],
'/hub/login',
'/hub/login?next=',
),
],
)
async def test_login_page(app, url, params, redirected_url, form_action):
url = url_concat(url, params)
r = await get_page(url, app)
redirected_url = redirected_url.replace('{{BASE_URL}}', url_escape(app.base_url))
assert r.url.endswith(redirected_url)
# now the login.html rendered template must include the given parameters in the form
# action URL, including the next URL
page = BeautifulSoup(r.text, "html.parser")
form = page.find("form", method="post")
action = form.attrs['action']
form_action = form_action.replace('{{BASE_URL}}', url_escape(app.base_url))
assert action.endswith(form_action)
async def test_login_fail(app): async def test_login_fail(app):
name = 'wash' name = 'wash'
base_url = public_url(app) base_url = public_url(app)
@@ -505,26 +599,29 @@ async def test_login_strip(app):
@pytest.mark.parametrize( @pytest.mark.parametrize(
'running, next_url, location', 'running, next_url, location, params',
[ [
# default URL if next not specified, for both running and not # default URL if next not specified, for both running and not
(True, '', ''), (True, '', '', None),
(False, '', ''), (False, '', '', None),
# next_url is respected # next_url is respected
(False, '/hub/admin', '/hub/admin'), (False, '/hub/admin', '/hub/admin', None),
(False, '/user/other', '/hub/user/other'), (False, '/user/other', '/hub/user/other', None),
(False, '/absolute', '/absolute'), (False, '/absolute', '/absolute', None),
(False, '/has?query#andhash', '/has?query#andhash'), (False, '/has?query#andhash', '/has?query#andhash', None),
# next_url outside is not allowed # next_url outside is not allowed
(False, 'relative/path', ''), (False, 'relative/path', '', None),
(False, 'https://other.domain', ''), (False, 'https://other.domain', '', None),
(False, 'ftp://other.domain', ''), (False, 'ftp://other.domain', '', None),
(False, '//other.domain', ''), (False, '//other.domain', '', None),
(False, '///other.domain/triple', ''), (False, '///other.domain/triple', '', None),
(False, '\\\\other.domain/backslashes', ''), (False, '\\\\other.domain/backslashes', '', None),
# params are handled correctly
(True, '/hub/admin', 'hub/admin?left=1&right=2', [('left', 1), ('right', 2)]),
(False, '/hub/admin', 'hub/admin?left=1&right=2', [('left', 1), ('right', 2)]),
], ],
) )
async def test_login_redirect(app, running, next_url, location): async def test_login_redirect(app, running, next_url, location, params):
cookies = await app.login_user('river') cookies = await app.login_user('river')
user = app.users['river'] user = app.users['river']
if location: if location:
@@ -536,6 +633,8 @@ async def test_login_redirect(app, running, next_url, location):
location = ujoin(app.base_url, 'hub/spawn') location = ujoin(app.base_url, 'hub/spawn')
url = 'login' url = 'login'
if params:
url = url_concat(url, params)
if next_url: if next_url:
if '//' not in next_url and next_url.startswith('/'): if '//' not in next_url and next_url.startswith('/'):
next_url = ujoin(app.base_url, next_url, '') next_url = ujoin(app.base_url, next_url, '')
@@ -645,7 +744,7 @@ async def test_shutdown_on_logout(app, shutdown_on_logout):
assert spawner.ready == (not shutdown_on_logout) assert spawner.ready == (not shutdown_on_logout)
async def test_login_no_whitelist_adds_user(app): async def test_login_no_allowed_adds_user(app):
auth = app.authenticator auth = app.authenticator
mock_add_user = mock.Mock() mock_add_user = mock.Mock()
with mock.patch.object(auth, 'add_user', mock_add_user): with mock.patch.object(auth, 'add_user', mock_add_user):

View File

@@ -184,7 +184,7 @@ def test_hub_authenticated(request):
m.get(good_url, text=json.dumps(mock_model)) m.get(good_url, text=json.dumps(mock_model))
# no whitelist # no specific allowed user
r = requests.get( r = requests.get(
'http://127.0.0.1:%i' % port, 'http://127.0.0.1:%i' % port,
cookies={'jubal': 'early'}, cookies={'jubal': 'early'},
@@ -193,7 +193,7 @@ def test_hub_authenticated(request):
r.raise_for_status() r.raise_for_status()
assert r.status_code == 200 assert r.status_code == 200
# pass whitelist # pass allowed user
TestHandler.hub_users = {'jubalearly'} TestHandler.hub_users = {'jubalearly'}
r = requests.get( r = requests.get(
'http://127.0.0.1:%i' % port, 'http://127.0.0.1:%i' % port,
@@ -203,7 +203,7 @@ def test_hub_authenticated(request):
r.raise_for_status() r.raise_for_status()
assert r.status_code == 200 assert r.status_code == 200
# no pass whitelist # no pass allowed ser
TestHandler.hub_users = {'kaylee'} TestHandler.hub_users = {'kaylee'}
r = requests.get( r = requests.get(
'http://127.0.0.1:%i' % port, 'http://127.0.0.1:%i' % port,
@@ -212,7 +212,7 @@ def test_hub_authenticated(request):
) )
assert r.status_code == 403 assert r.status_code == 403
# pass group whitelist # pass allowed group
TestHandler.hub_groups = {'lions'} TestHandler.hub_groups = {'lions'}
r = requests.get( r = requests.get(
'http://127.0.0.1:%i' % port, 'http://127.0.0.1:%i' % port,
@@ -222,7 +222,7 @@ def test_hub_authenticated(request):
r.raise_for_status() r.raise_for_status()
assert r.status_code == 200 assert r.status_code == 200
# no pass group whitelist # no pass allowed group
TestHandler.hub_groups = {'tigers'} TestHandler.hub_groups = {'tigers'}
r = requests.get( r = requests.get(
'http://127.0.0.1:%i' % port, 'http://127.0.0.1:%i' % port,

View File

@@ -31,7 +31,11 @@ async def test_singleuser_auth(app):
# with cookies, login successful # with cookies, login successful
r = await async_requests.get(url, cookies=cookies) r = await async_requests.get(url, cookies=cookies)
r.raise_for_status() r.raise_for_status()
assert urlparse(r.url).path.rstrip('/').endswith('/user/nandy/tree') assert (
urlparse(r.url)
.path.rstrip('/')
.endswith(url_path_join('/user/nandy', user.spawner.default_url or "/tree"))
)
assert r.status_code == 200 assert r.status_code == 200
# logout # logout
@@ -46,7 +50,11 @@ async def test_singleuser_auth(app):
assert urlparse(r.url).path.endswith('/oauth2/authorize') assert urlparse(r.url).path.endswith('/oauth2/authorize')
# submit the oauth form to complete authorization # submit the oauth form to complete authorization
r = await s.post(r.url, data={'scopes': ['identify']}, headers={'Referer': r.url}) r = await s.post(r.url, data={'scopes': ['identify']}, headers={'Referer': r.url})
assert urlparse(r.url).path.rstrip('/').endswith('/user/nandy/tree') assert (
urlparse(r.url)
.path.rstrip('/')
.endswith(url_path_join('/user/nandy', user.spawner.default_url or "/tree"))
)
# user isn't authorized, should raise 403 # user isn't authorized, should raise 403
assert r.status_code == 403 assert r.status_code == 403
assert 'burgess' in r.text assert 'burgess' in r.text
@@ -74,7 +82,9 @@ async def test_disable_user_config(app):
# with cookies, login successful # with cookies, login successful
r = await async_requests.get(url, cookies=cookies) r = await async_requests.get(url, cookies=cookies)
r.raise_for_status() r.raise_for_status()
assert r.url.rstrip('/').endswith('/user/nandy/tree') assert r.url.rstrip('/').endswith(
url_path_join('/user/nandy', user.spawner.default_url or "/tree")
)
assert r.status_code == 200 assert r.status_code == 200

View File

@@ -404,3 +404,15 @@ async def test_spawner_routing(app, name):
assert r.url == url assert r.url == url
assert r.text == urlparse(url).path assert r.text == urlparse(url).path
await user.stop() await user.stop()
async def test_spawner_env(db):
env_overrides = {
"JUPYTERHUB_API_URL": "https://test.horse/hub/api",
"TEST_KEY": "value",
}
spawner = new_spawner(db, environment=env_overrides)
env = spawner.get_env()
for key, value in env_overrides.items():
assert key in env
assert env[key] == value

View File

@@ -0,0 +1,22 @@
import pytest
from ..user import UserDict
from .utils import add_user
@pytest.mark.parametrize("attr", ["self", "id", "name"])
async def test_userdict_get(db, attr):
u = add_user(db, name="rey", app=False)
userdict = UserDict(db_factory=lambda: db, settings={})
if attr == "self":
key = u
else:
key = getattr(u, attr)
# `in` checks cache only
assert key not in userdict
assert userdict.get(key)
assert userdict.get(key).id == u.id
# `in` should find it now
assert key in userdict

View File

@@ -4,6 +4,11 @@ import logging
import pytest import pytest
from .._version import _check_version from .._version import _check_version
from .._version import reset_globals
def setup_function(function):
reset_globals()
@pytest.mark.parametrize( @pytest.mark.parametrize(
@@ -25,3 +30,27 @@ def test_check_version(hub_version, singleuser_version, log_level, msg, caplog):
record = caplog.records[0] record = caplog.records[0]
assert record.levelno == log_level assert record.levelno == log_level
assert msg in record.getMessage() assert msg in record.getMessage()
def test_check_version_singleton(caplog):
"""Tests that minor version difference logging is only logged once."""
# Run test_check_version twice which will assert that the warning is only logged
# once.
for x in range(2):
test_check_version(
'1.2.0',
'1.1.0',
logging.WARNING,
'This could cause failure to authenticate',
caplog,
)
# Run it again with a different singleuser_version to make sure that is logged as
# a warning.
caplog.clear()
test_check_version(
'1.2.0',
'1.1.1',
logging.WARNING,
'This could cause failure to authenticate',
caplog,
)

View File

@@ -34,7 +34,23 @@ from .utils import url_path_join
class UserDict(dict): class UserDict(dict):
"""Like defaultdict, but for users """Like defaultdict, but for users
Getting by a user id OR an orm.User instance returns a User wrapper around the orm user. Users can be retrieved by:
- integer database id
- orm.User object
- username str
A User wrapper object is always returned.
This dict contains at least all active users,
but not necessarily all users in the database.
Checking `key in userdict` returns whether
an item is already in the cache,
*not* whether it is in the database.
.. versionchanged:: 1.2
``'username' in userdict`` pattern is now supported
""" """
def __init__(self, db_factory, settings): def __init__(self, db_factory, settings):
@@ -57,11 +73,28 @@ class UserDict(dict):
return self[orm_user.id] return self[orm_user.id]
def __contains__(self, key): def __contains__(self, key):
"""key in userdict checks presence in the cache
it does not check if the user is in the database
"""
if isinstance(key, (User, orm.User)): if isinstance(key, (User, orm.User)):
key = key.id key = key.id
elif isinstance(key, str):
# username lookup, O(N)
for user in self.values():
if user.name == key:
key = user.id
break
return dict.__contains__(self, key) return dict.__contains__(self, key)
def __getitem__(self, key): def __getitem__(self, key):
"""UserDict allows retrieval of user by any of:
- User object
- orm.User object
- username (str)
- orm.User.id int (actual key used in underlying dict)
"""
if isinstance(key, User): if isinstance(key, User):
key = key.id key = key.id
elif isinstance(key, str): elif isinstance(key, str):
@@ -69,7 +102,7 @@ class UserDict(dict):
if orm_user is None: if orm_user is None:
raise KeyError("No such user: %s" % key) raise KeyError("No such user: %s" % key)
else: else:
key = orm_user key = orm_user.id
if isinstance(key, orm.User): if isinstance(key, orm.User):
# users[orm_user] returns User(orm_user) # users[orm_user] returns User(orm_user)
orm_user = key orm_user = key
@@ -92,6 +125,20 @@ class UserDict(dict):
else: else:
raise KeyError(repr(key)) raise KeyError(repr(key))
def get(self, key, default=None):
"""Retrieve a User object if it can be found, else default
Lookup can be by User object, id, or name
.. versionchanged:: 1.2
``get()`` accesses the database instead of just the cache by integer id,
so is equivalent to catching KeyErrors on attempted lookup.
"""
try:
return self[key]
except KeyError:
return default
def __delitem__(self, key): def __delitem__(self, key):
user = self[key] user = self[key]
for orm_spawner in user.orm_user._orm_spawners: for orm_spawner in user.orm_user._orm_spawners:
@@ -566,7 +613,12 @@ class User:
else: else:
# >= 0.7 returns (ip, port) # >= 0.7 returns (ip, port)
proto = 'https' if self.settings['internal_ssl'] else 'http' proto = 'https' if self.settings['internal_ssl'] else 'http'
url = '%s://%s:%i' % ((proto,) + url)
# check if spawner returned an IPv6 address
if ':' in url[0]:
url = '%s://[%s]:%i' % ((proto,) + url)
else:
url = '%s://%s:%i' % ((proto,) + url)
urlinfo = urlparse(url) urlinfo = urlparse(url)
server.proto = urlinfo.scheme server.proto = urlinfo.scheme
server.ip = urlinfo.hostname server.ip = urlinfo.hostname
@@ -743,8 +795,6 @@ class User:
status = await spawner.poll() status = await spawner.poll()
if status is None: if status is None:
await spawner.stop() await spawner.stop()
spawner.clear_state()
spawner.orm_spawner.state = spawner.get_state()
self.last_activity = spawner.orm_spawner.last_activity = datetime.utcnow() self.last_activity = spawner.orm_spawner.last_activity = datetime.utcnow()
# remove server entry from db # remove server entry from db
spawner.server = None spawner.server = None
@@ -774,7 +824,17 @@ class User:
spawner.orm_spawner.started = None spawner.orm_spawner.started = None
self.db.commit() self.db.commit()
# trigger post-stop hook # trigger post-stop hook
await maybe_future(spawner.run_post_stop_hook()) try:
await maybe_future(spawner.run_post_stop_hook())
except:
spawner.clear_state()
spawner.orm_spawner.state = spawner.get_state()
self.db.commit()
raise
spawner.clear_state()
spawner.orm_spawner.state = spawner.get_state()
self.db.commit()
# trigger post-spawner hook on authenticator # trigger post-spawner hook on authenticator
auth = spawner.authenticator auth = spawner.authenticator
try: try:

View File

@@ -66,7 +66,7 @@ def can_connect(ip, port):
Return True if we can connect, False otherwise. Return True if we can connect, False otherwise.
""" """
if ip in {'', '0.0.0.0'}: if ip in {'', '0.0.0.0', '::'}:
ip = '127.0.0.1' ip = '127.0.0.1'
try: try:
socket.create_connection((ip, port)).close() socket.create_connection((ip, port)).close()
@@ -85,6 +85,7 @@ def make_ssl_context(keyfile, certfile, cafile=None, verify=True, check_hostname
return None return None
purpose = ssl.Purpose.SERVER_AUTH if verify else ssl.Purpose.CLIENT_AUTH purpose = ssl.Purpose.SERVER_AUTH if verify else ssl.Purpose.CLIENT_AUTH
ssl_context = ssl.create_default_context(purpose, cafile=cafile) ssl_context = ssl.create_default_context(purpose, cafile=cafile)
ssl_context.load_default_certs()
ssl_context.load_cert_chain(certfile, keyfile) ssl_context.load_cert_chain(certfile, keyfile)
ssl_context.check_hostname = check_hostname ssl_context.check_hostname = check_hostname
return ssl_context return ssl_context
@@ -179,7 +180,7 @@ async def exponential_backoff(
async def wait_for_server(ip, port, timeout=10): async def wait_for_server(ip, port, timeout=10):
"""Wait for any server to show up at ip:port.""" """Wait for any server to show up at ip:port."""
if ip in {'', '0.0.0.0'}: if ip in {'', '0.0.0.0', '::'}:
ip = '127.0.0.1' ip = '127.0.0.1'
await exponential_backoff( await exponential_backoff(
lambda: can_connect(ip, port), lambda: can_connect(ip, port),
@@ -579,7 +580,7 @@ def utcnow():
def _parse_accept_header(accept): def _parse_accept_header(accept):
""" """
Parse the Accept header *accept* Parse the Accept header *accept*
Return a list with 3-tuples of Return a list with 3-tuples of
[(str(media_type), dict(params), float(q_value)),] ordered by q values. [(str(media_type), dict(params), float(q_value)),] ordered by q values.
If the accept header includes vendor-specific types like:: If the accept header includes vendor-specific types like::

View File

@@ -9,7 +9,7 @@
"url": "https://github.com/jupyter/jupyterhub.git" "url": "https://github.com/jupyter/jupyterhub.git"
}, },
"scripts": { "scripts": {
"postinstall": "python ./bower-lite", "postinstall": "python3 ./bower-lite",
"fmt": "prettier --write --trailing-comma es5 share/jupyterhub/static/js/*", "fmt": "prettier --write --trailing-comma es5 share/jupyterhub/static/js/*",
"lessc": "lessc" "lessc": "lessc"
}, },
@@ -21,7 +21,7 @@
"dependencies": { "dependencies": {
"bootstrap": "^3.4.1", "bootstrap": "^3.4.1",
"font-awesome": "^4.7.0", "font-awesome": "^4.7.0",
"jquery": "^3.3.1", "jquery": "^3.5.1",
"moment": "^2.24.0", "moment": "^2.24.0",
"requirejs": "^2.3.6" "requirejs": "^2.3.6"
} }

View File

@@ -3,6 +3,9 @@
# so we have to disable this until pytest 3.11 # so we have to disable this until pytest 3.11
# minversion = 3.3 # minversion = 3.3
# jupyter_server plugin is incompatible with notebook imports
addopts = -p no:jupyter_server
python_files = test_*.py python_files = test_*.py
markers = markers =
gen_test: marks an async tornado test gen_test: marks an async tornado test

View File

@@ -3,7 +3,7 @@ async_generator>=1.8
certipy>=0.1.2 certipy>=0.1.2
entrypoints entrypoints
jinja2 jinja2
jupyter_telemetry jupyter_telemetry>=0.1.0
oauthlib>=3.0 oauthlib>=3.0
pamela pamela
prometheus_client>=0.0.21 prometheus_client>=0.0.21

View File

@@ -1,9 +1,8 @@
// Copyright (c) Jupyter Development Team. // Copyright (c) Jupyter Development Team.
// Distributed under the terms of the Modified BSD License. // Distributed under the terms of the Modified BSD License.
require(["jquery", "bootstrap", "moment", "jhapi", "utils"], function( require(["jquery", "moment", "jhapi", "utils"], function(
$, $,
bs,
moment, moment,
JHAPI, JHAPI,
utils utils

View File

@@ -104,6 +104,7 @@ require(["jquery", "moment", "jhapi"], function(
.text("Start My Server") .text("Start My Server")
.attr("title", "Start your default server") .attr("title", "Start your default server")
.attr("disabled", false) .attr("disabled", false)
.attr("href", base_url + "spawn/" + user)
.off("click"); .off("click");
}, },
}); });

View File

@@ -2,8 +2,11 @@ i.sort-icon {
margin-left: 4px; margin-left: 4px;
} }
tr.pagination-row > td.pagination-page-info {
vertical-align: middle;
}
.version_footer { .version_footer {
position: fixed;
bottom: 0; bottom: 0;
width: 100%; width: 100%;
} }

View File

@@ -6,7 +6,7 @@
.bg-warning(); .bg-warning();
padding:10px; padding:10px;
} }
.service-login { .service-login {
text-align: center; text-align: center;
display: table-cell; display: table-cell;
@@ -27,9 +27,9 @@
} }
input[type=submit] { input[type=submit] {
margin-top: 16px; margin-top: 0px;
} }
.form-control:focus, input[type=submit]:focus { .form-control:focus, input[type=submit]:focus {
box-shadow: inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px @jupyter-orange; box-shadow: inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px @jupyter-orange;
border-color: @jupyter-orange; border-color: @jupyter-orange;

View File

@@ -1,8 +1,30 @@
@import "../components/bootstrap/less/variables.less";
@logo-height: 28px; @logo-height: 28px;
.jpy-logo { #jupyterhub-logo {
height: @logo-height; @media (max-width: @grid-float-breakpoint) {
margin-top: (@navbar-height - @logo-height) / 2; // same length as the navbar-toggle element, displayed on responsive mode
margin-left: 15px;
}
.jpy-logo {
height: @logo-height;
margin-top: (@navbar-height - @logo-height) / 2;
}
}
.navbar-right {
li {
span {
// same as .nav > li > a from bootstrap, but applied to the span[id="login_widget"]
// or any other span that matches .nav > li > span, but only in responsive mode
@media (max-width: @grid-float-breakpoint) {
position: relative;
display: block;
padding: 10px 15px;
}
}
}
} }
#header { #header {
@@ -26,3 +48,19 @@
// .progress-log-event:hover { // .progress-log-event:hover {
// background: rgba(66, 165, 245, 0.2); // background: rgba(66, 165, 245, 0.2);
// } // }
.feedback {
&-container {
margin-top: 16px;
}
&-widget {
padding: 5px 0px 0px 6px;
i {
font-size: 2em;
color: lightgrey;
}
}
}

View File

@@ -22,7 +22,7 @@
<thead> <thead>
<tr> <tr>
{% block thead %} {% block thead %}
{{ th("User (%i)" % users|length, 'name') }} {{ th("User", 'name') }}
{{ th("Admin", 'admin') }} {{ th("Admin", 'admin') }}
{{ th("Last Activity", 'last_activity') }} {{ th("Last Activity", 'last_activity') }}
{{ th("Running (%i)" % running|length, 'running', colspan=2) }} {{ th("Running (%i)" % running|length, 'running', colspan=2) }}
@@ -96,18 +96,25 @@
<a role="button" class="delete-server btn btn-xs btn-warning">delete server</a> <a role="button" class="delete-server btn btn-xs btn-warning">delete server</a>
{%- endif -%} {%- endif -%}
</td> </td>
</tr>
{% endblock user_row %} {% endblock user_row %}
</tr>
{% endfor %} {% endfor %}
{% endfor %} {% endfor %}
</tbody> </tbody>
<tfoot>
<tr class="pagination-row">
<td colspan="3">
{% if pagination.links %}
<div class="pagination menu">{{ pagination.links|safe }}</div>
{% endif %}
</td>
<td colspan="2" class="pagination-page-info">
Displaying users {{ pagination.info.start|safe }} - {{ pagination.info.end|safe }} of {{ pagination.info.total|safe }}
</td>
</tr>
</tfoot>
</table> </table>
</div> </div>
<div class="container-fluid navbar-default small version_footer">
<div class="navbar-text">
JupyterHub {{ server_version }}
</div>
</div>
{% call modal('Delete User', btn_class='btn-danger delete-button') %} {% call modal('Delete User', btn_class='btn-danger delete-button') %}
Are you sure you want to delete user <span class="delete-username">USER</span>? Are you sure you want to delete user <span class="delete-username">USER</span>?
@@ -163,6 +170,14 @@
{% endblock %} {% endblock %}
{% block footer %}
<div class="container-fluid navbar-default small version_footer">
<div class="navbar-text">
JupyterHub {{ server_version }}
</div>
</div>
{% endblock %}
{% block script %} {% block script %}
{{ super() }} {{ super() }}
<script type="text/javascript"> <script type="text/javascript">

View File

@@ -56,13 +56,18 @@
tabindex="2" tabindex="2"
/> />
<input <div class="feedback-container">
type="submit" <input
id="login_submit" id="login_submit"
class='btn btn-jupyter' type="submit"
value='Sign In' class='btn btn-jupyter'
tabindex="3" value='Sign In'
/> tabindex="3"
/>
<div class="feedback-widget hidden">
<i class="fa fa-spinner"></i>
</div>
</div>
</div> </div>
</form> </form>
{% endif %} {% endif %}
@@ -79,6 +84,12 @@ if (window.location.protocol === "http:") {
var warning = document.getElementById('insecure-login-warning'); var warning = document.getElementById('insecure-login-warning');
warning.className = warning.className.replace(/\bhidden\b/, ''); warning.className = warning.className.replace(/\bhidden\b/, '');
} }
// setup onSubmit feedback
$('form').submit((e) => {
var form = $(e.target);
form.find('.feedback-container>input').attr('disabled', true);
form.find('.feedback-container>*').toggleClass('hidden');
form.find('.feedback-widget>*').toggleClass('fa-pulse');
});
</script> </script>
{% endblock %} {% endblock %}

View File

@@ -106,12 +106,14 @@
<a href="{{logo_url or base_url}}"><img src='{{base_url}}logo' alt='JupyterHub' class='jpy-logo' title='Home'/></a> <a href="{{logo_url or base_url}}"><img src='{{base_url}}logo' alt='JupyterHub' class='jpy-logo' title='Home'/></a>
</span> </span>
{% endblock %} {% endblock %}
{% if user %}
<button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#thenavbar" aria-expanded="false"> <button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#thenavbar" aria-expanded="false">
<span class="sr-only">Toggle navigation</span> <span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span> <span class="icon-bar"></span>
<span class="icon-bar"></span> <span class="icon-bar"></span>
<span class="icon-bar"></span> <span class="icon-bar"></span>
</button> </button>
{% endif %}
</div> </div>
<div class="collapse navbar-collapse" id="thenavbar"> <div class="collapse navbar-collapse" id="thenavbar">
@@ -142,7 +144,7 @@
{% block login_widget %} {% block login_widget %}
<span id="login_widget"> <span id="login_widget">
{% if user %} {% if user %}
<p class="navbar-text">{{user.name}}</p> <p class="navbar-text">{{user.name}}</p>
<a id="logout" role="button" class="navbar-btn btn-sm btn btn-default" href="{{logout_url}}"> <i aria-hidden="true" class="fa fa-sign-out"></i> Logout</a> <a id="logout" role="button" class="navbar-btn btn-sm btn btn-default" href="{{logout_url}}"> <i aria-hidden="true" class="fa fa-sign-out"></i> Logout</a>
{% else %} {% else %}
<a id="login" role="button" class="btn-sm btn navbar-btn btn-default" href="{{login_url}}">Login</a> <a id="login" role="button" class="btn-sm btn navbar-btn btn-default" href="{{login_url}}">Login</a>
@@ -173,6 +175,9 @@
{% block main %} {% block main %}
{% endblock %} {% endblock %}
{% block footer %}
{% endblock %}
{% call modal('Error', btn_label='OK') %} {% call modal('Error', btn_label='OK') %}
<div class="ajax-error"> <div class="ajax-error">
The error The error

View File

@@ -23,9 +23,27 @@
<form enctype="multipart/form-data" id="spawn_form" action="{{url}}" method="post" role="form"> <form enctype="multipart/form-data" id="spawn_form" action="{{url}}" method="post" role="form">
{{spawner_options_form | safe}} {{spawner_options_form | safe}}
<br> <br>
<input type="submit" value="Start" class="btn btn-jupyter form-control"> <div class="feedback-container">
<input type="submit" value="Start" class="btn btn-jupyter form-control">
<div class="feedback-widget hidden">
<i class="fa fa-spinner"></i>
</div>
</div>
</form> </form>
</div> </div>
</div> </div>
{% endblock %} {% endblock %}
{% block script %}
{{ super() }}
<script>
// setup onSubmit feedback
$('form').submit((e) => {
var form = $(e.target);
form.find('.feedback-container>input').attr('disabled', true);
form.find('.feedback-container>*').toggleClass('hidden');
form.find('.feedback-widget>*').toggleClass('fa-pulse');
});
</script>
{% endblock %}

View File

@@ -1,11 +1,4 @@
#!/bin/bash #!/bin/bash
set -ex set -ex
stable=0.9 docker build --build-arg JUPYTERHUB_VERSION=$DOCKER_TAG -t $DOCKER_REPO:$DOCKER_TAG .
for V in master $stable; do
docker build --build-arg JUPYTERHUB_VERSION=$V -t $DOCKER_REPO:$V .
done
echo "tagging $IMAGE_NAME"
docker tag $DOCKER_REPO:$stable $IMAGE_NAME

View File

@@ -1,15 +1,10 @@
#!/bin/bash #!/bin/bash
set -ex set -ex
stable=0.9
for V in master $stable; do
docker push $DOCKER_REPO:$V
done
function get_hub_version() { function get_hub_version() {
rm -f hub_version rm -f hub_version
V=$1 V=$1
docker run --rm -v $PWD:/version -u $(id -u) -i $DOCKER_REPO:$V sh -c 'jupyterhub --version > /version/hub_version' docker run --rm -v $PWD:/version -u $(id -u) -i $DOCKER_REPO:$DOCKER_TAG sh -c 'jupyterhub --version > /version/hub_version'
hub_xyz=$(cat hub_version) hub_xyz=$(cat hub_version)
split=( ${hub_xyz//./ } ) split=( ${hub_xyz//./ } )
hub_xy="${split[0]}.${split[1]}" hub_xy="${split[0]}.${split[1]}"
@@ -18,14 +13,9 @@ function get_hub_version() {
hub_xy="${hub_xy}.${split[3]}" hub_xy="${hub_xy}.${split[3]}"
fi fi
} }
# tag e.g. 0.8.1 with 0.8
get_hub_version $stable
docker tag $DOCKER_REPO:$stable $DOCKER_REPO:$hub_xyz
docker push $DOCKER_REPO:$hub_xyz
# tag e.g. 0.9 with master # tag e.g. 0.9 with master
get_hub_version master get_hub_version
docker tag $DOCKER_REPO:master $DOCKER_REPO:$hub_xy docker tag $DOCKER_REPO:$DOCKER_TAG $DOCKER_REPO:$hub_xy
docker push $DOCKER_REPO:$hub_xy docker push $DOCKER_REPO:$hub_xy
docker tag $DOCKER_REPO:master $DOCKER_REPO:$hub_xyz docker tag $DOCKER_REPO:$DOCKER_TAG $DOCKER_REPO:$hub_xyz
docker push $DOCKER_REPO:$hub_xyz docker push $DOCKER_REPO:$hub_xyz