mirror of
https://github.com/jupyterhub/jupyterhub.git
synced 2025-10-07 10:04:07 +00:00
Compare commits
199 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
8b583cb445 | ||
![]() |
038a85af43 | ||
![]() |
9165beb41c | ||
![]() |
b285de4412 | ||
![]() |
5826035fe9 | ||
![]() |
b953ac295b | ||
![]() |
8a95066b2e | ||
![]() |
00a4aef607 | ||
![]() |
e01ce7b665 | ||
![]() |
a57df48f28 | ||
![]() |
5d7e008055 | ||
![]() |
ba31b3ecb7 | ||
![]() |
3c5eb934bf | ||
![]() |
82e15df6e9 | ||
![]() |
e3c83c0c29 | ||
![]() |
94542334c4 | ||
![]() |
95494b3ace | ||
![]() |
a131cfb79e | ||
![]() |
f002c67343 | ||
![]() |
b9caf95c72 | ||
![]() |
5356954240 | ||
![]() |
126c73002e | ||
![]() |
65b4502a78 | ||
![]() |
3406161d75 | ||
![]() |
e45f00f0f7 | ||
![]() |
71f4a30562 | ||
![]() |
20ba414b41 | ||
![]() |
f5250f04c5 | ||
![]() |
c2ea20a87a | ||
![]() |
b14989d4a5 | ||
![]() |
04578e329c | ||
![]() |
be05e438ca | ||
![]() |
24d9215029 | ||
![]() |
8892270c24 | ||
![]() |
b928df6cba | ||
![]() |
3fc74bd79e | ||
![]() |
b34be77fec | ||
![]() |
54dcca7ba9 | ||
![]() |
d991c06098 | ||
![]() |
01a67ba156 | ||
![]() |
8831573b6c | ||
![]() |
c5bc5411fb | ||
![]() |
a13ccd7530 | ||
![]() |
e9a744e8b7 | ||
![]() |
582d43c153 | ||
![]() |
7b5550928f | ||
![]() |
83920a3258 | ||
![]() |
d1670aa443 | ||
![]() |
c6f589124e | ||
![]() |
35991e5194 | ||
![]() |
b956190393 | ||
![]() |
122c989b7a | ||
![]() |
5602575099 | ||
![]() |
4534499aad | ||
![]() |
f733a91d7c | ||
![]() |
bf3fa30a01 | ||
![]() |
2625229847 | ||
![]() |
2c3eb6d0d6 | ||
![]() |
5ff98fd1a5 | ||
![]() |
056a7351a3 | ||
![]() |
f79b71727b | ||
![]() |
d3a3b8ca19 | ||
![]() |
df9e002b9a | ||
![]() |
a4a2c9d068 | ||
![]() |
c453e5ad20 | ||
![]() |
617b879c2a | ||
![]() |
a0042e9302 | ||
![]() |
6bbfcdfe4f | ||
![]() |
25662285af | ||
![]() |
84d12e8d72 | ||
![]() |
c317cbce36 | ||
![]() |
d279604fac | ||
![]() |
70fc4ef886 | ||
![]() |
24ff91eef5 | ||
![]() |
afc6789c74 | ||
![]() |
819e5e222a | ||
![]() |
e1a4f37bbc | ||
![]() |
a73477feed | ||
![]() |
89722ee2f3 | ||
![]() |
30d4b2cef4 | ||
![]() |
ca4fce7ffb | ||
![]() |
018b2daace | ||
![]() |
fd01165cf6 | ||
![]() |
34e4719893 | ||
![]() |
c6ac9e1d15 | ||
![]() |
70b8876239 | ||
![]() |
5e34f4481a | ||
![]() |
eae5594698 | ||
![]() |
f02022a00c | ||
![]() |
f964013516 | ||
![]() |
5f7ffaf1f6 | ||
![]() |
0e7ccb7520 | ||
![]() |
c9db504a49 | ||
![]() |
716677393e | ||
![]() |
ba8484f161 | ||
![]() |
ceec84dbb4 | ||
![]() |
f2a83ec846 | ||
![]() |
7deea6083a | ||
![]() |
a169ff3548 | ||
![]() |
f84a88da21 | ||
![]() |
eecec7183e | ||
![]() |
f11705ee26 | ||
![]() |
78ac5abf23 | ||
![]() |
2beeaa0932 | ||
![]() |
90cb8423bc | ||
![]() |
3b07bd286b | ||
![]() |
73564b97ea | ||
![]() |
65cad5efad | ||
![]() |
52eb627cd6 | ||
![]() |
506e568a9a | ||
![]() |
6c89de082f | ||
![]() |
6fb31cc613 | ||
![]() |
cfb22baf05 | ||
![]() |
2d0c1ff0a8 | ||
![]() |
7789e13879 | ||
![]() |
f7b90e2c09 | ||
![]() |
ccb29167dd | ||
![]() |
4ef1eca3c9 | ||
![]() |
c26ede30b9 | ||
![]() |
64c69a3164 | ||
![]() |
ad7867ff11 | ||
![]() |
14fc1588f8 | ||
![]() |
7e5a925f4f | ||
![]() |
3c61e422da | ||
![]() |
0e2cf37981 | ||
![]() |
503d5e389f | ||
![]() |
7b1e61ab2c | ||
![]() |
4692d6638d | ||
![]() |
7829070e1c | ||
![]() |
5e4b935322 | ||
![]() |
4c445c7a88 | ||
![]() |
8e2965df6a | ||
![]() |
7a41d24606 | ||
![]() |
5f84a006dc | ||
![]() |
e19296a230 | ||
![]() |
89ba97f413 | ||
![]() |
fe2157130b | ||
![]() |
e3b17e8176 | ||
![]() |
027f2f95c6 | ||
![]() |
210975324a | ||
![]() |
f9a90d2494 | ||
![]() |
932689f2f8 | ||
![]() |
f91e911d1a | ||
![]() |
b75cce857e | ||
![]() |
62f00690f7 | ||
![]() |
f700ba4154 | ||
![]() |
8b91842eae | ||
![]() |
80a9eb93f4 | ||
![]() |
e1deecbbfb | ||
![]() |
d3142704b7 | ||
![]() |
447edd081a | ||
![]() |
e1531ec277 | ||
![]() |
d12ac4b1f6 | ||
![]() |
17851b7586 | ||
![]() |
118e2fa610 | ||
![]() |
8e3553462c | ||
![]() |
37da47d811 | ||
![]() |
a640a468fb | ||
![]() |
92f034766e | ||
![]() |
f7ea451df8 | ||
![]() |
1b7f54b462 | ||
![]() |
b14b12231a | ||
![]() |
2866be9462 | ||
![]() |
f8648644bf | ||
![]() |
69d4d48db0 | ||
![]() |
df309749f2 | ||
![]() |
58751067db | ||
![]() |
4fd70cf79b | ||
![]() |
ff15bad375 | ||
![]() |
90ac4ab6fe | ||
![]() |
cba5bb1676 | ||
![]() |
4b5fa404fc | ||
![]() |
c4ac1240ac | ||
![]() |
d384ad2700 | ||
![]() |
c3da0b8073 | ||
![]() |
9919cba375 | ||
![]() |
1e6b94de92 | ||
![]() |
8451a4cd08 | ||
![]() |
48f1da1b8d | ||
![]() |
e20050b719 | ||
![]() |
a9c0a46a06 | ||
![]() |
03bb094b90 | ||
![]() |
5d0d552c26 | ||
![]() |
2d50cef098 | ||
![]() |
d6d0b83b4e | ||
![]() |
f1dbeda451 | ||
![]() |
512bbae5cb | ||
![]() |
8c575d40af | ||
![]() |
d6b9909bc6 | ||
![]() |
ef7d6dc091 | ||
![]() |
57f707bbfd | ||
![]() |
0ae7213366 | ||
![]() |
22ff7aa672 | ||
![]() |
ca579fbf4a | ||
![]() |
f2eb30d090 | ||
![]() |
63a4b4744b | ||
![]() |
e03b5b3992 | ||
![]() |
d3a6aa2471 | ||
![]() |
b254716cee |
@@ -4,3 +4,7 @@ jupyterhub_cookie_secret
|
||||
jupyterhub.sqlite
|
||||
jupyterhub_config.py
|
||||
node_modules
|
||||
docs
|
||||
.git
|
||||
dist
|
||||
build
|
||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@@ -3,9 +3,10 @@ node_modules
|
||||
*~
|
||||
.cache
|
||||
.DS_Store
|
||||
build
|
||||
/build
|
||||
dist
|
||||
docs/_build
|
||||
docs/build
|
||||
docs/source/_static/rest-api
|
||||
.ipynb_checkpoints
|
||||
# ignore config file at the top-level of the repo
|
||||
|
36
.travis.yml
36
.travis.yml
@@ -1,5 +1,7 @@
|
||||
language: python
|
||||
sudo: false
|
||||
cache:
|
||||
- pip
|
||||
python:
|
||||
- nightly
|
||||
- 3.6
|
||||
@@ -9,8 +11,8 @@ env:
|
||||
global:
|
||||
- ASYNC_TEST_TIMEOUT=15
|
||||
services:
|
||||
- mysql
|
||||
- postgresql
|
||||
- postgres
|
||||
- docker
|
||||
|
||||
# installing dependencies
|
||||
before_install:
|
||||
@@ -19,10 +21,12 @@ before_install:
|
||||
- npm install -g configurable-http-proxy
|
||||
- |
|
||||
if [[ $JUPYTERHUB_TEST_DB_URL == mysql* ]]; then
|
||||
mysql -e 'CREATE DATABASE jupyterhub CHARACTER SET utf8 COLLATE utf8_general_ci;'
|
||||
unset MYSQL_UNIX_PORT
|
||||
DB=mysql bash ci/docker-db.sh
|
||||
DB=mysql bash ci/init-db.sh
|
||||
pip install 'mysql-connector<2.2'
|
||||
elif [[ $JUPYTERHUB_TEST_DB_URL == postgresql* ]]; then
|
||||
psql -c 'create database jupyterhub;' -U postgres
|
||||
DB=postgres bash ci/init-db.sh
|
||||
pip install psycopg2
|
||||
fi
|
||||
install:
|
||||
@@ -32,6 +36,20 @@ install:
|
||||
|
||||
# running tests
|
||||
script:
|
||||
- |
|
||||
if [[ ! -z "$JUPYTERHUB_TEST_DB_URL" ]]; then
|
||||
# if testing upgrade-db, run `jupyterhub token` with 0.7
|
||||
# to initialize an old db. Used in upgrade-tests
|
||||
export JUPYTERHUB_TEST_UPGRADE_DB_URL=${JUPYTERHUB_TEST_DB_URL}_upgrade
|
||||
# use virtualenv instead of venv because venv doesn't work here
|
||||
python -m pip install virtualenv
|
||||
python -m virtualenv old-hub-env
|
||||
./old-hub-env/bin/python -m pip install jupyterhub==0.7.2 psycopg2 'mysql-connector<2.2'
|
||||
./old-hub-env/bin/jupyterhub token kaylee \
|
||||
--JupyterHub.db_url=$JUPYTERHUB_TEST_UPGRADE_DB_URL \
|
||||
--Authenticator.whitelist="{'kaylee'}" \
|
||||
--JupyterHub.authenticator_class=jupyterhub.auth.Authenticator
|
||||
fi
|
||||
- pytest -v --maxfail=2 --cov=jupyterhub jupyterhub/tests
|
||||
after_success:
|
||||
- codecov
|
||||
@@ -42,6 +60,12 @@ matrix:
|
||||
- python: 3.6
|
||||
env: JUPYTERHUB_TEST_SUBDOMAIN_HOST=http://localhost.jovyan.org:8000
|
||||
- python: 3.6
|
||||
env: JUPYTERHUB_TEST_DB_URL=mysql+mysqlconnector://root@127.0.0.1/jupyterhub
|
||||
env:
|
||||
- MYSQL_HOST=127.0.0.1
|
||||
- MYSQL_TCP_PORT=13306
|
||||
- JUPYTERHUB_TEST_DB_URL=mysql+mysqlconnector://root@127.0.0.1:$MYSQL_TCP_PORT/jupyterhub
|
||||
- python: 3.6
|
||||
env: JUPYTERHUB_TEST_DB_URL=postgresql://postgres@127.0.0.1/jupyterhub
|
||||
env:
|
||||
- JUPYTERHUB_TEST_DB_URL=postgresql://postgres@127.0.0.1/jupyterhub
|
||||
allow_failures:
|
||||
- python: nightly
|
||||
|
@@ -52,7 +52,8 @@ ENV PATH=/opt/conda/bin:$PATH
|
||||
ADD . /src/jupyterhub
|
||||
WORKDIR /src/jupyterhub
|
||||
|
||||
RUN python setup.py js && pip install . && \
|
||||
RUN npm install --unsafe-perm && \
|
||||
pip install . && \
|
||||
rm -rf $PWD ~/.cache ~/.npm
|
||||
|
||||
RUN mkdir -p /srv/jupyterhub/
|
||||
|
15
MANIFEST.in
15
MANIFEST.in
@@ -1,7 +1,7 @@
|
||||
include README.md
|
||||
include COPYING.md
|
||||
include setupegg.py
|
||||
include bower.json
|
||||
include bower-lite
|
||||
include package.json
|
||||
include *requirements.txt
|
||||
include Dockerfile
|
||||
@@ -10,18 +10,21 @@ graft onbuild
|
||||
graft jupyterhub
|
||||
graft scripts
|
||||
graft share
|
||||
graft singleuser
|
||||
graft ci
|
||||
|
||||
# Documentation
|
||||
graft docs
|
||||
prune docs/node_modules
|
||||
|
||||
# prune some large unused files from components
|
||||
prune share/jupyter/hub/static/components/bootstrap/css
|
||||
exclude share/jupyter/hub/static/components/components/fonts/*.svg
|
||||
exclude share/jupyter/hub/static/components/bootstrap/less/*.js
|
||||
exclude share/jupyter/hub/static/components/font-awesome/css
|
||||
prune share/jupyter/hub/static/components/bootstrap/dist/css
|
||||
exclude share/jupyter/hub/static/components/bootstrap/dist/fonts/*.svg
|
||||
prune share/jupyter/hub/static/components/font-awesome/css
|
||||
prune share/jupyter/hub/static/components/font-awesome/scss
|
||||
exclude share/jupyter/hub/static/components/font-awesome/fonts/*.svg
|
||||
exclude share/jupyter/hub/static/components/jquery/*migrate*.js
|
||||
prune share/jupyter/hub/static/components/jquery/external
|
||||
prune share/jupyter/hub/static/components/jquery/src
|
||||
prune share/jupyter/hub/static/components/moment/lang
|
||||
prune share/jupyter/hub/static/components/moment/min
|
||||
|
||||
|
@@ -99,7 +99,7 @@ more configuration of the system.
|
||||
|
||||
## Configuration
|
||||
|
||||
The [Getting Started](http://jupyterhub.readthedocs.io/en/latest/getting-started.html) section of the
|
||||
The [Getting Started](http://jupyterhub.readthedocs.io/en/latest/getting-started/index.html) section of the
|
||||
documentation explains the common steps in setting up JupyterHub.
|
||||
|
||||
The [**JupyterHub tutorial**](https://github.com/jupyterhub/jupyterhub-tutorial)
|
||||
|
36
bower-lite
Executable file
36
bower-lite
Executable file
@@ -0,0 +1,36 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
"""
|
||||
bower-lite
|
||||
|
||||
Since Bower's on its way out,
|
||||
stage frontend dependencies from node_modules into components
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
from os.path import join
|
||||
import shutil
|
||||
|
||||
HERE = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
|
||||
components = join(HERE, "share", "jupyter", "hub", "static", "components")
|
||||
node_modules = join(HERE, "node_modules")
|
||||
|
||||
if os.path.exists(components):
|
||||
shutil.rmtree(components)
|
||||
os.mkdir(components)
|
||||
|
||||
with open(join(HERE, 'package.json')) as f:
|
||||
package_json = json.load(f)
|
||||
|
||||
dependencies = package_json['dependencies']
|
||||
for dep in dependencies:
|
||||
src = join(node_modules, dep)
|
||||
dest = join(components, dep)
|
||||
print("%s -> %s" % (src, dest))
|
||||
shutil.copytree(src, dest)
|
11
bower.json
11
bower.json
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"name": "jupyterhub-deps",
|
||||
"version": "0.0.0",
|
||||
"dependencies": {
|
||||
"bootstrap": "components/bootstrap#~3.3",
|
||||
"font-awesome": "components/font-awesome#~4.7",
|
||||
"jquery": "components/jquery#~3.2",
|
||||
"moment": "~2.18",
|
||||
"requirejs": "~2.3"
|
||||
}
|
||||
}
|
50
ci/docker-db.sh
Normal file
50
ci/docker-db.sh
Normal file
@@ -0,0 +1,50 @@
|
||||
#!/usr/bin/env bash
|
||||
# source this file to setup postgres and mysql
|
||||
# for local testing (as similar as possible to docker)
|
||||
|
||||
set -e
|
||||
|
||||
export MYSQL_HOST=127.0.0.1
|
||||
export MYSQL_TCP_PORT=${MYSQL_TCP_PORT:-13306}
|
||||
export PGHOST=127.0.0.1
|
||||
NAME="hub-test-$DB"
|
||||
DOCKER_RUN="docker run --rm -d --name $NAME"
|
||||
|
||||
docker rm -f "$NAME" 2>/dev/null || true
|
||||
|
||||
case "$DB" in
|
||||
"mysql")
|
||||
RUN_ARGS="-e MYSQL_ALLOW_EMPTY_PASSWORD=1 -p $MYSQL_TCP_PORT:3306 mysql:5.7"
|
||||
CHECK="mysql --host $MYSQL_HOST --port $MYSQL_TCP_PORT --user root -e \q"
|
||||
;;
|
||||
"postgres")
|
||||
RUN_ARGS="-p 5432:5432 postgres:9.5"
|
||||
CHECK="psql --user postgres -c \q"
|
||||
;;
|
||||
*)
|
||||
echo '$DB must be mysql or postgres'
|
||||
exit 1
|
||||
esac
|
||||
|
||||
$DOCKER_RUN $RUN_ARGS
|
||||
|
||||
echo -n "waiting for $DB "
|
||||
for i in {1..60}; do
|
||||
if $CHECK; then
|
||||
echo 'done'
|
||||
break
|
||||
else
|
||||
echo -n '.'
|
||||
sleep 1
|
||||
fi
|
||||
done
|
||||
$CHECK
|
||||
|
||||
|
||||
echo -e "
|
||||
Set these environment variables:
|
||||
|
||||
export MYSQL_HOST=127.0.0.1
|
||||
export MYSQL_TCP_PORT=$MYSQL_TCP_PORT
|
||||
export PGHOST=127.0.0.1
|
||||
"
|
27
ci/init-db.sh
Normal file
27
ci/init-db.sh
Normal file
@@ -0,0 +1,27 @@
|
||||
#!/usr/bin/env bash
|
||||
# initialize jupyterhub databases for testing
|
||||
|
||||
set -e
|
||||
|
||||
MYSQL="mysql --user root --host $MYSQL_HOST --port $MYSQL_TCP_PORT -e "
|
||||
PSQL="psql --user postgres -c "
|
||||
|
||||
case "$DB" in
|
||||
"mysql")
|
||||
EXTRA_CREATE='CHARACTER SET utf8 COLLATE utf8_general_ci'
|
||||
SQL="$MYSQL"
|
||||
;;
|
||||
"postgres")
|
||||
SQL="$PSQL"
|
||||
;;
|
||||
*)
|
||||
echo '$DB must be mysql or postgres'
|
||||
exit 1
|
||||
esac
|
||||
|
||||
set -x
|
||||
|
||||
$SQL 'DROP DATABASE jupyterhub;' 2>/dev/null || true
|
||||
$SQL "CREATE DATABASE jupyterhub ${EXTRA_CREATE};"
|
||||
$SQL 'DROP DATABASE jupyterhub_upgrade;' 2>/dev/null || true
|
||||
$SQL "CREATE DATABASE jupyterhub_upgrade ${EXTRA_CREATE};"
|
@@ -203,6 +203,43 @@ paths:
|
||||
description: The user's notebook server has stopped
|
||||
'202':
|
||||
description: The user's notebook server has not yet stopped as it is taking a while to stop
|
||||
/users/{name}/servers/{server_name}:
|
||||
post:
|
||||
summary: Start a user's single-user named-server notebook server
|
||||
parameters:
|
||||
- name: name
|
||||
description: username
|
||||
in: path
|
||||
required: true
|
||||
type: string
|
||||
- name: server_name
|
||||
description: name given to a named-server
|
||||
in: path
|
||||
required: true
|
||||
type: string
|
||||
responses:
|
||||
'201':
|
||||
description: The user's notebook named-server has started
|
||||
'202':
|
||||
description: The user's notebook named-server has not yet started, but has been requested
|
||||
delete:
|
||||
summary: Stop a user's named-server
|
||||
parameters:
|
||||
- name: name
|
||||
description: username
|
||||
in: path
|
||||
required: true
|
||||
type: string
|
||||
- name: server_name
|
||||
description: name given to a named-server
|
||||
in: path
|
||||
required: true
|
||||
type: string
|
||||
responses:
|
||||
'204':
|
||||
description: The user's notebook named-server has stopped
|
||||
'202':
|
||||
description: The user's notebook named-server has not yet stopped as it is taking a while to stop
|
||||
/users/{name}/admin-access:
|
||||
post:
|
||||
summary: Grant admin access to this user's notebook server
|
||||
|
@@ -16,6 +16,12 @@ Module: :mod:`jupyterhub.services.auth`
|
||||
.. autoconfigurable:: HubAuth
|
||||
:members:
|
||||
|
||||
:class:`HubOAuth`
|
||||
-----------------
|
||||
|
||||
.. autoconfigurable:: HubOAuth
|
||||
:members:
|
||||
|
||||
|
||||
:class:`HubAuthenticated`
|
||||
-------------------------
|
||||
@@ -23,3 +29,13 @@ Module: :mod:`jupyterhub.services.auth`
|
||||
.. autoclass:: HubAuthenticated
|
||||
:members:
|
||||
|
||||
:class:`HubOAuthenticated`
|
||||
--------------------------
|
||||
|
||||
.. autoclass:: HubOAuthenticated
|
||||
|
||||
:class:`HubOAuthCallbackHandler`
|
||||
--------------------------------
|
||||
|
||||
.. autoclass:: HubOAuthCallbackHandler
|
||||
|
||||
|
@@ -5,14 +5,119 @@ its link will bring up a GitHub listing of changes. Use `git log` on the
|
||||
command line for details.
|
||||
|
||||
|
||||
## [Unreleased] 0.8
|
||||
## [Unreleased]
|
||||
|
||||
## 0.8
|
||||
|
||||
### [0.8.1] 2017-11-07
|
||||
|
||||
JupyterHub 0.8.1 is a collection of bugfixes and small improvements on 0.8.
|
||||
|
||||
#### Added
|
||||
|
||||
#### Changed
|
||||
- Run tornado with AsyncIO by default
|
||||
- Add `jupyterhub --upgrade-db` flag for automatically upgrading the database as part of startup.
|
||||
This is useful for cases where manually running `jupyterhub upgrade-db`
|
||||
as a separate step is unwieldy.
|
||||
- Avoid creating backups of the database when no changes are to be made by
|
||||
`jupyterhub upgrade-db`.
|
||||
|
||||
#### Fixed
|
||||
|
||||
- Add some further validation to usernames - `/` is not allowed in usernames.
|
||||
- Fix empty logout page when using auto_login
|
||||
- Fix autofill of username field in default login form.
|
||||
- Fix listing of users on the admin page who have not yet started their server.
|
||||
- Fix ever-growing traceback when re-raising Exceptions from spawn failures.
|
||||
- Remove use of deprecated `bower` for javascript client dependencies.
|
||||
|
||||
|
||||
### [0.8.0] 2017-10-03
|
||||
|
||||
JupyterHub 0.8 is a big release!
|
||||
|
||||
Perhaps the biggest change is the use of OAuth to negotiate authentication
|
||||
between the Hub and single-user services.
|
||||
Due to this change, it is important that the single-user server
|
||||
and Hub are both running the same version of JupyterHub.
|
||||
If you are using containers (e.g. via DockerSpawner or KubeSpawner),
|
||||
this means upgrading jupyterhub in your user images at the same time as the Hub.
|
||||
In most cases, a
|
||||
|
||||
pip install jupyterhub==version
|
||||
|
||||
in your Dockerfile is sufficient.
|
||||
|
||||
#### Added
|
||||
|
||||
- JupyterHub now defined a `Proxy` API for custom
|
||||
proxy implementations other than the default.
|
||||
The defaults are unchanged,
|
||||
but configuration of the proxy is now done on the `ConfigurableHTTPProxy` class instead of the top-level JupyterHub.
|
||||
TODO: docs for writing a custom proxy.
|
||||
- Single-user servers and services
|
||||
(anything that uses HubAuth)
|
||||
can now accept token-authenticated requests via the Authentication header.
|
||||
- Authenticators can now store state in the Hub's database.
|
||||
To do so, the `authenticate` method should return a dict of the form
|
||||
|
||||
```python
|
||||
{
|
||||
'username': 'name'
|
||||
'state': {}
|
||||
}
|
||||
```
|
||||
|
||||
This data will be encrypted and requires `JUPYTERHUB_CRYPT_KEY` environment variable to be set
|
||||
and the `Authenticator.enable_auth_state` flag to be True.
|
||||
If these are not set, auth_state returned by the Authenticator will not be stored.
|
||||
- There is preliminary support for multiple (named) servers per user in the REST API.
|
||||
Named servers can be created via API requests, but there is currently no UI for managing them.
|
||||
- Add `LocalProcessSpawner.popen_kwargs` and `LocalProcessSpawner.shell_cmd`
|
||||
for customizing how user server processes are launched.
|
||||
- Add `Authenticator.auto_login` flag for skipping the "Login with..." page explicitly.
|
||||
- Add `JupyterHub.hub_connect_ip` configuration
|
||||
for the ip that should be used when connecting to the Hub.
|
||||
This is promoting (and deprecating) `DockerSpawner.hub_ip_connect`
|
||||
for use by all Spawners.
|
||||
- Add `Spawner.pre_spawn_hook(spawner)` hook for customizing
|
||||
pre-spawn events.
|
||||
- Add `JupyterHub.active_server_limit` and `JupyterHub.concurrent_spawn_limit`
|
||||
for limiting the total number of running user servers and the number of pending spawns, respectively.
|
||||
|
||||
|
||||
#### Changed
|
||||
|
||||
- more arguments to spawners are now passed via environment variables (`.get_env()`)
|
||||
rather than CLI arguments (`.get_args()`)
|
||||
- internally generated tokens no longer get extra hash rounds,
|
||||
significantly speeding up authentication.
|
||||
The hash rounds were deemed unnecessary because the tokens were already
|
||||
generated with high entropy.
|
||||
- `JUPYTERHUB_API_TOKEN` env is available at all times,
|
||||
rather than being removed during single-user start.
|
||||
The token is now accessible to kernel processes,
|
||||
enabling user kernels to make authenticated API requests to Hub-authenticated services.
|
||||
- Cookie secrets should be 32B hex instead of large base64 secrets.
|
||||
- pycurl is used by default, if available.
|
||||
|
||||
#### Fixed
|
||||
|
||||
So many things fixed!
|
||||
|
||||
- Collisions are checked when users are renamed
|
||||
- Fix bug where OAuth authenticators could not logout users
|
||||
due to being redirected right back through the login process.
|
||||
- If there are errors loading your config files,
|
||||
JupyterHub will refuse to start with an informative error.
|
||||
Previously, the bad config would be ignored and JupyterHub would launch with default configuration.
|
||||
- Raise 403 error on unauthorized user rather than redirect to login,
|
||||
which could cause redirect loop.
|
||||
- Set `httponly` on cookies because it's prudent.
|
||||
- Improve support for MySQL as the database backend
|
||||
- Many race conditions and performance problems under heavy load have been fixed.
|
||||
- Fix alembic tagging of database schema versions.
|
||||
|
||||
#### Removed
|
||||
|
||||
- End support for Python 3.3
|
||||
@@ -155,7 +260,9 @@ Fix removal of `/login` page in 0.4.0, breaking some OAuth providers.
|
||||
First preview release
|
||||
|
||||
|
||||
[Unreleased]: https://github.com/jupyterhub/jupyterhub/compare/0.7.2...HEAD
|
||||
[Unreleased]: https://github.com/jupyterhub/jupyterhub/compare/0.8.1...HEAD
|
||||
[0.8.1]: https://github.com/jupyterhub/jupyterhub/compare/0.8.0...0.8.1
|
||||
[0.8.0]: https://github.com/jupyterhub/jupyterhub/compare/0.7.2...0.8.0
|
||||
[0.7.2]: https://github.com/jupyterhub/jupyterhub/compare/0.7.1...0.7.2
|
||||
[0.7.1]: https://github.com/jupyterhub/jupyterhub/compare/0.7.0...0.7.1
|
||||
[0.7.0]: https://github.com/jupyterhub/jupyterhub/compare/0.6.1...0.7.0
|
||||
|
@@ -3,6 +3,7 @@
|
||||
Project Jupyter thanks the following people for their help and
|
||||
contribution on JupyterHub:
|
||||
|
||||
- Analect
|
||||
- anderbubble
|
||||
- apetresc
|
||||
- barrachri
|
||||
@@ -31,6 +32,7 @@ contribution on JupyterHub:
|
||||
- JamiesHQ
|
||||
- jbweston
|
||||
- jdavidheiser
|
||||
- jencabral
|
||||
- jhamrick
|
||||
- josephtate
|
||||
- kinuax
|
||||
|
@@ -67,6 +67,8 @@ Contents
|
||||
**Tutorials**
|
||||
|
||||
* :doc:`tutorials/index`
|
||||
* :doc:`tutorials/upgrade-dot-eight`
|
||||
* `Zero to JupyterHub with Kubernetes <https://zero-to-jupyterhub.readthedocs.io/en/latest/>`_
|
||||
|
||||
**Troubleshooting**
|
||||
|
||||
|
@@ -84,6 +84,7 @@ class DictionaryAuthenticator(Authenticator):
|
||||
return data['username']
|
||||
```
|
||||
|
||||
|
||||
#### Normalize usernames
|
||||
|
||||
Since the Authenticator and Spawner both use the same username,
|
||||
@@ -116,6 +117,7 @@ To only allow usernames that start with 'w':
|
||||
c.Authenticator.username_pattern = r'w.*'
|
||||
```
|
||||
|
||||
|
||||
### How to write a custom authenticator
|
||||
|
||||
You can use custom Authenticator subclasses to enable authentication
|
||||
@@ -123,6 +125,11 @@ via other mechanisms. One such example is using [GitHub OAuth][].
|
||||
|
||||
Because the username is passed from the Authenticator to the Spawner,
|
||||
a custom Authenticator and Spawner are often used together.
|
||||
For example, the Authenticator methods, [pre_spawn_start(user, spawner)][]
|
||||
and [post_spawn_stop(user, spawner)][], are hooks that can be used to do
|
||||
auth-related startup (e.g. opening PAM sessions) and cleanup
|
||||
(e.g. closing PAM sessions).
|
||||
|
||||
|
||||
See a list of custom Authenticators [on the wiki](https://github.com/jupyterhub/jupyterhub/wiki/Authenticators).
|
||||
|
||||
@@ -130,6 +137,77 @@ If you are interested in writing a custom authenticator, you can read
|
||||
[this tutorial](http://jupyterhub-tutorial.readthedocs.io/en/latest/authenticators.html).
|
||||
|
||||
|
||||
### Authentication state
|
||||
|
||||
JupyterHub 0.8 adds the ability to persist state related to authentication,
|
||||
such as auth-related tokens.
|
||||
If such state should be persisted, `.authenticate()` should return a dictionary of the form:
|
||||
|
||||
```python
|
||||
{
|
||||
'username': 'name',
|
||||
'auth_state': {
|
||||
'key': 'value',
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
where `username` is the username that has been authenticated,
|
||||
and `auth_state` is any JSON-serializable dictionary.
|
||||
|
||||
Because `auth_state` may contain sensitive information,
|
||||
it is encrypted before being stored in the database.
|
||||
To store auth_state, two conditions must be met:
|
||||
|
||||
1. persisting auth state must be enabled explicitly via configuration
|
||||
```python
|
||||
c.Authenticator.enable_auth_state = True
|
||||
```
|
||||
2. encryption must be enabled by the presence of `JUPYTERHUB_CRYPT_KEY` environment variable,
|
||||
which should be a hex-encoded 32-byte key.
|
||||
For example:
|
||||
```bash
|
||||
export JUPYTERHUB_CRYPT_KEY=$(openssl rand -hex 32)
|
||||
```
|
||||
|
||||
|
||||
JupyterHub uses [Fernet](https://cryptography.io/en/latest/fernet/) to encrypt auth_state.
|
||||
To facilitate key-rotation, `JUPYTERHUB_CRYPT_KEY` may be a semicolon-separated list of encryption keys.
|
||||
If there are multiple keys present, the **first** key is always used to persist any new auth_state.
|
||||
|
||||
|
||||
#### Using auth_state
|
||||
|
||||
Typically, if `auth_state` is persisted it is desirable to affect the Spawner environment in some way.
|
||||
This may mean defining environment variables, placing certificate in the user's home directory, etc.
|
||||
The `Authenticator.pre_spawn_start` method can be used to pass information from authenticator state
|
||||
to Spawner environment:
|
||||
|
||||
```python
|
||||
class MyAuthenticator(Authenticator):
|
||||
@gen.coroutine
|
||||
def authenticate(self, handler, data=None):
|
||||
username = yield identify_user(handler, data)
|
||||
upstream_token = yield token_for_user(username)
|
||||
return {
|
||||
'name': username,
|
||||
'auth_state': {
|
||||
'upstream_token': upstream_token,
|
||||
},
|
||||
}
|
||||
|
||||
@gen.coroutine
|
||||
def pre_spawn_start(self, user, spawner):
|
||||
"""Pass upstream_token to spawner via environment variable"""
|
||||
auth_state = yield user.get_auth_state()
|
||||
if not auth_state:
|
||||
# auth_state not enabled
|
||||
return
|
||||
spawner.environment['UPSTREAM_TOKEN'] = auth_state['upstream_token']
|
||||
```
|
||||
|
||||
|
||||
|
||||
## JupyterHub as an OAuth provider
|
||||
|
||||
Beginning with version 0.8, JupyterHub is an OAuth provider.
|
||||
@@ -140,3 +218,5 @@ Beginning with version 0.8, JupyterHub is an OAuth provider.
|
||||
[OAuth]: https://en.wikipedia.org/wiki/OAuth
|
||||
[GitHub OAuth]: https://developer.github.com/v3/oauth/
|
||||
[OAuthenticator]: https://github.com/jupyterhub/oauthenticator
|
||||
[pre_spawn_start(user, spawner)]: http://jupyterhub.readthedocs.io/en/latest/api/auth.html#jupyterhub.auth.Authenticator.pre_spawn_start
|
||||
[post_spawn_stop(user, spawner)]: http://jupyterhub.readthedocs.io/en/latest/api/auth.html#jupyterhub.auth.Authenticator.post_spawn_stop
|
||||
|
@@ -49,9 +49,6 @@ c.JupyterHub.cookie_secret_file = pjoin(runtime_dir, 'cookie_secret')
|
||||
c.JupyterHub.db_url = pjoin(runtime_dir, 'jupyterhub.sqlite')
|
||||
# or `--db=/path/to/jupyterhub.sqlite` on the command-line
|
||||
|
||||
# put the log file in /var/log
|
||||
c.JupyterHub.extra_log_file = '/var/log/jupyterhub.log'
|
||||
|
||||
# use GitHub OAuthenticator for local users
|
||||
c.JupyterHub.authenticator_class = 'oauthenticator.LocalGitHubOAuthenticator'
|
||||
c.GitHubOAuthenticator.oauth_callback_url = os.environ['OAUTH_CALLBACK_URL']
|
||||
@@ -79,10 +76,11 @@ export GITHUB_CLIENT_ID=github_id
|
||||
export GITHUB_CLIENT_SECRET=github_secret
|
||||
export OAUTH_CALLBACK_URL=https://example.com/hub/oauth_callback
|
||||
export CONFIGPROXY_AUTH_TOKEN=super-secret
|
||||
jupyterhub -f /etc/jupyterhub/jupyterhub_config.py
|
||||
# append log output to log file /var/log/jupyterhub.log
|
||||
jupyterhub -f /etc/jupyterhub/jupyterhub_config.py &>> /var/log/jupyterhub.log
|
||||
```
|
||||
|
||||
## Using nginx reverse proxy
|
||||
## Using a reverse proxy
|
||||
|
||||
In the following example, we show configuration files for a JupyterHub server
|
||||
running locally on port `8000` but accessible from the outside on the standard
|
||||
@@ -93,9 +91,9 @@ satisfy the following:
|
||||
* JupyterHub is running on a server, accessed *only* via `HUB.DOMAIN.TLD:443`
|
||||
* On the same machine, `NO_HUB.DOMAIN.TLD` strictly serves different content,
|
||||
also on port `443`
|
||||
* `nginx` is used to manage the web servers / reverse proxy (which means that
|
||||
only nginx will be able to bind two servers to `443`)
|
||||
* After testing, the server in question should be able to score an A+ on the
|
||||
* `nginx` or `apache` is used as the public access point (which means that
|
||||
only nginx/apache will bind to `443`)
|
||||
* After testing, the server in question should be able to score at least an A on the
|
||||
Qualys SSL Labs [SSL Server Test](https://www.ssllabs.com/ssltest/)
|
||||
|
||||
Let's start out with needed JupyterHub configuration in `jupyterhub_config.py`:
|
||||
@@ -105,30 +103,47 @@ Let's start out with needed JupyterHub configuration in `jupyterhub_config.py`:
|
||||
c.JupyterHub.ip = '127.0.0.1'
|
||||
```
|
||||
|
||||
For high-quality SSL configuration, we also generate Diffie-Helman parameters.
|
||||
This can take a few minutes:
|
||||
|
||||
```bash
|
||||
openssl dhparam -out /etc/ssl/certs/dhparam.pem 4096
|
||||
```
|
||||
|
||||
### nginx
|
||||
|
||||
The **`nginx` server config file** is fairly standard fare except for the two
|
||||
`location` blocks within the `HUB.DOMAIN.TLD` config file:
|
||||
|
||||
```bash
|
||||
# top-level http config for websocket headers
|
||||
# If Upgrade is defined, Connection = upgrade
|
||||
# If Upgrade is empty, Connection = close
|
||||
map $http_upgrade $connection_upgrade {
|
||||
default upgrade;
|
||||
'' close;
|
||||
}
|
||||
|
||||
# HTTP server to redirect all 80 traffic to SSL/HTTPS
|
||||
server {
|
||||
listen 80;
|
||||
server_name HUB.DOMAIN.TLD;
|
||||
listen 80;
|
||||
server_name HUB.DOMAIN.TLD;
|
||||
|
||||
# Tell all requests to port 80 to be 302 redirected to HTTPS
|
||||
return 302 https://$host$request_uri;
|
||||
# Tell all requests to port 80 to be 302 redirected to HTTPS
|
||||
return 302 https://$host$request_uri;
|
||||
}
|
||||
|
||||
# HTTPS server to handle JupyterHub
|
||||
server {
|
||||
listen 443;
|
||||
ssl on;
|
||||
listen 443;
|
||||
ssl on;
|
||||
|
||||
server_name HUB.DOMAIN.TLD;
|
||||
server_name HUB.DOMAIN.TLD;
|
||||
|
||||
ssl_certificate /etc/letsencrypt/live/HUB.DOMAIN.TLD/fullchain.pem;
|
||||
ssl_certificate_key /etc/letsencrypt/live/HUB.DOMAIN.TLD/privkey.pem;
|
||||
ssl_certificate /etc/letsencrypt/live/HUB.DOMAIN.TLD/fullchain.pem;
|
||||
ssl_certificate_key /etc/letsencrypt/live/HUB.DOMAIN.TLD/privkey.pem;
|
||||
|
||||
ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
|
||||
ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
|
||||
ssl_prefer_server_ciphers on;
|
||||
ssl_dhparam /etc/ssl/certs/dhparam.pem;
|
||||
ssl_ciphers 'ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-DSS-AES128-GCM-SHA256:kEDH+AESGCM:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-DSS-AES128-SHA256:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA:DHE-RSA-AES256-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:AES:CAMELLIA:DES-CBC3-SHA:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!aECDH:!EDH-DSS-DES-CBC3-SHA:!EDH-RSA-DES-CBC3-SHA:!KRB5-DES-CBC3-SHA';
|
||||
@@ -138,37 +153,28 @@ server {
|
||||
ssl_stapling_verify on;
|
||||
add_header Strict-Transport-Security max-age=15768000;
|
||||
|
||||
# Managing literal requests to the JupyterHub front end
|
||||
location / {
|
||||
proxy_pass https://127.0.0.1:8000;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
# Managing literal requests to the JupyterHub front end
|
||||
location / {
|
||||
proxy_pass http://127.0.0.1:8000;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
|
||||
# websocket headers
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
}
|
||||
|
||||
# Managing WebHook/Socket requests between hub user servers and external proxy
|
||||
location ~* /(api/kernels/[^/]+/(channels|iopub|shell|stdin)|terminals/websocket)/? {
|
||||
proxy_pass https://127.0.0.1:8000;
|
||||
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
# WebSocket support
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
|
||||
}
|
||||
|
||||
# Managing requests to verify letsencrypt host
|
||||
# Managing requests to verify letsencrypt host
|
||||
location ~ /.well-known {
|
||||
allow all;
|
||||
allow all;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
```
|
||||
|
||||
If `nginx` is not running on port 443, substitute `$http_host` for `$host` on
|
||||
the lines setting the `Host` header.
|
||||
|
||||
`nginx` will now be the front facing element of JupyterHub on `443` which means
|
||||
it is also free to bind other servers, like `NO_HUB.DOMAIN.TLD` to the same port
|
||||
on the same machine and network interface. In fact, one can simply use the same
|
||||
@@ -177,35 +183,90 @@ of the site as well as the applicable location call:
|
||||
|
||||
```bash
|
||||
server {
|
||||
listen 80;
|
||||
server_name NO_HUB.DOMAIN.TLD;
|
||||
listen 80;
|
||||
server_name NO_HUB.DOMAIN.TLD;
|
||||
|
||||
# Tell all requests to port 80 to be 302 redirected to HTTPS
|
||||
return 302 https://$host$request_uri;
|
||||
# Tell all requests to port 80 to be 302 redirected to HTTPS
|
||||
return 302 https://$host$request_uri;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 443;
|
||||
ssl on;
|
||||
listen 443;
|
||||
ssl on;
|
||||
|
||||
# INSERT OTHER SSL PARAMETERS HERE AS ABOVE
|
||||
# INSERT OTHER SSL PARAMETERS HERE AS ABOVE
|
||||
# SSL cert may differ
|
||||
|
||||
# Set the appropriate root directory
|
||||
root /var/www/html
|
||||
# Set the appropriate root directory
|
||||
root /var/www/html
|
||||
|
||||
# Set URI handling
|
||||
location / {
|
||||
try_files $uri $uri/ =404;
|
||||
}
|
||||
# Set URI handling
|
||||
location / {
|
||||
try_files $uri $uri/ =404;
|
||||
}
|
||||
|
||||
# Managing requests to verify letsencrypt host
|
||||
# Managing requests to verify letsencrypt host
|
||||
location ~ /.well-known {
|
||||
allow all;
|
||||
allow all;
|
||||
}
|
||||
|
||||
}
|
||||
```
|
||||
|
||||
Now just restart `nginx`, restart the JupyterHub, and enjoy accessing
|
||||
Now restart `nginx`, restart the JupyterHub, and enjoy accessing
|
||||
`https://HUB.DOMAIN.TLD` while serving other content securely on
|
||||
`https://NO_HUB.DOMAIN.TLD`.
|
||||
|
||||
|
||||
### Apache
|
||||
|
||||
As with nginx above, you can use [Apache](https://httpd.apache.org) as the reverse proxy.
|
||||
First, we will need to enable the apache modules that we are going to need:
|
||||
|
||||
```bash
|
||||
a2enmod ssl rewrite proxy proxy_http proxy_wstunnel
|
||||
```
|
||||
|
||||
Our Apache configuration is equivalent to the nginx configuration above:
|
||||
|
||||
- Redirect HTTP to HTTPS
|
||||
- Good SSL Configuration
|
||||
- Support for websockets on any proxied URL
|
||||
- JupyterHub is running locally at http://127.0.0.1:8000
|
||||
|
||||
```bash
|
||||
# redirect HTTP to HTTPS
|
||||
Listen 80
|
||||
<VirtualHost HUB.DOMAIN.TLD:80>
|
||||
ServerName HUB.DOMAIN.TLD
|
||||
Redirect / https://HUB.DOMAIN.TLD/
|
||||
</VirtualHost>
|
||||
|
||||
Listen 443
|
||||
<VirtualHost HUB.DOMAIN.TLD:443>
|
||||
|
||||
ServerName HUB.DOMAIN.TLD
|
||||
|
||||
# configure SSL
|
||||
SSLEngine on
|
||||
SSLCertificateFile /etc/letsencrypt/live/HUB.DOMAIN.TLD/fullchain.pem
|
||||
SSLCertificateKeyFile /etc/letsencrypt/live/HUB.DOMAIN.TLD/privkey.pem
|
||||
SSLProtocol All -SSLv2 -SSLv3
|
||||
SSLOpenSSLConfCmd DHParameters /etc/ssl/certs/dhparam.pem
|
||||
SSLCipherSuite EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH
|
||||
|
||||
# Use RewriteEngine to handle websocket connection upgrades
|
||||
RewriteEngine On
|
||||
RewriteCond %{HTTP:Connection} Upgrade [NC]
|
||||
RewriteCond %{HTTP:Upgrade} websocket [NC]
|
||||
RewriteRule /(.*) ws://127.0.0.1:8000/$1 [P,L]
|
||||
|
||||
<Location "/">
|
||||
# preserve Host header to avoid cross-origin problems
|
||||
ProxyPreserveHost on
|
||||
# proxy to JupyterHub
|
||||
ProxyPass http://127.0.0.1:8000/
|
||||
ProxyPassReverse http://127.0.0.1:8000/
|
||||
</Location>
|
||||
</VirtualHost>
|
||||
```
|
||||
|
@@ -9,6 +9,7 @@ Technical Reference
|
||||
authenticators
|
||||
spawners
|
||||
services
|
||||
proxy
|
||||
rest
|
||||
upgrading
|
||||
config-examples
|
||||
|
183
docs/source/reference/proxy.md
Normal file
183
docs/source/reference/proxy.md
Normal file
@@ -0,0 +1,183 @@
|
||||
# Writing a custom Proxy implementation
|
||||
|
||||
JupyterHub 0.8 introduced the ability to write a custom implementation of the proxy.
|
||||
This enables deployments with different needs than the default proxy,
|
||||
configurable-http-proxy (CHP).
|
||||
CHP is a single-process nodejs proxy that they Hub manages by default as a subprocess
|
||||
(it can be run externally, as well, and typically is in production deployments).
|
||||
|
||||
The upside to CHP, and why we use it by default, is that it's easy to install and run (if you have nodejs, you are set!).
|
||||
The downsides are that it's a single process and does not support any persistence of the routing table.
|
||||
So if the proxy process dies, your whole JupyterHub instance is inaccessible until the Hub notices, restarts the proxy, and restores the routing table.
|
||||
For deployments that want to avoid such a single point of failure,
|
||||
or leverage existing proxy infrastructure in their chosen deployment (such as Kubernetes ingress objects),
|
||||
the Proxy API provides a way to do that.
|
||||
|
||||
In general, for a proxy to be usable by JupyterHub, it must:
|
||||
|
||||
1. support websockets without prior knowledge of the URL where websockets may occur
|
||||
2. support trie-based routing (i.e. allow different routes on `/foo` and `/foo/bar` and route based on specificity)
|
||||
3. adding or removing a route should not cause existing connections to drop
|
||||
|
||||
Optionally, if the JupyterHub deployment is to use host-based routing,
|
||||
the Proxy must additionally support routing based on the Host of the request.
|
||||
|
||||
## Subclassing Proxy
|
||||
|
||||
To start, any Proxy implementation should subclass the base Proxy class,
|
||||
as is done with custom Spawners and Authenticators.
|
||||
|
||||
```python
|
||||
from jupyterhub.proxy import Proxy
|
||||
|
||||
class MyProxy(Proxy):
|
||||
"""My Proxy implementation"""
|
||||
...
|
||||
```
|
||||
|
||||
|
||||
## Starting and stopping the proxy
|
||||
|
||||
If your proxy should be launched when the Hub starts, you must define how to start and stop your proxy:
|
||||
|
||||
```python
|
||||
from tornado import gen
|
||||
class MyProxy(Proxy):
|
||||
...
|
||||
@gen.coroutine
|
||||
def start(self):
|
||||
"""Start the proxy"""
|
||||
|
||||
@gen.coroutine
|
||||
def stop(self):
|
||||
"""Stop the proxy"""
|
||||
```
|
||||
|
||||
These methods **may** be coroutines.
|
||||
|
||||
`c.Proxy.should_start` is a configurable flag that determines whether the Hub should call these methods when the Hub itself starts and stops.
|
||||
|
||||
|
||||
### Purely external proxies
|
||||
|
||||
Probably most custom proxies will be externally managed,
|
||||
such as Kubernetes ingress-based implementations.
|
||||
In this case, you do not need to define `start` and `stop`.
|
||||
To disable the methods, you can define `should_start = False` at the class level:
|
||||
|
||||
```python
|
||||
class MyProxy(Proxy):
|
||||
should_start = False
|
||||
```
|
||||
|
||||
|
||||
## Adding and removing routes
|
||||
|
||||
At its most basic, a Proxy implementation defines a mechanism to add, remove, and retrieve routes.
|
||||
A proxy that implements these three methods is complete.
|
||||
Each of these methods **may** be a coroutine.
|
||||
|
||||
**Definition:** routespec
|
||||
|
||||
A routespec, which will appear in these methods, is a string describing a route to be proxied,
|
||||
such as `/user/name/`. A routespec will:
|
||||
|
||||
1. always end with `/`
|
||||
2. always start with `/` if it is a path-based route `/proxy/path/`
|
||||
3. precede the leading `/` with a host for host-based routing, e.g. `host.tld/proxy/path/`
|
||||
|
||||
|
||||
### Adding a route
|
||||
|
||||
When adding a route, JupyterHub may pass a JSON-serializable dict as a `data` argument
|
||||
that should be attacked to the proxy route.
|
||||
When that route is retrieved, the `data` argument should be returned as well.
|
||||
If your proxy implementation doesn't support storing data attached to routes,
|
||||
then your Python wrapper may have to handle storing the `data` piece itself,
|
||||
e.g in a simple file or database.
|
||||
|
||||
```python
|
||||
@gen.coroutine
|
||||
def add_route(self, routespec, target, data):
|
||||
"""Proxy `routespec` to `target`.
|
||||
|
||||
Store `data` associated with the routespec
|
||||
for retrieval later.
|
||||
"""
|
||||
```
|
||||
|
||||
Adding a route for a user looks like this:
|
||||
|
||||
```python
|
||||
proxy.add_route('/user/pgeorgiou/', 'http://127.0.0.1:1227',
|
||||
{'user': 'pgeorgiou'})
|
||||
```
|
||||
|
||||
|
||||
### Removing routes
|
||||
|
||||
`delete_route()` is given a routespec to delete.
|
||||
If there is no such route, `delete_route` should still succeed,
|
||||
but a warning may be issued.
|
||||
|
||||
```python
|
||||
@gen.coroutine
|
||||
def delete_route(self, routespec):
|
||||
"""Delete the route"""
|
||||
```
|
||||
|
||||
|
||||
### Retrieving routes
|
||||
|
||||
For retrieval, you only *need* to implement a single method that retrieves all routes.
|
||||
The return value for this function should be a dictionary, keyed by `routespect`,
|
||||
of dicts whose keys are the same three arguments passed to `add_route`
|
||||
(`routespec`, `target`, `data`)
|
||||
|
||||
```python
|
||||
@gen.coroutine
|
||||
def get_all_routes(self):
|
||||
"""Return all routes, keyed by routespec""""
|
||||
```
|
||||
|
||||
```python
|
||||
{
|
||||
'/proxy/path/': {
|
||||
'routespec': '/proxy/path/',
|
||||
'target': 'http://...',
|
||||
'data': {},
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
|
||||
#### Note on activity tracking
|
||||
|
||||
JupyterHub can track activity of users, for use in services such as culling idle servers.
|
||||
As of JupyterHub 0.8, this activity tracking is the responsibility of the proxy.
|
||||
If your proxy implementation can track activity to endpoints,
|
||||
it may add a `last_activity` key to the `data` of routes retrieved in `.get_all_routes()`.
|
||||
If present, the value of `last_activity` should be an [ISO8601](https://en.wikipedia.org/wiki/ISO_8601) UTC date string:
|
||||
|
||||
```python
|
||||
{
|
||||
'/user/pgeorgiou/': {
|
||||
'routespec': '/user/pgeorgiou/',
|
||||
'target': 'http://127.0.0.1:1227',
|
||||
'data': {
|
||||
'user': 'pgeourgiou',
|
||||
'last_activity': '2017-10-03T10:33:49.570Z',
|
||||
},
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
If the proxy does not track activity, then only activity to the Hub itself is tracked,
|
||||
and services such as cull-idle will not work.
|
||||
|
||||
Now that `notebook-5.0` tracks activity internally,
|
||||
we can retrieve activity information from the single-user servers instead,
|
||||
removing the need to track activity in the proxy.
|
||||
But this is not yet implemented in JupyterHub 0.8.0.
|
@@ -114,10 +114,60 @@ r.raise_for_status()
|
||||
r.json()
|
||||
```
|
||||
|
||||
Note that the API token authorizes **JupyterHub** REST API requests. The same
|
||||
token does **not** authorize access to the [Jupyter Notebook REST API][]
|
||||
provided by notebook servers managed by JupyterHub. A different token is used
|
||||
to access the **Jupyter Notebook** API.
|
||||
The same API token can also authorize access to the [Jupyter Notebook REST API][]
|
||||
provided by notebook servers managed by JupyterHub if one of the following is true:
|
||||
|
||||
1. The token is for the same user as the owner of the notebook
|
||||
2. The token is tied to an admin user or service **and** `c.JupyterHub.admin_access` is set to `True`
|
||||
|
||||
## Enabling users to spawn multiple named-servers via the API
|
||||
|
||||
With JupyterHub version 0.8, support for multiple servers per user has landed.
|
||||
Prior to that, each user could only launch a single default server via the API
|
||||
like this:
|
||||
|
||||
```bash
|
||||
curl -X POST -H "Authorization: token <token>" "http://127.0.0.1:8081/hub/api/users/<user>/server"
|
||||
```
|
||||
|
||||
With the named-server functionality, it's now possible to launch more than one
|
||||
specifically named servers against a given user. This could be used, for instance,
|
||||
to launch each server based on a different image.
|
||||
|
||||
First you must enable named-servers by including the following setting in the `jupyterhub_config.py` file.
|
||||
|
||||
`c.JupyterHub.allow_named_servers = True`
|
||||
|
||||
If using the [zero-to-jupyterhub-k8s](https://github.com/jupyterhub/zero-to-jupyterhub-k8s) set-up to run JupyterHub,
|
||||
then instead of editing the `jupyterhub_config.py` file directly, you could pass
|
||||
the following as part of the `config.yaml` file, as per the [tutorial](https://zero-to-jupyterhub.readthedocs.io/en/latest/):
|
||||
|
||||
```bash
|
||||
hub:
|
||||
extraConfig: |
|
||||
c.JupyterHub.allow_named_servers = True
|
||||
```
|
||||
|
||||
With that setting in place, a new named-server is activated like this:
|
||||
```bash
|
||||
curl -X POST -H "Authorization: token <token>" "http://127.0.0.1:8081/hub/api/users/<user>/servers/<serverA>"
|
||||
curl -X POST -H "Authorization: token <token>" "http://127.0.0.1:8081/hub/api/users/<user>/servers/<serverB>"
|
||||
```
|
||||
|
||||
The same servers can be stopped by substituting `DELETE` for `POST` above.
|
||||
|
||||
### Some caveats for using named-servers
|
||||
|
||||
The named-server capabilities are not fully implemented for JupyterHub as yet.
|
||||
While it's possible to start/stop a server via the API, the UI on the
|
||||
JupyterHub control-panel has not been implemented, and so it may not be obvious
|
||||
to those viewing the panel that a named-server may be running for a given user.
|
||||
|
||||
For named-servers via the API to work, the spawner used to spawn these servers
|
||||
will need to be able to handle the case of multiple servers per user and ensure
|
||||
uniqueness of names, particularly if servers are spawned via docker containers
|
||||
or kubernetes pods.
|
||||
|
||||
|
||||
## Learn more about the API
|
||||
|
||||
|
@@ -178,7 +178,13 @@ When you run a service that has a url, it will be accessible under a
|
||||
your service to route proxied requests properly, it must take
|
||||
`JUPYTERHUB_SERVICE_PREFIX` into account when routing requests. For example, a
|
||||
web service would normally service its root handler at `'/'`, but the proxied
|
||||
service would need to serve `JUPYTERHUB_SERVICE_PREFIX + '/'`.
|
||||
service would need to serve `JUPYTERHUB_SERVICE_PREFIX`.
|
||||
|
||||
Note that `JUPYTERHUB_SERVICE_PREFIX` will contain a trailing slash. This must
|
||||
be taken into consideration when creating the service routes. If you include an
|
||||
extra slash you might get unexpected behavior. For example if your service has a
|
||||
`/foo` endpoint, the route would be `JUPYTERHUB_SERVICE_PREFIX + foo`, and
|
||||
`/foo/bar` would be `JUPYTERHUB_SERVICE_PREFIX + foo/bar`.
|
||||
|
||||
## Hub Authentication and Services
|
||||
|
||||
@@ -200,7 +206,9 @@ or via the `JUPYTERHUB_API_TOKEN` environment variable.
|
||||
|
||||
Most of the logic for authentication implementation is found in the
|
||||
[`HubAuth.user_for_cookie`](services.auth.html#jupyterhub.services.auth.HubAuth.user_for_cookie)
|
||||
method, which makes a request of the Hub, and returns:
|
||||
and in the
|
||||
[`HubAuth.user_for_token`](services.auth.html#jupyterhub.services.auth.HubAuth.user_for_token)
|
||||
methods, which makes a request of the Hub, and returns:
|
||||
|
||||
- None, if no user could be identified, or
|
||||
- a dict of the following form:
|
||||
@@ -252,8 +260,11 @@ def authenticated(f):
|
||||
@wraps(f)
|
||||
def decorated(*args, **kwargs):
|
||||
cookie = request.cookies.get(auth.cookie_name)
|
||||
token = request.headers.get(auth.auth_header_name)
|
||||
if cookie:
|
||||
user = auth.user_for_cookie(cookie)
|
||||
elif token:
|
||||
user = auth.user_for_token(token)
|
||||
else:
|
||||
user = None
|
||||
if user:
|
||||
@@ -264,7 +275,7 @@ def authenticated(f):
|
||||
return decorated
|
||||
|
||||
|
||||
@app.route(prefix + '/')
|
||||
@app.route(prefix)
|
||||
@authenticated
|
||||
def whoami(user):
|
||||
return Response(
|
||||
|
@@ -4,4 +4,11 @@ Tutorials
|
||||
This section provides links to documentation that helps a user do a specific
|
||||
task.
|
||||
|
||||
- `Zero to JupyterHub with Kubernetes <https://zero-to-jupyterhub.readthedocs.io/en/latest/>`_
|
||||
* :doc:`upgrade-dot-eight`
|
||||
* `Zero to JupyterHub with Kubernetes <https://zero-to-jupyterhub.readthedocs.io/en/latest/>`_
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:hidden:
|
||||
|
||||
upgrade-dot-eight
|
||||
|
93
docs/source/tutorials/upgrade-dot-eight.rst
Normal file
93
docs/source/tutorials/upgrade-dot-eight.rst
Normal file
@@ -0,0 +1,93 @@
|
||||
.. upgrade-dot-eight:
|
||||
|
||||
Upgrading to JupyterHub version 0.8
|
||||
===================================
|
||||
|
||||
This document will assist you in upgrading an existing JupyterHub deployment
|
||||
from version 0.7 to version 0.8.
|
||||
|
||||
Upgrade checklist
|
||||
-----------------
|
||||
|
||||
0. Review the release notes. Review any deprecated features and pay attention
|
||||
to any backwards incompatible changes
|
||||
1. Backup JupyterHub database:
|
||||
- ``jupyterhub.sqlite`` when using the default sqlite database
|
||||
- Your JupyterHub database when using an RDBMS
|
||||
2. Backup the existing JupyterHub configuration file: ``jupyterhub_config.py``
|
||||
3. Shutdown the Hub
|
||||
4. Upgrade JupyterHub
|
||||
- ``pip install -U jupyterhub`` when using ``pip``
|
||||
- ``conda upgrade jupyterhub`` when using ``conda``
|
||||
5. Upgrade the database using run ```jupyterhub upgrade-db``
|
||||
6. Update the JupyterHub configuration file ``jupyterhub_config.py``
|
||||
|
||||
Backup JupyterHub database
|
||||
--------------------------
|
||||
|
||||
To prevent unintended loss of data or configuration information, you should
|
||||
back up the JupyterHub database (the default SQLite database or a RDBMS
|
||||
database using PostgreSQL, MySQL, or others supported by SQLAlchemy):
|
||||
|
||||
- If using the default SQLite database, back up the ``jupyterhub.sqlite``
|
||||
database.
|
||||
- If using an RDBMS database such as PostgreSQL, MySQL, or other supported by
|
||||
SQLAlchemy, back up the JupyterHub database.
|
||||
|
||||
.. note::
|
||||
|
||||
Losing the Hub database is often not a big deal. Information that resides only
|
||||
in the Hub database includes:
|
||||
|
||||
- active login tokens (user cookies, service tokens)
|
||||
- users added via GitHub UI, instead of config files
|
||||
- info about running servers
|
||||
|
||||
If the following conditions are true, you should be fine clearing the Hub
|
||||
database and starting over:
|
||||
|
||||
- users specified in config file
|
||||
- user servers are stopped during upgrade
|
||||
- don't mind causing users to login again after upgrade
|
||||
|
||||
Backup JupyterHub configuration file
|
||||
------------------------------------
|
||||
|
||||
Backup up your configuration file, ``jupyterhub_config.py``, to a secure
|
||||
location.
|
||||
|
||||
Shutdown JupyterHub
|
||||
-------------------
|
||||
|
||||
- Prior to shutting down JupyterHub, you should notify the Hub users of the
|
||||
scheduled downtime.
|
||||
- Shutdown the JupyterHub service.
|
||||
|
||||
Upgrade JupyterHub
|
||||
------------------
|
||||
|
||||
Follow directions that correspond to your package manager, ``pip`` or ``conda``,
|
||||
for the new JupyterHub release:
|
||||
|
||||
- ``pip install -U jupyterhub`` for ``pip``
|
||||
- ``conda upgrade jupyterhub`` for ``conda``
|
||||
|
||||
Upgrade the proxy, authenticator, or spawner if needed.
|
||||
|
||||
Upgrade JupyterHub database
|
||||
---------------------------
|
||||
|
||||
To run the upgrade process for JupyterHub databases, enter::
|
||||
|
||||
jupyterhub upgrade-db
|
||||
|
||||
Update the JupyterHub configuration file
|
||||
----------------------------------------
|
||||
|
||||
Create a new JupyterHub configuration file or edit a copy of the existing
|
||||
file ``jupyterhub_config.py``.
|
||||
|
||||
Start JupyterHub
|
||||
----------------
|
||||
|
||||
Start JupyterHub with the same command that you used before the upgrade.
|
@@ -40,8 +40,11 @@ from tornado.options import define, options, parse_command_line
|
||||
|
||||
|
||||
@coroutine
|
||||
def cull_idle(url, api_token, timeout):
|
||||
"""cull idle single-user servers"""
|
||||
def cull_idle(url, api_token, timeout, cull_users=False):
|
||||
"""Shutdown idle single-user servers
|
||||
|
||||
If cull_users, inactive *users* will be deleted as well.
|
||||
"""
|
||||
auth_header = {
|
||||
'Authorization': 'token %s' % api_token
|
||||
}
|
||||
@@ -54,26 +57,50 @@ def cull_idle(url, api_token, timeout):
|
||||
resp = yield client.fetch(req)
|
||||
users = json.loads(resp.body.decode('utf8', 'replace'))
|
||||
futures = []
|
||||
for user in users:
|
||||
last_activity = parse_date(user['last_activity'])
|
||||
if user['server'] and last_activity < cull_limit:
|
||||
app_log.info("Culling %s (inactive since %s)", user['name'], last_activity)
|
||||
|
||||
@coroutine
|
||||
def cull_one(user, last_activity):
|
||||
"""cull one user"""
|
||||
|
||||
# shutdown server first. Hub doesn't allow deleting users with running servers.
|
||||
if user['server']:
|
||||
app_log.info("Culling server for %s (inactive since %s)", user['name'], last_activity)
|
||||
req = HTTPRequest(url=url + '/users/%s/server' % user['name'],
|
||||
method='DELETE',
|
||||
headers=auth_header,
|
||||
)
|
||||
futures.append((user['name'], client.fetch(req)))
|
||||
elif user['server'] and last_activity > cull_limit:
|
||||
yield client.fetch(req)
|
||||
if cull_users:
|
||||
app_log.info("Culling user %s (inactive since %s)", user['name'], last_activity)
|
||||
req = HTTPRequest(url=url + '/users/%s' % user['name'],
|
||||
method='DELETE',
|
||||
headers=auth_header,
|
||||
)
|
||||
yield client.fetch(req)
|
||||
|
||||
for user in users:
|
||||
if not user['server'] and not cull_users:
|
||||
# server not running and not culling users, nothing to do
|
||||
continue
|
||||
last_activity = parse_date(user['last_activity'])
|
||||
if last_activity < cull_limit:
|
||||
futures.append((user['name'], cull_one(user, last_activity)))
|
||||
else:
|
||||
app_log.debug("Not culling %s (active since %s)", user['name'], last_activity)
|
||||
|
||||
for (name, f) in futures:
|
||||
yield f
|
||||
app_log.debug("Finished culling %s", name)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
define('url', default=os.environ.get('JUPYTERHUB_API_URL'), help="The JupyterHub API URL")
|
||||
define('timeout', default=600, help="The idle timeout (in seconds)")
|
||||
define('cull_every', default=0, help="The interval (in seconds) for checking for idle servers to cull")
|
||||
define('cull_users', default=False,
|
||||
help="""Cull users in addition to servers.
|
||||
This is for use in temporary-user cases such as tmpnb.""",
|
||||
)
|
||||
|
||||
parse_command_line()
|
||||
if not options.cull_every:
|
||||
@@ -82,7 +109,7 @@ if __name__ == '__main__':
|
||||
api_token = os.environ['JUPYTERHUB_API_TOKEN']
|
||||
|
||||
loop = IOLoop.current()
|
||||
cull = lambda : cull_idle(options.url, api_token, options.timeout)
|
||||
cull = lambda : cull_idle(options.url, api_token, options.timeout, options.cull_users)
|
||||
# run once before scheduling periodic call
|
||||
loop.run_sync(cull)
|
||||
# schedule periodic cull
|
||||
|
@@ -8,7 +8,7 @@ Uses `jupyterhub.services.HubAuth` to authenticate requests with the Hub in a [f
|
||||
|
||||
jupyterhub --ip=127.0.0.1
|
||||
|
||||
2. Visit http://127.0.0.1:8000/services/whoami
|
||||
2. Visit http://127.0.0.1:8000/services/whoami/ or http://127.0.0.1:8000/services/whoami-oauth/
|
||||
|
||||
After logging in with your local-system credentials, you should see a JSON dump of your user info:
|
||||
|
||||
|
@@ -9,5 +9,13 @@ c.JupyterHub.services = [
|
||||
'environment': {
|
||||
'FLASK_APP': 'whoami-flask.py',
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
'name': 'whoami-oauth',
|
||||
'url': 'http://127.0.0.1:10201',
|
||||
'command': ['flask', 'run', '--port=10201'],
|
||||
'environment': {
|
||||
'FLASK_APP': 'whoami-oauth.py',
|
||||
}
|
||||
},
|
||||
]
|
||||
|
@@ -17,7 +17,7 @@ prefix = os.environ.get('JUPYTERHUB_SERVICE_PREFIX', '/')
|
||||
|
||||
auth = HubAuth(
|
||||
api_token=os.environ['JUPYTERHUB_API_TOKEN'],
|
||||
cookie_cache_max_age=60,
|
||||
cache_max_age=60,
|
||||
)
|
||||
|
||||
app = Flask(__name__)
|
||||
@@ -28,8 +28,11 @@ def authenticated(f):
|
||||
@wraps(f)
|
||||
def decorated(*args, **kwargs):
|
||||
cookie = request.cookies.get(auth.cookie_name)
|
||||
token = request.headers.get(auth.auth_header_name)
|
||||
if cookie:
|
||||
user = auth.user_for_cookie(cookie)
|
||||
elif token:
|
||||
user = auth.user_for_token(token)
|
||||
else:
|
||||
user = None
|
||||
if user:
|
||||
@@ -40,7 +43,7 @@ def authenticated(f):
|
||||
return decorated
|
||||
|
||||
|
||||
@app.route(prefix + '/')
|
||||
@app.route(prefix)
|
||||
@authenticated
|
||||
def whoami(user):
|
||||
return Response(
|
||||
|
70
examples/service-whoami-flask/whoami-oauth.py
Normal file
70
examples/service-whoami-flask/whoami-oauth.py
Normal file
@@ -0,0 +1,70 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
whoami service authentication with the Hub
|
||||
"""
|
||||
|
||||
from functools import wraps
|
||||
import json
|
||||
import os
|
||||
|
||||
from flask import Flask, redirect, request, Response, make_response
|
||||
|
||||
from jupyterhub.services.auth import HubOAuth
|
||||
|
||||
|
||||
prefix = os.environ.get('JUPYTERHUB_SERVICE_PREFIX', '/')
|
||||
|
||||
auth = HubOAuth(
|
||||
api_token=os.environ['JUPYTERHUB_API_TOKEN'],
|
||||
cache_max_age=60,
|
||||
)
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
|
||||
def authenticated(f):
|
||||
"""Decorator for authenticating with the Hub via OAuth"""
|
||||
@wraps(f)
|
||||
def decorated(*args, **kwargs):
|
||||
token = request.cookies.get(auth.cookie_name)
|
||||
if token:
|
||||
user = auth.user_for_token(token)
|
||||
else:
|
||||
user = None
|
||||
if user:
|
||||
return f(user, *args, **kwargs)
|
||||
else:
|
||||
# redirect to login url on failed auth
|
||||
state = auth.generate_state(next_url=request.path)
|
||||
response = make_response(redirect(auth.login_url + '&state=%s' % state))
|
||||
response.set_cookie(auth.state_cookie_name, state)
|
||||
return response
|
||||
return decorated
|
||||
|
||||
|
||||
@app.route(prefix)
|
||||
@authenticated
|
||||
def whoami(user):
|
||||
return Response(
|
||||
json.dumps(user, indent=1, sort_keys=True),
|
||||
mimetype='application/json',
|
||||
)
|
||||
|
||||
@app.route(prefix + 'oauth_callback')
|
||||
def oauth_callback():
|
||||
code = request.args.get('code', None)
|
||||
if code is None:
|
||||
return 403
|
||||
|
||||
# validate state field
|
||||
arg_state = request.args.get('state', None)
|
||||
cookie_state = request.cookies.get(auth.state_cookie_name)
|
||||
if arg_state is None or arg_state != cookie_state:
|
||||
# state doesn't match
|
||||
return 403
|
||||
|
||||
token = auth.token_for_code(code)
|
||||
next_url = auth.get_next_url(cookie_state) or prefix
|
||||
response = make_response(redirect(next_url))
|
||||
response.set_cookie(auth.cookie_name, token)
|
||||
return response
|
@@ -2,13 +2,15 @@
|
||||
|
||||
Uses `jupyterhub.services.HubAuthenticated` to authenticate requests with the Hub.
|
||||
|
||||
There is an implementation each of cookie-based `HubAuthenticated` and OAuth-based `HubOAuthenticated`.
|
||||
|
||||
## Run
|
||||
|
||||
1. Launch JupyterHub and the `whoami service` with
|
||||
|
||||
jupyterhub --ip=127.0.0.1
|
||||
|
||||
2. Visit http://127.0.0.1:8000/services/whoami
|
||||
2. Visit http://127.0.0.1:8000/services/whoami or http://127.0.0.1:8000/services/whoami-oauth
|
||||
|
||||
After logging in with your local-system credentials, you should see a JSON dump of your user info:
|
||||
|
||||
|
@@ -6,5 +6,10 @@ c.JupyterHub.services = [
|
||||
'name': 'whoami',
|
||||
'url': 'http://127.0.0.1:10101',
|
||||
'command': [sys.executable, './whoami.py'],
|
||||
}
|
||||
},
|
||||
{
|
||||
'name': 'whoami-oauth',
|
||||
'url': 'http://127.0.0.1:10102',
|
||||
'command': [sys.executable, './whoami-oauth.py'],
|
||||
},
|
||||
]
|
||||
|
@@ -13,10 +13,10 @@ from tornado.ioloop import IOLoop
|
||||
from tornado.httpserver import HTTPServer
|
||||
from tornado.web import RequestHandler, Application, authenticated
|
||||
|
||||
from jupyterhub.services.auth import HubAuthenticated
|
||||
from jupyterhub.services.auth import HubOAuthenticated, HubOAuthCallbackHandler
|
||||
from jupyterhub.utils import url_path_join
|
||||
|
||||
|
||||
class WhoAmIHandler(HubAuthenticated, RequestHandler):
|
||||
class WhoAmIHandler(HubOAuthenticated, RequestHandler):
|
||||
hub_users = {getuser()} # the users allowed to access this service
|
||||
|
||||
@authenticated
|
||||
@@ -27,9 +27,10 @@ class WhoAmIHandler(HubAuthenticated, RequestHandler):
|
||||
|
||||
def main():
|
||||
app = Application([
|
||||
(os.environ['JUPYTERHUB_SERVICE_PREFIX'] + '/?', WhoAmIHandler),
|
||||
(os.environ['JUPYTERHUB_SERVICE_PREFIX'], WhoAmIHandler),
|
||||
(url_path_join(os.environ['JUPYTERHUB_SERVICE_PREFIX'], 'oauth_callback'), HubOAuthCallbackHandler),
|
||||
(r'.*', WhoAmIHandler),
|
||||
])
|
||||
], cookie_secret=os.urandom(32))
|
||||
|
||||
http_server = HTTPServer(app)
|
||||
url = urlparse(os.environ['JUPYTERHUB_SERVICE_URL'])
|
@@ -27,7 +27,7 @@ def main():
|
||||
app = Application([
|
||||
(os.environ['JUPYTERHUB_SERVICE_PREFIX'] + '/?', WhoAmIHandler),
|
||||
(r'.*', WhoAmIHandler),
|
||||
], login_url='/hub/login')
|
||||
])
|
||||
|
||||
http_server = HTTPServer(app)
|
||||
url = urlparse(os.environ['JUPYTERHUB_SERVICE_URL'])
|
||||
|
@@ -13,7 +13,8 @@ def get_data_files():
|
||||
# walk up, looking for prefix/share/jupyter
|
||||
while path != '/':
|
||||
share_jupyter = join(path, 'share', 'jupyter', 'hub')
|
||||
if exists(join(share_jupyter, 'static', 'components')):
|
||||
static = join(share_jupyter, 'static')
|
||||
if all(exists(join(static, f)) for f in ['components', 'css']):
|
||||
return share_jupyter
|
||||
path, _ = split(path)
|
||||
# didn't find it, give up
|
||||
|
@@ -6,8 +6,8 @@
|
||||
version_info = (
|
||||
0,
|
||||
8,
|
||||
0,
|
||||
'b1',
|
||||
1,
|
||||
# 'dev',
|
||||
)
|
||||
|
||||
__version__ = '.'.join(map(str, version_info))
|
||||
@@ -28,6 +28,7 @@ def _check_version(hub_version, singleuser_version, log):
|
||||
from distutils.version import LooseVersion as V
|
||||
hub_major_minor = V(hub_version).version[:2]
|
||||
singleuser_major_minor = V(singleuser_version).version[:2]
|
||||
extra = ""
|
||||
if singleuser_major_minor == hub_major_minor:
|
||||
# patch-level mismatch or lower, log difference at debug-level
|
||||
# because this should be fine
|
||||
@@ -35,8 +36,11 @@ def _check_version(hub_version, singleuser_version, log):
|
||||
else:
|
||||
# log warning-level for more significant mismatch, such as 0.8 vs 0.9, etc.
|
||||
log_method = log.warning
|
||||
log_method("jupyterhub version %s != jupyterhub-singleuser version %s",
|
||||
hub_version, singleuser_version,
|
||||
extra = " This could cause failure to authenticate and result in redirect loops!"
|
||||
log_method(
|
||||
"jupyterhub version %s != jupyterhub-singleuser version %s." + extra,
|
||||
hub_version,
|
||||
singleuser_version,
|
||||
)
|
||||
else:
|
||||
log.debug("jupyterhub and jupyterhub-singleuser both on version %s" % hub_version)
|
||||
|
@@ -12,9 +12,16 @@ config = context.config
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if 'jupyterhub' in sys.modules:
|
||||
from traitlets.config import MultipleInstanceError
|
||||
from jupyterhub.app import JupyterHub
|
||||
app = None
|
||||
if JupyterHub.initialized():
|
||||
app = JupyterHub.instance()
|
||||
try:
|
||||
app = JupyterHub.instance()
|
||||
except MultipleInstanceError:
|
||||
# could have been another Application
|
||||
pass
|
||||
if app is not None:
|
||||
alembic_logger = logging.getLogger('alembic')
|
||||
alembic_logger.propagate = True
|
||||
alembic_logger.parent = app.log
|
||||
|
@@ -36,6 +36,10 @@ def upgrade():
|
||||
# drop some columns no longer in use
|
||||
try:
|
||||
op.drop_column('users', 'auth_state')
|
||||
# mysql cannot drop _server_id without also dropping
|
||||
# implicitly created foreign key
|
||||
if op.get_context().dialect.name == 'mysql':
|
||||
op.drop_constraint('users_ibfk_1', 'users', type_='foreignkey')
|
||||
op.drop_column('users', '_server_id')
|
||||
except sa.exc.OperationalError:
|
||||
# this won't be a problem moving forward, but downgrade will fail
|
||||
|
@@ -41,15 +41,27 @@ class TokenAPIHandler(APIHandler):
|
||||
# for authenticators where that's possible
|
||||
data = self.get_json_body()
|
||||
try:
|
||||
authenticated = yield self.authenticate(self, data)
|
||||
user = yield self.login_user(data)
|
||||
except Exception as e:
|
||||
self.log.error("Failure trying to authenticate with form data: %s" % e)
|
||||
authenticated = None
|
||||
if authenticated is None:
|
||||
user = None
|
||||
if user is None:
|
||||
raise web.HTTPError(403)
|
||||
user = self.find_user(authenticated['name'])
|
||||
else:
|
||||
data = self.get_json_body()
|
||||
# admin users can request
|
||||
if data and data.get('username') != user.name:
|
||||
if user.admin:
|
||||
user = self.find_user(data['username'])
|
||||
if user is None:
|
||||
raise web.HTTPError(400, "No such user '%s'" % data['username'])
|
||||
else:
|
||||
raise web.HTTPError(403, "Only admins can request tokens for other users.")
|
||||
api_token = user.new_api_token()
|
||||
self.write(json.dumps({'token': api_token}))
|
||||
self.write(json.dumps({
|
||||
'token': api_token,
|
||||
'user': self.user_model(user),
|
||||
}))
|
||||
|
||||
|
||||
class CookieAPIHandler(APIHandler):
|
||||
|
@@ -104,22 +104,17 @@ class APIHandler(BaseHandler):
|
||||
'pending': None,
|
||||
'last_activity': user.last_activity.isoformat(),
|
||||
}
|
||||
if user.spawners['']._spawn_pending:
|
||||
model['pending'] = 'spawn'
|
||||
elif user.spawners['']._stop_pending:
|
||||
model['pending'] = 'stop'
|
||||
model['pending'] = user.spawners[''].pending or None
|
||||
|
||||
if self.allow_named_servers:
|
||||
servers = model['servers'] = {}
|
||||
for name, spawner in user.spawners.items():
|
||||
if spawner.ready:
|
||||
servers[name] = s = {'name': name}
|
||||
if spawner._spawn_pending:
|
||||
s['pending'] = 'spawn'
|
||||
elif spawner._stop_pending:
|
||||
s['pending'] = 'stop'
|
||||
if spawner.pending:
|
||||
s['pending'] = spawner.pending
|
||||
if spawner.server:
|
||||
s['url'] = user.url + name
|
||||
s['url'] = url_path_join(user.url, name, '/')
|
||||
return model
|
||||
|
||||
def group_model(self, group):
|
||||
|
@@ -178,19 +178,32 @@ class UserAPIHandler(APIHandler):
|
||||
|
||||
class UserServerAPIHandler(APIHandler):
|
||||
"""Start and stop single-user servers"""
|
||||
|
||||
@gen.coroutine
|
||||
@admin_or_self
|
||||
def post(self, name, server_name=''):
|
||||
user = self.find_user(name)
|
||||
if server_name:
|
||||
if not self.allow_named_servers:
|
||||
raise web.HTTPError(400, "Named servers are not enabled.")
|
||||
if server_name and not self.allow_named_servers:
|
||||
raise web.HTTPError(400, "Named servers are not enabled.")
|
||||
spawner = user.spawners[server_name]
|
||||
pending = spawner.pending
|
||||
if pending == 'spawn':
|
||||
self.set_header('Content-Type', 'text/plain')
|
||||
self.set_status(202)
|
||||
return
|
||||
elif pending:
|
||||
raise web.HTTPError(400, "%s is pending %s" % (spawner._log_name, pending))
|
||||
|
||||
if spawner.ready:
|
||||
# include notify, so that a server that died is noticed immediately
|
||||
state = yield spawner.poll_and_notify()
|
||||
# set _spawn_pending flag to prevent races while we wait
|
||||
spawner._spawn_pending = True
|
||||
try:
|
||||
state = yield spawner.poll_and_notify()
|
||||
finally:
|
||||
spawner._spawn_pending = False
|
||||
if state is None:
|
||||
raise web.HTTPError(400, "%s's server %s is already running" % (name, server_name))
|
||||
raise web.HTTPError(400, "%s is already running" % spawner._log_name)
|
||||
|
||||
options = self.get_json_body()
|
||||
yield self.spawn_single_user(user, server_name, options=options)
|
||||
@@ -209,17 +222,21 @@ class UserServerAPIHandler(APIHandler):
|
||||
raise web.HTTPError(404, "%s has no server named '%s'" % (name, server_name))
|
||||
|
||||
spawner = user.spawners[server_name]
|
||||
|
||||
if spawner._stop_pending:
|
||||
if spawner.pending == 'stop':
|
||||
self.log.debug("%s already stopping", spawner._log_name)
|
||||
self.set_header('Content-Type', 'text/plain')
|
||||
self.set_status(202)
|
||||
return
|
||||
|
||||
if not spawner.ready:
|
||||
raise web.HTTPError(400, "%s's server %s is not running" % (name, server_name))
|
||||
raise web.HTTPError(
|
||||
400, "%s is not running %s" %
|
||||
(spawner._log_name, '(pending: %s)' % spawner.pending if spawner.pending else '')
|
||||
)
|
||||
# include notify, so that a server that died is noticed immediately
|
||||
status = yield spawner.poll_and_notify()
|
||||
if status is not None:
|
||||
raise web.HTTPError(400, "%s's server %s is not running" % (name, server_name))
|
||||
raise web.HTTPError(400, "%s is not running" % spawner._log_name)
|
||||
yield self.stop_single_user(user, server_name)
|
||||
status = 202 if spawner._stop_pending else 204
|
||||
self.set_header('Content-Type', 'text/plain')
|
||||
|
@@ -12,7 +12,6 @@ import logging
|
||||
from operator import itemgetter
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import signal
|
||||
import sys
|
||||
from textwrap import dedent
|
||||
@@ -31,6 +30,8 @@ from tornado.ioloop import IOLoop, PeriodicCallback
|
||||
from tornado.log import app_log, access_log, gen_log
|
||||
import tornado.options
|
||||
from tornado import gen, web
|
||||
from tornado.platform.asyncio import AsyncIOMainLoop
|
||||
AsyncIOMainLoop().install()
|
||||
|
||||
from traitlets import (
|
||||
Unicode, Integer, Dict, TraitError, List, Bool, Any,
|
||||
@@ -62,7 +63,7 @@ from .utils import (
|
||||
from .auth import Authenticator, PAMAuthenticator
|
||||
from .crypto import CryptKeeper
|
||||
from .spawner import Spawner, LocalProcessSpawner
|
||||
from .objects import Hub
|
||||
from .objects import Hub, Server
|
||||
|
||||
# For faking stats
|
||||
from .emptyclass import EmptyClass
|
||||
@@ -98,6 +99,13 @@ flags = {
|
||||
'no-db': ({'JupyterHub': {'db_url': 'sqlite:///:memory:'}},
|
||||
"disable persisting state database to disk"
|
||||
),
|
||||
'upgrade-db': ({'JupyterHub': {'upgrade_db': True}},
|
||||
"""Automatically upgrade the database if needed on startup.
|
||||
|
||||
Only safe if the database has been backed up.
|
||||
Only SQLite database files will be backed up automatically.
|
||||
"""
|
||||
),
|
||||
'no-ssl': ({'JupyterHub': {'confirm_no_ssl': True}},
|
||||
"[DEPRECATED in 0.7: does nothing]"
|
||||
),
|
||||
@@ -164,32 +172,11 @@ class UpgradeDB(Application):
|
||||
aliases = common_aliases
|
||||
classes = []
|
||||
|
||||
def _backup_db_file(self, db_file):
|
||||
"""Backup a database file"""
|
||||
if not os.path.exists(db_file):
|
||||
return
|
||||
|
||||
timestamp = datetime.now().strftime('.%Y-%m-%d-%H%M%S')
|
||||
backup_db_file = db_file + timestamp
|
||||
for i in range(1, 10):
|
||||
if not os.path.exists(backup_db_file):
|
||||
break
|
||||
backup_db_file = '{}.{}.{}'.format(db_file, timestamp, i)
|
||||
if os.path.exists(backup_db_file):
|
||||
self.exit("backup db file already exists: %s" % backup_db_file)
|
||||
|
||||
self.log.info("Backing up %s => %s", db_file, backup_db_file)
|
||||
shutil.copy(db_file, backup_db_file)
|
||||
|
||||
def start(self):
|
||||
hub = JupyterHub(parent=self)
|
||||
hub.load_config_file(hub.config_file)
|
||||
self.log = hub.log
|
||||
if (hub.db_url.startswith('sqlite:///')):
|
||||
db_file = hub.db_url.split(':///', 1)[1]
|
||||
self._backup_db_file(db_file)
|
||||
self.log.info("Upgrading %s", hub.db_url)
|
||||
dbutil.upgrade(hub.db_url)
|
||||
dbutil.upgrade_if_needed(hub.db_url, log=self.log)
|
||||
|
||||
|
||||
class JupyterHub(Application):
|
||||
@@ -291,13 +278,13 @@ class JupyterHub(Application):
|
||||
ssl_key = Unicode('',
|
||||
help="""Path to SSL key file for the public facing interface of the proxy
|
||||
|
||||
Use with ssl_cert
|
||||
When setting this, you should also set ssl_cert
|
||||
"""
|
||||
).tag(config=True)
|
||||
ssl_cert = Unicode('',
|
||||
help="""Path to SSL certificate file for the public facing interface of the proxy
|
||||
|
||||
Use with ssl_key
|
||||
When setting this, you should also set ssl_key
|
||||
"""
|
||||
).tag(config=True)
|
||||
ip = Unicode('',
|
||||
@@ -360,7 +347,7 @@ class JupyterHub(Application):
|
||||
proxy_cmd = Command([], config=True,
|
||||
help="DEPRECATED since version 0.8. Use ConfigurableHTTPProxy.command",
|
||||
).tag(config=True)
|
||||
|
||||
|
||||
debug_proxy = Bool(False,
|
||||
help="DEPRECATED since version 0.8: Use ConfigurableHTTPProxy.debug",
|
||||
).tag(config=True)
|
||||
@@ -465,7 +452,7 @@ class JupyterHub(Application):
|
||||
help="""The cookie secret to use to encrypt cookies.
|
||||
|
||||
Loaded from the JPY_COOKIE_SECRET env variable by default.
|
||||
|
||||
|
||||
Should be exactly 256 bits (32 bytes).
|
||||
"""
|
||||
).tag(
|
||||
@@ -626,6 +613,12 @@ class JupyterHub(Application):
|
||||
"""
|
||||
).tag(config=True)
|
||||
|
||||
upgrade_db = Bool(False,
|
||||
help="""Upgrade the database automatically on start.
|
||||
|
||||
Only safe if database is regularly backed up.
|
||||
Only SQLite databases will be backed up to a local file automatically.
|
||||
""").tag(config=True)
|
||||
reset_db = Bool(False,
|
||||
help="Purge and reset the database."
|
||||
).tag(config=True)
|
||||
@@ -801,12 +794,10 @@ class JupyterHub(Application):
|
||||
self.handlers = self.add_url_prefix(self.hub_prefix, h)
|
||||
# some extra handlers, outside hub_prefix
|
||||
self.handlers.extend([
|
||||
(r"%s" % self.hub_prefix.rstrip('/'), web.RedirectHandler,
|
||||
{
|
||||
"url": self.hub_prefix,
|
||||
"permanent": False,
|
||||
}
|
||||
),
|
||||
# add trailing / to `/hub`
|
||||
(self.hub_prefix.rstrip('/'), handlers.AddSlashHandler),
|
||||
# add trailing / to ``/user|services/:name`
|
||||
(r"%s(user|services)/([^/]+)" % self.base_url, handlers.AddSlashHandler),
|
||||
(r"(?!%s).*" % self.hub_prefix, handlers.PrefixRedirectHandler),
|
||||
(r'(.*)', handlers.Template404),
|
||||
])
|
||||
@@ -891,7 +882,11 @@ class JupyterHub(Application):
|
||||
|
||||
def init_db(self):
|
||||
"""Create the database connection"""
|
||||
|
||||
self.log.debug("Connecting to db: %s", self.db_url)
|
||||
if self.upgrade_db:
|
||||
dbutil.upgrade_if_needed(self.db_url, log=self.log)
|
||||
|
||||
try:
|
||||
self.session_factory = orm.new_session_factory(
|
||||
self.db_url,
|
||||
@@ -1180,7 +1175,7 @@ class JupyterHub(Application):
|
||||
if not service.url:
|
||||
continue
|
||||
try:
|
||||
yield service.orm.server.wait_up(timeout=1)
|
||||
yield Server.from_orm(service.orm.server).wait_up(timeout=1)
|
||||
except TimeoutError:
|
||||
self.log.warning("Cannot connect to %s service %s at %s", service.kind, name, service.url)
|
||||
else:
|
||||
@@ -1221,7 +1216,7 @@ class JupyterHub(Application):
|
||||
status = yield spawner.poll()
|
||||
except Exception:
|
||||
self.log.exception("Failed to poll spawner for %s, assuming the spawner is not running.",
|
||||
user.name if name else '%s|%s' % (user.name, name))
|
||||
spawner._log_name)
|
||||
status = -1
|
||||
|
||||
if status is None:
|
||||
@@ -1232,11 +1227,13 @@ class JupyterHub(Application):
|
||||
# user not running. This is expected if server is None,
|
||||
# but indicates the user's server died while the Hub wasn't running
|
||||
# if spawner.server is defined.
|
||||
log = self.log.warning if spawner.server else self.log.debug
|
||||
log("%s not running.", user.name)
|
||||
# remove all server or servers entry from db related to the user
|
||||
if spawner.server:
|
||||
self.log.warning("%s appears to have stopped while the Hub was down", spawner._log_name)
|
||||
# remove server entry from db
|
||||
db.delete(spawner.orm_spawner.server)
|
||||
spawner.server = None
|
||||
else:
|
||||
self.log.debug("%s not running", spawner._log_name)
|
||||
db.commit()
|
||||
|
||||
user_summaries.append(_user_summary(user))
|
||||
@@ -1557,7 +1554,7 @@ class JupyterHub(Application):
|
||||
tries = 10 if service.managed else 1
|
||||
for i in range(tries):
|
||||
try:
|
||||
yield service.orm.server.wait_up(http=True, timeout=1)
|
||||
yield Server.from_orm(service.orm.server).wait_up(http=True, timeout=1)
|
||||
except TimeoutError:
|
||||
if service.managed:
|
||||
status = yield service.spawner.poll()
|
||||
@@ -1568,7 +1565,7 @@ class JupyterHub(Application):
|
||||
break
|
||||
else:
|
||||
self.log.error("Cannot connect to %s service %s at %s. Is it running?", service.kind, service_name, service.url)
|
||||
|
||||
|
||||
yield self.proxy.check_routes(self.users, self._service_map)
|
||||
|
||||
|
||||
|
@@ -144,6 +144,12 @@ class Authenticator(LoggingConfigurable):
|
||||
|
||||
Return True if username is valid, False otherwise.
|
||||
"""
|
||||
if '/' in username:
|
||||
# / is not allowed in usernames
|
||||
return False
|
||||
if not username:
|
||||
# empty usernames are not allowed
|
||||
return False
|
||||
if not self.username_regex:
|
||||
return True
|
||||
return bool(self.username_regex.match(username))
|
||||
|
@@ -5,11 +5,17 @@
|
||||
# Based on pgcontents.utils.migrate, used under the Apache license.
|
||||
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime
|
||||
import os
|
||||
import shutil
|
||||
from subprocess import check_call
|
||||
import sys
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
|
||||
from . import orm
|
||||
|
||||
_here = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
ALEMBIC_INI_TEMPLATE_PATH = os.path.join(_here, 'alembic.ini')
|
||||
@@ -84,6 +90,46 @@ def upgrade(db_url, revision='head'):
|
||||
)
|
||||
|
||||
|
||||
def backup_db_file(db_file, log=None):
|
||||
"""Backup a database file if it exists"""
|
||||
timestamp = datetime.now().strftime('.%Y-%m-%d-%H%M%S')
|
||||
backup_db_file = db_file + timestamp
|
||||
for i in range(1, 10):
|
||||
if not os.path.exists(backup_db_file):
|
||||
break
|
||||
backup_db_file = '{}.{}.{}'.format(db_file, timestamp, i)
|
||||
#
|
||||
if os.path.exists(backup_db_file):
|
||||
raise OSError("backup db file already exists: %s" % backup_db_file)
|
||||
if log:
|
||||
log.info("Backing up %s => %s", db_file, backup_db_file)
|
||||
shutil.copy(db_file, backup_db_file)
|
||||
|
||||
|
||||
def upgrade_if_needed(db_url, backup=True, log=None):
|
||||
"""Upgrade a database if needed
|
||||
|
||||
If the database is sqlite, a backup file will be created with a timestamp.
|
||||
Other database systems should perform their own backups prior to calling this.
|
||||
"""
|
||||
# run check-db-revision first
|
||||
engine = create_engine(db_url)
|
||||
try:
|
||||
orm.check_db_revision(engine)
|
||||
except orm.DatabaseSchemaMismatch:
|
||||
# ignore mismatch error because that's what we are here for!
|
||||
pass
|
||||
else:
|
||||
# nothing to do
|
||||
return
|
||||
log.info("Upgrading %s", db_url)
|
||||
# we need to upgrade, backup the database
|
||||
if backup and db_url.startswith('sqlite:///'):
|
||||
db_file = db_url.split(':///', 1)[1]
|
||||
backup_db_file(db_file, log=log)
|
||||
upgrade(db_url)
|
||||
|
||||
|
||||
def _alembic(*args):
|
||||
"""Run an alembic command with a temporary alembic.ini"""
|
||||
with _temp_alembic_ini('sqlite:///jupyterhub.sqlite') as alembic_ini:
|
||||
|
@@ -3,6 +3,7 @@
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import copy
|
||||
import re
|
||||
from datetime import timedelta
|
||||
from http.client import responses
|
||||
@@ -20,7 +21,7 @@ from .. import __version__
|
||||
from .. import orm
|
||||
from ..objects import Server
|
||||
from ..spawner import LocalProcessSpawner
|
||||
from ..utils import url_path_join, exponential_backoff
|
||||
from ..utils import url_path_join
|
||||
|
||||
# pattern for the authentication token header
|
||||
auth_header_pat = re.compile(r'^(?:token|bearer)\s+([^\s]+)$', flags=re.IGNORECASE)
|
||||
@@ -347,7 +348,7 @@ class BaseHandler(RequestHandler):
|
||||
else:
|
||||
self.statsd.incr('login.failure')
|
||||
self.statsd.timing('login.authenticate.failure', auth_timer.ms)
|
||||
self.log.warning("Failed login for %s", data.get('username', 'unknown user'))
|
||||
self.log.warning("Failed login for %s", (data or {}).get('username', 'unknown user'))
|
||||
|
||||
|
||||
#---------------------------------------------------------------
|
||||
@@ -376,8 +377,17 @@ class BaseHandler(RequestHandler):
|
||||
|
||||
@gen.coroutine
|
||||
def spawn_single_user(self, user, server_name='', options=None):
|
||||
if server_name in user.spawners and user.spawners[server_name].pending == 'spawn':
|
||||
raise RuntimeError("Spawn already pending for: %s" % user.name)
|
||||
# in case of error, include 'try again from /hub/home' message
|
||||
self.extra_error_html = self.spawn_home_error
|
||||
|
||||
user_server_name = user.name
|
||||
|
||||
if server_name:
|
||||
user_server_name = '%s:%s' % (user.name, server_name)
|
||||
|
||||
if server_name in user.spawners and user.spawners[server_name].pending:
|
||||
pending = user.spawners[server_name].pending
|
||||
raise RuntimeError("%s pending %s" % (user_server_name, pending))
|
||||
|
||||
# count active servers and pending spawns
|
||||
# we could do careful bookkeeping to avoid
|
||||
@@ -391,32 +401,26 @@ class BaseHandler(RequestHandler):
|
||||
active_server_limit = self.active_server_limit
|
||||
|
||||
if concurrent_spawn_limit and spawn_pending_count >= concurrent_spawn_limit:
|
||||
self.log.info(
|
||||
'%s pending spawns, throttling',
|
||||
spawn_pending_count,
|
||||
)
|
||||
raise web.HTTPError(
|
||||
429,
|
||||
"User startup rate limit exceeded. Try again in a few minutes.")
|
||||
self.log.info(
|
||||
'%s pending spawns, throttling',
|
||||
spawn_pending_count,
|
||||
)
|
||||
raise web.HTTPError(
|
||||
429,
|
||||
"User startup rate limit exceeded. Try again in a few minutes.",
|
||||
)
|
||||
if active_server_limit and active_count >= active_server_limit:
|
||||
self.log.info(
|
||||
'%s servers active, no space available',
|
||||
active_count,
|
||||
)
|
||||
raise web.HTTPError(
|
||||
429,
|
||||
"Active user limit exceeded. Try again in a few minutes.")
|
||||
self.log.info(
|
||||
'%s servers active, no space available',
|
||||
active_count,
|
||||
)
|
||||
raise web.HTTPError(429, "Active user limit exceeded. Try again in a few minutes.")
|
||||
|
||||
tic = IOLoop.current().time()
|
||||
user_server_name = user.name
|
||||
if server_name:
|
||||
user_server_name = '%s:%s' % (user.name, server_name)
|
||||
else:
|
||||
user_server_name = user.name
|
||||
|
||||
self.log.debug("Initiating spawn for %s", user_server_name)
|
||||
|
||||
f = user.spawn(server_name, options)
|
||||
spawn_future = user.spawn(server_name, options)
|
||||
|
||||
self.log.debug("%i%s concurrent spawns",
|
||||
spawn_pending_count,
|
||||
@@ -426,22 +430,24 @@ class BaseHandler(RequestHandler):
|
||||
'/%i' % active_server_limit if active_server_limit else '')
|
||||
|
||||
spawner = user.spawners[server_name]
|
||||
# set spawn_pending now, so there's no gap where _spawn_pending is False
|
||||
# while we are waiting for _proxy_pending to be set
|
||||
spawner._spawn_pending = True
|
||||
|
||||
@gen.coroutine
|
||||
def finish_user_spawn(f=None):
|
||||
def finish_user_spawn():
|
||||
"""Finish the user spawn by registering listeners and notifying the proxy.
|
||||
|
||||
If the spawner is slow to start, this is passed as an async callback,
|
||||
otherwise it is called immediately.
|
||||
"""
|
||||
if f and f.exception() is not None:
|
||||
# failed, don't add to the proxy
|
||||
return
|
||||
# wait for spawn Future
|
||||
yield spawn_future
|
||||
toc = IOLoop.current().time()
|
||||
self.log.info("User %s took %.3f seconds to start", user_server_name, toc-tic)
|
||||
self.statsd.timing('spawner.success', (toc - tic) * 1000)
|
||||
spawner._proxy_pending = True
|
||||
try:
|
||||
spawner._proxy_pending = True
|
||||
yield self.proxy.add_user(user, server_name)
|
||||
except Exception:
|
||||
self.log.exception("Failed to add %s to proxy!", user_server_name)
|
||||
@@ -452,36 +458,53 @@ class BaseHandler(RequestHandler):
|
||||
finally:
|
||||
spawner._proxy_pending = False
|
||||
|
||||
# hook up spawner._spawn_future so that other requests can await
|
||||
# this result
|
||||
finish_spawn_future = spawner._spawn_future = finish_user_spawn()
|
||||
def _clear_spawn_future(f):
|
||||
# clear spawner._spawn_future when it's done
|
||||
# keep an exception around, though, to prevent repeated implicit spawns
|
||||
# if spawn is failing
|
||||
if f.exception() is None:
|
||||
spawner._spawn_future = None
|
||||
# Now we're all done. clear _spawn_pending flag
|
||||
spawner._spawn_pending = False
|
||||
finish_spawn_future.add_done_callback(_clear_spawn_future)
|
||||
|
||||
try:
|
||||
yield gen.with_timeout(timedelta(seconds=self.slow_spawn_timeout), f)
|
||||
yield gen.with_timeout(timedelta(seconds=self.slow_spawn_timeout), finish_spawn_future)
|
||||
except gen.TimeoutError:
|
||||
# waiting_for_response indicates server process has started,
|
||||
# but is yet to become responsive.
|
||||
if not spawner._waiting_for_response:
|
||||
if spawner._spawn_pending and not spawner._waiting_for_response:
|
||||
# still in Spawner.start, which is taking a long time
|
||||
# we shouldn't poll while spawn is incomplete.
|
||||
self.log.warning("User %s is slow to start (timeout=%s)",
|
||||
user_server_name, self.slow_spawn_timeout)
|
||||
# schedule finish for when the user finishes spawning
|
||||
IOLoop.current().add_future(f, finish_user_spawn)
|
||||
else:
|
||||
# start has finished, but the server hasn't come up
|
||||
# check if the server died while we were waiting
|
||||
status = yield user.spawner.poll()
|
||||
if status is None:
|
||||
# hit timeout, but server's running. Hope that it'll show up soon enough,
|
||||
# though it's possible that it started at the wrong URL
|
||||
self.log.warning("User %s is slow to become responsive (timeout=%s)",
|
||||
user_server_name, self.slow_spawn_timeout)
|
||||
self.log.debug("Expecting server for %s at: %s", user_server_name, spawner.server.url)
|
||||
# schedule finish for when the user finishes spawning
|
||||
IOLoop.current().add_future(f, finish_user_spawn)
|
||||
else:
|
||||
toc = IOLoop.current().time()
|
||||
self.statsd.timing('spawner.failure', (toc - tic) * 1000)
|
||||
raise web.HTTPError(500, "Spawner failed to start [status=%s]" % status)
|
||||
else:
|
||||
yield finish_user_spawn()
|
||||
user_server_name, self.slow_spawn_timeout)
|
||||
return
|
||||
|
||||
# start has finished, but the server hasn't come up
|
||||
# check if the server died while we were waiting
|
||||
status = yield spawner.poll()
|
||||
if status is not None:
|
||||
toc = IOLoop.current().time()
|
||||
self.statsd.timing('spawner.failure', (toc - tic) * 1000)
|
||||
raise web.HTTPError(500, "Spawner failed to start [status=%s]. The logs for %s may contain details." % (
|
||||
status, spawner._log_name))
|
||||
|
||||
if spawner._waiting_for_response:
|
||||
# hit timeout waiting for response, but server's running.
|
||||
# Hope that it'll show up soon enough,
|
||||
# though it's possible that it started at the wrong URL
|
||||
self.log.warning("User %s is slow to become responsive (timeout=%s)",
|
||||
user_server_name, self.slow_spawn_timeout)
|
||||
self.log.debug("Expecting server for %s at: %s",
|
||||
user_server_name, spawner.server.url)
|
||||
if spawner._proxy_pending:
|
||||
# User.spawn finished, but it hasn't been added to the proxy
|
||||
# Could be due to load or a slow proxy
|
||||
self.log.warning("User %s is slow to be added to the proxy (timeout=%s)",
|
||||
user_server_name, self.slow_spawn_timeout)
|
||||
|
||||
@gen.coroutine
|
||||
def user_stopped(self, user, server_name):
|
||||
@@ -501,41 +524,56 @@ class BaseHandler(RequestHandler):
|
||||
if name not in user.spawners:
|
||||
raise KeyError("User %s has no such spawner %r", user.name, name)
|
||||
spawner = user.spawners[name]
|
||||
if spawner._stop_pending:
|
||||
raise RuntimeError("Stop already pending for: %s:%s" % (user.name, name))
|
||||
tic = IOLoop.current().time()
|
||||
yield self.proxy.delete_user(user, name)
|
||||
f = user.stop()
|
||||
@gen.coroutine
|
||||
def finish_stop(f=None):
|
||||
"""Finish the stop action by noticing that the user is stopped.
|
||||
if spawner.pending:
|
||||
raise RuntimeError("%s pending %s" % (spawner._log_name, spawner.pending))
|
||||
# set user._stop_pending before doing anything async
|
||||
# to avoid races
|
||||
spawner._stop_pending = True
|
||||
|
||||
If the spawner is slow to stop, this is passed as an async callback,
|
||||
otherwise it is called immediately.
|
||||
@gen.coroutine
|
||||
def stop():
|
||||
"""Stop the server
|
||||
|
||||
1. remove it from the proxy
|
||||
2. stop the server
|
||||
3. notice that it stopped
|
||||
"""
|
||||
if f and f.exception() is not None:
|
||||
# failed, don't do anything
|
||||
return
|
||||
tic = IOLoop.current().time()
|
||||
try:
|
||||
yield self.proxy.delete_user(user, name)
|
||||
yield user.stop(name)
|
||||
finally:
|
||||
spawner._stop_pending = False
|
||||
toc = IOLoop.current().time()
|
||||
self.log.info("User %s server took %.3f seconds to stop", user.name, toc-tic)
|
||||
self.log.info("User %s server took %.3f seconds to stop", user.name, toc - tic)
|
||||
self.statsd.timing('spawner.stop', (toc - tic) * 1000)
|
||||
|
||||
try:
|
||||
yield gen.with_timeout(timedelta(seconds=self.slow_stop_timeout), f)
|
||||
yield gen.with_timeout(timedelta(seconds=self.slow_stop_timeout), stop())
|
||||
except gen.TimeoutError:
|
||||
if spawner._stop_pending:
|
||||
# hit timeout, but stop is still pending
|
||||
self.log.warning("User %s:%s server is slow to stop", user.name, name)
|
||||
# schedule finish for when the server finishes stopping
|
||||
IOLoop.current().add_future(f, finish_stop)
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
yield finish_stop()
|
||||
|
||||
#---------------------------------------------------------------
|
||||
# template rendering
|
||||
#---------------------------------------------------------------
|
||||
|
||||
@property
|
||||
def spawn_home_error(self):
|
||||
"""Extra message pointing users to try spawning again from /hub/home.
|
||||
|
||||
Should be added to `self.extra_error_html` for any handler
|
||||
that could serve a failed spawn message.
|
||||
"""
|
||||
home = url_path_join(self.hub.base_url, 'home')
|
||||
return (
|
||||
"You can try restarting your server from the "
|
||||
"<a href='{home}'>home page</a>.".format(home=home)
|
||||
)
|
||||
|
||||
def get_template(self, name):
|
||||
"""Return the jinja template object for a given name"""
|
||||
return self.settings['jinja2_env'].get_template(name)
|
||||
@@ -583,6 +621,7 @@ class BaseHandler(RequestHandler):
|
||||
status_code=status_code,
|
||||
status_message=status_message,
|
||||
message=message,
|
||||
extra_error_html=getattr(self, 'extra_error_html', ''),
|
||||
exception=exception,
|
||||
)
|
||||
|
||||
@@ -636,10 +675,13 @@ class UserSpawnHandler(BaseHandler):
|
||||
current_user = self.get_current_user()
|
||||
|
||||
if current_user and current_user.name == name:
|
||||
# if spawning fails for any reason, point users to /hub/home to retry
|
||||
self.extra_error_html = self.spawn_home_error
|
||||
|
||||
# If people visit /user/:name directly on the Hub,
|
||||
# the redirects will just loop, because the proxy is bypassed.
|
||||
# Try to check for that and warn,
|
||||
# though the user-facing behavior is unchainged
|
||||
# though the user-facing behavior is unchanged
|
||||
host_info = urlparse(self.request.full_url())
|
||||
port = host_info.port
|
||||
if not port:
|
||||
@@ -651,9 +693,38 @@ class UserSpawnHandler(BaseHandler):
|
||||
Make sure to connect to the proxied public URL %s
|
||||
""", self.request.full_url(), self.proxy.public_url)
|
||||
|
||||
# logged in as correct user, spawn the server
|
||||
# logged in as correct user, check for pending spawn
|
||||
spawner = current_user.spawner
|
||||
if spawner._spawn_pending or spawner._proxy_pending:
|
||||
|
||||
# First, check for previous failure.
|
||||
if (
|
||||
not spawner.active
|
||||
and spawner._spawn_future
|
||||
and spawner._spawn_future.done()
|
||||
and spawner._spawn_future.exception()
|
||||
):
|
||||
# Condition: spawner not active and _spawn_future exists and contains an Exception
|
||||
# Implicit spawn on /user/:name is not allowed if the user's last spawn failed.
|
||||
# We should point the user to Home if the most recent spawn failed.
|
||||
exc = spawner._spawn_future.exception()
|
||||
self.log.error("Preventing implicit spawn for %s because last spawn failed: %s",
|
||||
spawner._log_name, exc)
|
||||
# raise a copy because each time an Exception object is re-raised, its traceback grows
|
||||
raise copy.copy(exc).with_traceback(exc.__traceback__)
|
||||
|
||||
# check for pending spawn
|
||||
if spawner.pending and spawner._spawn_future:
|
||||
# wait on the pending spawn
|
||||
self.log.debug("Waiting for %s pending %s", spawner._log_name, spawner.pending)
|
||||
try:
|
||||
yield gen.with_timeout(timedelta(seconds=self.slow_spawn_timeout), spawner._spawn_future)
|
||||
except gen.TimeoutError:
|
||||
self.log.info("Pending spawn for %s didn't finish in %.1f seconds", spawner._log_name, self.slow_spawn_timeout)
|
||||
pass
|
||||
|
||||
# we may have waited above, check pending again:
|
||||
if spawner.pending:
|
||||
self.log.info("%s is pending %s", spawner._log_name, spawner.pending)
|
||||
# spawn has started, but not finished
|
||||
self.statsd.incr('redirects.user_spawn_pending', 1)
|
||||
html = self.render_template("spawn_pending.html", user=current_user)
|
||||
@@ -661,7 +732,12 @@ class UserSpawnHandler(BaseHandler):
|
||||
return
|
||||
|
||||
# spawn has supposedly finished, check on the status
|
||||
status = yield spawner.poll()
|
||||
if spawner.ready:
|
||||
status = yield spawner.poll()
|
||||
else:
|
||||
status = 0
|
||||
|
||||
# server is not running, trigger spawn
|
||||
if status is not None:
|
||||
if spawner.options_form:
|
||||
self.redirect(url_concat(url_path_join(self.hub.base_url, 'spawn'),
|
||||
@@ -670,6 +746,15 @@ class UserSpawnHandler(BaseHandler):
|
||||
else:
|
||||
yield self.spawn_single_user(current_user)
|
||||
|
||||
# spawn didn't finish, show pending page
|
||||
if spawner.pending:
|
||||
self.log.info("%s is pending %s", spawner._log_name, spawner.pending)
|
||||
# spawn has started, but not finished
|
||||
self.statsd.incr('redirects.user_spawn_pending', 1)
|
||||
html = self.render_template("spawn_pending.html", user=current_user)
|
||||
self.finish(html)
|
||||
return
|
||||
|
||||
# We do exponential backoff here - since otherwise we can get stuck in a redirect loop!
|
||||
# This is important in many distributed proxy implementations - those are often eventually
|
||||
# consistent and can take upto a couple of seconds to actually apply throughout the cluster.
|
||||
@@ -679,9 +764,23 @@ class UserSpawnHandler(BaseHandler):
|
||||
self.log.warning("Invalid redirects argument %r", self.get_argument('redirects'))
|
||||
redirects = 0
|
||||
|
||||
if redirects >= self.settings.get('user_redirect_limit', 5):
|
||||
# check redirect limit to prevent browser-enforced limits.
|
||||
# In case of version mismatch, raise on only two redirects.
|
||||
if redirects >= self.settings.get(
|
||||
'user_redirect_limit', 4
|
||||
) or (redirects >= 2 and spawner._jupyterhub_version != __version__):
|
||||
# We stop if we've been redirected too many times.
|
||||
raise web.HTTPError(500, "Redirect loop detected.")
|
||||
msg = "Redirect loop detected."
|
||||
if spawner._jupyterhub_version != __version__:
|
||||
msg += (
|
||||
" Notebook has jupyterhub version {singleuser}, but the Hub expects {hub}."
|
||||
" Try installing jupyterhub=={hub} in the user environment"
|
||||
" if you continue to have problems."
|
||||
).format(
|
||||
singleuser=spawner._jupyterhub_version or 'unknown (likely < 0.8)',
|
||||
hub=__version__,
|
||||
)
|
||||
raise web.HTTPError(500, msg)
|
||||
|
||||
# set login cookie anew
|
||||
self.set_login_cookie(current_user)
|
||||
@@ -755,6 +854,13 @@ class CSPReportHandler(BaseHandler):
|
||||
self.statsd.incr('csp_report')
|
||||
|
||||
|
||||
class AddSlashHandler(BaseHandler):
|
||||
"""Handler for adding trailing slash to URLs that need them"""
|
||||
def get(self, *args):
|
||||
src = urlparse(self.request.uri)
|
||||
dest = src._replace(path=src.path + '/')
|
||||
self.redirect(urlunparse(dest))
|
||||
|
||||
default_handlers = [
|
||||
(r'/user/([^/]+)(/.*)?', UserSpawnHandler),
|
||||
(r'/user-redirect/(.*)?', UserRedirectHandler),
|
||||
|
@@ -20,7 +20,8 @@ class LogoutHandler(BaseHandler):
|
||||
self.clear_login_cookie()
|
||||
self.statsd.incr('logout')
|
||||
if self.authenticator.auto_login:
|
||||
self.render('logout.html')
|
||||
html = self.render_template('logout.html')
|
||||
self.finish(html)
|
||||
else:
|
||||
self.redirect(self.settings['login_url'], permanent=False)
|
||||
|
||||
@@ -84,10 +85,11 @@ class LoginHandler(BaseHandler):
|
||||
|
||||
if user:
|
||||
already_running = False
|
||||
if user.spawner:
|
||||
if user.spawner.ready:
|
||||
status = yield user.spawner.poll()
|
||||
already_running = (status is None)
|
||||
if not already_running and not user.spawner.options_form:
|
||||
if not already_running and not user.spawner.options_form \
|
||||
and not user.spawner.pending:
|
||||
# logging in triggers spawn
|
||||
yield self.spawn_single_user(user)
|
||||
self.redirect(self.get_next_url())
|
||||
|
@@ -67,9 +67,13 @@ class HomeHandler(BaseHandler):
|
||||
if user.running:
|
||||
# trigger poll_and_notify event in case of a server that died
|
||||
yield user.spawner.poll_and_notify()
|
||||
# send the user to /spawn if they aren't running,
|
||||
# to establish that this is an explicit spawn request rather
|
||||
# than an implicit one, which can be caused by any link to `/user/:name`
|
||||
url = user.url if user.running else url_path_join(self.hub.base_url, 'spawn')
|
||||
html = self.render_template('home.html',
|
||||
user=user,
|
||||
url=user.url,
|
||||
url=url,
|
||||
)
|
||||
self.finish(html)
|
||||
|
||||
@@ -92,7 +96,10 @@ class SpawnHandler(BaseHandler):
|
||||
|
||||
@web.authenticated
|
||||
def get(self):
|
||||
"""GET renders form for spawning with user-specified options"""
|
||||
"""GET renders form for spawning with user-specified options
|
||||
|
||||
or triggers spawn via redirect if there is no form.
|
||||
"""
|
||||
user = self.get_current_user()
|
||||
if not self.allow_named_servers and user.running:
|
||||
url = user.url
|
||||
@@ -102,7 +109,12 @@ class SpawnHandler(BaseHandler):
|
||||
if user.spawner.options_form:
|
||||
self.finish(self._render_form())
|
||||
else:
|
||||
# not running, no form. Trigger spawn.
|
||||
# Explicit spawn request: clear _spawn_future
|
||||
# which may have been saved to prevent implicit spawns
|
||||
# after a failure.
|
||||
if user.spawner._spawn_future and user.spawner._spawn_future.done():
|
||||
user.spawner._spawn_future = None
|
||||
# not running, no form. Trigger spawn by redirecting to /user/:name
|
||||
self.redirect(user.url)
|
||||
|
||||
@web.authenticated
|
||||
@@ -115,6 +127,10 @@ class SpawnHandler(BaseHandler):
|
||||
self.log.warning("User is already running: %s", url)
|
||||
self.redirect(url)
|
||||
return
|
||||
if user.spawner.pending:
|
||||
raise web.HTTPError(
|
||||
400, "%s is pending %s" % (user.spawner._log_name, user.spawner.pending)
|
||||
)
|
||||
form_options = {}
|
||||
for key, byte_list in self.request.body_arguments.items():
|
||||
form_options[key] = [ bs.decode('utf8') for bs in byte_list ]
|
||||
@@ -146,14 +162,19 @@ class AdminHandler(BaseHandler):
|
||||
available = {'name', 'admin', 'running', 'last_activity'}
|
||||
default_sort = ['admin', 'name']
|
||||
mapping = {
|
||||
'running': '_server_id'
|
||||
'running': orm.Spawner.server_id,
|
||||
}
|
||||
for name in available:
|
||||
if name not in mapping:
|
||||
mapping[name] = getattr(orm.User, name)
|
||||
|
||||
default_order = {
|
||||
'name': 'asc',
|
||||
'last_activity': 'desc',
|
||||
'admin': 'desc',
|
||||
'running': 'desc',
|
||||
}
|
||||
|
||||
sorts = self.get_arguments('sort') or default_sort
|
||||
orders = self.get_arguments('order')
|
||||
|
||||
@@ -176,11 +197,11 @@ class AdminHandler(BaseHandler):
|
||||
|
||||
# this could be one incomprehensible nested list comprehension
|
||||
# get User columns
|
||||
cols = [ getattr(orm.User, mapping.get(c, c)) for c in sorts ]
|
||||
cols = [ mapping[c] for c in sorts ]
|
||||
# get User.col.desc() order objects
|
||||
ordered = [ getattr(c, o)() for c, o in zip(cols, orders) ]
|
||||
|
||||
users = self.db.query(orm.User).order_by(*ordered)
|
||||
users = self.db.query(orm.User).outerjoin(orm.Spawner).order_by(*ordered)
|
||||
users = [ self._user_from_orm(u) for u in users ]
|
||||
running = [ u for u in users if u.running ]
|
||||
|
||||
|
@@ -24,7 +24,6 @@ from sqlalchemy.pool import StaticPool
|
||||
from sqlalchemy.sql.expression import bindparam
|
||||
from sqlalchemy import create_engine, Table
|
||||
|
||||
from .dbutil import _temp_alembic_ini
|
||||
from .utils import (
|
||||
random_port,
|
||||
new_token, hash_token, compare_token,
|
||||
@@ -177,7 +176,7 @@ class Spawner(Base):
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
user_id = Column(Integer, ForeignKey('users.id', ondelete='CASCADE'))
|
||||
|
||||
server_id = Column(Integer, ForeignKey('servers.id'))
|
||||
server_id = Column(Integer, ForeignKey('servers.id', ondelete='SET NULL'))
|
||||
server = relationship(Server)
|
||||
|
||||
state = Column(JSONDict)
|
||||
@@ -213,7 +212,7 @@ class Service(Base):
|
||||
api_tokens = relationship("APIToken", backref="service")
|
||||
|
||||
# service-specific interface
|
||||
_server_id = Column(Integer, ForeignKey('servers.id'))
|
||||
_server_id = Column(Integer, ForeignKey('servers.id', ondelete='SET NULL'))
|
||||
server = relationship(Server, primaryjoin=_server_id == Server.id)
|
||||
pid = Column(Integer)
|
||||
|
||||
@@ -463,6 +462,8 @@ def check_db_revision(engine):
|
||||
current_table_names = set(engine.table_names())
|
||||
my_table_names = set(Base.metadata.tables.keys())
|
||||
|
||||
from .dbutil import _temp_alembic_ini
|
||||
|
||||
with _temp_alembic_ini(engine.url) as ini:
|
||||
cfg = alembic.config.Config(ini)
|
||||
scripts = ScriptDirectory.from_config(cfg)
|
||||
|
@@ -231,9 +231,10 @@ class Proxy(LoggingConfigurable):
|
||||
user.name, spawner.proxy_spec, spawner.server.host,
|
||||
)
|
||||
|
||||
if spawner._spawn_pending:
|
||||
if spawner.pending and spawner.pending != 'spawn':
|
||||
raise RuntimeError(
|
||||
"User %s's spawn is pending, shouldn't be added to the proxy yet!", user.name)
|
||||
"%s is pending %s, shouldn't be added to the proxy yet!" % (spawner._log_name, spawner.pending)
|
||||
)
|
||||
|
||||
yield self.add_route(
|
||||
spawner.proxy_spec,
|
||||
@@ -326,7 +327,7 @@ class Proxy(LoggingConfigurable):
|
||||
spec, route['target'], spawner.server,
|
||||
)
|
||||
futures.append(self.add_user(user, name))
|
||||
elif spawner._proxy_pending:
|
||||
elif spawner._spawn_pending:
|
||||
good_routes.add(spawner.proxy_spec)
|
||||
|
||||
# check service routes
|
||||
@@ -374,7 +375,7 @@ class Proxy(LoggingConfigurable):
|
||||
self.log.info("Setting up routes on new proxy")
|
||||
yield self.add_hub_route(self.app.hub)
|
||||
yield self.add_all_users(self.app.users)
|
||||
yield self.add_all_services(self.app.services)
|
||||
yield self.add_all_services(self.app._service_map)
|
||||
self.log.info("New proxy back up and good to go")
|
||||
|
||||
|
||||
|
@@ -9,11 +9,17 @@ model describing the authenticated user.
|
||||
authenticate with the Hub.
|
||||
|
||||
"""
|
||||
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
import socket
|
||||
import string
|
||||
import time
|
||||
from urllib.parse import quote, urlencode
|
||||
import uuid
|
||||
import warnings
|
||||
|
||||
import requests
|
||||
@@ -239,7 +245,8 @@ class HubAuth(Configurable):
|
||||
headers.setdefault('Authorization', 'token %s' % self.api_token)
|
||||
try:
|
||||
r = requests.request(method, url, **kwargs)
|
||||
except requests.ConnectionError:
|
||||
except requests.ConnectionError as e:
|
||||
app_log.error("Error connecting to %s: %s", self.api_url, e)
|
||||
msg = "Failed to connect to Hub API at %r." % self.api_url
|
||||
msg += " Is the Hub accessible at this URL (from host: %s)?" % socket.gethostname()
|
||||
if '127.0.0.1' in self.api_url:
|
||||
@@ -397,6 +404,14 @@ class HubOAuth(HubAuth):
|
||||
"""
|
||||
return self.oauth_client_id
|
||||
|
||||
@property
|
||||
def state_cookie_name(self):
|
||||
"""The cookie name for storing OAuth state
|
||||
|
||||
This cookie is only live for the duration of the OAuth handshake.
|
||||
"""
|
||||
return self.cookie_name + '-oauth-state'
|
||||
|
||||
def _get_user_cookie(self, handler):
|
||||
token = handler.get_secure_cookie(self.cookie_name)
|
||||
if token:
|
||||
@@ -476,6 +491,110 @@ class HubOAuth(HubAuth):
|
||||
|
||||
return token_reply['access_token']
|
||||
|
||||
def _encode_state(self, state):
|
||||
"""Encode a state dict as url-safe base64"""
|
||||
# trim trailing `=` because
|
||||
json_state = json.dumps(state)
|
||||
return base64.urlsafe_b64encode(
|
||||
json_state.encode('utf8')
|
||||
).decode('ascii').rstrip('=')
|
||||
|
||||
def _decode_state(self, b64_state):
|
||||
"""Decode a base64 state
|
||||
|
||||
Always returns a dict.
|
||||
The dict will be empty if the state is invalid.
|
||||
"""
|
||||
if isinstance(b64_state, str):
|
||||
b64_state = b64_state.encode('ascii')
|
||||
if len(b64_state) != 4:
|
||||
# restore padding
|
||||
b64_state = b64_state + (b'=' * (4 - len(b64_state) % 4))
|
||||
try:
|
||||
json_state = base64.urlsafe_b64decode(b64_state).decode('utf8')
|
||||
except ValueError:
|
||||
app_log.error("Failed to b64-decode state: %r", b64_state)
|
||||
return {}
|
||||
try:
|
||||
return json.loads(json_state)
|
||||
except ValueError:
|
||||
app_log.error("Failed to json-decode state: %r", json_state)
|
||||
return {}
|
||||
|
||||
def set_state_cookie(self, handler, next_url=None):
|
||||
"""Generate an OAuth state and store it in a cookie
|
||||
|
||||
Parameters
|
||||
----------
|
||||
handler (RequestHandler): A tornado RequestHandler
|
||||
next_url (str): The page to redirect to on successful login
|
||||
|
||||
Returns
|
||||
-------
|
||||
state (str): The OAuth state that has been stored in the cookie (url safe, base64-encoded)
|
||||
"""
|
||||
extra_state = {}
|
||||
if handler.get_cookie(self.state_cookie_name):
|
||||
# oauth state cookie is already set
|
||||
# use a randomized cookie suffix to avoid collisions
|
||||
# in case of concurrent logins
|
||||
app_log.warning("Detected unused OAuth state cookies")
|
||||
cookie_suffix = ''.join(random.choice(string.ascii_letters) for i in range(8))
|
||||
cookie_name = '{}-{}'.format(self.state_cookie_name, cookie_suffix)
|
||||
extra_state['cookie_name'] = cookie_name
|
||||
else:
|
||||
cookie_name = self.state_cookie_name
|
||||
b64_state = self.generate_state(next_url, **extra_state)
|
||||
kwargs = {
|
||||
'path': self.base_url,
|
||||
'httponly': True,
|
||||
# Expire oauth state cookie in ten minutes.
|
||||
# Usually this will be cleared by completed login
|
||||
# in less than a few seconds.
|
||||
# OAuth that doesn't complete shouldn't linger too long.
|
||||
'max_age': 600,
|
||||
}
|
||||
if handler.request.protocol == 'https':
|
||||
kwargs['secure'] = True
|
||||
handler.set_secure_cookie(
|
||||
cookie_name,
|
||||
b64_state,
|
||||
**kwargs
|
||||
)
|
||||
return b64_state
|
||||
|
||||
def generate_state(self, next_url=None, **extra_state):
|
||||
"""Generate a state string, given a next_url redirect target
|
||||
|
||||
Parameters
|
||||
----------
|
||||
next_url (str): The URL of the page to redirect to on successful login.
|
||||
|
||||
Returns
|
||||
-------
|
||||
state (str): The base64-encoded state string.
|
||||
"""
|
||||
state = {
|
||||
'uuid': uuid.uuid4().hex,
|
||||
'next_url': next_url,
|
||||
}
|
||||
state.update(extra_state)
|
||||
return self._encode_state(state)
|
||||
|
||||
def get_next_url(self, b64_state=''):
|
||||
"""Get the next_url for redirection, given an encoded OAuth state"""
|
||||
state = self._decode_state(b64_state)
|
||||
return state.get('next_url') or self.base_url
|
||||
|
||||
def get_state_cookie_name(self, b64_state=''):
|
||||
"""Get the cookie name for oauth state, given an encoded OAuth state
|
||||
|
||||
Cookie name is stored in the state itself because the cookie name
|
||||
is randomized to deal with races between concurrent oauth sequences.
|
||||
"""
|
||||
state = self._decode_state(b64_state)
|
||||
return state.get('cookie_name') or self.state_cookie_name
|
||||
|
||||
def set_cookie(self, handler, access_token):
|
||||
"""Set a cookie recording OAuth result"""
|
||||
kwargs = {
|
||||
@@ -565,8 +684,13 @@ class HubAuthenticated(object):
|
||||
|
||||
def get_login_url(self):
|
||||
"""Return the Hub's login URL"""
|
||||
app_log.debug("Redirecting to login url: %s" % self.hub_auth.login_url)
|
||||
return self.hub_auth.login_url
|
||||
login_url = self.hub_auth.login_url
|
||||
if isinstance(self.hub_auth, HubOAuth):
|
||||
# add state argument to OAuth url
|
||||
state = self.hub_auth.set_state_cookie(self, next_url=self.request.uri)
|
||||
login_url = url_concat(login_url, {'state': state})
|
||||
app_log.debug("Redirecting to login url: %s", login_url)
|
||||
return login_url
|
||||
|
||||
def check_hub_user(self, model):
|
||||
"""Check whether Hub-authenticated user or service should be allowed.
|
||||
@@ -634,6 +758,19 @@ class HubAuthenticated(object):
|
||||
except Exception:
|
||||
self._hub_auth_user_cache = None
|
||||
raise
|
||||
|
||||
# store ?token=... tokens passed via url in a cookie for future requests
|
||||
url_token = self.get_argument('token', '')
|
||||
if (
|
||||
user_model
|
||||
and url_token
|
||||
and getattr(self, '_token_authenticated', False)
|
||||
and hasattr(self.hub_auth, 'set_cookie')
|
||||
):
|
||||
# authenticated via `?token=`
|
||||
# set a cookie for future requests
|
||||
# hub_auth.set_cookie is only available on HubOAuth
|
||||
self.hub_auth.set_cookie(self, url_token)
|
||||
return self._hub_auth_user_cache
|
||||
|
||||
|
||||
@@ -657,6 +794,22 @@ class HubOAuthCallbackHandler(HubOAuthenticated, RequestHandler):
|
||||
code = self.get_argument("code", False)
|
||||
if not code:
|
||||
raise HTTPError(400, "oauth callback made without a token")
|
||||
|
||||
# validate OAuth state
|
||||
arg_state = self.get_argument("state", None)
|
||||
if arg_state is None:
|
||||
raise HTTPError("oauth state is missing. Try logging in again.")
|
||||
cookie_name = self.hub_auth.get_state_cookie_name(arg_state)
|
||||
cookie_state = self.get_secure_cookie(cookie_name)
|
||||
# clear cookie state now that we've consumed it
|
||||
self.clear_cookie(cookie_name, path=self.hub_auth.base_url)
|
||||
if isinstance(cookie_state, bytes):
|
||||
cookie_state = cookie_state.decode('ascii', 'replace')
|
||||
# check that state matches
|
||||
if arg_state != cookie_state:
|
||||
app_log.warning("oauth state %r != %r", arg_state, cookie_state)
|
||||
raise HTTPError(403, "oauth state does not match. Try logging in again.")
|
||||
next_url = self.hub_auth.get_next_url(cookie_state)
|
||||
# TODO: make async (in a Thread?)
|
||||
token = self.hub_auth.token_for_code(code)
|
||||
user_model = self.hub_auth.user_for_token(token)
|
||||
@@ -664,7 +817,6 @@ class HubOAuthCallbackHandler(HubOAuthenticated, RequestHandler):
|
||||
raise HTTPError(500, "oauth callback failed to identify a user")
|
||||
app_log.info("Logged-in user %s", user_model)
|
||||
self.hub_auth.set_cookie(self, token)
|
||||
next_url = self.get_argument('next', '') or self.hub_auth.base_url
|
||||
self.redirect(next_url)
|
||||
self.redirect(next_url or self.hub_auth.base_url)
|
||||
|
||||
|
||||
|
@@ -301,5 +301,8 @@ class Service(LoggingConfigurable):
|
||||
if not self.managed:
|
||||
raise RuntimeError("Cannot stop unmanaged service %s" % self)
|
||||
if self.spawner:
|
||||
if self.orm.server:
|
||||
self.db.delete(self.orm.server)
|
||||
self.db.commit()
|
||||
self.spawner.stop_polling()
|
||||
return self.spawner.stop()
|
||||
|
@@ -22,6 +22,7 @@ except ImportError:
|
||||
|
||||
from traitlets import (
|
||||
Bool,
|
||||
Bytes,
|
||||
Unicode,
|
||||
CUnicode,
|
||||
default,
|
||||
@@ -115,20 +116,6 @@ class OAuthCallbackHandler(HubOAuthCallbackHandler, IPythonHandler):
|
||||
@property
|
||||
def hub_auth(self):
|
||||
return self.settings['hub_auth']
|
||||
|
||||
def get(self):
|
||||
code = self.get_argument("code", False)
|
||||
if not code:
|
||||
raise HTTPError(400, "oauth callback made without a token")
|
||||
# TODO: make async (in a Thread?)
|
||||
token = self.hub_auth.token_for_code(code)
|
||||
user_model = self.hub_auth.user_for_token(token)
|
||||
if user_model is None:
|
||||
raise HTTPError(500, "oauth callback failed to identify a user")
|
||||
self.log.info("Logged-in user %s", user_model)
|
||||
self.hub_auth.set_cookie(self, token)
|
||||
next_url = self.get_argument('next', '') or self.base_url
|
||||
self.redirect(next_url)
|
||||
|
||||
|
||||
# register new hub related command-line aliases
|
||||
@@ -157,11 +144,13 @@ page_template = """
|
||||
{% block header_buttons %}
|
||||
{{super()}}
|
||||
|
||||
<a href='{{hub_control_panel_url}}'
|
||||
class='btn btn-default btn-sm navbar-btn pull-right'
|
||||
style='margin-right: 4px; margin-left: 2px;'
|
||||
>
|
||||
Control Panel</a>
|
||||
<span>
|
||||
<a href='{{hub_control_panel_url}}'
|
||||
class='btn btn-default btn-sm navbar-btn pull-right'
|
||||
style='margin-right: 4px; margin-left: 2px;'>
|
||||
Control Panel
|
||||
</a>
|
||||
</span>
|
||||
{% endblock %}
|
||||
{% block logo %}
|
||||
<img src='{{logo_url}}' alt='Jupyter Notebook'/>
|
||||
@@ -192,6 +181,15 @@ class SingleUserNotebookApp(NotebookApp):
|
||||
subcommands = {}
|
||||
version = __version__
|
||||
classes = NotebookApp.classes + [HubOAuth]
|
||||
|
||||
# don't store cookie secrets
|
||||
cookie_secret_file = ''
|
||||
# always generate a new cookie secret on launch
|
||||
# ensures that each spawn clears any cookies from previous session,
|
||||
# triggering OAuth again
|
||||
cookie_secret = Bytes()
|
||||
def _cookie_secret_default(self):
|
||||
return os.urandom(32)
|
||||
|
||||
user = CUnicode().tag(config=True)
|
||||
group = CUnicode().tag(config=True)
|
||||
|
@@ -18,7 +18,7 @@ from tempfile import mkdtemp
|
||||
from sqlalchemy import inspect
|
||||
|
||||
from tornado import gen
|
||||
from tornado.ioloop import PeriodicCallback, IOLoop
|
||||
from tornado.ioloop import PeriodicCallback
|
||||
|
||||
from traitlets.config import LoggingConfigurable
|
||||
from traitlets import (
|
||||
@@ -49,9 +49,23 @@ class Spawner(LoggingConfigurable):
|
||||
|
||||
# private attributes for tracking status
|
||||
_spawn_pending = False
|
||||
_start_pending = False
|
||||
_stop_pending = False
|
||||
_proxy_pending = False
|
||||
_waiting_for_response = False
|
||||
_jupyterhub_version = None
|
||||
_spawn_future = None
|
||||
|
||||
@property
|
||||
def _log_name(self):
|
||||
"""Return username:servername or username
|
||||
|
||||
Used in logging for consistency with named servers.
|
||||
"""
|
||||
if self.name:
|
||||
return '%s:%s' % (self.user.name, self.name)
|
||||
else:
|
||||
return self.user.name
|
||||
|
||||
@property
|
||||
def pending(self):
|
||||
@@ -59,7 +73,7 @@ class Spawner(LoggingConfigurable):
|
||||
|
||||
Return False if nothing is pending.
|
||||
"""
|
||||
if self._spawn_pending or self._proxy_pending:
|
||||
if self._spawn_pending:
|
||||
return 'spawn'
|
||||
elif self._stop_pending:
|
||||
return 'stop'
|
||||
@@ -89,6 +103,7 @@ class Spawner(LoggingConfigurable):
|
||||
authenticator = Any()
|
||||
hub = Any()
|
||||
orm_spawner = Any()
|
||||
db = Any()
|
||||
|
||||
@observe('orm_spawner')
|
||||
def _orm_spawner_changed(self, change):
|
||||
@@ -824,7 +839,7 @@ class LocalProcessSpawner(Spawner):
|
||||
This is the default spawner for JupyterHub.
|
||||
"""
|
||||
|
||||
INTERRUPT_TIMEOUT = Integer(10,
|
||||
interrupt_timeout = Integer(10,
|
||||
help="""
|
||||
Seconds to wait for single-user server process to halt after SIGINT.
|
||||
|
||||
@@ -832,7 +847,7 @@ class LocalProcessSpawner(Spawner):
|
||||
"""
|
||||
).tag(config=True)
|
||||
|
||||
TERM_TIMEOUT = Integer(5,
|
||||
term_timeout = Integer(5,
|
||||
help="""
|
||||
Seconds to wait for single-user server process to halt after SIGTERM.
|
||||
|
||||
@@ -840,7 +855,7 @@ class LocalProcessSpawner(Spawner):
|
||||
"""
|
||||
).tag(config=True)
|
||||
|
||||
KILL_TIMEOUT = Integer(5,
|
||||
kill_timeout = Integer(5,
|
||||
help="""
|
||||
Seconds to wait for process to halt after SIGKILL before giving up.
|
||||
|
||||
@@ -1056,7 +1071,7 @@ class LocalProcessSpawner(Spawner):
|
||||
return
|
||||
self.log.debug("Interrupting %i", self.pid)
|
||||
yield self._signal(signal.SIGINT)
|
||||
yield self.wait_for_death(self.INTERRUPT_TIMEOUT)
|
||||
yield self.wait_for_death(self.interrupt_timeout)
|
||||
|
||||
# clean shutdown failed, use TERM
|
||||
status = yield self.poll()
|
||||
@@ -1064,7 +1079,7 @@ class LocalProcessSpawner(Spawner):
|
||||
return
|
||||
self.log.debug("Terminating %i", self.pid)
|
||||
yield self._signal(signal.SIGTERM)
|
||||
yield self.wait_for_death(self.TERM_TIMEOUT)
|
||||
yield self.wait_for_death(self.term_timeout)
|
||||
|
||||
# TERM failed, use KILL
|
||||
status = yield self.poll()
|
||||
@@ -1072,7 +1087,7 @@ class LocalProcessSpawner(Spawner):
|
||||
return
|
||||
self.log.debug("Killing %i", self.pid)
|
||||
yield self._signal(signal.SIGKILL)
|
||||
yield self.wait_for_death(self.KILL_TIMEOUT)
|
||||
yield self.wait_for_death(self.kill_timeout)
|
||||
|
||||
status = yield self.poll()
|
||||
if status is None:
|
||||
|
@@ -7,8 +7,6 @@ import threading
|
||||
from unittest import mock
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import requests
|
||||
|
||||
from tornado import gen
|
||||
from tornado.concurrent import Future
|
||||
from tornado.ioloop import IOLoop
|
||||
@@ -58,6 +56,13 @@ class MockSpawner(LocalProcessSpawner):
|
||||
def _cmd_default(self):
|
||||
return [sys.executable, '-m', 'jupyterhub.tests.mocksu']
|
||||
|
||||
use_this_api_token = None
|
||||
def start(self):
|
||||
if self.use_this_api_token:
|
||||
self.api_token = self.use_this_api_token
|
||||
elif self.will_resume:
|
||||
self.use_this_api_token = self.api_token
|
||||
return super().start()
|
||||
|
||||
class SlowSpawner(MockSpawner):
|
||||
"""A spawner that takes a few seconds to start"""
|
||||
|
@@ -89,7 +89,7 @@ def api_request(app, *api_path, **kwargs):
|
||||
base_url = app.hub.url
|
||||
headers = kwargs.setdefault('headers', {})
|
||||
|
||||
if 'Authorization' not in headers:
|
||||
if 'Authorization' not in headers and not kwargs.pop('noauth', False):
|
||||
headers.update(auth_header(app.db, 'admin'))
|
||||
|
||||
url = ujoin(base_url, 'api', *api_path)
|
||||
@@ -654,6 +654,50 @@ def test_active_server_limit(app, request):
|
||||
assert counts['pending'] == 0
|
||||
|
||||
|
||||
@mark.gen_test
|
||||
def test_start_stop_race(app, no_patience, slow_spawn):
|
||||
user = add_user(app.db, app, name='panda')
|
||||
spawner = user.spawner
|
||||
# start the server
|
||||
r = yield api_request(app, 'users', user.name, 'server', method='post')
|
||||
assert r.status_code == 202
|
||||
assert spawner.pending == 'spawn'
|
||||
# additional spawns while spawning shouldn't trigger a new spawn
|
||||
with mock.patch.object(spawner, 'start') as m:
|
||||
r = yield api_request(app, 'users', user.name, 'server', method='post')
|
||||
assert r.status_code == 202
|
||||
assert m.call_count == 0
|
||||
|
||||
# stop while spawning is not okay
|
||||
r = yield api_request(app, 'users', user.name, 'server', method='delete')
|
||||
assert r.status_code == 400
|
||||
while not spawner.ready:
|
||||
yield gen.sleep(0.1)
|
||||
|
||||
spawner.delay = 3
|
||||
# stop the spawner
|
||||
r = yield api_request(app, 'users', user.name, 'server', method='delete')
|
||||
assert r.status_code == 202
|
||||
assert spawner.pending == 'stop'
|
||||
# make sure we get past deleting from the proxy
|
||||
yield gen.sleep(1)
|
||||
# additional stops while stopping shouldn't trigger a new stop
|
||||
with mock.patch.object(spawner, 'stop') as m:
|
||||
r = yield api_request(app, 'users', user.name, 'server', method='delete')
|
||||
assert r.status_code == 202
|
||||
assert m.call_count == 0
|
||||
# start while stopping is not allowed
|
||||
with mock.patch.object(spawner, 'start') as m:
|
||||
r = yield api_request(app, 'users', user.name, 'server', method='post')
|
||||
assert r.status_code == 400
|
||||
|
||||
while spawner.active:
|
||||
yield gen.sleep(0.1)
|
||||
# start after stop is okay
|
||||
r = yield api_request(app, 'users', user.name, 'server', method='post')
|
||||
assert r.status_code == 202
|
||||
|
||||
|
||||
@mark.gen_test
|
||||
def test_get_proxy(app):
|
||||
r = yield api_request(app, 'proxy')
|
||||
@@ -711,16 +755,16 @@ def test_token(app):
|
||||
|
||||
|
||||
@mark.gen_test
|
||||
@mark.parametrize("headers, data, status", [
|
||||
({}, None, 200),
|
||||
({'Authorization': ''}, None, 403),
|
||||
({}, {'username': 'fake', 'password': 'fake'}, 200),
|
||||
@mark.parametrize("headers, status", [
|
||||
({}, 200),
|
||||
({'Authorization': 'token bad'}, 403),
|
||||
])
|
||||
def test_get_new_token(app, headers, data, status):
|
||||
if data:
|
||||
data = json.dumps(data)
|
||||
def test_get_new_token(app, headers, status):
|
||||
# request a new token
|
||||
r = yield api_request(app, 'authorizations', 'token', method='post', data=data, headers=headers)
|
||||
r = yield api_request(app, 'authorizations', 'token',
|
||||
method='post',
|
||||
headers=headers,
|
||||
)
|
||||
assert r.status_code == status
|
||||
if status != 200:
|
||||
return
|
||||
@@ -728,7 +772,61 @@ def test_get_new_token(app, headers, data, status):
|
||||
assert 'token' in reply
|
||||
r = yield api_request(app, 'authorizations', 'token', reply['token'])
|
||||
r.raise_for_status()
|
||||
assert 'name' in r.json()
|
||||
reply = r.json()
|
||||
assert reply['name'] == 'admin'
|
||||
|
||||
|
||||
@mark.gen_test
|
||||
def test_token_formdata(app):
|
||||
"""Create a token for a user with formdata and no auth header"""
|
||||
data = {
|
||||
'username': 'fake',
|
||||
'password': 'fake',
|
||||
}
|
||||
r = yield api_request(app, 'authorizations', 'token',
|
||||
method='post',
|
||||
data=json.dumps(data) if data else None,
|
||||
noauth=True,
|
||||
)
|
||||
assert r.status_code == 200
|
||||
reply = r.json()
|
||||
assert 'token' in reply
|
||||
r = yield api_request(app, 'authorizations', 'token', reply['token'])
|
||||
r.raise_for_status()
|
||||
reply = r.json()
|
||||
assert reply['name'] == data['username']
|
||||
|
||||
|
||||
@mark.gen_test
|
||||
@mark.parametrize("as_user, for_user, status", [
|
||||
('admin', 'other', 200),
|
||||
('admin', 'missing', 400),
|
||||
('user', 'other', 403),
|
||||
('user', 'user', 200),
|
||||
])
|
||||
def test_token_as_user(app, as_user, for_user, status):
|
||||
# ensure both users exist
|
||||
u = add_user(app.db, app, name=as_user)
|
||||
if for_user != 'missing':
|
||||
add_user(app.db, app, name=for_user)
|
||||
data = {'username': for_user}
|
||||
headers = {
|
||||
'Authorization': 'token %s' % u.new_api_token(),
|
||||
}
|
||||
r = yield api_request(app, 'authorizations', 'token',
|
||||
method='post',
|
||||
data=json.dumps(data),
|
||||
headers=headers,
|
||||
)
|
||||
assert r.status_code == status
|
||||
reply = r.json()
|
||||
if status != 200:
|
||||
return
|
||||
assert 'token' in reply
|
||||
r = yield api_request(app, 'authorizations', 'token', reply['token'])
|
||||
r.raise_for_status()
|
||||
reply = r.json()
|
||||
assert reply['name'] == data['username']
|
||||
|
||||
|
||||
# ---------------
|
||||
|
@@ -8,9 +8,11 @@ from subprocess import check_output, Popen, PIPE
|
||||
from tempfile import NamedTemporaryFile, TemporaryDirectory
|
||||
from unittest.mock import patch
|
||||
|
||||
from tornado import gen
|
||||
import pytest
|
||||
|
||||
from .mocking import MockHub
|
||||
from .test_api import add_user
|
||||
from .. import orm
|
||||
from ..app import COOKIE_SECRET_BYTES
|
||||
|
||||
@@ -161,3 +163,57 @@ def test_load_groups():
|
||||
assert gold is not None
|
||||
assert sorted([ u.name for u in gold.users ]) == sorted(to_load['gold'])
|
||||
|
||||
|
||||
@pytest.mark.gen_test
|
||||
def test_resume_spawners(tmpdir, request):
|
||||
if not os.getenv('JUPYTERHUB_TEST_DB_URL'):
|
||||
p = patch.dict(os.environ, {
|
||||
'JUPYTERHUB_TEST_DB_URL': 'sqlite:///%s' % tmpdir.join('jupyterhub.sqlite'),
|
||||
})
|
||||
p.start()
|
||||
request.addfinalizer(p.stop)
|
||||
@gen.coroutine
|
||||
def new_hub():
|
||||
app = MockHub()
|
||||
app.config.ConfigurableHTTPProxy.should_start = False
|
||||
yield app.initialize([])
|
||||
return app
|
||||
app = yield new_hub()
|
||||
db = app.db
|
||||
# spawn a user's server
|
||||
name = 'kurt'
|
||||
user = add_user(db, app, name=name)
|
||||
yield user.spawn()
|
||||
proc = user.spawner.proc
|
||||
assert proc is not None
|
||||
|
||||
# stop the Hub without cleaning up servers
|
||||
app.cleanup_servers = False
|
||||
yield app.stop()
|
||||
|
||||
# proc is still running
|
||||
assert proc.poll() is None
|
||||
|
||||
# resume Hub, should still be running
|
||||
app = yield new_hub()
|
||||
db = app.db
|
||||
user = app.users[name]
|
||||
assert user.running
|
||||
assert user.spawner.server is not None
|
||||
|
||||
# stop the Hub without cleaning up servers
|
||||
app.cleanup_servers = False
|
||||
yield app.stop()
|
||||
|
||||
# stop the server while the Hub is down. BAMF!
|
||||
proc.terminate()
|
||||
proc.wait(timeout=10)
|
||||
assert proc.poll() is not None
|
||||
|
||||
# resume Hub, should be stopped
|
||||
app = yield new_hub()
|
||||
db = app.db
|
||||
user = app.users[name]
|
||||
assert not user.running
|
||||
assert user.spawner.server is None
|
||||
assert list(db.query(orm.Server)) == []
|
||||
|
@@ -4,6 +4,7 @@ import shutil
|
||||
|
||||
import pytest
|
||||
from pytest import raises
|
||||
from traitlets.config import Config
|
||||
|
||||
from ..dbutil import upgrade
|
||||
from ..app import NewToken, UpgradeDB, JupyterHub
|
||||
@@ -21,29 +22,35 @@ def generate_old_db(path):
|
||||
def test_upgrade(tmpdir):
|
||||
print(tmpdir)
|
||||
db_url = generate_old_db(str(tmpdir))
|
||||
print(db_url)
|
||||
upgrade(db_url)
|
||||
|
||||
@pytest.mark.gen_test
|
||||
def test_upgrade_entrypoint(tmpdir):
|
||||
generate_old_db(str(tmpdir))
|
||||
db_url = os.getenv('JUPYTERHUB_TEST_UPGRADE_DB_URL')
|
||||
if not db_url:
|
||||
# default: sqlite
|
||||
db_url = generate_old_db(str(tmpdir))
|
||||
cfg = Config()
|
||||
cfg.JupyterHub.db_url = db_url
|
||||
|
||||
tmpdir.chdir()
|
||||
tokenapp = NewToken()
|
||||
tokenapp = NewToken(config=cfg)
|
||||
tokenapp.initialize(['kaylee'])
|
||||
with raises(SystemExit):
|
||||
tokenapp.start()
|
||||
|
||||
sqlite_files = glob(os.path.join(str(tmpdir), 'jupyterhub.sqlite*'))
|
||||
assert len(sqlite_files) == 1
|
||||
if 'sqlite' in db_url:
|
||||
sqlite_files = glob(os.path.join(str(tmpdir), 'jupyterhub.sqlite*'))
|
||||
assert len(sqlite_files) == 1
|
||||
|
||||
upgradeapp = UpgradeDB()
|
||||
upgradeapp = UpgradeDB(config=cfg)
|
||||
yield upgradeapp.initialize([])
|
||||
upgradeapp.start()
|
||||
|
||||
# check that backup was created:
|
||||
sqlite_files = glob(os.path.join(str(tmpdir), 'jupyterhub.sqlite*'))
|
||||
assert len(sqlite_files) == 2
|
||||
if 'sqlite' in db_url:
|
||||
sqlite_files = glob(os.path.join(str(tmpdir), 'jupyterhub.sqlite*'))
|
||||
assert len(sqlite_files) == 2
|
||||
|
||||
# run tokenapp again, it should work
|
||||
tokenapp.start()
|
||||
|
@@ -17,6 +17,57 @@ def named_servers(app):
|
||||
app.tornado_application.settings[key] = app.tornado_settings[key] = False
|
||||
|
||||
|
||||
@pytest.mark.gen_test
|
||||
def test_default_server(app, named_servers):
|
||||
"""Test the default /users/:user/server handler when named servers are enabled"""
|
||||
username = 'rosie'
|
||||
user = add_user(app.db, app, name=username)
|
||||
r = yield api_request(app, 'users', username, 'server', method='post')
|
||||
assert r.status_code == 201
|
||||
assert r.text == ''
|
||||
|
||||
r = yield api_request(app, 'users', username)
|
||||
r.raise_for_status()
|
||||
|
||||
user_model = r.json()
|
||||
user_model.pop('last_activity')
|
||||
assert user_model == {
|
||||
'name': username,
|
||||
'groups': [],
|
||||
'kind': 'user',
|
||||
'admin': False,
|
||||
'pending': None,
|
||||
'server': user.url,
|
||||
'servers': {
|
||||
'': {
|
||||
'name': '',
|
||||
'url': user.url,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
# now stop the server
|
||||
r = yield api_request(app, 'users', username, 'server', method='delete')
|
||||
assert r.status_code == 204
|
||||
assert r.text == ''
|
||||
|
||||
r = yield api_request(app, 'users', username)
|
||||
r.raise_for_status()
|
||||
|
||||
user_model = r.json()
|
||||
user_model.pop('last_activity')
|
||||
assert user_model == {
|
||||
'name': username,
|
||||
'groups': [],
|
||||
'kind': 'user',
|
||||
'admin': False,
|
||||
'pending': None,
|
||||
'server': None,
|
||||
'servers': {},
|
||||
}
|
||||
|
||||
|
||||
|
||||
@pytest.mark.gen_test
|
||||
def test_create_named_server(app, named_servers):
|
||||
username = 'walnut'
|
||||
@@ -38,6 +89,27 @@ def test_create_named_server(app, named_servers):
|
||||
assert prefix == user.spawners[servername].server.base_url
|
||||
assert prefix.endswith('/user/%s/%s/' % (username, servername))
|
||||
|
||||
r = yield api_request(app, 'users', username)
|
||||
r.raise_for_status()
|
||||
|
||||
user_model = r.json()
|
||||
user_model.pop('last_activity')
|
||||
assert user_model == {
|
||||
'name': username,
|
||||
'groups': [],
|
||||
'kind': 'user',
|
||||
'admin': False,
|
||||
'pending': None,
|
||||
'server': user.url,
|
||||
'servers': {
|
||||
name: {
|
||||
'name': name,
|
||||
'url': url_path_join(user.url, name, '/'),
|
||||
}
|
||||
for name in ['', servername]
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.gen_test
|
||||
def test_delete_named_server(app, named_servers):
|
||||
@@ -65,13 +137,13 @@ def test_delete_named_server(app, named_servers):
|
||||
'kind': 'user',
|
||||
'admin': False,
|
||||
'pending': None,
|
||||
'server': None,
|
||||
'server': user.url,
|
||||
'servers': {
|
||||
name: {
|
||||
'name': name,
|
||||
'url': url_path_join(user.url, name),
|
||||
'url': url_path_join(user.url, name, '/'),
|
||||
}
|
||||
for name in ['1', servername]
|
||||
for name in ['']
|
||||
},
|
||||
}
|
||||
|
||||
|
@@ -14,6 +14,7 @@ from .. import objects
|
||||
from .. import crypto
|
||||
from ..user import User
|
||||
from .mocking import MockSpawner
|
||||
from ..emptyclass import EmptyClass
|
||||
|
||||
|
||||
def test_server(db):
|
||||
@@ -167,6 +168,7 @@ def test_spawn_fails(db):
|
||||
user = User(orm_user, {
|
||||
'spawner_class': BadSpawner,
|
||||
'config': None,
|
||||
'statsd': EmptyClass(),
|
||||
})
|
||||
|
||||
with pytest.raises(RuntimeError) as exc:
|
||||
|
@@ -85,11 +85,25 @@ def test_admin_not_admin(app):
|
||||
@pytest.mark.gen_test
|
||||
def test_admin(app):
|
||||
cookies = yield app.login_user('admin')
|
||||
r = yield get_page('admin', app, cookies=cookies)
|
||||
r = yield get_page('admin', app, cookies=cookies, allow_redirects=False)
|
||||
r.raise_for_status()
|
||||
assert r.url.endswith('/admin')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('sort', [
|
||||
'running',
|
||||
'last_activity',
|
||||
'admin',
|
||||
'name',
|
||||
])
|
||||
@pytest.mark.gen_test
|
||||
def test_admin_sort(app, sort):
|
||||
cookies = yield app.login_user('admin')
|
||||
r = yield get_page('admin?sort=%s' % sort, app, cookies=cookies)
|
||||
r.raise_for_status()
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
@pytest.mark.gen_test
|
||||
def test_spawn_redirect(app):
|
||||
name = 'wash'
|
||||
@@ -112,7 +126,7 @@ def test_spawn_redirect(app):
|
||||
# should have started server
|
||||
status = yield u.spawner.poll()
|
||||
assert status is None
|
||||
|
||||
|
||||
# test spawn page when server is already running (just redirect)
|
||||
r = yield get_page('spawn', app, cookies=cookies)
|
||||
r.raise_for_status()
|
||||
@@ -120,6 +134,16 @@ def test_spawn_redirect(app):
|
||||
path = urlparse(r.url).path
|
||||
assert path == ujoin(app.base_url, '/user/%s/' % name)
|
||||
|
||||
# stop server to ensure /user/name is handled by the Hub
|
||||
r = yield api_request(app, 'users', name, 'server', method='delete', cookies=cookies)
|
||||
r.raise_for_status()
|
||||
|
||||
# test handing of trailing slash on `/user/name`
|
||||
r = yield get_page('user/' + name, app, hub=False, cookies=cookies)
|
||||
r.raise_for_status()
|
||||
path = urlparse(r.url).path
|
||||
assert path == ujoin(app.base_url, '/user/%s/' % name)
|
||||
|
||||
|
||||
@pytest.mark.gen_test
|
||||
def test_spawn_page(app):
|
||||
@@ -320,6 +344,19 @@ def test_auto_login(app, request):
|
||||
r = yield async_requests.get(base_url)
|
||||
assert r.url == public_url(app, path='hub/dummy')
|
||||
|
||||
@pytest.mark.gen_test
|
||||
def test_auto_login_logout(app):
|
||||
name = 'burnham'
|
||||
cookies = yield app.login_user(name)
|
||||
|
||||
with mock.patch.dict(app.tornado_application.settings, {
|
||||
'authenticator': Authenticator(auto_login=True),
|
||||
}):
|
||||
r = yield async_requests.get(public_host(app) + app.tornado_settings['logout_url'], cookies=cookies)
|
||||
r.raise_for_status()
|
||||
logout_url = public_host(app) + app.tornado_settings['logout_url']
|
||||
assert r.url == logout_url
|
||||
assert r.cookies == {}
|
||||
|
||||
@pytest.mark.gen_test
|
||||
def test_logout(app):
|
||||
|
@@ -16,6 +16,7 @@ import requests_mock
|
||||
from tornado.ioloop import IOLoop
|
||||
from tornado.httpserver import HTTPServer
|
||||
from tornado.web import RequestHandler, Application, authenticated, HTTPError
|
||||
from tornado.httputil import url_concat
|
||||
|
||||
from ..services.auth import _ExpiringDict, HubAuth, HubAuthenticated
|
||||
from ..utils import url_path_join
|
||||
@@ -279,7 +280,7 @@ def test_hubauth_service_token(app, mockservice_url):
|
||||
name = 'test-api-service'
|
||||
app.service_tokens[token] = name
|
||||
yield app.init_api_tokens()
|
||||
|
||||
|
||||
# token in Authorization header
|
||||
r = yield async_requests.get(public_url(app, mockservice_url) + '/whoami/',
|
||||
headers={
|
||||
@@ -292,6 +293,7 @@ def test_hubauth_service_token(app, mockservice_url):
|
||||
'name': name,
|
||||
'admin': False,
|
||||
}
|
||||
assert not r.cookies
|
||||
|
||||
# token in ?token parameter
|
||||
r = yield async_requests.get(public_url(app, mockservice_url) + '/whoami/?token=%s' % token)
|
||||
@@ -315,15 +317,25 @@ def test_hubauth_service_token(app, mockservice_url):
|
||||
|
||||
@pytest.mark.gen_test
|
||||
def test_oauth_service(app, mockservice_url):
|
||||
url = url_path_join(public_url(app, mockservice_url) + 'owhoami/')
|
||||
service = mockservice_url
|
||||
url = url_path_join(public_url(app, mockservice_url) + 'owhoami/?arg=x')
|
||||
# first request is only going to set login cookie
|
||||
# FIXME: redirect to originating URL (OAuth loses this info)
|
||||
s = requests.Session()
|
||||
s.cookies = yield app.login_user('link')
|
||||
name = 'link'
|
||||
s.cookies = yield app.login_user(name)
|
||||
# run session.get in async_requests thread
|
||||
s_get = lambda *args, **kwargs: async_requests.executor.submit(s.get, *args, **kwargs)
|
||||
r = yield s_get(url)
|
||||
r.raise_for_status()
|
||||
assert r.url == url
|
||||
# verify oauth cookie is set
|
||||
assert 'service-%s' % service.name in set(s.cookies.keys())
|
||||
# verify oauth state cookie has been consumed
|
||||
assert 'service-%s-oauth-state' % service.name not in set(s.cookies.keys())
|
||||
# verify oauth state cookie was set at some point
|
||||
assert set(r.history[0].cookies.keys()) == {'service-%s-oauth-state' % service.name}
|
||||
|
||||
# second request should be authenticated
|
||||
r = yield s_get(url, allow_redirects=False)
|
||||
r.raise_for_status()
|
||||
@@ -335,3 +347,82 @@ def test_oauth_service(app, mockservice_url):
|
||||
'kind': 'user',
|
||||
}
|
||||
|
||||
# token-authenticated request to HubOAuth
|
||||
token = app.users[name].new_api_token()
|
||||
# token in ?token parameter
|
||||
r = yield async_requests.get(url_concat(url, {'token': token}))
|
||||
r.raise_for_status()
|
||||
reply = r.json()
|
||||
assert reply['name'] == name
|
||||
|
||||
# verify that ?token= requests set a cookie
|
||||
assert len(r.cookies) != 0
|
||||
# ensure cookie works in future requests
|
||||
r = yield async_requests.get(
|
||||
url,
|
||||
cookies=r.cookies,
|
||||
allow_redirects=False,
|
||||
)
|
||||
r.raise_for_status()
|
||||
assert r.url == url
|
||||
reply = r.json()
|
||||
assert reply['name'] == name
|
||||
|
||||
|
||||
@pytest.mark.gen_test
|
||||
def test_oauth_cookie_collision(app, mockservice_url):
|
||||
service = mockservice_url
|
||||
url = url_path_join(public_url(app, mockservice_url) + 'owhoami/')
|
||||
print(url)
|
||||
s = requests.Session()
|
||||
name = 'mypha'
|
||||
s.cookies = yield app.login_user(name)
|
||||
# run session.get in async_requests thread
|
||||
s_get = lambda *args, **kwargs: async_requests.executor.submit(s.get, *args, **kwargs)
|
||||
state_cookie_name = 'service-%s-oauth-state' % service.name
|
||||
service_cookie_name = 'service-%s' % service.name
|
||||
oauth_1 = yield s_get(url, allow_redirects=False)
|
||||
print(oauth_1.headers)
|
||||
print(oauth_1.cookies, oauth_1.url, url)
|
||||
assert state_cookie_name in s.cookies
|
||||
state_cookies = [ s for s in s.cookies.keys() if s.startswith(state_cookie_name) ]
|
||||
# only one state cookie
|
||||
assert state_cookies == [state_cookie_name]
|
||||
state_1 = s.cookies[state_cookie_name]
|
||||
|
||||
# start second oauth login before finishing the first
|
||||
oauth_2 = yield s_get(url, allow_redirects=False)
|
||||
state_cookies = [ s for s in s.cookies.keys() if s.startswith(state_cookie_name) ]
|
||||
assert len(state_cookies) == 2
|
||||
# get the random-suffix cookie name
|
||||
state_cookie_2 = sorted(state_cookies)[-1]
|
||||
# we didn't clobber the default cookie
|
||||
assert s.cookies[state_cookie_name] == state_1
|
||||
|
||||
# finish oauth 2
|
||||
url = oauth_2.headers['Location']
|
||||
if not urlparse(url).netloc:
|
||||
url = public_host(app) + url
|
||||
r = yield s_get(url)
|
||||
r.raise_for_status()
|
||||
# after finishing, state cookie is cleared
|
||||
assert state_cookie_2 not in s.cookies
|
||||
# service login cookie is set
|
||||
assert service_cookie_name in s.cookies
|
||||
service_cookie_2 = s.cookies[service_cookie_name]
|
||||
|
||||
# finish oauth 1
|
||||
url = oauth_1.headers['Location']
|
||||
if not urlparse(url).netloc:
|
||||
url = public_host(app) + url
|
||||
r = yield s_get(url)
|
||||
r.raise_for_status()
|
||||
# after finishing, state cookie is cleared (again)
|
||||
assert state_cookie_name not in s.cookies
|
||||
# service login cookie is set (again, to a different value)
|
||||
assert service_cookie_name in s.cookies
|
||||
assert s.cookies[service_cookie_name] != service_cookie_2
|
||||
|
||||
# after completing both OAuth logins, no OAuth state cookies remain
|
||||
state_cookies = [ s for s in s.cookies.keys() if s.startswith(state_cookie_name) ]
|
||||
assert state_cookies == []
|
||||
|
@@ -15,11 +15,13 @@ from unittest import mock
|
||||
import pytest
|
||||
from tornado import gen
|
||||
|
||||
from ..user import User
|
||||
from ..objects import Hub, Server
|
||||
from .. import orm
|
||||
from .. import spawner as spawnermod
|
||||
from ..spawner import LocalProcessSpawner, Spawner
|
||||
from .. import orm
|
||||
from ..user import User
|
||||
from ..utils import new_token
|
||||
from .test_api import add_user
|
||||
from .utils import async_requests
|
||||
|
||||
_echo_sleep = """
|
||||
@@ -49,9 +51,9 @@ def new_spawner(db, **kwargs):
|
||||
kwargs.setdefault('notebook_dir', os.getcwd())
|
||||
kwargs.setdefault('default_url', '/user/{username}/lab')
|
||||
kwargs.setdefault('oauth_client_id', 'mock-client-id')
|
||||
kwargs.setdefault('INTERRUPT_TIMEOUT', 1)
|
||||
kwargs.setdefault('TERM_TIMEOUT', 1)
|
||||
kwargs.setdefault('KILL_TIMEOUT', 1)
|
||||
kwargs.setdefault('interrupt_timeout', 1)
|
||||
kwargs.setdefault('term_timeout', 1)
|
||||
kwargs.setdefault('kill_timeout', 1)
|
||||
kwargs.setdefault('poll_interval', 1)
|
||||
return user._new_spawner('', spawner_class=LocalProcessSpawner, **kwargs)
|
||||
|
||||
@@ -270,3 +272,111 @@ def test_inherit_ok():
|
||||
|
||||
def poll():
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.gen_test
|
||||
def test_spawner_reuse_api_token(db, app):
|
||||
# setup: user with no tokens, whose spawner has set the .will_resume flag
|
||||
user = add_user(app.db, app, name='snoopy')
|
||||
spawner = user.spawner
|
||||
assert user.api_tokens == []
|
||||
# will_resume triggers reuse of tokens
|
||||
spawner.will_resume = True
|
||||
# first start: gets a new API token
|
||||
yield user.spawn()
|
||||
api_token = spawner.api_token
|
||||
found = orm.APIToken.find(app.db, api_token)
|
||||
assert found
|
||||
assert found.user.name == user.name
|
||||
assert user.api_tokens == [found]
|
||||
yield user.stop()
|
||||
# second start: should reuse the token
|
||||
yield user.spawn()
|
||||
# verify re-use of API token
|
||||
assert spawner.api_token == api_token
|
||||
# verify that a new token was not created
|
||||
assert user.api_tokens == [found]
|
||||
|
||||
|
||||
@pytest.mark.gen_test
|
||||
def test_spawner_insert_api_token(app):
|
||||
"""Token provided by spawner is not in the db
|
||||
|
||||
Insert token into db as a user-provided token.
|
||||
"""
|
||||
# setup: new user, double check that they don't have any tokens registered
|
||||
user = add_user(app.db, app, name='tonkee')
|
||||
spawner = user.spawner
|
||||
assert user.api_tokens == []
|
||||
|
||||
# setup: spawner's going to use a token that's not in the db
|
||||
api_token = new_token()
|
||||
assert not orm.APIToken.find(app.db, api_token)
|
||||
user.spawner.use_this_api_token = api_token
|
||||
# The spawner's provided API token would already be in the db
|
||||
# unless there is a bug somewhere else (in the Spawner),
|
||||
# but handle it anyway.
|
||||
yield user.spawn()
|
||||
assert spawner.api_token == api_token
|
||||
found = orm.APIToken.find(app.db, api_token)
|
||||
assert found
|
||||
assert found.user.name == user.name
|
||||
assert user.api_tokens == [found]
|
||||
yield user.stop()
|
||||
|
||||
|
||||
@pytest.mark.gen_test
|
||||
def test_spawner_bad_api_token(app):
|
||||
"""Tokens are revoked when a Spawner gets another user's token"""
|
||||
# we need two users for this one
|
||||
user = add_user(app.db, app, name='antimone')
|
||||
spawner = user.spawner
|
||||
other_user = add_user(app.db, app, name='alabaster')
|
||||
assert user.api_tokens == []
|
||||
assert other_user.api_tokens == []
|
||||
|
||||
# create a token owned by alabaster that antimone's going to try to use
|
||||
other_token = other_user.new_api_token()
|
||||
spawner.use_this_api_token = other_token
|
||||
assert len(other_user.api_tokens) == 1
|
||||
|
||||
# starting a user's server with another user's token
|
||||
# should revoke it
|
||||
with pytest.raises(ValueError):
|
||||
yield user.spawn()
|
||||
assert orm.APIToken.find(app.db, other_token) is None
|
||||
assert other_user.api_tokens == []
|
||||
|
||||
|
||||
@pytest.mark.gen_test
|
||||
def test_spawner_delete_server(app):
|
||||
"""Test deleting spawner.server
|
||||
|
||||
This can occur during app startup if their server has been deleted.
|
||||
"""
|
||||
db = app.db
|
||||
user = add_user(app.db, app, name='gaston')
|
||||
spawner = user.spawner
|
||||
orm_server = orm.Server()
|
||||
db.add(orm_server)
|
||||
db.commit()
|
||||
server_id = orm_server.id
|
||||
spawner.server = Server.from_orm(orm_server)
|
||||
db.commit()
|
||||
|
||||
assert spawner.server is not None
|
||||
assert spawner.orm_spawner.server is not None
|
||||
|
||||
# trigger delete via db
|
||||
db.delete(spawner.orm_spawner.server)
|
||||
db.commit()
|
||||
assert spawner.orm_spawner.server is None
|
||||
|
||||
# setting server = None also triggers delete
|
||||
spawner.server = None
|
||||
db.commit()
|
||||
# verify that the server was actually deleted from the db
|
||||
assert db.query(orm.Server).filter(orm.Server.id == server_id).first() is None
|
||||
# verify that both ORM and top-level references are None
|
||||
assert spawner.orm_spawner.server is None
|
||||
assert spawner.server is None
|
||||
|
@@ -34,18 +34,27 @@ def test_memoryspec():
|
||||
c = C()
|
||||
|
||||
c.mem = 1024
|
||||
assert isinstance(c.mem, int)
|
||||
assert c.mem == 1024
|
||||
|
||||
c.mem = '1024K'
|
||||
assert isinstance(c.mem, int)
|
||||
assert c.mem == 1024 * 1024
|
||||
|
||||
c.mem = '1024M'
|
||||
assert isinstance(c.mem, int)
|
||||
assert c.mem == 1024 * 1024 * 1024
|
||||
|
||||
c.mem = '1.5M'
|
||||
assert isinstance(c.mem, int)
|
||||
assert c.mem == 1.5 * 1024 * 1024
|
||||
|
||||
c.mem = '1024G'
|
||||
assert isinstance(c.mem, int)
|
||||
assert c.mem == 1024 * 1024 * 1024 * 1024
|
||||
|
||||
c.mem = '1024T'
|
||||
assert isinstance(c.mem, int)
|
||||
assert c.mem == 1024 * 1024 * 1024 * 1024 * 1024
|
||||
|
||||
with pytest.raises(TraitError):
|
||||
|
@@ -48,7 +48,7 @@ class ByteSpecification(Integer):
|
||||
'K': 1024,
|
||||
'M': 1024 * 1024,
|
||||
'G': 1024 * 1024 * 1024,
|
||||
'T': 1024 * 1024 * 1024 * 1024
|
||||
'T': 1024 * 1024 * 1024 * 1024,
|
||||
}
|
||||
|
||||
# Default to allowing None as a value
|
||||
@@ -62,11 +62,15 @@ class ByteSpecification(Integer):
|
||||
If it has one of the suffixes, it is converted into the appropriate
|
||||
pure byte value.
|
||||
"""
|
||||
if isinstance(value, int):
|
||||
return value
|
||||
num = value[:-1]
|
||||
if isinstance(value, (int, float)):
|
||||
return int(value)
|
||||
|
||||
try:
|
||||
num = float(value[:-1])
|
||||
except ValueError:
|
||||
raise TraitError('{val} is not a valid memory specification. Must be an int or a string with suffix K, M, G, T'.format(val=value))
|
||||
suffix = value[-1]
|
||||
if not num.isdigit() and suffix not in ByteSpecification.UNIT_SUFFIXES:
|
||||
if suffix not in self.UNIT_SUFFIXES:
|
||||
raise TraitError('{val} is not a valid memory specification. Must be an int or a string with suffix K, M, G, T'.format(val=value))
|
||||
else:
|
||||
return int(num) * ByteSpecification.UNIT_SUFFIXES[suffix]
|
||||
return int(float(num) * self.UNIT_SUFFIXES[suffix])
|
||||
|
@@ -12,7 +12,7 @@ from tornado import gen
|
||||
from tornado.log import app_log
|
||||
from traitlets import HasTraits, Any, Dict, default
|
||||
|
||||
from .utils import url_path_join, default_server_name
|
||||
from .utils import url_path_join
|
||||
|
||||
from . import orm
|
||||
from ._version import _check_version, __version__
|
||||
@@ -201,6 +201,7 @@ class User(HasTraits):
|
||||
authenticator=self.authenticator,
|
||||
config=self.settings.get('config'),
|
||||
proxy_spec=url_path_join(self.proxy_spec, name, '/'),
|
||||
db=self.db,
|
||||
)
|
||||
# update with kwargs. Mainly for testing.
|
||||
spawn_kwargs.update(kwargs)
|
||||
@@ -237,7 +238,7 @@ class User(HasTraits):
|
||||
def running(self):
|
||||
"""property for whether the user's default server is running"""
|
||||
return self.spawner.ready
|
||||
|
||||
|
||||
@property
|
||||
def active(self):
|
||||
"""True if any server is active"""
|
||||
@@ -317,8 +318,6 @@ class User(HasTraits):
|
||||
url of the server will be /user/:name/:server_name
|
||||
"""
|
||||
db = self.db
|
||||
if self.allow_named_servers and not server_name:
|
||||
server_name = default_server_name(self)
|
||||
|
||||
base_url = url_path_join(self.base_url, server_name) + '/'
|
||||
|
||||
@@ -356,12 +355,11 @@ class User(HasTraits):
|
||||
oauth_client = client_store.fetch_by_client_id(client_id)
|
||||
except ClientNotFoundError:
|
||||
oauth_client = None
|
||||
# create a new OAuth client + secret on every launch,
|
||||
# except for resuming containers.
|
||||
if oauth_client is None or not spawner.will_resume:
|
||||
client_store.add_client(client_id, api_token,
|
||||
url_path_join(self.url, 'oauth_callback'),
|
||||
)
|
||||
# create a new OAuth client + secret on every launch
|
||||
# containers that resume will be updated below
|
||||
client_store.add_client(client_id, api_token,
|
||||
url_path_join(self.url, server_name, 'oauth_callback'),
|
||||
)
|
||||
db.commit()
|
||||
|
||||
# trigger pre-spawn hook on authenticator
|
||||
@@ -369,7 +367,7 @@ class User(HasTraits):
|
||||
if (authenticator):
|
||||
yield gen.maybe_future(authenticator.pre_spawn_start(self, spawner))
|
||||
|
||||
spawner._spawn_pending = True
|
||||
spawner._start_pending = True
|
||||
# wait for spawner.start to return
|
||||
try:
|
||||
# run optional preparation work to bootstrap the notebook
|
||||
@@ -385,22 +383,50 @@ class User(HasTraits):
|
||||
# prior to 0.7, spawners had to store this info in user.server themselves.
|
||||
# Handle < 0.7 behavior with a warning, assuming info was stored in db by the Spawner.
|
||||
self.log.warning("DEPRECATION: Spawner.start should return (ip, port) in JupyterHub >= 0.7")
|
||||
if spawner.api_token != api_token:
|
||||
if spawner.api_token and spawner.api_token != api_token:
|
||||
# Spawner re-used an API token, discard the unused api_token
|
||||
orm_token = orm.APIToken.find(self.db, api_token)
|
||||
if orm_token is not None:
|
||||
self.db.delete(orm_token)
|
||||
self.db.commit()
|
||||
# check if the re-used API token is valid
|
||||
found = orm.APIToken.find(self.db, spawner.api_token)
|
||||
if found:
|
||||
if found.user is not self.orm_user:
|
||||
self.log.error("%s's server is using %s's token! Revoking this token.",
|
||||
self.name, (found.user or found.service).name)
|
||||
self.db.delete(found)
|
||||
self.db.commit()
|
||||
raise ValueError("Invalid token for %s!" % self.name)
|
||||
else:
|
||||
# Spawner.api_token has changed, but isn't in the db.
|
||||
# What happened? Maybe something unclean in a resumed container.
|
||||
self.log.warning("%s's server specified its own API token that's not in the database",
|
||||
self.name
|
||||
)
|
||||
# use generated=False because we don't trust this token
|
||||
# to have been generated properly
|
||||
self.new_api_token(spawner.api_token, generated=False)
|
||||
# update OAuth client secret with updated API token
|
||||
if oauth_provider:
|
||||
client_store = oauth_provider.client_authenticator.client_store
|
||||
client_store.add_client(client_id, spawner.api_token,
|
||||
url_path_join(self.url, server_name, 'oauth_callback'),
|
||||
)
|
||||
db.commit()
|
||||
|
||||
except Exception as e:
|
||||
if isinstance(e, gen.TimeoutError):
|
||||
self.log.warning("{user}'s server failed to start in {s} seconds, giving up".format(
|
||||
user=self.name, s=spawner.start_timeout,
|
||||
))
|
||||
e.reason = 'timeout'
|
||||
self.settings['statsd'].incr('spawner.failure.timeout')
|
||||
else:
|
||||
self.log.error("Unhandled error starting {user}'s server: {error}".format(
|
||||
user=self.name, error=e,
|
||||
))
|
||||
self.settings['statsd'].incr('spawner.failure.error')
|
||||
e.reason = 'error'
|
||||
try:
|
||||
yield self.stop()
|
||||
@@ -409,6 +435,7 @@ class User(HasTraits):
|
||||
user=self.name,
|
||||
), exc_info=True)
|
||||
# raise original exception
|
||||
spawner._start_pending = False
|
||||
raise e
|
||||
spawner.start_polling()
|
||||
|
||||
@@ -432,11 +459,13 @@ class User(HasTraits):
|
||||
)
|
||||
)
|
||||
e.reason = 'timeout'
|
||||
self.settings['statsd'].incr('spawner.failure.http_timeout')
|
||||
else:
|
||||
e.reason = 'error'
|
||||
self.log.error("Unhandled error waiting for {user}'s server to show up at {url}: {error}".format(
|
||||
user=self.name, url=server.url, error=e,
|
||||
))
|
||||
self.settings['statsd'].incr('spawner.failure.http_error')
|
||||
try:
|
||||
yield self.stop()
|
||||
except Exception:
|
||||
@@ -448,9 +477,12 @@ class User(HasTraits):
|
||||
else:
|
||||
server_version = resp.headers.get('X-JupyterHub-Version')
|
||||
_check_version(__version__, server_version, self.log)
|
||||
# record the Spawner version for better error messages
|
||||
# if it doesn't work
|
||||
spawner._jupyterhub_version = server_version
|
||||
finally:
|
||||
spawner._waiting_for_response = False
|
||||
spawner._spawn_pending = False
|
||||
spawner._start_pending = False
|
||||
return self
|
||||
|
||||
@gen.coroutine
|
||||
@@ -461,6 +493,7 @@ class User(HasTraits):
|
||||
"""
|
||||
spawner = self.spawners[server_name]
|
||||
spawner._spawn_pending = False
|
||||
spawner._start_pending = False
|
||||
spawner.stop_polling()
|
||||
spawner._stop_pending = True
|
||||
try:
|
||||
|
@@ -142,7 +142,8 @@ def wait_for_server(ip, port, timeout=10):
|
||||
ip = '127.0.0.1'
|
||||
yield exponential_backoff(
|
||||
lambda: can_connect(ip, port),
|
||||
"Server at {ip}:{port} didn't respond in {timeout} seconds".format(ip=ip, port=port, timeout=timeout)
|
||||
"Server at {ip}:{port} didn't respond in {timeout} seconds".format(ip=ip, port=port, timeout=timeout),
|
||||
timeout=timeout
|
||||
)
|
||||
|
||||
|
||||
@@ -175,7 +176,8 @@ def wait_for_http_server(url, timeout=10):
|
||||
return False
|
||||
re = yield exponential_backoff(
|
||||
is_reachable,
|
||||
"Server at {url} didn't respond in {timeout} seconds".format(url=url, timeout=timeout)
|
||||
"Server at {url} didn't respond in {timeout} seconds".format(url=url, timeout=timeout),
|
||||
timeout=timeout
|
||||
)
|
||||
return re
|
||||
|
||||
@@ -296,17 +298,3 @@ def url_path_join(*pieces):
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def default_server_name(user):
|
||||
"""Return the default name for a new server for a given user.
|
||||
|
||||
Will be the first available integer string, e.g. '1' or '2'.
|
||||
"""
|
||||
existing_names = set(user.spawners)
|
||||
# if there are 5 servers, count from 1 to 6
|
||||
for n in range(1, len(existing_names) + 2):
|
||||
name = str(n)
|
||||
if name not in existing_names:
|
||||
return name
|
||||
raise RuntimeError("It should be impossible to get here")
|
||||
|
||||
|
40
package.json
40
package.json
@@ -1,17 +1,27 @@
|
||||
{
|
||||
"name": "jupyterhub-deps",
|
||||
"version": "0.0.0",
|
||||
"description": "JupyterHub nodejs dependencies",
|
||||
"author": "Jupyter Developers",
|
||||
"license": "BSD",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/jupyter/jupyterhub.git"
|
||||
},
|
||||
"devDependencies": {
|
||||
"bower": "*",
|
||||
"less": "^2.7.1",
|
||||
"less-plugin-clean-css": "^1.5.1",
|
||||
"clean-css": "^3.4.13"
|
||||
}
|
||||
"name": "jupyterhub-deps",
|
||||
"version": "0.0.0",
|
||||
"description": "JupyterHub nodejs dependencies",
|
||||
"author": "Jupyter Developers",
|
||||
"license": "BSD",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/jupyter/jupyterhub.git"
|
||||
},
|
||||
"scripts": {
|
||||
"postinstall": "./bower-lite",
|
||||
"lessc": "lessc"
|
||||
},
|
||||
"devDependencies": {
|
||||
"less": "^2.7.1",
|
||||
"less-plugin-clean-css": "^1.5.1",
|
||||
"clean-css": "^3.4.13"
|
||||
},
|
||||
"dependencies": {
|
||||
"bootstrap": "^3.3.7",
|
||||
"font-awesome": "^4.7.0",
|
||||
"jquery": "^3.2.1",
|
||||
"moment": "^2.18.1",
|
||||
"requirejs": "^2.3.4"
|
||||
}
|
||||
}
|
||||
|
@@ -4,5 +4,5 @@ tornado>=4.1
|
||||
jinja2
|
||||
pamela
|
||||
python-oauth2>=1.0
|
||||
SQLAlchemy>=1.0
|
||||
SQLAlchemy>=1.1
|
||||
requests
|
||||
|
40
setup.py
40
setup.py
@@ -149,45 +149,34 @@ class BaseCommand(Command):
|
||||
return []
|
||||
|
||||
|
||||
class Bower(BaseCommand):
|
||||
class NPM(BaseCommand):
|
||||
description = "fetch static client-side components with bower"
|
||||
|
||||
user_options = []
|
||||
bower_dir = pjoin(static, 'components')
|
||||
node_modules = pjoin(here, 'node_modules')
|
||||
bower_dir = pjoin(static, 'components')
|
||||
|
||||
def should_run(self):
|
||||
if not os.path.exists(self.bower_dir):
|
||||
return True
|
||||
return mtime(self.bower_dir) < mtime(pjoin(here, 'bower.json'))
|
||||
|
||||
def should_run_npm(self):
|
||||
if not shutil.which('npm'):
|
||||
print("npm unavailable", file=sys.stderr)
|
||||
return False
|
||||
if not os.path.exists(self.bower_dir):
|
||||
return True
|
||||
if not os.path.exists(self.node_modules):
|
||||
return True
|
||||
if mtime(self.bower_dir) < mtime(self.node_modules):
|
||||
return True
|
||||
return mtime(self.node_modules) < mtime(pjoin(here, 'package.json'))
|
||||
|
||||
def run(self):
|
||||
if not self.should_run():
|
||||
print("bower dependencies up to date")
|
||||
print("npm dependencies up to date")
|
||||
return
|
||||
|
||||
if self.should_run_npm():
|
||||
print("installing build dependencies with npm")
|
||||
check_call(['npm', 'install', '--progress=false'], cwd=here, shell=shell)
|
||||
os.utime(self.node_modules)
|
||||
print("installing js dependencies with npm")
|
||||
check_call(['npm', 'install', '--progress=false'], cwd=here, shell=shell)
|
||||
os.utime(self.node_modules)
|
||||
|
||||
env = os.environ.copy()
|
||||
env['PATH'] = npm_path
|
||||
args = ['bower', 'install', '--allow-root', '--config.interactive=false']
|
||||
try:
|
||||
check_call(args, cwd=here, env=env, shell=shell)
|
||||
except OSError as e:
|
||||
print("Failed to run bower: %s" % e, file=sys.stderr)
|
||||
print("You can install js dependencies with `npm install`", file=sys.stderr)
|
||||
raise
|
||||
os.utime(self.bower_dir)
|
||||
# update data-files in case this created new files
|
||||
self.distribution.data_files = get_data_files()
|
||||
@@ -225,22 +214,21 @@ class CSS(BaseCommand):
|
||||
return
|
||||
|
||||
self.run_command('js')
|
||||
print("Building css with less")
|
||||
|
||||
style_less = pjoin(static, 'less', 'style.less')
|
||||
style_css = pjoin(static, 'css', 'style.min.css')
|
||||
sourcemap = style_css + '.map'
|
||||
|
||||
env = os.environ.copy()
|
||||
env['PATH'] = npm_path
|
||||
args = [
|
||||
'lessc', '--clean-css',
|
||||
'npm', 'run', 'lessc', '--', '--clean-css',
|
||||
'--source-map-basepath={}'.format(static),
|
||||
'--source-map={}'.format(sourcemap),
|
||||
'--source-map-rootpath=../',
|
||||
style_less, style_css,
|
||||
]
|
||||
try:
|
||||
check_call(args, cwd=here, env=env, shell=shell)
|
||||
check_call(args, cwd=here, shell=shell)
|
||||
except OSError as e:
|
||||
print("Failed to run lessc: %s" % e, file=sys.stderr)
|
||||
print("You can install js dependencies with `npm install`", file=sys.stderr)
|
||||
@@ -275,7 +263,7 @@ class bdist_egg_disabled(bdist_egg):
|
||||
|
||||
|
||||
setup_args['cmdclass'] = {
|
||||
'js': Bower,
|
||||
'js': NPM,
|
||||
'css': CSS,
|
||||
'build_py': js_css_first(build_py, strict=is_repo),
|
||||
'sdist': js_css_first(sdist, strict=True),
|
||||
|
@@ -32,9 +32,9 @@
|
||||
<tbody>
|
||||
<tr class="user-row add-user-row">
|
||||
<td colspan="12">
|
||||
<a id="add-users" class="col-xs-2 btn btn-default">Add Users</a>
|
||||
<a id="stop-all-servers" class="col-xs-2 col-xs-offset-5 btn btn-danger">Stop All</a>
|
||||
<a id="shutdown-hub" class="col-xs-2 col-xs-offset-1 btn btn-danger">Shutdown Hub</a>
|
||||
<a id="add-users" role="button" class="col-xs-2 btn btn-default">Add Users</a>
|
||||
<a id="stop-all-servers" role="button" class="col-xs-2 col-xs-offset-5 btn btn-danger">Stop All</a>
|
||||
<a id="shutdown-hub" role="button" class="col-xs-2 col-xs-offset-1 btn btn-danger">Shutdown Hub</a>
|
||||
</td>
|
||||
</tr>
|
||||
{% for u in users %}
|
||||
@@ -44,20 +44,20 @@
|
||||
<td class="admin-col col-sm-2">{% if u.admin %}admin{% endif %}</td>
|
||||
<td class="time-col col-sm-3">{{u.last_activity.isoformat() + 'Z'}}</td>
|
||||
<td class="server-col col-sm-2 text-center">
|
||||
<span class="stop-server btn btn-xs btn-danger {% if not u.running %}hidden{% endif %}">stop server</span>
|
||||
<span class="start-server btn btn-xs btn-success {% if u.running %}hidden{% endif %}">start server</span>
|
||||
<span role="button" class="stop-server btn btn-xs btn-danger {% if not u.running %}hidden{% endif %}">stop server</span>
|
||||
<span role="button" class="start-server btn btn-xs btn-success {% if u.running %}hidden{% endif %}">start server</span>
|
||||
</td>
|
||||
<td class="server-col col-sm-1 text-center">
|
||||
{% if admin_access %}
|
||||
<span class="access-server btn btn-xs btn-success {% if not u.running %}hidden{% endif %}">access server</span>
|
||||
<span role="button" class="access-server btn btn-xs btn-success {% if not u.running %}hidden{% endif %}">access server</span>
|
||||
{% endif %}
|
||||
</td>
|
||||
<td class="edit-col col-sm-1 text-center">
|
||||
<span class="edit-user btn btn-xs btn-primary">edit</span>
|
||||
<span role="button" class="edit-user btn btn-xs btn-primary">edit</span>
|
||||
</td>
|
||||
<td class="edit-col col-sm-1 text-center">
|
||||
{% if u.name != user.name %}
|
||||
<span class="delete-user btn btn-xs btn-danger">delete</span>
|
||||
<span role="button" class="delete-user btn btn-xs btn-danger">delete</span>
|
||||
{% endif %}
|
||||
</td>
|
||||
{% endblock user_row %}
|
||||
|
@@ -22,6 +22,11 @@
|
||||
{{message_html | safe}}
|
||||
</p>
|
||||
{% endif %}
|
||||
{% if extra_error_html %}
|
||||
<p>
|
||||
{{extra_error_html | safe}}
|
||||
</p>
|
||||
{% endif %}
|
||||
{% endblock error_detail %}
|
||||
</div>
|
||||
|
||||
|
@@ -6,9 +6,9 @@
|
||||
<div class="row">
|
||||
<div class="text-center">
|
||||
{% if user.running %}
|
||||
<a id="stop" class="btn btn-lg btn-danger">Stop My Server</a>
|
||||
<a id="stop" role="button" class="btn btn-lg btn-danger">Stop My Server</a>
|
||||
{% endif %}
|
||||
<a id="start" class="btn btn-lg btn-success" href="{{ url }}">
|
||||
<a id="start"role="button" class="btn btn-lg btn-success" href="{{ url }}">
|
||||
{% if not user.running %}
|
||||
Start
|
||||
{% endif %}
|
||||
@@ -24,4 +24,4 @@
|
||||
<script type="text/javascript">
|
||||
require(["home"]);
|
||||
</script>
|
||||
{% endblock %}
|
||||
{% endblock %}
|
||||
|
@@ -8,10 +8,10 @@
|
||||
{% block login %}
|
||||
<div id="login-main" class="container">
|
||||
{% if custom_html %}
|
||||
{{ custom_html }}
|
||||
{{ custom_html | safe }}
|
||||
{% elif login_service %}
|
||||
<div class="service-login">
|
||||
<a class='btn btn-jupyter btn-lg' href='{{authenticator_login_url}}'>
|
||||
<a role="button" class='btn btn-jupyter btn-lg' href='{{authenticator_login_url}}'>
|
||||
Sign in with {{login_service}}
|
||||
</a>
|
||||
</div>
|
||||
@@ -35,7 +35,7 @@
|
||||
<label for="username_input">Username:</label>
|
||||
<input
|
||||
id="username_input"
|
||||
type="username"
|
||||
type="text"
|
||||
autocapitalize="off"
|
||||
autocorrect="off"
|
||||
class="form-control"
|
||||
|
@@ -35,8 +35,8 @@
|
||||
<link rel="stylesheet" href="{{ static_url("css/style.min.css") }}" type="text/css"/>
|
||||
{% endblock %}
|
||||
<script src="{{static_url("components/requirejs/require.js") }}" type="text/javascript" charset="utf-8"></script>
|
||||
<script src="{{static_url("components/jquery/jquery.min.js") }}" type="text/javascript" charset="utf-8"></script>
|
||||
<script src="{{static_url("components/bootstrap/js/bootstrap.min.js") }}" type="text/javascript" charset="utf-8"></script>
|
||||
<script src="{{static_url("components/jquery/dist/jquery.min.js") }}" type="text/javascript" charset="utf-8"></script>
|
||||
<script src="{{static_url("components/bootstrap/dist/js/bootstrap.min.js") }}" type="text/javascript" charset="utf-8"></script>
|
||||
<script>
|
||||
require.config({
|
||||
{% if version_hash %}
|
||||
@@ -45,8 +45,8 @@
|
||||
baseUrl: '{{static_url("js", include_version=False)}}',
|
||||
paths: {
|
||||
components: '../components',
|
||||
jquery: '../components/jquery/jquery.min',
|
||||
bootstrap: '../components/bootstrap/js/bootstrap.min',
|
||||
jquery: '../components/jquery/dist/jquery.min',
|
||||
bootstrap: '../components/bootstrap/dist/js/bootstrap.min',
|
||||
moment: "../components/moment/moment",
|
||||
},
|
||||
shim: {
|
||||
@@ -99,9 +99,9 @@
|
||||
<ul class="nav navbar-nav">
|
||||
<li><a href="{{base_url}}home">Home</a></li>
|
||||
<li><a href="{{base_url}}token">Token</a></li>
|
||||
{% endif %}
|
||||
{% if user.admin %}
|
||||
<li><a href="{{base_url}}admin">Admin</a></li>
|
||||
{% endif %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
<ul class="nav navbar-nav navbar-right">
|
||||
@@ -109,9 +109,9 @@
|
||||
{% block login_widget %}
|
||||
<span id="login_widget">
|
||||
{% if user %}
|
||||
<a id="logout" class="navbar-btn btn-sm btn btn-default" href="{{logout_url}}"> <i aria-hidden="true" class="fa fa-sign-out"></i> Logout</a>
|
||||
<a id="logout" role="button" class="navbar-btn btn-sm btn btn-default" href="{{logout_url}}"> <i aria-hidden="true" class="fa fa-sign-out"></i> Logout</a>
|
||||
{% else %}
|
||||
<a id="login" class="btn-sm btn navbar-btn btn-default" href="{{login_url}}">Login</a>
|
||||
<a id="login" role="button" class="btn-sm btn navbar-btn btn-default" href="{{login_url}}">Login</a>
|
||||
{% endif %}
|
||||
</span>
|
||||
{% endblock %}
|
||||
|
@@ -8,7 +8,7 @@
|
||||
<p>Your server is starting up.</p>
|
||||
<p>You will be redirected automatically when it's ready for you.</p>
|
||||
<p><i class="fa fa-spinner fa-pulse fa-fw fa-3x" aria-hidden="true"></i></p>
|
||||
<a id="refresh" class="btn btn-lg btn-primary" href="#">refresh</a>
|
||||
<a role="button" id="refresh" class="btn btn-lg btn-primary" href="#">refresh</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@@ -5,7 +5,7 @@
|
||||
<div class="container">
|
||||
<div class="row">
|
||||
<div class="text-center">
|
||||
<a id="request-token" class="btn btn-lg btn-jupyter" href="#">
|
||||
<a id="request-token" role="button" class="btn btn-lg btn-jupyter" href="#">
|
||||
Request new API token
|
||||
</a>
|
||||
</div>
|
||||
|
12
singleuser/Dockerfile
Normal file
12
singleuser/Dockerfile
Normal file
@@ -0,0 +1,12 @@
|
||||
# Build as jupyterhub/singleuser
|
||||
# Run with the DockerSpawner in JupyterHub
|
||||
|
||||
ARG BASE_IMAGE=jupyter/base-notebook
|
||||
FROM $BASE_IMAGE
|
||||
MAINTAINER Project Jupyter <jupyter@googlegroups.com>
|
||||
|
||||
ADD install_jupyterhub /tmp/install_jupyterhub
|
||||
ARG JUPYTERHUB_VERSION=master
|
||||
# install pinned jupyterhub and ensure notebook is installed
|
||||
RUN python3 /tmp/install_jupyterhub && \
|
||||
python3 -m pip install notebook
|
36
singleuser/README.md
Normal file
36
singleuser/README.md
Normal file
@@ -0,0 +1,36 @@
|
||||
# jupyterhub/singleuser
|
||||
|
||||
Built from the `jupyter/base-notebook` base image.
|
||||
|
||||
This image contains a single user notebook server for use with
|
||||
[JupyterHub](https://github.com/jupyterhub/jupyterhub). In particular, it is meant
|
||||
to be used with the
|
||||
[DockerSpawner](https://github.com/jupyterhub/dockerspawner/blob/master/dockerspawner/dockerspawner.py)
|
||||
class to launch user notebook servers within docker containers.
|
||||
|
||||
The only thing this image accomplishes is pinning the jupyterhub version on top of base-notebook.
|
||||
In most cases, one of the Jupyter [docker-stacks](https://github.com/jupyter/docker-stacks) is a better choice.
|
||||
You will just have to make sure that you have the right version of JupyterHub installed in your image,
|
||||
which can usually be accomplished with one line:
|
||||
|
||||
```Dockerfile
|
||||
FROM jupyter/base-notebook:5ded1de07260
|
||||
RUN pip3 install jupyterhub==0.7.2
|
||||
```
|
||||
|
||||
The dockerfile that builds this image exposes `BASE_IMAGE` and `JUPYTERHUB_VERSION` as build args, so you can do:
|
||||
|
||||
docker build -t singleuser \
|
||||
--build-arg BASE_IMAGE=jupyter/scipy-notebook \
|
||||
--build-arg JUPYTERHUB_VERSION=0.8.0 \
|
||||
.
|
||||
|
||||
in this directory to get a new image `singleuser` that is based on `jupyter/scipy-notebook` with JupyterHub 0.8, for example.
|
||||
|
||||
This particular image runs as the `jovyan` user, with home directory at `/home/jovyan`.
|
||||
|
||||
## Note on persistence
|
||||
|
||||
This home directory, `/home/jovyan`, is *not* persistent by default,
|
||||
so some configuration is required unless the directory is to be used
|
||||
with temporary or demonstration JupyterHub deployments.
|
11
singleuser/hooks/build
Normal file
11
singleuser/hooks/build
Normal file
@@ -0,0 +1,11 @@
|
||||
#!/bin/bash
|
||||
set -ex
|
||||
|
||||
stable=0.7
|
||||
|
||||
for V in master 0.7; do
|
||||
docker build --build-arg JUPYTERHUB_VERSION=$V -t $DOCKER_REPO:$V .
|
||||
done
|
||||
|
||||
echo "tagging $IMAGE_NAME"
|
||||
docker tag $DOCKER_REPO:$stable $IMAGE_NAME
|
23
singleuser/hooks/post_push
Normal file
23
singleuser/hooks/post_push
Normal file
@@ -0,0 +1,23 @@
|
||||
#!/bin/bash
|
||||
|
||||
for V in master 0.7; do
|
||||
docker push $DOCKER_REPO:$V
|
||||
done
|
||||
|
||||
function get_hub_version() {
|
||||
rm -f hub_version
|
||||
V=$1
|
||||
docker run --rm -v $PWD:/version -u $(id -u) -i $DOCKER_REPO:$V sh -c 'jupyterhub --version > /version/hub_version'
|
||||
hub_xyz=$(cat hub_version)
|
||||
split=( ${hub_xyz//./ } )
|
||||
hub_xy="${split[0]}.${split[1]}"
|
||||
}
|
||||
# tag e.g. 0.7.2 with 0.7
|
||||
get_hub_version 0.7
|
||||
docker tag $DOCKER_REPO:0.7 $DOCKER_REPO:$hub_xyz
|
||||
docker push $DOCKER_REPO:$hub_xyz
|
||||
|
||||
# tag e.g. 0.8 with master
|
||||
get_hub_version master
|
||||
docker tag $DOCKER_REPO:master $DOCKER_REPO:$hub_xy
|
||||
docker push $DOCKER_REPO:$hub_xy
|
21
singleuser/install_jupyterhub
Normal file
21
singleuser/install_jupyterhub
Normal file
@@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env python
|
||||
import os
|
||||
from subprocess import check_call
|
||||
import sys
|
||||
|
||||
V = os.environ['JUPYTERHUB_VERSION']
|
||||
|
||||
pip_install = [
|
||||
sys.executable, '-m', 'pip', 'install', '--no-cache', '--upgrade',
|
||||
'--upgrade-strategy', 'only-if-needed',
|
||||
]
|
||||
if V == 'master':
|
||||
req = 'https://github.com/jupyterhub/jupyterhub/archive/master.tar.gz'
|
||||
else:
|
||||
version_info = [ int(part) for part in V.split('.') ]
|
||||
version_info[-1] += 1
|
||||
upper_bound = '.'.join(map(str, version_info))
|
||||
vs = '>=%s,<%s' % (V, upper_bound)
|
||||
req = 'jupyterhub%s' % vs
|
||||
|
||||
check_call(pip_install + [req])
|
Reference in New Issue
Block a user