Compare commits

...

32 Commits

Author SHA1 Message Date
Min RK
8892270c24 0.8.0 2017-10-03 21:35:24 +02:00
Min RK
b928df6cba update changelog links for 0.8.0 release 2017-10-03 21:35:24 +02:00
Carol Willing
3fc74bd79e Merge pull request #1462 from minrk/proxy-docs
Document custom proxy implementations
2017-10-03 08:36:02 -07:00
Carol Willing
b34be77fec Merge pull request #1463 from minrk/auth-docs
Document auth_state
2017-10-03 08:29:45 -07:00
Min RK
d991c06098 document auth_state 2017-10-03 13:08:10 +02:00
Min RK
01a67ba156 document custom proxies 2017-10-03 12:42:52 +02:00
Min RK
8831573b6c typos in services.auth headings 2017-10-03 12:42:52 +02:00
Min RK
c5bc5411fb ignore docs/build 2017-10-03 12:42:52 +02:00
Carol Willing
a13ccd7530 Merge pull request #1461 from minrk/apache-docs
Update reverse proxy config examples
2017-10-03 02:46:27 -07:00
Min RK
e9a744e8b7 further clarify config-examples comments
per review
2017-10-03 10:19:43 +02:00
Min RK
582d43c153 add apache reverse proxy to config-examples 2017-10-02 18:18:03 +02:00
Min RK
7b5550928f mention how to generate dhparams
since we use them
2017-10-02 18:17:39 +02:00
Min RK
83920a3258 remove websocket-path-awareness from nginx config
using map, knowledge of the path is no longer necessary
2017-10-02 17:20:09 +02:00
Min RK
d1670aa443 fix mixed tabs and spaces 2017-10-02 16:19:21 +02:00
Min RK
c6f589124e Merge pull request #1458 from ryanlovett/master
Conditionally substitute $http_host for $host.
2017-09-29 16:06:56 +02:00
Carol Willing
35991e5194 Merge pull request #1455 from minrk/db-upgrade-test
Add db-upgrade test
2017-09-28 10:08:27 -07:00
Ryan Lovett
b956190393 Conditionally substitute $http_host for $host.
Necessary when using non-standard port. Closes #1457.
2017-09-28 09:40:51 -07:00
Min RK
122c989b7a specify mysql host and port explicitly
seems to be preferring MYSQL_UNIX_PORT
2017-09-28 18:20:57 +02:00
Min RK
5602575099 move db scripts to general ci directory
- remove shell test-db-upgrade test
- run mysql with docker on Travis because the version there is too old (< 5.7)
2017-09-28 16:20:15 +02:00
Min RK
4534499aad make db scripts accept one db at a time 2017-09-28 16:20:15 +02:00
Min RK
f733a91d7c avoid key length errors with old mysql + jupyterhub 0.7 2017-09-28 16:20:15 +02:00
Min RK
bf3fa30a01 load upgrade_db_url in test 2017-09-28 16:20:15 +02:00
Min RK
2625229847 note about venv 2017-09-28 16:20:15 +02:00
Min RK
2c3eb6d0d6 only count sqlite files when using sqlite 2017-09-28 15:09:17 +02:00
Min RK
5ff98fd1a5 run upgrade-tests on travis via pytest 2017-09-28 15:09:17 +02:00
Carol Willing
f79b71727b Merge pull request #1454 from minrk/auto-login-logout
typo rendering logout page when auto_login=True
2017-09-27 10:33:42 -07:00
Min RK
d3a3b8ca19 test db-upgrade on travis 2017-09-27 19:06:54 +02:00
Min RK
df9e002b9a separate docker-db from init-db
so we don't need docker on Travis
2017-09-27 19:05:55 +02:00
Min RK
a4a2c9d068 add tests for db upgrade with mysql, postgres 2017-09-27 18:41:08 +02:00
Min RK
c453e5ad20 mysql needs an extra step to drop _server_id 2017-09-27 18:34:54 +02:00
Min RK
617b879c2a stamp version before performing upgrade-db 2017-09-27 18:34:54 +02:00
Min RK
a0042e9302 typo rendering logout page when auto_login=True
and include it in test coverage
2017-09-27 14:29:56 +02:00
17 changed files with 530 additions and 76 deletions

1
.gitignore vendored
View File

@@ -6,6 +6,7 @@ node_modules
/build
dist
docs/_build
docs/build
docs/source/_static/rest-api
.ipynb_checkpoints
# ignore config file at the top-level of the repo

View File

@@ -11,8 +11,8 @@ env:
global:
- ASYNC_TEST_TIMEOUT=15
services:
- mysql
- postgresql
- postgres
- docker
# installing dependencies
before_install:
@@ -21,10 +21,12 @@ before_install:
- npm install -g configurable-http-proxy
- |
if [[ $JUPYTERHUB_TEST_DB_URL == mysql* ]]; then
mysql -e 'CREATE DATABASE jupyterhub CHARACTER SET utf8 COLLATE utf8_general_ci;'
unset MYSQL_UNIX_PORT
DB=mysql bash ci/docker-db.sh
DB=mysql bash ci/init-db.sh
pip install 'mysql-connector<2.2'
elif [[ $JUPYTERHUB_TEST_DB_URL == postgresql* ]]; then
psql -c 'create database jupyterhub;' -U postgres
DB=postgres bash ci/init-db.sh
pip install psycopg2
fi
install:
@@ -34,6 +36,20 @@ install:
# running tests
script:
- |
if [[ ! -z "$JUPYTERHUB_TEST_DB_URL" ]]; then
# if testing upgrade-db, run `jupyterhub token` with 0.7
# to initialize an old db. Used in upgrade-tests
export JUPYTERHUB_TEST_UPGRADE_DB_URL=${JUPYTERHUB_TEST_DB_URL}_upgrade
# use virtualenv instead of venv because venv doesn't work here
python -m pip install virtualenv
python -m virtualenv old-hub-env
./old-hub-env/bin/python -m pip install jupyterhub==0.7.2 psycopg2 'mysql-connector<2.2'
./old-hub-env/bin/jupyterhub token kaylee \
--JupyterHub.db_url=$JUPYTERHUB_TEST_UPGRADE_DB_URL \
--Authenticator.whitelist="{'kaylee'}" \
--JupyterHub.authenticator_class=jupyterhub.auth.Authenticator
fi
- pytest -v --maxfail=2 --cov=jupyterhub jupyterhub/tests
after_success:
- codecov
@@ -44,8 +60,12 @@ matrix:
- python: 3.6
env: JUPYTERHUB_TEST_SUBDOMAIN_HOST=http://localhost.jovyan.org:8000
- python: 3.6
env: JUPYTERHUB_TEST_DB_URL=mysql+mysqlconnector://root@127.0.0.1/jupyterhub
env:
- MYSQL_HOST=127.0.0.1
- MYSQL_TCP_PORT=13306
- JUPYTERHUB_TEST_DB_URL=mysql+mysqlconnector://root@127.0.0.1:$MYSQL_TCP_PORT/jupyterhub
- python: 3.6
env: JUPYTERHUB_TEST_DB_URL=postgresql://postgres@127.0.0.1/jupyterhub
env:
- JUPYTERHUB_TEST_DB_URL=postgresql://postgres@127.0.0.1/jupyterhub
allow_failures:
- python: nightly

View File

@@ -11,6 +11,7 @@ graft jupyterhub
graft scripts
graft share
graft singleuser
graft ci
# Documentation
graft docs

50
ci/docker-db.sh Normal file
View File

@@ -0,0 +1,50 @@
#!/usr/bin/env bash
# source this file to setup postgres and mysql
# for local testing (as similar as possible to docker)
set -e
export MYSQL_HOST=127.0.0.1
export MYSQL_TCP_PORT=${MYSQL_TCP_PORT:-13306}
export PGHOST=127.0.0.1
NAME="hub-test-$DB"
DOCKER_RUN="docker run --rm -d --name $NAME"
docker rm -f "$NAME" 2>/dev/null || true
case "$DB" in
"mysql")
RUN_ARGS="-e MYSQL_ALLOW_EMPTY_PASSWORD=1 -p $MYSQL_TCP_PORT:3306 mysql:5.7"
CHECK="mysql --host $MYSQL_HOST --port $MYSQL_TCP_PORT --user root -e \q"
;;
"postgres")
RUN_ARGS="-p 5432:5432 postgres:9.5"
CHECK="psql --user postgres -c \q"
;;
*)
echo '$DB must be mysql or postgres'
exit 1
esac
$DOCKER_RUN $RUN_ARGS
echo -n "waiting for $DB "
for i in {1..60}; do
if $CHECK; then
echo 'done'
break
else
echo -n '.'
sleep 1
fi
done
$CHECK
echo -e "
Set these environment variables:
export MYSQL_HOST=127.0.0.1
export MYSQL_TCP_PORT=$MYSQL_TCP_PORT
export PGHOST=127.0.0.1
"

27
ci/init-db.sh Normal file
View File

@@ -0,0 +1,27 @@
#!/usr/bin/env bash
# initialize jupyterhub databases for testing
set -e
MYSQL="mysql --user root --host $MYSQL_HOST --port $MYSQL_TCP_PORT -e "
PSQL="psql --user postgres -c "
case "$DB" in
"mysql")
EXTRA_CREATE='CHARACTER SET utf8 COLLATE utf8_general_ci'
SQL="$MYSQL"
;;
"postgres")
SQL="$PSQL"
;;
*)
echo '$DB must be mysql or postgres'
exit 1
esac
set -x
$SQL 'DROP DATABASE jupyterhub;' 2>/dev/null || true
$SQL "CREATE DATABASE jupyterhub ${EXTRA_CREATE};"
$SQL 'DROP DATABASE jupyterhub_upgrade;' 2>/dev/null || true
$SQL "CREATE DATABASE jupyterhub_upgrade ${EXTRA_CREATE};"

View File

@@ -17,7 +17,7 @@ Module: :mod:`jupyterhub.services.auth`
:members:
:class:`HubOAuth`
----------------
-----------------
.. autoconfigurable:: HubOAuth
:members:
@@ -30,7 +30,7 @@ Module: :mod:`jupyterhub.services.auth`
:members:
:class:`HubOAuthenticated`
-------------------------
--------------------------
.. autoclass:: HubOAuthenticated

View File

@@ -5,7 +5,9 @@ its link will bring up a GitHub listing of changes. Use `git log` on the
command line for details.
## [Unreleased] 0.8
## [Unreleased]
## [0.8.0] 2017-10-03
JupyterHub 0.8 is a big release!
@@ -23,7 +25,7 @@ in your Dockerfile is sufficient.
#### Added
- JupyterHub now defined a `.Proxy` API for custom
- JupyterHub now defined a `Proxy` API for custom
proxy implementations other than the default.
The defaults are unchanged,
but configuration of the proxy is now done on the `ConfigurableHTTPProxy` class instead of the top-level JupyterHub.
@@ -32,7 +34,7 @@ in your Dockerfile is sufficient.
(anything that uses HubAuth)
can now accept token-authenticated requests via the Authentication header.
- Authenticators can now store state in the Hub's database.
To do so, the `.authenticate` method should return a dict of the form
To do so, the `authenticate` method should return a dict of the form
```python
{
@@ -233,7 +235,8 @@ Fix removal of `/login` page in 0.4.0, breaking some OAuth providers.
First preview release
[Unreleased]: https://github.com/jupyterhub/jupyterhub/compare/0.7.2...HEAD
[Unreleased]: https://github.com/jupyterhub/jupyterhub/compare/0.8.0...HEAD
[0.8.0]: https://github.com/jupyterhub/jupyterhub/compare/0.7.2...0.8.0
[0.7.2]: https://github.com/jupyterhub/jupyterhub/compare/0.7.1...0.7.2
[0.7.1]: https://github.com/jupyterhub/jupyterhub/compare/0.7.0...0.7.1
[0.7.0]: https://github.com/jupyterhub/jupyterhub/compare/0.6.1...0.7.0

View File

@@ -84,6 +84,7 @@ class DictionaryAuthenticator(Authenticator):
return data['username']
```
#### Normalize usernames
Since the Authenticator and Spawner both use the same username,
@@ -116,6 +117,7 @@ To only allow usernames that start with 'w':
c.Authenticator.username_pattern = r'w.*'
```
### How to write a custom authenticator
You can use custom Authenticator subclasses to enable authentication
@@ -135,6 +137,77 @@ If you are interested in writing a custom authenticator, you can read
[this tutorial](http://jupyterhub-tutorial.readthedocs.io/en/latest/authenticators.html).
### Authentication state
JupyterHub 0.8 adds the ability to persist state related to authentication,
such as auth-related tokens.
If such state should be persisted, `.authenticate()` should return a dictionary of the form:
```python
{
'username': 'name',
'auth_state': {
'key': 'value',
}
}
```
where `username` is the username that has been authenticated,
and `auth_state` is any JSON-serializable dictionary.
Because `auth_state` may contain sensitive information,
it is encrypted before being stored in the database.
To store auth_state, two conditions must be met:
1. persisting auth state must be enabled explicitly via configuration
```python
c.Authenticator.enable_auth_state = True
```
2. encryption must be enabled by the presence of `JUPYTERHUB_CRYPT_KEY` environment variable,
which should be a hex-encoded 32-byte key.
For example:
```bash
export JUPYTERHUB_CRYPT_KEY=$(openssl rand -hex 32)
```
JupyterHub uses [Fernet](https://cryptography.io/en/latest/fernet/) to encrypt auth_state.
To facilitate key-rotation, `JUPYTERHUB_CRYPT_KEY` may be a semicolon-separated list of encryption keys.
If there are multiple keys present, the **first** key is always used to persist any new auth_state.
#### Using auth_state
Typically, if `auth_state` is persisted it is desirable to affect the Spawner environment in some way.
This may mean defining environment variables, placing certificate in the user's home directory, etc.
The `Authenticator.pre_spawn_start` method can be used to pass information from authenticator state
to Spawner environment:
```python
class MyAuthenticator(Authenticator):
@gen.coroutine
def authenticate(self, handler, data=None):
username = yield identify_user(handler, data)
upstream_token = yield token_for_user(username)
return {
'username': username,
'auth_state': {
'upstream_token': upstream_token,
},
}
@gen.coroutine
def pre_spawn_start(self, user, spawner):
"""Pass upstream_token to spawner via environment variable"""
auth_state = yield user.get_auth_state()
if not auth_state:
# auth_state not enabled
return
spawner.environment['UPSTREAM_TOKEN'] = auth_state['upstream_token']
```
## JupyterHub as an OAuth provider
Beginning with version 0.8, JupyterHub is an OAuth provider.

View File

@@ -80,7 +80,7 @@ export CONFIGPROXY_AUTH_TOKEN=super-secret
jupyterhub -f /etc/jupyterhub/jupyterhub_config.py &>> /var/log/jupyterhub.log
```
## Using nginx reverse proxy
## Using a reverse proxy
In the following example, we show configuration files for a JupyterHub server
running locally on port `8000` but accessible from the outside on the standard
@@ -91,9 +91,9 @@ satisfy the following:
* JupyterHub is running on a server, accessed *only* via `HUB.DOMAIN.TLD:443`
* On the same machine, `NO_HUB.DOMAIN.TLD` strictly serves different content,
also on port `443`
* `nginx` is used to manage the web servers / reverse proxy (which means that
only nginx will be able to bind two servers to `443`)
* After testing, the server in question should be able to score an A+ on the
* `nginx` or `apache` is used as the public access point (which means that
only nginx/apache will bind to `443`)
* After testing, the server in question should be able to score at least an A on the
Qualys SSL Labs [SSL Server Test](https://www.ssllabs.com/ssltest/)
Let's start out with needed JupyterHub configuration in `jupyterhub_config.py`:
@@ -103,30 +103,47 @@ Let's start out with needed JupyterHub configuration in `jupyterhub_config.py`:
c.JupyterHub.ip = '127.0.0.1'
```
For high-quality SSL configuration, we also generate Diffie-Helman parameters.
This can take a few minutes:
```bash
openssl dhparam -out /etc/ssl/certs/dhparam.pem 4096
```
### nginx
The **`nginx` server config file** is fairly standard fare except for the two
`location` blocks within the `HUB.DOMAIN.TLD` config file:
```bash
# top-level http config for websocket headers
# If Upgrade is defined, Connection = upgrade
# If Upgrade is empty, Connection = close
map $http_upgrade $connection_upgrade {
default upgrade;
'' close;
}
# HTTP server to redirect all 80 traffic to SSL/HTTPS
server {
listen 80;
server_name HUB.DOMAIN.TLD;
listen 80;
server_name HUB.DOMAIN.TLD;
# Tell all requests to port 80 to be 302 redirected to HTTPS
return 302 https://$host$request_uri;
# Tell all requests to port 80 to be 302 redirected to HTTPS
return 302 https://$host$request_uri;
}
# HTTPS server to handle JupyterHub
server {
listen 443;
ssl on;
listen 443;
ssl on;
server_name HUB.DOMAIN.TLD;
server_name HUB.DOMAIN.TLD;
ssl_certificate /etc/letsencrypt/live/HUB.DOMAIN.TLD/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/HUB.DOMAIN.TLD/privkey.pem;
ssl_certificate /etc/letsencrypt/live/HUB.DOMAIN.TLD/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/HUB.DOMAIN.TLD/privkey.pem;
ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
ssl_prefer_server_ciphers on;
ssl_dhparam /etc/ssl/certs/dhparam.pem;
ssl_ciphers 'ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-DSS-AES128-GCM-SHA256:kEDH+AESGCM:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-DSS-AES128-SHA256:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA:DHE-RSA-AES256-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:AES:CAMELLIA:DES-CBC3-SHA:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!aECDH:!EDH-DSS-DES-CBC3-SHA:!EDH-RSA-DES-CBC3-SHA:!KRB5-DES-CBC3-SHA';
@@ -136,37 +153,28 @@ server {
ssl_stapling_verify on;
add_header Strict-Transport-Security max-age=15768000;
# Managing literal requests to the JupyterHub front end
location / {
proxy_pass https://127.0.0.1:8000;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
# Managing literal requests to the JupyterHub front end
location / {
proxy_pass http://127.0.0.1:8000;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
# websocket headers
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
}
# Managing WebHook/Socket requests between hub user servers and external proxy
location ~* /(api/kernels/[^/]+/(channels|iopub|shell|stdin)|terminals/websocket)/? {
proxy_pass https://127.0.0.1:8000;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
# WebSocket support
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
}
# Managing requests to verify letsencrypt host
# Managing requests to verify letsencrypt host
location ~ /.well-known {
allow all;
allow all;
}
}
```
If `nginx` is not running on port 443, substitute `$http_host` for `$host` on
the lines setting the `Host` header.
`nginx` will now be the front facing element of JupyterHub on `443` which means
it is also free to bind other servers, like `NO_HUB.DOMAIN.TLD` to the same port
on the same machine and network interface. In fact, one can simply use the same
@@ -175,35 +183,90 @@ of the site as well as the applicable location call:
```bash
server {
listen 80;
server_name NO_HUB.DOMAIN.TLD;
listen 80;
server_name NO_HUB.DOMAIN.TLD;
# Tell all requests to port 80 to be 302 redirected to HTTPS
return 302 https://$host$request_uri;
# Tell all requests to port 80 to be 302 redirected to HTTPS
return 302 https://$host$request_uri;
}
server {
listen 443;
ssl on;
listen 443;
ssl on;
# INSERT OTHER SSL PARAMETERS HERE AS ABOVE
# INSERT OTHER SSL PARAMETERS HERE AS ABOVE
# SSL cert may differ
# Set the appropriate root directory
root /var/www/html
# Set the appropriate root directory
root /var/www/html
# Set URI handling
location / {
try_files $uri $uri/ =404;
}
# Set URI handling
location / {
try_files $uri $uri/ =404;
}
# Managing requests to verify letsencrypt host
# Managing requests to verify letsencrypt host
location ~ /.well-known {
allow all;
allow all;
}
}
```
Now just restart `nginx`, restart the JupyterHub, and enjoy accessing
Now restart `nginx`, restart the JupyterHub, and enjoy accessing
`https://HUB.DOMAIN.TLD` while serving other content securely on
`https://NO_HUB.DOMAIN.TLD`.
### Apache
As with nginx above, you can use [Apache](https://httpd.apache.org) as the reverse proxy.
First, we will need to enable the apache modules that we are going to need:
```bash
a2enmod ssl rewrite proxy proxy_http proxy_wstunnel
```
Our Apache configuration is equivalent to the nginx configuration above:
- Redirect HTTP to HTTPS
- Good SSL Configuration
- Support for websockets on any proxied URL
- JupyterHub is running locally at http://127.0.0.1:8000
```bash
# redirect HTTP to HTTPS
Listen 80
<VirtualHost HUB.DOMAIN.TLD:80>
ServerName HUB.DOMAIN.TLD
Redirect / https://HUB.DOMAIN.TLD/
</VirtualHost>
Listen 443
<VirtualHost HUB.DOMAIN.TLD:443>
ServerName HUB.DOMAIN.TLD
# configure SSL
SSLEngine on
SSLCertificateFile /etc/letsencrypt/live/HUB.DOMAIN.TLD/fullchain.pem
SSLCertificateKeyFile /etc/letsencrypt/live/HUB.DOMAIN.TLD/privkey.pem
SSLProtocol All -SSLv2 -SSLv3
SSLOpenSSLConfCmd DHParameters /etc/ssl/certs/dhparam.pem
SSLCipherSuite EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH
# Use RewriteEngine to handle websocket connection upgrades
RewriteEngine On
RewriteCond %{HTTP:Connection} Upgrade [NC]
RewriteCond %{HTTP:Upgrade} websocket [NC]
RewriteRule /(.*) ws://127.0.0.1:8000/$1 [P,L]
<Location "/">
# preserve Host header to avoid cross-origin problems
ProxyPreserveHost on
# proxy to JupyterHub
ProxyPass http://127.0.0.1:8000/
ProxyPassReverse http://127.0.0.1:8000/
</Location>
</VirtualHost>
```

View File

@@ -9,6 +9,7 @@ Technical Reference
authenticators
spawners
services
proxy
rest
upgrading
config-examples

View File

@@ -0,0 +1,183 @@
# Writing a custom Proxy implementation
JupyterHub 0.8 introduced the ability to write a custom implementation of the proxy.
This enables deployments with different needs than the default proxy,
configurable-http-proxy (CHP).
CHP is a single-process nodejs proxy that they Hub manages by default as a subprocess
(it can be run externally, as well, and typically is in production deployments).
The upside to CHP, and why we use it by default, is that it's easy to install and run (if you have nodejs, you are set!).
The downsides are that it's a single process and does not support any persistence of the routing table.
So if the proxy process dies, your whole JupyterHub instance is inaccessible until the Hub notices, restarts the proxy, and restores the routing table.
For deployments that want to avoid such a single point of failure,
or leverage existing proxy infrastructure in their chosen deployment (such as Kubernetes ingress objects),
the Proxy API provides a way to do that.
In general, for a proxy to be usable by JupyterHub, it must:
1. support websockets without prior knowledge of the URL where websockets may occur
2. support trie-based routing (i.e. allow different routes on `/foo` and `/foo/bar` and route based on specificity)
3. adding or removing a route should not cause existing connections to drop
Optionally, if the JupyterHub deployment is to use host-based routing,
the Proxy must additionally support routing based on the Host of the request.
## Subclassing Proxy
To start, any Proxy implementation should subclass the base Proxy class,
as is done with custom Spawners and Authenticators.
```python
from jupyterhub.proxy import Proxy
class MyProxy(Proxy):
"""My Proxy implementation"""
...
```
## Starting and stopping the proxy
If your proxy should be launched when the Hub starts, you must define how to start and stop your proxy:
```python
from tornado import gen
class MyProxy(Proxy):
...
@gen.coroutine
def start(self):
"""Start the proxy"""
@gen.coroutine
def stop(self):
"""Stop the proxy"""
```
These methods **may** be coroutines.
`c.Proxy.should_start` is a configurable flag that determines whether the Hub should call these methods when the Hub itself starts and stops.
### Purely external proxies
Probably most custom proxies will be externally managed,
such as Kubernetes ingress-based implementations.
In this case, you do not need to define `start` and `stop`.
To disable the methods, you can define `should_start = False` at the class level:
```python
class MyProxy(Proxy):
should_start = False
```
## Adding and removing routes
At its most basic, a Proxy implementation defines a mechanism to add, remove, and retrieve routes.
A proxy that implements these three methods is complete.
Each of these methods **may** be a coroutine.
**Definition:** routespec
A routespec, which will appear in these methods, is a string describing a route to be proxied,
such as `/user/name/`. A routespec will:
1. always end with `/`
2. always start with `/` if it is a path-based route `/proxy/path/`
3. precede the leading `/` with a host for host-based routing, e.g. `host.tld/proxy/path/`
### Adding a route
When adding a route, JupyterHub may pass a JSON-serializable dict as a `data` argument
that should be attacked to the proxy route.
When that route is retrieved, the `data` argument should be returned as well.
If your proxy implementation doesn't support storing data attached to routes,
then your Python wrapper may have to handle storing the `data` piece itself,
e.g in a simple file or database.
```python
@gen.coroutine
def add_route(self, routespec, target, data):
"""Proxy `routespec` to `target`.
Store `data` associated with the routespec
for retrieval later.
"""
```
Adding a route for a user looks like this:
```python
proxy.add_route('/user/pgeorgiou/', 'http://127.0.0.1:1227',
{'user': 'pgeorgiou'})
```
### Removing routes
`delete_route()` is given a routespec to delete.
If there is no such route, `delete_route` should still succeed,
but a warning may be issued.
```python
@gen.coroutine
def delete_route(self, routespec):
"""Delete the route"""
```
### Retrieving routes
For retrieval, you only *need* to implement a single method that retrieves all routes.
The return value for this function should be a dictionary, keyed by `routespect`,
of dicts whose keys are the same three arguments passed to `add_route`
(`routespec`, `target`, `data`)
```python
@gen.coroutine
def get_all_routes(self):
"""Return all routes, keyed by routespec""""
```
```python
{
'/proxy/path/': {
'routespec': '/proxy/path/',
'target': 'http://...',
'data': {},
},
}
```
#### Note on activity tracking
JupyterHub can track activity of users, for use in services such as culling idle servers.
As of JupyterHub 0.8, this activity tracking is the responsibility of the proxy.
If your proxy implementation can track activity to endpoints,
it may add a `last_activity` key to the `data` of routes retrieved in `.get_all_routes()`.
If present, the value of `last_activity` should be an [ISO8601](https://en.wikipedia.org/wiki/ISO_8601) UTC date string:
```python
{
'/user/pgeorgiou/': {
'routespec': '/user/pgeorgiou/',
'target': 'http://127.0.0.1:1227',
'data': {
'user': 'pgeourgiou',
'last_activity': '2017-10-03T10:33:49.570Z',
},
},
}
```
If the proxy does not track activity, then only activity to the Hub itself is tracked,
and services such as cull-idle will not work.
Now that `notebook-5.0` tracks activity internally,
we can retrieve activity information from the single-user servers instead,
removing the need to track activity in the proxy.
But this is not yet implemented in JupyterHub 0.8.0.

View File

@@ -7,7 +7,6 @@ version_info = (
0,
8,
0,
'rc2',
)
__version__ = '.'.join(map(str, version_info))

View File

@@ -36,6 +36,10 @@ def upgrade():
# drop some columns no longer in use
try:
op.drop_column('users', 'auth_state')
# mysql cannot drop _server_id without also dropping
# implicitly created foreign key
if op.get_context().dialect.name == 'mysql':
op.drop_constraint('users_ibfk_1', 'users', type_='foreignkey')
op.drop_column('users', '_server_id')
except sa.exc.OperationalError:
# this won't be a problem moving forward, but downgrade will fail

View File

@@ -23,6 +23,7 @@ if sys.version_info[:2] < (3, 3):
from jinja2 import Environment, FileSystemLoader
from sqlalchemy import create_engine
from sqlalchemy.exc import OperationalError
from tornado.httpclient import AsyncHTTPClient
@@ -189,6 +190,13 @@ class UpgradeDB(Application):
db_file = hub.db_url.split(':///', 1)[1]
self._backup_db_file(db_file)
self.log.info("Upgrading %s", hub.db_url)
# run check-db-revision first
engine = create_engine(hub.db_url)
try:
orm.check_db_revision(engine)
except orm.DatabaseSchemaMismatch:
# ignore mismatch error because that's what we are here for!
pass
dbutil.upgrade(hub.db_url)

View File

@@ -20,7 +20,7 @@ class LogoutHandler(BaseHandler):
self.clear_login_cookie()
self.statsd.incr('logout')
if self.authenticator.auto_login:
self.render('logout.html')
self.render_template('logout.html')
else:
self.redirect(self.settings['login_url'], permanent=False)

View File

@@ -4,6 +4,7 @@ import shutil
import pytest
from pytest import raises
from traitlets.config import Config
from ..dbutil import upgrade
from ..app import NewToken, UpgradeDB, JupyterHub
@@ -21,28 +22,35 @@ def generate_old_db(path):
def test_upgrade(tmpdir):
print(tmpdir)
db_url = generate_old_db(str(tmpdir))
print(db_url)
upgrade(db_url)
@pytest.mark.gen_test
def test_upgrade_entrypoint(tmpdir):
generate_old_db(str(tmpdir))
db_url = os.getenv('JUPYTERHUB_TEST_UPGRADE_DB_URL')
if not db_url:
# default: sqlite
db_url = generate_old_db(str(tmpdir))
cfg = Config()
cfg.JupyterHub.db_url = db_url
tmpdir.chdir()
tokenapp = NewToken()
tokenapp = NewToken(config=cfg)
tokenapp.initialize(['kaylee'])
with raises(SystemExit):
tokenapp.start()
sqlite_files = glob(os.path.join(str(tmpdir), 'jupyterhub.sqlite*'))
assert len(sqlite_files) == 1
if 'sqlite' in db_url:
sqlite_files = glob(os.path.join(str(tmpdir), 'jupyterhub.sqlite*'))
assert len(sqlite_files) == 1
upgradeapp = UpgradeDB()
upgradeapp = UpgradeDB(config=cfg)
yield upgradeapp.initialize([])
upgradeapp.start()
# check that backup was created:
sqlite_files = glob(os.path.join(str(tmpdir), 'jupyterhub.sqlite*'))
assert len(sqlite_files) == 2
if 'sqlite' in db_url:
sqlite_files = glob(os.path.join(str(tmpdir), 'jupyterhub.sqlite*'))
assert len(sqlite_files) == 2
# run tokenapp again, it should work
tokenapp.start()

View File

@@ -344,6 +344,19 @@ def test_auto_login(app, request):
r = yield async_requests.get(base_url)
assert r.url == public_url(app, path='hub/dummy')
@pytest.mark.gen_test
def test_auto_login_logout(app):
name = 'burnham'
cookies = yield app.login_user(name)
with mock.patch.dict(app.tornado_application.settings, {
'authenticator': Authenticator(auto_login=True),
}):
r = yield async_requests.get(public_host(app) + app.tornado_settings['logout_url'], cookies=cookies)
r.raise_for_status()
logout_url = public_host(app) + app.tornado_settings['logout_url']
assert r.url == logout_url
assert r.cookies == {}
@pytest.mark.gen_test
def test_logout(app):