mirror of
https://github.com/jupyterhub/jupyterhub.git
synced 2025-10-08 10:34:10 +00:00
Compare commits
64 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
e4d4e059bd | ||
![]() |
2967383654 | ||
![]() |
85f5ae1a37 | ||
![]() |
ecafe4add9 | ||
![]() |
9462511aa5 | ||
![]() |
31736eea9a | ||
![]() |
f97ef7eaac | ||
![]() |
2065099338 | ||
![]() |
d4df579fa6 | ||
![]() |
4378603e83 | ||
![]() |
40db4edc6d | ||
![]() |
ccf13979e9 | ||
![]() |
76f134c393 | ||
![]() |
77d4c1f23d | ||
![]() |
5856f46e1d | ||
![]() |
edfd1eb6cf | ||
![]() |
1ae6678360 | ||
![]() |
7794eea3fb | ||
![]() |
f51e6a1ca0 | ||
![]() |
ab00a19be1 | ||
![]() |
7742bfdda5 | ||
![]() |
f3878d8216 | ||
![]() |
d17cb637fe | ||
![]() |
5b63efe63c | ||
![]() |
54816b0a7c | ||
![]() |
41fc73db42 | ||
![]() |
984d6be542 | ||
![]() |
d7d8459edb | ||
![]() |
39a7116d16 | ||
![]() |
d27c970cc4 | ||
![]() |
cf56dbb97b | ||
![]() |
a4ccfe4e11 | ||
![]() |
f1871bbe24 | ||
![]() |
1cc9153a91 | ||
![]() |
4258254c39 | ||
![]() |
f3aee9bd16 | ||
![]() |
5cb8ccf8b2 | ||
![]() |
1d63e417ca | ||
![]() |
ee0020e8fa | ||
![]() |
2d83575a24 | ||
![]() |
33c168530e | ||
![]() |
5d4d34b24d | ||
![]() |
49cc794937 | ||
![]() |
7f9e77ce5b | ||
![]() |
6fa3b429db | ||
![]() |
e89836c035 | ||
![]() |
784b5cb6f0 | ||
![]() |
daaa763c3b | ||
![]() |
2b18c64081 | ||
![]() |
785addc245 | ||
![]() |
b4758db017 | ||
![]() |
10fbfee157 | ||
![]() |
c58a251dbd | ||
![]() |
27be5e4847 | ||
![]() |
be97a0c95b | ||
![]() |
689a312756 | ||
![]() |
1484869ee3 | ||
![]() |
74a457f6b5 | ||
![]() |
137a044f96 | ||
![]() |
a090632a48 | ||
![]() |
451a16c57e | ||
![]() |
6e14e86a1a | ||
![]() |
a142f543ba | ||
![]() |
0bb3996c30 |
10
.travis.yml
10
.travis.yml
@@ -27,8 +27,11 @@ before_install:
|
|||||||
unset MYSQL_UNIX_PORT
|
unset MYSQL_UNIX_PORT
|
||||||
DB=mysql bash ci/docker-db.sh
|
DB=mysql bash ci/docker-db.sh
|
||||||
DB=mysql bash ci/init-db.sh
|
DB=mysql bash ci/init-db.sh
|
||||||
pip install 'mysql-connector<2.2'
|
# FIXME: mysql-connector-python 8.0.16 incorrectly decodes bytes to str
|
||||||
|
# ref: https://bugs.mysql.com/bug.php?id=94944
|
||||||
|
pip install 'mysql-connector-python==8.0.15'
|
||||||
elif [[ $JUPYTERHUB_TEST_DB_URL == postgresql* ]]; then
|
elif [[ $JUPYTERHUB_TEST_DB_URL == postgresql* ]]; then
|
||||||
|
psql -c "CREATE USER $PGUSER WITH PASSWORD '$PGPASSWORD';" -U postgres
|
||||||
DB=postgres bash ci/init-db.sh
|
DB=postgres bash ci/init-db.sh
|
||||||
pip install psycopg2-binary
|
pip install psycopg2-binary
|
||||||
fi
|
fi
|
||||||
@@ -87,7 +90,10 @@ matrix:
|
|||||||
- JUPYTERHUB_TEST_DB_URL=mysql+mysqlconnector://root@127.0.0.1:$MYSQL_TCP_PORT/jupyterhub
|
- JUPYTERHUB_TEST_DB_URL=mysql+mysqlconnector://root@127.0.0.1:$MYSQL_TCP_PORT/jupyterhub
|
||||||
- python: 3.6
|
- python: 3.6
|
||||||
env:
|
env:
|
||||||
- JUPYTERHUB_TEST_DB_URL=postgresql://postgres@127.0.0.1/jupyterhub
|
- PGUSER=jupyterhub
|
||||||
|
- PGPASSWORD=hub[test/:?
|
||||||
|
# password in url is url-encoded (urllib.parse.quote($PGPASSWORD, safe=''))
|
||||||
|
- JUPYTERHUB_TEST_DB_URL=postgresql://jupyterhub:hub%5Btest%2F%3A%3F@127.0.0.1/jupyterhub
|
||||||
- python: 3.7
|
- python: 3.7
|
||||||
dist: xenial
|
dist: xenial
|
||||||
allow_failures:
|
allow_failures:
|
||||||
|
@@ -150,7 +150,7 @@ To start the Hub on a specific url and port ``10.0.1.2:443`` with **https**:
|
|||||||
| PAMAuthenticator | Default, built-in authenticator |
|
| PAMAuthenticator | Default, built-in authenticator |
|
||||||
| [OAuthenticator](https://github.com/jupyterhub/oauthenticator) | OAuth + JupyterHub Authenticator = OAuthenticator |
|
| [OAuthenticator](https://github.com/jupyterhub/oauthenticator) | OAuth + JupyterHub Authenticator = OAuthenticator |
|
||||||
| [ldapauthenticator](https://github.com/jupyterhub/ldapauthenticator) | Simple LDAP Authenticator Plugin for JupyterHub |
|
| [ldapauthenticator](https://github.com/jupyterhub/ldapauthenticator) | Simple LDAP Authenticator Plugin for JupyterHub |
|
||||||
| [kdcAuthenticator](https://github.com/bloomberg/jupyterhub-kdcauthenticator)| Kerberos Authenticator Plugin for JupyterHub |
|
| [kerberosauthenticator](https://github.com/jcrist/kerberosauthenticator) | Kerberos Authenticator Plugin for JupyterHub |
|
||||||
|
|
||||||
### Spawners
|
### Spawners
|
||||||
|
|
||||||
@@ -162,6 +162,7 @@ To start the Hub on a specific url and port ``10.0.1.2:443`` with **https**:
|
|||||||
| [sudospawner](https://github.com/jupyterhub/sudospawner) | Spawn single-user servers without being root |
|
| [sudospawner](https://github.com/jupyterhub/sudospawner) | Spawn single-user servers without being root |
|
||||||
| [systemdspawner](https://github.com/jupyterhub/systemdspawner) | Spawn single-user notebook servers using systemd |
|
| [systemdspawner](https://github.com/jupyterhub/systemdspawner) | Spawn single-user notebook servers using systemd |
|
||||||
| [batchspawner](https://github.com/jupyterhub/batchspawner) | Designed for clusters using batch scheduling software |
|
| [batchspawner](https://github.com/jupyterhub/batchspawner) | Designed for clusters using batch scheduling software |
|
||||||
|
| [yarnspawner](https://github.com/jcrist/yarnspawner) | Spawn single-user notebook servers distributed on a Hadoop cluster |
|
||||||
| [wrapspawner](https://github.com/jupyterhub/wrapspawner) | WrapSpawner and ProfilesSpawner enabling runtime configuration of spawners |
|
| [wrapspawner](https://github.com/jupyterhub/wrapspawner) | WrapSpawner and ProfilesSpawner enabling runtime configuration of spawners |
|
||||||
|
|
||||||
## Docker
|
## Docker
|
||||||
|
@@ -2,7 +2,7 @@
|
|||||||
# source this file to setup postgres and mysql
|
# source this file to setup postgres and mysql
|
||||||
# for local testing (as similar as possible to docker)
|
# for local testing (as similar as possible to docker)
|
||||||
|
|
||||||
set -e
|
set -eu
|
||||||
|
|
||||||
export MYSQL_HOST=127.0.0.1
|
export MYSQL_HOST=127.0.0.1
|
||||||
export MYSQL_TCP_PORT=${MYSQL_TCP_PORT:-13306}
|
export MYSQL_TCP_PORT=${MYSQL_TCP_PORT:-13306}
|
||||||
@@ -40,6 +40,15 @@ for i in {1..60}; do
|
|||||||
done
|
done
|
||||||
$CHECK
|
$CHECK
|
||||||
|
|
||||||
|
case "$DB" in
|
||||||
|
"mysql")
|
||||||
|
;;
|
||||||
|
"postgres")
|
||||||
|
# create the user
|
||||||
|
psql --user postgres -c "CREATE USER $PGUSER WITH PASSWORD '$PGPASSWORD';"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
esac
|
||||||
|
|
||||||
echo -e "
|
echo -e "
|
||||||
Set these environment variables:
|
Set these environment variables:
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
# initialize jupyterhub databases for testing
|
# initialize jupyterhub databases for testing
|
||||||
|
|
||||||
set -e
|
set -eu
|
||||||
|
|
||||||
MYSQL="mysql --user root --host $MYSQL_HOST --port $MYSQL_TCP_PORT -e "
|
MYSQL="mysql --user root --host $MYSQL_HOST --port $MYSQL_TCP_PORT -e "
|
||||||
PSQL="psql --user postgres -c "
|
PSQL="psql --user postgres -c "
|
||||||
@@ -23,5 +23,5 @@ set -x
|
|||||||
|
|
||||||
for SUFFIX in '' _upgrade_072 _upgrade_081 _upgrade_094; do
|
for SUFFIX in '' _upgrade_072 _upgrade_081 _upgrade_094; do
|
||||||
$SQL "DROP DATABASE jupyterhub${SUFFIX};" 2>/dev/null || true
|
$SQL "DROP DATABASE jupyterhub${SUFFIX};" 2>/dev/null || true
|
||||||
$SQL "CREATE DATABASE jupyterhub${SUFFIX} ${EXTRA_CREATE};"
|
$SQL "CREATE DATABASE jupyterhub${SUFFIX} ${EXTRA_CREATE:-};"
|
||||||
done
|
done
|
||||||
|
@@ -9,7 +9,7 @@ command line for details.
|
|||||||
|
|
||||||
## 1.0
|
## 1.0
|
||||||
|
|
||||||
### [1.0.0] 2018-03-XX
|
### [1.0.0] 2019-05-03
|
||||||
|
|
||||||
JupyterHub 1.0 is a major milestone for JupyterHub.
|
JupyterHub 1.0 is a major milestone for JupyterHub.
|
||||||
Huge thanks to the many people who have contributed to this release,
|
Huge thanks to the many people who have contributed to this release,
|
||||||
@@ -94,6 +94,8 @@ whether it was through discussion, testing, documentation, or development.
|
|||||||
- `Spawner.options_from_form` may now be async
|
- `Spawner.options_from_form` may now be async
|
||||||
- Added `JupyterHub.shutdown_on_logout` option to trigger shutdown of a user's
|
- Added `JupyterHub.shutdown_on_logout` option to trigger shutdown of a user's
|
||||||
servers when they log out.
|
servers when they log out.
|
||||||
|
- When `Spawner.start` raises an Exception,
|
||||||
|
a message can be passed on to the user if the exception has a `.jupyterhub_message` attribute.
|
||||||
|
|
||||||
|
|
||||||
#### Changes
|
#### Changes
|
||||||
@@ -131,6 +133,7 @@ whether it was through discussion, testing, documentation, or development.
|
|||||||
- Fewer redirects following a visit to the default `/` url
|
- Fewer redirects following a visit to the default `/` url
|
||||||
- Error when progress is requested before progress is ready
|
- Error when progress is requested before progress is ready
|
||||||
- Error when API requests are made to a not-running server without authentication
|
- Error when API requests are made to a not-running server without authentication
|
||||||
|
- Avoid logging database password on connect if password is specified in `JupyterHub.db_url`.
|
||||||
|
|
||||||
#### Development changes
|
#### Development changes
|
||||||
|
|
||||||
@@ -148,6 +151,14 @@ In general, see `CONTRIBUTING.md` for contribution info or ask if you have quest
|
|||||||
|
|
||||||
## 0.9
|
## 0.9
|
||||||
|
|
||||||
|
### [0.9.6] 2019-04-01
|
||||||
|
|
||||||
|
JupyterHub 0.9.6 is a security release.
|
||||||
|
|
||||||
|
- Fixes an Open Redirect vulnerability (CVE-2019-10255).
|
||||||
|
|
||||||
|
JupyterHub 0.9.5 included a partial fix for this issue.
|
||||||
|
|
||||||
### [0.9.4] 2018-09-24
|
### [0.9.4] 2018-09-24
|
||||||
|
|
||||||
JupyterHub 0.9.4 is a small bugfix release.
|
JupyterHub 0.9.4 is a small bugfix release.
|
||||||
@@ -566,7 +577,8 @@ First preview release
|
|||||||
|
|
||||||
|
|
||||||
[Unreleased]: https://github.com/jupyterhub/jupyterhub/compare/1.0.0...HEAD
|
[Unreleased]: https://github.com/jupyterhub/jupyterhub/compare/1.0.0...HEAD
|
||||||
[1.0.0]: https://github.com/jupyterhub/jupyterhub/compare/0.9.4...HEAD
|
[1.0.0]: https://github.com/jupyterhub/jupyterhub/compare/0.9.6...1.0.0
|
||||||
|
[0.9.6]: https://github.com/jupyterhub/jupyterhub/compare/0.9.4...0.9.6
|
||||||
[0.9.4]: https://github.com/jupyterhub/jupyterhub/compare/0.9.3...0.9.4
|
[0.9.4]: https://github.com/jupyterhub/jupyterhub/compare/0.9.3...0.9.4
|
||||||
[0.9.3]: https://github.com/jupyterhub/jupyterhub/compare/0.9.2...0.9.3
|
[0.9.3]: https://github.com/jupyterhub/jupyterhub/compare/0.9.2...0.9.3
|
||||||
[0.9.2]: https://github.com/jupyterhub/jupyterhub/compare/0.9.1...0.9.2
|
[0.9.2]: https://github.com/jupyterhub/jupyterhub/compare/0.9.1...0.9.2
|
||||||
|
@@ -4,8 +4,13 @@
|
|||||||
Community communication channels
|
Community communication channels
|
||||||
================================
|
================================
|
||||||
|
|
||||||
We use `Gitter <https://gitter.im>`_ for online, real-time text chat. The
|
We use `Discourse <https://discourse.jupyter.org>` for online discussion.
|
||||||
primary channel for JupyterHub is `jupyterhub/jupyterhub <https://gitter.im/jupyterhub/jupyterhub>`_.
|
Everyone in the Jupyter community is welcome to bring ideas and questions there.
|
||||||
|
In addition, we use `Gitter <https://gitter.im>`_ for online, real-time text chat,
|
||||||
|
a place for more ephemeral discussions.
|
||||||
|
The primary Gitter channel for JupyterHub is `jupyterhub/jupyterhub <https://gitter.im/jupyterhub/jupyterhub>`_.
|
||||||
|
Gitter isn't archived or searchable, so we recommend going to discourse first
|
||||||
|
to make sure that discussions are most useful and accessible to the community.
|
||||||
Remember that our community is distributed across the world in various
|
Remember that our community is distributed across the world in various
|
||||||
timezones, so be patient if you do not get an answer immediately!
|
timezones, so be patient if you do not get an answer immediately!
|
||||||
|
|
||||||
|
10
docs/source/contributing/security.rst
Normal file
10
docs/source/contributing/security.rst
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
Reporting security issues in Jupyter or JupyterHub
|
||||||
|
==================================================
|
||||||
|
|
||||||
|
If you find a security vulnerability in Jupyter or JupyterHub,
|
||||||
|
whether it is a failure of the security model described in :doc:`../reference/websecurity`
|
||||||
|
or a failure in implementation,
|
||||||
|
please report it to security@ipython.org.
|
||||||
|
|
||||||
|
If you prefer to encrypt your security reports,
|
||||||
|
you can use :download:`this PGP public key </ipython_security.asc>`.
|
@@ -66,7 +66,7 @@ easy to do with RStudio too.
|
|||||||
|
|
||||||
### University of Colorado Boulder
|
### University of Colorado Boulder
|
||||||
|
|
||||||
- (CU Research Computing) CURC
|
- (CU Research Computing) CURC
|
||||||
- [JupyterHub User Guide](https://www.rc.colorado.edu/support/user-guide/jupyterhub.html)
|
- [JupyterHub User Guide](https://www.rc.colorado.edu/support/user-guide/jupyterhub.html)
|
||||||
- Slurm job dispatched on Crestone compute cluster
|
- Slurm job dispatched on Crestone compute cluster
|
||||||
- log troubleshooting
|
- log troubleshooting
|
||||||
@@ -77,13 +77,17 @@ easy to do with RStudio too.
|
|||||||
- Earth Lab at CU
|
- Earth Lab at CU
|
||||||
- [Tutorial on Parallel R on JupyterHub](https://earthdatascience.org/tutorials/parallel-r-on-jupyterhub/)
|
- [Tutorial on Parallel R on JupyterHub](https://earthdatascience.org/tutorials/parallel-r-on-jupyterhub/)
|
||||||
|
|
||||||
|
### George Washington University
|
||||||
|
|
||||||
|
- [Jupyter Hub](http://go.gwu.edu/jupyter) with university single-sign-on. Deployed early 2017.
|
||||||
|
|
||||||
### HTCondor
|
### HTCondor
|
||||||
|
|
||||||
- [HTCondor Python Bindings Tutorial from HTCondor Week 2017 includes information on their JupyterHub tutorials](https://research.cs.wisc.edu/htcondor/HTCondorWeek2017/presentations/TueBockelman_Python.pdf)
|
- [HTCondor Python Bindings Tutorial from HTCondor Week 2017 includes information on their JupyterHub tutorials](https://research.cs.wisc.edu/htcondor/HTCondorWeek2017/presentations/TueBockelman_Python.pdf)
|
||||||
|
|
||||||
### University of Illinois
|
### University of Illinois
|
||||||
|
|
||||||
- https://datascience.business.illinois.edu
|
- https://datascience.business.illinois.edu (currently down; checked 04/26/19)
|
||||||
|
|
||||||
### IllustrisTNG Simulation Project
|
### IllustrisTNG Simulation Project
|
||||||
|
|
||||||
@@ -110,7 +114,11 @@ easy to do with RStudio too.
|
|||||||
- [Data Science (DICE) group](https://dice.cs.uni-paderborn.de/)
|
- [Data Science (DICE) group](https://dice.cs.uni-paderborn.de/)
|
||||||
- [nbgraderutils](https://github.com/dice-group/nbgraderutils): Use JupyterHub + nbgrader + iJava kernel for online Java exercises. Used in lecture Statistical Natural Language Processing.
|
- [nbgraderutils](https://github.com/dice-group/nbgraderutils): Use JupyterHub + nbgrader + iJava kernel for online Java exercises. Used in lecture Statistical Natural Language Processing.
|
||||||
|
|
||||||
### University of Rochester CIRC
|
### Penn State University
|
||||||
|
|
||||||
|
- [Press release](https://news.psu.edu/story/523093/2018/05/24/new-open-source-web-apps-available-students-and-faculty): "New open-source web apps available for students and faculty" (but Hub is currently down; checked 04/26/19)
|
||||||
|
|
||||||
|
### University of Rochester CIRC
|
||||||
|
|
||||||
- [JupyterHub Userguide](https://info.circ.rochester.edu/Web_Applications/JupyterHub.html) - Slurm, beehive
|
- [JupyterHub Userguide](https://info.circ.rochester.edu/Web_Applications/JupyterHub.html) - Slurm, beehive
|
||||||
|
|
||||||
@@ -126,7 +134,7 @@ easy to do with RStudio too.
|
|||||||
|
|
||||||
- Educational Technology Services - Paul Jamason
|
- Educational Technology Services - Paul Jamason
|
||||||
- [jupyterhub.ucsd.edu](https://jupyterhub.ucsd.edu)
|
- [jupyterhub.ucsd.edu](https://jupyterhub.ucsd.edu)
|
||||||
|
|
||||||
### TACC University of Texas
|
### TACC University of Texas
|
||||||
|
|
||||||
### Texas A&M
|
### Texas A&M
|
||||||
@@ -160,6 +168,10 @@ easy to do with RStudio too.
|
|||||||
- https://getcarina.com/blog/learning-how-to-whale/
|
- https://getcarina.com/blog/learning-how-to-whale/
|
||||||
- http://carolynvanslyck.com/talk/carina/jupyterhub/#/
|
- http://carolynvanslyck.com/talk/carina/jupyterhub/#/
|
||||||
|
|
||||||
|
### Hadoop
|
||||||
|
|
||||||
|
- [Deploying JupyterHub on Hadoop](https://jcrist.github.io/jupyterhub-on-hadoop/)
|
||||||
|
|
||||||
|
|
||||||
## Miscellaneous
|
## Miscellaneous
|
||||||
|
|
||||||
|
BIN
docs/source/images/jhub-fluxogram.jpeg
Normal file
BIN
docs/source/images/jhub-fluxogram.jpeg
Normal file
Binary file not shown.
After Width: | Height: | Size: 158 KiB |
@@ -2,21 +2,37 @@
|
|||||||
JupyterHub
|
JupyterHub
|
||||||
==========
|
==========
|
||||||
|
|
||||||
`JupyterHub`_, a multi-user **Hub**, spawns, manages, and proxies multiple
|
`JupyterHub`_ is the best way to serve `Jupyter notebook`_ for multiple users.
|
||||||
|
It can be used in a classes of students, a corporate data science group or scientific
|
||||||
|
research group. It is a multi-user **Hub** that spawns, manages, and proxies multiple
|
||||||
instances of the single-user `Jupyter notebook`_ server.
|
instances of the single-user `Jupyter notebook`_ server.
|
||||||
JupyterHub can be used to serve notebooks to a class of students, a corporate
|
|
||||||
data science group, or a scientific research group.
|
|
||||||
|
|
||||||
.. image:: images/jhub-parts.png
|
To make life easier, JupyterHub have distributions. Be sure to
|
||||||
|
take a look at them before continuing with the configuration of the broad
|
||||||
|
original system of `JupyterHub`_. Today, you can find two main cases:
|
||||||
|
|
||||||
|
1. If you need a simple case for a small amount of users (0-100) and single server
|
||||||
|
take a look at
|
||||||
|
`The Littlest JupyterHub <https://github.com/jupyterhub/the-littlest-jupyterhub>`__ distribution.
|
||||||
|
2. If you need to allow for even more users, a dynamic amount of servers can be used on a cloud,
|
||||||
|
take a look at the `Zero to JupyterHub with Kubernetes <https://github.com/jupyterhub/zero-to-jupyterhub-k8s>`__ .
|
||||||
|
|
||||||
|
|
||||||
|
Four subsystems make up JupyterHub:
|
||||||
|
|
||||||
|
* a **Hub** (tornado process) that is the heart of JupyterHub
|
||||||
|
* a **configurable http proxy** (node-http-proxy) that receives the requests from the client's browser
|
||||||
|
* multiple **single-user Jupyter notebook servers** (Python/IPython/tornado) that are monitored by Spawners
|
||||||
|
* an **authentication class** that manages how users can access the system
|
||||||
|
|
||||||
|
|
||||||
|
Besides these central pieces, you can add optional configurations through a `config.py` file and manage users kernels on an admin panel. A simplification of the whole system can be seen in the figure below:
|
||||||
|
|
||||||
|
.. image:: images/jhub-fluxogram.jpeg
|
||||||
:alt: JupyterHub subsystems
|
:alt: JupyterHub subsystems
|
||||||
:width: 40%
|
:width: 80%
|
||||||
:align: right
|
:align: center
|
||||||
|
|
||||||
Three subsystems make up JupyterHub:
|
|
||||||
|
|
||||||
* a multi-user **Hub** (tornado process)
|
|
||||||
* a **configurable http proxy** (node-http-proxy)
|
|
||||||
* multiple **single-user Jupyter notebook servers** (Python/IPython/tornado)
|
|
||||||
|
|
||||||
JupyterHub performs the following functions:
|
JupyterHub performs the following functions:
|
||||||
|
|
||||||
@@ -116,6 +132,7 @@ helps keep our community welcoming to as many people as possible.
|
|||||||
contributing/docs
|
contributing/docs
|
||||||
contributing/tests
|
contributing/tests
|
||||||
contributing/roadmap
|
contributing/roadmap
|
||||||
|
contributing/security
|
||||||
|
|
||||||
Upgrading JupyterHub
|
Upgrading JupyterHub
|
||||||
--------------------
|
--------------------
|
||||||
|
52
docs/source/ipython_security.asc
Normal file
52
docs/source/ipython_security.asc
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||||
|
Version: GnuPG v2.0.22 (GNU/Linux)
|
||||||
|
|
||||||
|
mQINBFMx2LoBEAC9xU8JiKI1VlCJ4PT9zqhU5nChQZ06/bj1BBftiMJG07fdGVO0
|
||||||
|
ibOn4TrCoRYaeRlet0UpHzxT4zDa5h3/usJaJNTSRwtWePw2o7Lik8J+F3LionRf
|
||||||
|
8Jz81WpJ+81Klg4UWKErXjBHsu/50aoQm6ZNYG4S2nwOmMVEC4nc44IAA0bb+6kW
|
||||||
|
saFKKzEDsASGyuvyutdyUHiCfvvh5GOC2h9mXYvl4FaMW7K+d2UgCYERcXDNy7C1
|
||||||
|
Bw+uepQ9ELKdG4ZpvonO6BNr1BWLln3wk93AQfD5qhfsYRJIyj0hJlaRLtBU3i6c
|
||||||
|
xs+gQNF4mPmybpPSGuOyUr4FYC7NfoG7IUMLj+DYa6d8LcMJO+9px4IbdhQvzGtC
|
||||||
|
qz5av1TX7/+gnS4L8C9i1g8xgI+MtvogngPmPY4repOlK6y3l/WtxUPkGkyYkn3s
|
||||||
|
RzYyE/GJgTwuxFXzMQs91s+/iELFQq/QwmEJf+g/QYfSAuM+lVGajEDNBYVAQkxf
|
||||||
|
gau4s8Gm0GzTZmINilk+7TxpXtKbFc/Yr4A/fMIHmaQ7KmJB84zKwONsQdVv7Jjj
|
||||||
|
0dpwu8EIQdHxX3k7/Q+KKubEivgoSkVwuoQTG15X9xrOsDZNwfOVQh+JKazPvJtd
|
||||||
|
SNfep96r9t/8gnXv9JI95CGCQ8lNhXBUSBM3BDPTbudc4b6lFUyMXN0mKQARAQAB
|
||||||
|
tCxJUHl0aG9uIFNlY3VyaXR5IFRlYW0gPHNlY3VyaXR5QGlweXRob24ub3JnPokC
|
||||||
|
OAQTAQIAIgUCUzHYugIbAwYLCQgHAwIGFQgCCQoLBBYCAwECHgECF4AACgkQEwJc
|
||||||
|
LcmZYkjuXg//R/t6nMNQmf9W1h52IVfUbRAVmvZ5d063hQHKV2dssxtnA2dRm/x5
|
||||||
|
JZu8Wz7ZrEZpyqwRJO14sxN1/lC3v+zs9XzYXr2lBTZuKCPIBypYVGIynCuWJBQJ
|
||||||
|
rWnfG4+u1RHahnjqlTWTY1C/le6v7SjAvCb6GbdA6k4ZL2EJjQlRaHDmzw3rV/+l
|
||||||
|
LLx6/tYzIsotuflm/bFumyOMmpQQpJjnCkWIVjnRICZvuAn97jLgtTI0+0Rzf4Zb
|
||||||
|
k2BwmHwDRqWCTTcRI9QvTl8AzjW+dNImN22TpGOBPfYj8BCZ9twrpKUbf+jNqJ1K
|
||||||
|
THQzFtpdJ6SzqiFVm74xW4TKqCLkbCQ/HtVjTGMGGz/y7KTtaLpGutQ6XE8SSy6P
|
||||||
|
EffSb5u+kKlQOWaH7Mc3B0yAojz6T3j5RSI8ts6pFi6pZhDg9hBfPK2dT0v/7Mkv
|
||||||
|
E1Z7q2IdjZnhhtGWjDAMtDDn2NbY2wuGoa5jAWAR0WvIbEZ3kOxuLE5/ZOG1FyYm
|
||||||
|
noJRliBz7038nT92EoD5g1pdzuxgXtGCpYyyjRZwaLmmi4CvA+oThKmnqWNY5lyY
|
||||||
|
ricdNHDiyEXK0YafJL1oZgM86MSb0jKJMp5U11nUkUGzkroFfpGDmzBwAzEPgeiF
|
||||||
|
40+qgsKB9lqwb3G7PxvfSi3XwxfXgpm1cTyEaPSzsVzve3d1xeqb7Yq5Ag0EUzHY
|
||||||
|
ugEQALQ5FtLdNoxTxMsgvrRr1ejLiUeRNUfXtN1TYttOfvAhfBVnszjtkpIW8DCB
|
||||||
|
JF/bA7ETiH8OYYn/Fm6MPI5H64IHEncpzxjf57jgpXd9CA9U2OMk/P1nve5zYchP
|
||||||
|
QmP2fJxeAWr0aRH0Mse5JS5nCkh8Xv4nAjsBYeLTJEVOb1gPQFXOiFcVp3gaKAzX
|
||||||
|
GWOZ/mtG/uaNsabH/3TkcQQEgJefd11DWgMB7575GU+eME7c6hn3FPITA5TC5HUX
|
||||||
|
azvjv/PsWGTTVAJluJ3fUDvhpbGwYOh1uV0rB68lPpqVIro18IIJhNDnccM/xqko
|
||||||
|
4fpJdokdg4L1wih+B04OEXnwgjWG8OIphR/oL/+M37VV2U7Om/GE6LGefaYccC9c
|
||||||
|
tIaacRQJmZpG/8RsimFIY2wJ07z8xYBITmhMmOt0bLBv0mU0ym5KH9Dnru1m9QDO
|
||||||
|
AHwcKrDgL85f9MCn+YYw0d1lYxjOXjf+moaeW3izXCJ5brM+MqVtixY6aos3YO29
|
||||||
|
J7SzQ4aEDv3h/oKdDfZny21jcVPQxGDui8sqaZCi8usCcyqWsKvFHcr6vkwaufcm
|
||||||
|
3Knr2HKVotOUF5CDZybopIz1sJvY/5Dx9yfRmtivJtglrxoDKsLi1rQTlEQcFhCS
|
||||||
|
ACjf7txLtv03vWHxmp4YKQFkkOlbyhIcvfPVLTvqGerdT2FHABEBAAGJAh8EGAEC
|
||||||
|
AAkFAlMx2LoCGwwACgkQEwJcLcmZYkgK0BAAny0YUugpZldiHzYNf8I6p2OpiDWv
|
||||||
|
ZHaguTTPg2LJSKaTd+5UHZwRFIWjcSiFu+qTGLNtZAdcr0D5f991CPvyDSLYgOwb
|
||||||
|
Jm2p3GM2KxfECWzFbB/n/PjbZ5iky3+5sPlOdBR4TkfG4fcu5GwUgCkVe5u3USAk
|
||||||
|
C6W5lpeaspDz39HAPRSIOFEX70+xV+6FZ17B7nixFGN+giTpGYOEdGFxtUNmHmf+
|
||||||
|
waJoPECyImDwJvmlMTeP9jfahlB6Pzaxt6TBZYHetI/JR9FU69EmA+XfCSGt5S+0
|
||||||
|
Eoc330gpsSzo2VlxwRCVNrcuKmG7PsFFANok05ssFq1/Djv5rJ++3lYb88b8HSP2
|
||||||
|
3pQJPrM7cQNU8iPku9yLXkY5qsoZOH+3yAia554Dgc8WBhp6fWh58R0dIONQxbbo
|
||||||
|
apNdwvlI8hKFB7TiUL6PNShE1yL+XD201iNkGAJXbLMIC1ImGLirUfU267A3Cop5
|
||||||
|
hoGs179HGBcyj/sKA3uUIFdNtP+NndaP3v4iYhCitdVCvBJMm6K3tW88qkyRGzOk
|
||||||
|
4PW422oyWKwbAPeMk5PubvEFuFAIoBAFn1zecrcOg85RzRnEeXaiemmmH8GOe1Xu
|
||||||
|
Kh+7h8XXyG6RPFy8tCcLOTk+miTqX+4VWy+kVqoS2cQ5IV8WsJ3S7aeIy0H89Z8n
|
||||||
|
5vmLc+Ibz+eT+rM=
|
||||||
|
=XVDe
|
||||||
|
-----END PGP PUBLIC KEY BLOCK-----
|
@@ -25,6 +25,8 @@ Some examples include:
|
|||||||
run without being root, by spawning an intermediate process via `sudo`
|
run without being root, by spawning an intermediate process via `sudo`
|
||||||
- [BatchSpawner](https://github.com/jupyterhub/batchspawner) for spawning remote
|
- [BatchSpawner](https://github.com/jupyterhub/batchspawner) for spawning remote
|
||||||
servers using batch systems
|
servers using batch systems
|
||||||
|
- [YarnSpawner](https://github.com/jcrist/yarnspawner) for spawning notebook
|
||||||
|
servers in YARN containers on a Hadoop cluster
|
||||||
- [RemoteSpawner](https://github.com/zonca/remotespawner) to spawn notebooks
|
- [RemoteSpawner](https://github.com/zonca/remotespawner) to spawn notebooks
|
||||||
and a remote server and tunnel the port via SSH
|
and a remote server and tunnel the port via SSH
|
||||||
|
|
||||||
|
@@ -127,3 +127,11 @@ A handy website for testing your deployment is
|
|||||||
|
|
||||||
|
|
||||||
[configurable-http-proxy]: https://github.com/jupyterhub/configurable-http-proxy
|
[configurable-http-proxy]: https://github.com/jupyterhub/configurable-http-proxy
|
||||||
|
|
||||||
|
## Vulnerability reporting
|
||||||
|
|
||||||
|
If you believe you’ve found a security vulnerability in JupyterHub, or any
|
||||||
|
Jupyter project, please report it to
|
||||||
|
[security@ipython.org](mailto:security@iypthon.org). If you prefer to encrypt
|
||||||
|
your security reports, you can use [this PGP public
|
||||||
|
key](https://jupyter-notebook.readthedocs.io/en/stable/_downloads/ipython_security.asc).
|
||||||
|
@@ -38,17 +38,18 @@ class TraitDocumenter(AttributeDocumenter):
|
|||||||
def can_document_member(cls, member, membername, isattr, parent):
|
def can_document_member(cls, member, membername, isattr, parent):
|
||||||
return isinstance(member, TraitType)
|
return isinstance(member, TraitType)
|
||||||
|
|
||||||
def format_name(self):
|
|
||||||
return 'config c.' + super().format_name()
|
|
||||||
|
|
||||||
def add_directive_header(self, sig):
|
def add_directive_header(self, sig):
|
||||||
default = self.object.get_default_value()
|
default = self.object.get_default_value()
|
||||||
if default is Undefined:
|
if default is Undefined:
|
||||||
default_s = ''
|
default_s = ''
|
||||||
else:
|
else:
|
||||||
default_s = repr(default)
|
default_s = repr(default)
|
||||||
sig = ' = {}({})'.format(self.object.__class__.__name__, default_s)
|
self.options.annotation = 'c.{name} = {trait}({default})'.format(
|
||||||
return super().add_directive_header(sig)
|
name=self.format_name(),
|
||||||
|
trait=self.object.__class__.__name__,
|
||||||
|
default=default_s,
|
||||||
|
)
|
||||||
|
super().add_directive_header(sig)
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
def setup(app):
|
||||||
|
@@ -12,8 +12,11 @@ function get_hub_version() {
|
|||||||
split=( ${hub_xyz//./ } )
|
split=( ${hub_xyz//./ } )
|
||||||
hub_xy="${split[0]}.${split[1]}"
|
hub_xy="${split[0]}.${split[1]}"
|
||||||
# add .dev on hub_xy so it's 1.0.dev
|
# add .dev on hub_xy so it's 1.0.dev
|
||||||
if [[ ! -z "${split[3]}" ]]; then
|
if [[ ! -z "${split[3]:-}" ]]; then
|
||||||
hub_xy="${hub_xy}.${split[3]}"
|
hub_xy="${hub_xy}.${split[3]}"
|
||||||
|
latest=0
|
||||||
|
else
|
||||||
|
latest=1
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -31,3 +34,11 @@ docker tag $DOCKER_REPO:$DOCKER_TAG $DOCKER_REPO:$hub_xy
|
|||||||
docker push $DOCKER_REPO:$hub_xy
|
docker push $DOCKER_REPO:$hub_xy
|
||||||
docker tag $ONBUILD:$DOCKER_TAG $ONBUILD:$hub_xy
|
docker tag $ONBUILD:$DOCKER_TAG $ONBUILD:$hub_xy
|
||||||
docker push $ONBUILD:$hub_xyz
|
docker push $ONBUILD:$hub_xyz
|
||||||
|
|
||||||
|
# if building a stable release, tag latest as well
|
||||||
|
if [[ "$latest" == "1" ]]; then
|
||||||
|
docker tag $DOCKER_REPO:$DOCKER_TAG $DOCKER_REPO:latest
|
||||||
|
docker push $DOCKER_REPO:latest
|
||||||
|
docker tag $ONBUILD:$DOCKER_TAG $ONBUILD:latest
|
||||||
|
docker push $ONBUILD:latest
|
||||||
|
fi
|
||||||
|
@@ -6,7 +6,7 @@ version_info = (
|
|||||||
1,
|
1,
|
||||||
0,
|
0,
|
||||||
0,
|
0,
|
||||||
"b1", # release (b1, rc1, or "" for final or dev)
|
# "b2", # release (b1, rc1, or "" for final or dev)
|
||||||
# "dev", # dev or nothing
|
# "dev", # dev or nothing
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@@ -427,6 +427,7 @@ class UserServerAPIHandler(APIHandler):
|
|||||||
return
|
return
|
||||||
self.log.info("Deleting spawner %s", spawner._log_name)
|
self.log.info("Deleting spawner %s", spawner._log_name)
|
||||||
self.db.delete(spawner.orm_spawner)
|
self.db.delete(spawner.orm_spawner)
|
||||||
|
user.spawners.pop(server_name, None)
|
||||||
self.db.commit()
|
self.db.commit()
|
||||||
|
|
||||||
if server_name:
|
if server_name:
|
||||||
|
@@ -277,7 +277,7 @@ class JupyterHub(Application):
|
|||||||
try:
|
try:
|
||||||
cls = entry_point.load()
|
cls = entry_point.load()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.log.warning(
|
self.log.debug(
|
||||||
"Failed to load %s entrypoint %r: %r",
|
"Failed to load %s entrypoint %r: %r",
|
||||||
trait.entry_point_group,
|
trait.entry_point_group,
|
||||||
key,
|
key,
|
||||||
@@ -1410,7 +1410,18 @@ class JupyterHub(Application):
|
|||||||
def init_db(self):
|
def init_db(self):
|
||||||
"""Create the database connection"""
|
"""Create the database connection"""
|
||||||
|
|
||||||
self.log.debug("Connecting to db: %s", self.db_url)
|
urlinfo = urlparse(self.db_url)
|
||||||
|
if urlinfo.password:
|
||||||
|
# avoid logging the database password
|
||||||
|
urlinfo = urlinfo._replace(
|
||||||
|
netloc='{}:[redacted]@{}:{}'.format(
|
||||||
|
urlinfo.username, urlinfo.hostname, urlinfo.port
|
||||||
|
)
|
||||||
|
)
|
||||||
|
db_log_url = urlinfo.geturl()
|
||||||
|
else:
|
||||||
|
db_log_url = self.db_url
|
||||||
|
self.log.debug("Connecting to db: %s", db_log_url)
|
||||||
if self.upgrade_db:
|
if self.upgrade_db:
|
||||||
dbutil.upgrade_if_needed(self.db_url, log=self.log)
|
dbutil.upgrade_if_needed(self.db_url, log=self.log)
|
||||||
|
|
||||||
@@ -1420,7 +1431,7 @@ class JupyterHub(Application):
|
|||||||
)
|
)
|
||||||
self.db = self.session_factory()
|
self.db = self.session_factory()
|
||||||
except OperationalError as e:
|
except OperationalError as e:
|
||||||
self.log.error("Failed to connect to db: %s", self.db_url)
|
self.log.error("Failed to connect to db: %s", db_log_url)
|
||||||
self.log.debug("Database error was:", exc_info=True)
|
self.log.debug("Database error was:", exc_info=True)
|
||||||
if self.db_url.startswith('sqlite:///'):
|
if self.db_url.startswith('sqlite:///'):
|
||||||
self._check_db_path(self.db_url.split(':///', 1)[1])
|
self._check_db_path(self.db_url.split(':///', 1)[1])
|
||||||
@@ -2173,7 +2184,6 @@ class JupyterHub(Application):
|
|||||||
self.log.info("Cleaning up PID file %s", self.pid_file)
|
self.log.info("Cleaning up PID file %s", self.pid_file)
|
||||||
os.remove(self.pid_file)
|
os.remove(self.pid_file)
|
||||||
|
|
||||||
# finally stop the loop once we are all cleaned up
|
|
||||||
self.log.info("...done")
|
self.log.info("...done")
|
||||||
|
|
||||||
def write_config_file(self):
|
def write_config_file(self):
|
||||||
@@ -2411,37 +2421,51 @@ class JupyterHub(Application):
|
|||||||
|
|
||||||
self.log.info("JupyterHub is now running at %s", self.proxy.public_url)
|
self.log.info("JupyterHub is now running at %s", self.proxy.public_url)
|
||||||
# register cleanup on both TERM and INT
|
# register cleanup on both TERM and INT
|
||||||
atexit.register(self.atexit)
|
|
||||||
self.init_signal()
|
self.init_signal()
|
||||||
|
|
||||||
def init_signal(self):
|
def init_signal(self):
|
||||||
signal.signal(signal.SIGTERM, self.sigterm)
|
loop = asyncio.get_event_loop()
|
||||||
|
for s in (signal.SIGTERM, signal.SIGINT):
|
||||||
|
loop.add_signal_handler(
|
||||||
|
s, lambda s=s: asyncio.ensure_future(self.shutdown_cancel_tasks(s))
|
||||||
|
)
|
||||||
|
infosignals = [signal.SIGUSR1]
|
||||||
if hasattr(signal, 'SIGINFO'):
|
if hasattr(signal, 'SIGINFO'):
|
||||||
signal.signal(signal.SIGINFO, self.log_status)
|
infosignals.append(signal.SIGINFO)
|
||||||
|
for s in infosignals:
|
||||||
|
loop.add_signal_handler(
|
||||||
|
s, lambda s=s: asyncio.ensure_future(self.log_status(s))
|
||||||
|
)
|
||||||
|
|
||||||
def log_status(self, signum, frame):
|
async def log_status(self, sig):
|
||||||
"""Log current status, triggered by SIGINFO (^T in many terminals)"""
|
"""Log current status, triggered by SIGINFO (^T in many terminals)"""
|
||||||
self.log.debug("Received signal %s[%s]", signum, signal.getsignal(signum))
|
self.log.critical("Received signal %s...", sig.name)
|
||||||
print_ps_info()
|
print_ps_info()
|
||||||
print_stacks()
|
print_stacks()
|
||||||
|
|
||||||
def sigterm(self, signum, frame):
|
async def shutdown_cancel_tasks(self, sig):
|
||||||
self.log.critical("Received SIGTERM, shutting down")
|
"""Cancel all other tasks of the event loop and initiate cleanup"""
|
||||||
raise SystemExit(128 + signum)
|
self.log.critical("Received signal %s, initiating shutdown...", sig.name)
|
||||||
|
tasks = [
|
||||||
|
t for t in asyncio.Task.all_tasks() if t is not asyncio.Task.current_task()
|
||||||
|
]
|
||||||
|
|
||||||
_atexit_ran = False
|
if tasks:
|
||||||
|
self.log.debug("Cancelling pending tasks")
|
||||||
|
[t.cancel() for t in tasks]
|
||||||
|
|
||||||
def atexit(self):
|
try:
|
||||||
"""atexit callback"""
|
await asyncio.wait(tasks)
|
||||||
if self._atexit_ran:
|
except asyncio.CancelledError as e:
|
||||||
return
|
self.log.debug("Caught Task CancelledError. Ignoring")
|
||||||
self._atexit_ran = True
|
except StopAsyncIteration as e:
|
||||||
# run the cleanup step (in a new loop, because the interrupted one is unclean)
|
self.log.error("Caught StopAsyncIteration Exception", exc_info=True)
|
||||||
asyncio.set_event_loop(asyncio.new_event_loop())
|
|
||||||
IOLoop.clear_current()
|
tasks = [t for t in asyncio.Task.all_tasks()]
|
||||||
loop = IOLoop()
|
for t in tasks:
|
||||||
loop.make_current()
|
self.log.debug("Task status: %s", t)
|
||||||
loop.run_sync(self.cleanup)
|
await self.cleanup()
|
||||||
|
asyncio.get_event_loop().stop()
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
if not self.io_loop:
|
if not self.io_loop:
|
||||||
@@ -2468,6 +2492,9 @@ class JupyterHub(Application):
|
|||||||
loop.start()
|
loop.start()
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print("\nInterrupted")
|
print("\nInterrupted")
|
||||||
|
finally:
|
||||||
|
loop.stop()
|
||||||
|
loop.close()
|
||||||
|
|
||||||
|
|
||||||
NewToken.classes.append(JupyterHub)
|
NewToken.classes.append(JupyterHub)
|
||||||
|
@@ -9,6 +9,7 @@ from contextlib import contextmanager
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from subprocess import check_call
|
from subprocess import check_call
|
||||||
from tempfile import TemporaryDirectory
|
from tempfile import TemporaryDirectory
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from sqlalchemy import create_engine
|
from sqlalchemy import create_engine
|
||||||
|
|
||||||
@@ -118,7 +119,18 @@ def upgrade_if_needed(db_url, backup=True, log=None):
|
|||||||
else:
|
else:
|
||||||
# nothing to do
|
# nothing to do
|
||||||
return
|
return
|
||||||
log.info("Upgrading %s", db_url)
|
urlinfo = urlparse(db_url)
|
||||||
|
if urlinfo.password:
|
||||||
|
# avoid logging the database password
|
||||||
|
urlinfo = urlinfo._replace(
|
||||||
|
netloc='{}:[redacted]@{}:{}'.format(
|
||||||
|
urlinfo.username, urlinfo.hostname, urlinfo.port
|
||||||
|
)
|
||||||
|
)
|
||||||
|
db_log_url = urlinfo.geturl()
|
||||||
|
else:
|
||||||
|
db_log_url = db_url
|
||||||
|
log.info("Upgrading %s", db_log_url)
|
||||||
# we need to upgrade, backup the database
|
# we need to upgrade, backup the database
|
||||||
if backup and db_url.startswith('sqlite:///'):
|
if backup and db_url.startswith('sqlite:///'):
|
||||||
db_file = db_url.split(':///', 1)[1]
|
db_file = db_url.split(':///', 1)[1]
|
||||||
|
@@ -549,6 +549,8 @@ class BaseHandler(RequestHandler):
|
|||||||
- else: /hub/home
|
- else: /hub/home
|
||||||
"""
|
"""
|
||||||
next_url = self.get_argument('next', default='')
|
next_url = self.get_argument('next', default='')
|
||||||
|
# protect against some browsers' buggy handling of backslash as slash
|
||||||
|
next_url = next_url.replace('\\', '%5C')
|
||||||
if (next_url + '/').startswith(
|
if (next_url + '/').startswith(
|
||||||
(
|
(
|
||||||
'%s://%s/' % (self.request.protocol, self.request.host),
|
'%s://%s/' % (self.request.protocol, self.request.host),
|
||||||
@@ -562,15 +564,23 @@ class BaseHandler(RequestHandler):
|
|||||||
)
|
)
|
||||||
):
|
):
|
||||||
# treat absolute URLs for our host as absolute paths:
|
# treat absolute URLs for our host as absolute paths:
|
||||||
|
# below, redirects that aren't strictly paths
|
||||||
parsed = urlparse(next_url)
|
parsed = urlparse(next_url)
|
||||||
next_url = parsed.path
|
next_url = parsed.path
|
||||||
if parsed.query:
|
if parsed.query:
|
||||||
next_url = next_url + '?' + parsed.query
|
next_url = next_url + '?' + parsed.query
|
||||||
if parsed.fragment:
|
if parsed.fragment:
|
||||||
next_url = next_url + '#' + parsed.fragment
|
next_url = next_url + '#' + parsed.fragment
|
||||||
if next_url and (urlparse(next_url).netloc or not next_url.startswith('/')):
|
|
||||||
|
# if it still has host info, it didn't match our above check for *this* host
|
||||||
|
if next_url and (
|
||||||
|
'://' in next_url
|
||||||
|
or next_url.startswith('//')
|
||||||
|
or not next_url.startswith('/')
|
||||||
|
):
|
||||||
self.log.warning("Disallowing redirect outside JupyterHub: %r", next_url)
|
self.log.warning("Disallowing redirect outside JupyterHub: %r", next_url)
|
||||||
next_url = ''
|
next_url = ''
|
||||||
|
|
||||||
if next_url and next_url.startswith(url_path_join(self.base_url, 'user/')):
|
if next_url and next_url.startswith(url_path_join(self.base_url, 'user/')):
|
||||||
# add /hub/ prefix, to ensure we redirect to the right user's server.
|
# add /hub/ prefix, to ensure we redirect to the right user's server.
|
||||||
# The next request will be handled by SpawnHandler,
|
# The next request will be handled by SpawnHandler,
|
||||||
|
@@ -18,33 +18,73 @@ class LogoutHandler(BaseHandler):
|
|||||||
def shutdown_on_logout(self):
|
def shutdown_on_logout(self):
|
||||||
return self.settings.get('shutdown_on_logout', False)
|
return self.settings.get('shutdown_on_logout', False)
|
||||||
|
|
||||||
async def get(self):
|
async def _shutdown_servers(self, user):
|
||||||
|
"""Shutdown servers for logout
|
||||||
|
|
||||||
|
Get all active servers for the provided user, stop them.
|
||||||
|
"""
|
||||||
|
active_servers = [
|
||||||
|
name
|
||||||
|
for (name, spawner) in user.spawners.items()
|
||||||
|
if spawner.active and not spawner.pending
|
||||||
|
]
|
||||||
|
if active_servers:
|
||||||
|
self.log.info("Shutting down %s's servers", user.name)
|
||||||
|
futures = []
|
||||||
|
for server_name in active_servers:
|
||||||
|
futures.append(maybe_future(self.stop_single_user(user, server_name)))
|
||||||
|
await asyncio.gather(*futures)
|
||||||
|
|
||||||
|
def _backend_logout_cleanup(self, name):
|
||||||
|
"""Default backend logout actions
|
||||||
|
|
||||||
|
Send a log message, clear some cookies, increment the logout counter.
|
||||||
|
"""
|
||||||
|
self.log.info("User logged out: %s", name)
|
||||||
|
self.clear_login_cookie()
|
||||||
|
self.statsd.incr('logout')
|
||||||
|
|
||||||
|
async def default_handle_logout(self):
|
||||||
|
"""The default logout action
|
||||||
|
|
||||||
|
Optionally cleans up servers, clears cookies, increments logout counter
|
||||||
|
Cleaning up servers can be prevented by setting shutdown_on_logout to
|
||||||
|
False.
|
||||||
|
"""
|
||||||
user = self.current_user
|
user = self.current_user
|
||||||
if user:
|
if user:
|
||||||
if self.shutdown_on_logout:
|
if self.shutdown_on_logout:
|
||||||
active_servers = [
|
await self._shutdown_servers(user)
|
||||||
name
|
|
||||||
for (name, spawner) in user.spawners.items()
|
|
||||||
if spawner.active and not spawner.pending
|
|
||||||
]
|
|
||||||
if active_servers:
|
|
||||||
self.log.info("Shutting down %s's servers", user.name)
|
|
||||||
futures = []
|
|
||||||
for server_name in active_servers:
|
|
||||||
futures.append(
|
|
||||||
maybe_future(self.stop_single_user(user, server_name))
|
|
||||||
)
|
|
||||||
await asyncio.gather(*futures)
|
|
||||||
|
|
||||||
self.log.info("User logged out: %s", user.name)
|
self._backend_logout_cleanup(user.name)
|
||||||
self.clear_login_cookie()
|
|
||||||
self.statsd.incr('logout')
|
async def handle_logout(self):
|
||||||
|
"""Custom user action during logout
|
||||||
|
|
||||||
|
By default a no-op, this function should be overridden in subclasses
|
||||||
|
to have JupyterHub take a custom action on logout.
|
||||||
|
"""
|
||||||
|
return
|
||||||
|
|
||||||
|
async def render_logout_page(self):
|
||||||
|
"""Render the logout page, if any
|
||||||
|
|
||||||
|
Override this function to set a custom logout page.
|
||||||
|
"""
|
||||||
if self.authenticator.auto_login:
|
if self.authenticator.auto_login:
|
||||||
html = self.render_template('logout.html')
|
html = self.render_template('logout.html')
|
||||||
self.finish(html)
|
self.finish(html)
|
||||||
else:
|
else:
|
||||||
self.redirect(self.settings['login_url'], permanent=False)
|
self.redirect(self.settings['login_url'], permanent=False)
|
||||||
|
|
||||||
|
async def get(self):
|
||||||
|
"""Log the user out, call the custom action, forward the user
|
||||||
|
to the logout page
|
||||||
|
"""
|
||||||
|
await self.default_handle_logout()
|
||||||
|
await self.handle_logout()
|
||||||
|
await self.render_logout_page()
|
||||||
|
|
||||||
|
|
||||||
class LoginHandler(BaseHandler):
|
class LoginHandler(BaseHandler):
|
||||||
"""Render the login page."""
|
"""Render the login page."""
|
||||||
|
@@ -281,13 +281,20 @@ class SpawnPendingHandler(BaseHandler):
|
|||||||
# Implicit spawn on /user/:name is not allowed if the user's last spawn failed.
|
# Implicit spawn on /user/:name is not allowed if the user's last spawn failed.
|
||||||
# We should point the user to Home if the most recent spawn failed.
|
# We should point the user to Home if the most recent spawn failed.
|
||||||
exc = spawner._spawn_future.exception()
|
exc = spawner._spawn_future.exception()
|
||||||
self.log.error(
|
self.log.error("Previous spawn for %s failed: %s", spawner._log_name, exc)
|
||||||
"Preventing implicit spawn for %s because last spawn failed: %s",
|
spawn_url = url_path_join(self.hub.base_url, "spawn", user.escaped_name)
|
||||||
spawner._log_name,
|
self.set_status(500)
|
||||||
exc,
|
html = self.render_template(
|
||||||
|
"not_running.html",
|
||||||
|
user=user,
|
||||||
|
server_name=server_name,
|
||||||
|
spawn_url=spawn_url,
|
||||||
|
failed=True,
|
||||||
|
failed_message=getattr(exc, 'jupyterhub_message', ''),
|
||||||
|
exception=exc,
|
||||||
)
|
)
|
||||||
# raise a copy because each time an Exception object is re-raised, its traceback grows
|
self.finish(html)
|
||||||
raise copy.copy(exc).with_traceback(exc.__traceback__)
|
return
|
||||||
|
|
||||||
# Check for pending events. This should usually be the case
|
# Check for pending events. This should usually be the case
|
||||||
# when we are on this page.
|
# when we are on this page.
|
||||||
|
@@ -5,9 +5,11 @@ implements https://oauthlib.readthedocs.io/en/latest/oauth2/server.html
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
from oauthlib import uri_validate
|
||||||
from oauthlib.oauth2 import RequestValidator
|
from oauthlib.oauth2 import RequestValidator
|
||||||
from oauthlib.oauth2 import WebApplicationServer
|
from oauthlib.oauth2 import WebApplicationServer
|
||||||
from oauthlib.oauth2.rfc6749.grant_types import authorization_code
|
from oauthlib.oauth2.rfc6749.grant_types import authorization_code
|
||||||
|
from oauthlib.oauth2.rfc6749.grant_types import base
|
||||||
from sqlalchemy.orm import scoped_session
|
from sqlalchemy.orm import scoped_session
|
||||||
from tornado import web
|
from tornado import web
|
||||||
from tornado.escape import url_escape
|
from tornado.escape import url_escape
|
||||||
@@ -21,7 +23,16 @@ from ..utils import url_path_join
|
|||||||
# patch absolute-uri check
|
# patch absolute-uri check
|
||||||
# because we want to allow relative uri oauth
|
# because we want to allow relative uri oauth
|
||||||
# for internal services
|
# for internal services
|
||||||
authorization_code.is_absolute_uri = lambda uri: True
|
|
||||||
|
|
||||||
|
def is_absolute_uri(uri):
|
||||||
|
if uri.startswith('/'):
|
||||||
|
return True
|
||||||
|
return uri_validate.is_absolute_uri(uri)
|
||||||
|
|
||||||
|
|
||||||
|
authorization_code.is_absolute_uri = is_absolute_uri
|
||||||
|
base.is_absolute_uri = is_absolute_uri
|
||||||
|
|
||||||
|
|
||||||
class JupyterHubRequestValidator(RequestValidator):
|
class JupyterHubRequestValidator(RequestValidator):
|
||||||
|
@@ -960,7 +960,7 @@ class HubOAuthCallbackHandler(HubOAuthenticated, RequestHandler):
|
|||||||
# validate OAuth state
|
# validate OAuth state
|
||||||
arg_state = self.get_argument("state", None)
|
arg_state = self.get_argument("state", None)
|
||||||
if arg_state is None:
|
if arg_state is None:
|
||||||
raise HTTPError("oauth state is missing. Try logging in again.")
|
raise HTTPError(500, "oauth state is missing. Try logging in again.")
|
||||||
cookie_name = self.hub_auth.get_state_cookie_name(arg_state)
|
cookie_name = self.hub_auth.get_state_cookie_name(arg_state)
|
||||||
cookie_state = self.get_secure_cookie(cookie_name)
|
cookie_state = self.get_secure_cookie(cookie_name)
|
||||||
# clear cookie state now that we've consumed it
|
# clear cookie state now that we've consumed it
|
||||||
|
@@ -323,6 +323,8 @@ class MockHub(JupyterHub):
|
|||||||
self.pid_file = NamedTemporaryFile(delete=False).name
|
self.pid_file = NamedTemporaryFile(delete=False).name
|
||||||
self.db_file = NamedTemporaryFile()
|
self.db_file = NamedTemporaryFile()
|
||||||
self.db_url = os.getenv('JUPYTERHUB_TEST_DB_URL') or self.db_file.name
|
self.db_url = os.getenv('JUPYTERHUB_TEST_DB_URL') or self.db_file.name
|
||||||
|
if 'mysql' in self.db_url:
|
||||||
|
self.db_kwargs['connect_args'] = {'auth_plugin': 'mysql_native_password'}
|
||||||
yield super().initialize([])
|
yield super().initialize([])
|
||||||
|
|
||||||
# add an initial user
|
# add an initial user
|
||||||
|
@@ -13,7 +13,10 @@ from jupyterhub import orm
|
|||||||
|
|
||||||
def populate_db(url):
|
def populate_db(url):
|
||||||
"""Populate a jupyterhub database"""
|
"""Populate a jupyterhub database"""
|
||||||
db = orm.new_session_factory(url)()
|
connect_args = {}
|
||||||
|
if 'mysql' in url:
|
||||||
|
connect_args['auth_plugin'] = 'mysql_native_password'
|
||||||
|
db = orm.new_session_factory(url, connect_args=connect_args)()
|
||||||
# create some users
|
# create some users
|
||||||
admin = orm.User(name='admin', admin=True)
|
admin = orm.User(name='admin', admin=True)
|
||||||
db.add(admin)
|
db.add(admin)
|
||||||
|
@@ -28,9 +28,9 @@ def generate_old_db(env_dir, hub_version, db_url):
|
|||||||
check_call([sys.executable, '-m', 'virtualenv', env_dir])
|
check_call([sys.executable, '-m', 'virtualenv', env_dir])
|
||||||
pkgs = ['jupyterhub==' + hub_version]
|
pkgs = ['jupyterhub==' + hub_version]
|
||||||
if 'mysql' in db_url:
|
if 'mysql' in db_url:
|
||||||
pkgs.append('mysql-connector<2.2')
|
pkgs.append('mysql-connector-python')
|
||||||
elif 'postgres' in db_url:
|
elif 'postgres' in db_url:
|
||||||
pkgs.append('psycopg2')
|
pkgs.append('psycopg2-binary')
|
||||||
check_call([env_pip, 'install'] + pkgs)
|
check_call([env_pip, 'install'] + pkgs)
|
||||||
check_call([env_py, populate_db, db_url])
|
check_call([env_py, populate_db, db_url])
|
||||||
|
|
||||||
|
@@ -162,8 +162,10 @@ async def test_delete_named_server(app, named_servers):
|
|||||||
)
|
)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
assert r.status_code == 204
|
assert r.status_code == 204
|
||||||
# low-level record is now removes
|
# low-level record is now removed
|
||||||
assert servername not in user.orm_spawners
|
assert servername not in user.orm_spawners
|
||||||
|
# and it's still not in the high-level wrapper dict
|
||||||
|
assert servername not in user.spawners
|
||||||
|
|
||||||
|
|
||||||
async def test_named_server_disabled(app):
|
async def test_named_server_disabled(app):
|
||||||
|
@@ -393,7 +393,7 @@ async def test_user_redirect(app, username):
|
|||||||
path = urlparse(r.url).path
|
path = urlparse(r.url).path
|
||||||
while '/spawn-pending/' in path:
|
while '/spawn-pending/' in path:
|
||||||
await asyncio.sleep(0.1)
|
await asyncio.sleep(0.1)
|
||||||
r = await get_page(r.url, app, cookies=cookies)
|
r = await async_requests.get(r.url, cookies=cookies)
|
||||||
path = urlparse(r.url).path
|
path = urlparse(r.url).path
|
||||||
assert path == ujoin(app.base_url, '/user/%s/notebooks/test.ipynb' % name)
|
assert path == ujoin(app.base_url, '/user/%s/notebooks/test.ipynb' % name)
|
||||||
|
|
||||||
@@ -467,9 +467,12 @@ async def test_login_strip(app):
|
|||||||
(False, '/absolute', '/absolute'),
|
(False, '/absolute', '/absolute'),
|
||||||
(False, '/has?query#andhash', '/has?query#andhash'),
|
(False, '/has?query#andhash', '/has?query#andhash'),
|
||||||
# next_url outside is not allowed
|
# next_url outside is not allowed
|
||||||
|
(False, 'relative/path', ''),
|
||||||
(False, 'https://other.domain', ''),
|
(False, 'https://other.domain', ''),
|
||||||
(False, 'ftp://other.domain', ''),
|
(False, 'ftp://other.domain', ''),
|
||||||
(False, '//other.domain', ''),
|
(False, '//other.domain', ''),
|
||||||
|
(False, '///other.domain/triple', ''),
|
||||||
|
(False, '\\\\other.domain/backslashes', ''),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
async def test_login_redirect(app, running, next_url, location):
|
async def test_login_redirect(app, running, next_url, location):
|
||||||
@@ -485,7 +488,7 @@ async def test_login_redirect(app, running, next_url, location):
|
|||||||
|
|
||||||
url = 'login'
|
url = 'login'
|
||||||
if next_url:
|
if next_url:
|
||||||
if '//' not in next_url:
|
if '//' not in next_url and next_url.startswith('/'):
|
||||||
next_url = ujoin(app.base_url, next_url, '')
|
next_url = ujoin(app.base_url, next_url, '')
|
||||||
url = url_concat(url, dict(next=next_url))
|
url = url_concat(url, dict(next=next_url))
|
||||||
|
|
||||||
|
@@ -159,6 +159,10 @@ async def api_request(
|
|||||||
|
|
||||||
|
|
||||||
def get_page(path, app, hub=True, **kw):
|
def get_page(path, app, hub=True, **kw):
|
||||||
|
if "://" in path:
|
||||||
|
raise ValueError(
|
||||||
|
"Not a hub page path: %r. Did you mean async_requests.get?" % path
|
||||||
|
)
|
||||||
if hub:
|
if hub:
|
||||||
prefix = app.hub.base_url
|
prefix = app.hub.base_url
|
||||||
else:
|
else:
|
||||||
|
@@ -163,6 +163,10 @@ class User:
|
|||||||
|
|
||||||
self.spawners = _SpawnerDict(self._new_spawner)
|
self.spawners = _SpawnerDict(self._new_spawner)
|
||||||
|
|
||||||
|
# ensure default spawner exists in the database
|
||||||
|
if '' not in self.orm_user.orm_spawners:
|
||||||
|
self._new_orm_spawner('')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def authenticator(self):
|
def authenticator(self):
|
||||||
return self.settings.get('authenticator', None)
|
return self.settings.get('authenticator', None)
|
||||||
@@ -221,6 +225,14 @@ class User:
|
|||||||
# otherwise, yield low-level ORM object (server is not active)
|
# otherwise, yield low-level ORM object (server is not active)
|
||||||
yield orm_spawner
|
yield orm_spawner
|
||||||
|
|
||||||
|
def _new_orm_spawner(self, server_name):
|
||||||
|
"""Creat the low-level orm Spawner object"""
|
||||||
|
orm_spawner = orm.Spawner(user=self.orm_user, name=server_name)
|
||||||
|
self.db.add(orm_spawner)
|
||||||
|
self.db.commit()
|
||||||
|
assert server_name in self.orm_spawners
|
||||||
|
return orm_spawner
|
||||||
|
|
||||||
def _new_spawner(self, server_name, spawner_class=None, **kwargs):
|
def _new_spawner(self, server_name, spawner_class=None, **kwargs):
|
||||||
"""Create a new spawner"""
|
"""Create a new spawner"""
|
||||||
if spawner_class is None:
|
if spawner_class is None:
|
||||||
@@ -229,10 +241,7 @@ class User:
|
|||||||
|
|
||||||
orm_spawner = self.orm_spawners.get(server_name)
|
orm_spawner = self.orm_spawners.get(server_name)
|
||||||
if orm_spawner is None:
|
if orm_spawner is None:
|
||||||
orm_spawner = orm.Spawner(user=self.orm_user, name=server_name)
|
orm_spawner = self._new_orm_spawner(server_name)
|
||||||
self.db.add(orm_spawner)
|
|
||||||
self.db.commit()
|
|
||||||
assert server_name in self.orm_spawners
|
|
||||||
if server_name == '' and self.state:
|
if server_name == '' and self.state:
|
||||||
# migrate user.state to spawner.state
|
# migrate user.state to spawner.state
|
||||||
orm_spawner.state = self.state
|
orm_spawner.state = self.state
|
||||||
|
13
package.json
13
package.json
@@ -14,16 +14,15 @@
|
|||||||
"lessc": "lessc"
|
"lessc": "lessc"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"clean-css": "^3.4.13",
|
"less": "^3.9.0",
|
||||||
"less": "^2.7.1",
|
|
||||||
"less-plugin-clean-css": "^1.5.1",
|
"less-plugin-clean-css": "^1.5.1",
|
||||||
"prettier": "^1.14.2"
|
"prettier": "^1.16.4"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"bootstrap": "^3.4.0",
|
"bootstrap": "^3.4.1",
|
||||||
"font-awesome": "^4.7.0",
|
"font-awesome": "^4.7.0",
|
||||||
"jquery": "^3.2.1",
|
"jquery": "^3.3.1",
|
||||||
"moment": "^2.19.3",
|
"moment": "^2.24.0",
|
||||||
"requirejs": "^2.3.4"
|
"requirejs": "^2.3.6"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -3,7 +3,7 @@ async_generator>=1.8
|
|||||||
certipy>=0.1.2
|
certipy>=0.1.2
|
||||||
entrypoints
|
entrypoints
|
||||||
jinja2
|
jinja2
|
||||||
oauthlib>=2.0,<3
|
oauthlib>=3.0
|
||||||
pamela
|
pamela
|
||||||
prometheus_client>=0.0.21
|
prometheus_client>=0.0.21
|
||||||
python-dateutil
|
python-dateutil
|
||||||
|
@@ -126,7 +126,10 @@ require(["jquery", "bootstrap", "moment", "jhapi", "utils"], function(
|
|||||||
var row = getRow(el);
|
var row = getRow(el);
|
||||||
var user = row.data("user");
|
var user = row.data("user");
|
||||||
var serverName = row.data("server-name");
|
var serverName = row.data("server-name");
|
||||||
el.attr("href", utils.url_path_join(prefix, "hub/spawn", user, serverName));
|
el.attr(
|
||||||
|
"href",
|
||||||
|
utils.url_path_join(prefix, "hub/spawn", user, serverName)
|
||||||
|
);
|
||||||
});
|
});
|
||||||
// cannot start all servers in this case
|
// cannot start all servers in this case
|
||||||
// since it would mean opening a bunch of tabs
|
// since it would mean opening a bunch of tabs
|
||||||
|
@@ -5,12 +5,36 @@
|
|||||||
<div class="container">
|
<div class="container">
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="text-center">
|
<div class="text-center">
|
||||||
|
{% block heading %}
|
||||||
|
<h1>
|
||||||
|
{% if failed %}
|
||||||
|
Spawn failed
|
||||||
|
{% else %}
|
||||||
|
Server not running
|
||||||
|
{% endif %}
|
||||||
|
</h1>
|
||||||
|
{% endblock %}
|
||||||
{% block message %}
|
{% block message %}
|
||||||
<p>Server {{ server_name }} is not running. Would you like to start it?</p>
|
<p>
|
||||||
|
{% if failed %}
|
||||||
|
The latest attempt to start your server {{ server_name }} has failed.
|
||||||
|
{% if failed_message %}
|
||||||
|
{{ failed_message }}
|
||||||
|
{% endif %}
|
||||||
|
Would you like to retry starting it?
|
||||||
|
{% else %}
|
||||||
|
Your server {{ server_name }} is not running. Would you like to start it?
|
||||||
|
{% endif %}
|
||||||
|
</p>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
{% block start_button %}
|
{% block start_button %}
|
||||||
<a id="start" role="button" class="btn btn-lg btn-primary" href="{{ spawn_url }}">
|
<a id="start" role="button" class="btn btn-lg btn-primary" href="{{ spawn_url }}">
|
||||||
Launch Server {{ server_name }}
|
{% if failed %}
|
||||||
|
Relaunch
|
||||||
|
{% else %}
|
||||||
|
Launch
|
||||||
|
{% endif %}
|
||||||
|
Server {{ server_name }}
|
||||||
</a>
|
</a>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
</div>
|
</div>
|
||||||
|
@@ -14,7 +14,7 @@ function get_hub_version() {
|
|||||||
split=( ${hub_xyz//./ } )
|
split=( ${hub_xyz//./ } )
|
||||||
hub_xy="${split[0]}.${split[1]}"
|
hub_xy="${split[0]}.${split[1]}"
|
||||||
# add .dev on hub_xy so it's 1.0.dev
|
# add .dev on hub_xy so it's 1.0.dev
|
||||||
if [[ ! -z "${split[3]}" ]]; then
|
if [[ ! -z "${split[3]:-}" ]]; then
|
||||||
hub_xy="${hub_xy}.${split[3]}"
|
hub_xy="${hub_xy}.${split[3]}"
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user