mirror of
https://github.com/jupyterhub/jupyterhub.git
synced 2025-10-18 15:33:02 +00:00
Merge branch 'main' into krassowski-manage_roles
This commit is contained in:
2
.github/workflows/test-docs.yml
vendored
2
.github/workflows/test-docs.yml
vendored
@@ -64,7 +64,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Install requirements
|
- name: Install requirements
|
||||||
run: |
|
run: |
|
||||||
pip install -r docs/requirements.txt pytest
|
pip install -e . -r docs/requirements.txt pytest
|
||||||
|
|
||||||
- name: pytest docs/
|
- name: pytest docs/
|
||||||
run: |
|
run: |
|
||||||
|
3
.github/workflows/test.yml
vendored
3
.github/workflows/test.yml
vendored
@@ -103,6 +103,9 @@ jobs:
|
|||||||
subset: singleuser
|
subset: singleuser
|
||||||
- python: "3.11"
|
- python: "3.11"
|
||||||
browser: browser
|
browser: browser
|
||||||
|
- python: "3.11"
|
||||||
|
subdomain: subdomain
|
||||||
|
browser: browser
|
||||||
- python: "3.12"
|
- python: "3.12"
|
||||||
main_dependencies: main_dependencies
|
main_dependencies: main_dependencies
|
||||||
|
|
||||||
|
@@ -16,7 +16,7 @@ ci:
|
|||||||
repos:
|
repos:
|
||||||
# autoformat and lint Python code
|
# autoformat and lint Python code
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.3.2
|
rev: v0.3.5
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
types_or:
|
types_or:
|
||||||
|
@@ -15,6 +15,7 @@ build:
|
|||||||
|
|
||||||
python:
|
python:
|
||||||
install:
|
install:
|
||||||
|
- path: .
|
||||||
- requirements: docs/requirements.txt
|
- requirements: docs/requirements.txt
|
||||||
|
|
||||||
formats:
|
formats:
|
||||||
|
@@ -56,7 +56,7 @@ for administration of the Hub and its users.
|
|||||||
### Check prerequisites
|
### Check prerequisites
|
||||||
|
|
||||||
- A Linux/Unix based system
|
- A Linux/Unix based system
|
||||||
- [Python](https://www.python.org/downloads/) 3.6 or greater
|
- [Python](https://www.python.org/downloads/) 3.8 or greater
|
||||||
- [nodejs/npm](https://www.npmjs.com/)
|
- [nodejs/npm](https://www.npmjs.com/)
|
||||||
|
|
||||||
- If you are using **`conda`**, the nodejs and npm dependencies will be installed for
|
- If you are using **`conda`**, the nodejs and npm dependencies will be installed for
|
||||||
|
@@ -1,13 +1,6 @@
|
|||||||
# We install the jupyterhub package to help autodoc-traits inspect it and
|
# docs also require jupyterhub itself to be installed
|
||||||
# generate documentation.
|
# don't depend on it here, as that often results in a duplicate
|
||||||
#
|
# installation of jupyterhub that's already installed
|
||||||
# FIXME: If there is a way for this requirements.txt file to pass a flag that
|
|
||||||
# the build system can intercept to not build the javascript artifacts,
|
|
||||||
# then do so so. That would mean that installing the documentation can
|
|
||||||
# avoid needing node/npm installed.
|
|
||||||
#
|
|
||||||
--editable .
|
|
||||||
|
|
||||||
autodoc-traits
|
autodoc-traits
|
||||||
jupyterhub-sphinx-theme
|
jupyterhub-sphinx-theme
|
||||||
myst-parser>=0.19
|
myst-parser>=0.19
|
||||||
|
@@ -70,6 +70,8 @@ myst_enable_extensions = [
|
|||||||
myst_substitutions = {
|
myst_substitutions = {
|
||||||
# date example: Dev 07, 2022
|
# date example: Dev 07, 2022
|
||||||
"date": datetime.date.today().strftime("%b %d, %Y").title(),
|
"date": datetime.date.today().strftime("%b %d, %Y").title(),
|
||||||
|
"node_min": "12",
|
||||||
|
"python_min": "3.8",
|
||||||
"version": jupyterhub.__version__,
|
"version": jupyterhub.__version__,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -289,6 +291,8 @@ linkcheck_ignore = [
|
|||||||
"https://github.com/jupyterhub/jupyterhub/compare/", # too many comparisons in changelog
|
"https://github.com/jupyterhub/jupyterhub/compare/", # too many comparisons in changelog
|
||||||
r"https?://(localhost|127.0.0.1).*", # ignore localhost references in auto-links
|
r"https?://(localhost|127.0.0.1).*", # ignore localhost references in auto-links
|
||||||
r"https://linux.die.net/.*", # linux.die.net seems to block requests from CI with 403 sometimes
|
r"https://linux.die.net/.*", # linux.die.net seems to block requests from CI with 403 sometimes
|
||||||
|
# don't check links to unpublished advisories
|
||||||
|
r"https://github.com/jupyterhub/jupyterhub/security/advisories/.*",
|
||||||
]
|
]
|
||||||
linkcheck_anchors_ignore = [
|
linkcheck_anchors_ignore = [
|
||||||
"/#!",
|
"/#!",
|
||||||
|
@@ -12,18 +12,18 @@ development.
|
|||||||
### Install Python
|
### Install Python
|
||||||
|
|
||||||
JupyterHub is written in the [Python](https://python.org) programming language and
|
JupyterHub is written in the [Python](https://python.org) programming language and
|
||||||
requires you have at least version 3.6 installed locally. If you haven’t
|
requires you have at least version {{python_min}} installed locally. If you haven’t
|
||||||
installed Python before, the recommended way to install it is to use
|
installed Python before, the recommended way to install it is to use
|
||||||
[Miniforge](https://github.com/conda-forge/miniforge#download).
|
[Miniforge](https://github.com/conda-forge/miniforge#download).
|
||||||
|
|
||||||
### Install nodejs
|
### Install nodejs
|
||||||
|
|
||||||
[NodeJS 12+](https://nodejs.org/en/) is required for building some JavaScript components.
|
[NodeJS {{node_min}}+](https://nodejs.org/en/) is required for building some JavaScript components.
|
||||||
`configurable-http-proxy`, the default proxy implementation for JupyterHub, is written in Javascript.
|
`configurable-http-proxy`, the default proxy implementation for JupyterHub, is written in Javascript.
|
||||||
If you have not installed NodeJS before, we recommend installing it in the `miniconda` environment you set up for Python.
|
If you have not installed NodeJS before, we recommend installing it in the `miniconda` environment you set up for Python.
|
||||||
You can do so with `conda install nodejs`.
|
You can do so with `conda install nodejs`.
|
||||||
|
|
||||||
Many in the Jupyter community use \[`nvm`\](<https://github.com/nvm-sh/nvm>) to
|
Many in the Jupyter community use [`nvm`](https://github.com/nvm-sh/nvm) to
|
||||||
managing node dependencies.
|
managing node dependencies.
|
||||||
|
|
||||||
### Install git
|
### Install git
|
||||||
@@ -59,7 +59,7 @@ a more detailed discussion.
|
|||||||
python -V
|
python -V
|
||||||
```
|
```
|
||||||
|
|
||||||
This should return a version number greater than or equal to 3.6.
|
This should return a version number greater than or equal to {{python_min}}.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
npm -v
|
npm -v
|
||||||
|
@@ -16,7 +16,8 @@ works.
|
|||||||
|
|
||||||
JupyterHub is designed to be a _simple multi-user server for modestly sized
|
JupyterHub is designed to be a _simple multi-user server for modestly sized
|
||||||
groups_ of **semi-trusted** users. While the design reflects serving
|
groups_ of **semi-trusted** users. While the design reflects serving
|
||||||
semi-trusted users, JupyterHub can also be suitable for serving **untrusted** users.
|
semi-trusted users, JupyterHub can also be suitable for serving **untrusted** users,
|
||||||
|
but **is not suitable for untrusted users** in its default configuration.
|
||||||
|
|
||||||
As a result, using JupyterHub with **untrusted** users means more work by the
|
As a result, using JupyterHub with **untrusted** users means more work by the
|
||||||
administrator, since much care is required to secure a Hub, with extra caution on
|
administrator, since much care is required to secure a Hub, with extra caution on
|
||||||
@@ -56,30 +57,63 @@ ensure that:
|
|||||||
|
|
||||||
If any additional services are run on the same domain as the Hub, the services
|
If any additional services are run on the same domain as the Hub, the services
|
||||||
**must never** display user-authored HTML that is neither _sanitized_ nor _sandboxed_
|
**must never** display user-authored HTML that is neither _sanitized_ nor _sandboxed_
|
||||||
(e.g. IFramed) to any user that lacks authentication as the author of a file.
|
to any user that lacks authentication as the author of a file.
|
||||||
|
|
||||||
|
### Sharing access to servers
|
||||||
|
|
||||||
|
Because sharing access to servers (via `access:servers` scopes or the sharing feature in JupyterHub 5) by definition means users can serve each other files, enabling sharing is not suitable for untrusted users without also enabling per-user domains.
|
||||||
|
|
||||||
|
JupyterHub does not enable any sharing by default.
|
||||||
|
|
||||||
## Mitigate security issues
|
## Mitigate security issues
|
||||||
|
|
||||||
The several approaches to mitigating security issues with configuration
|
The several approaches to mitigating security issues with configuration
|
||||||
options provided by JupyterHub include:
|
options provided by JupyterHub include:
|
||||||
|
|
||||||
### Enable subdomains
|
### Enable user subdomains
|
||||||
|
|
||||||
JupyterHub provides the ability to run single-user servers on their own
|
JupyterHub provides the ability to run single-user servers on their own
|
||||||
subdomains. This means the cross-origin protections between servers has the
|
domains. This means the cross-origin protections between servers has the
|
||||||
desired effect, and user servers and the Hub are protected from each other. A
|
desired effect, and user servers and the Hub are protected from each other.
|
||||||
user's single-user server will be at `username.jupyter.mydomain.com`. This also
|
|
||||||
requires all user subdomains to point to the same address, which is most easily
|
**Subdomains are the only way to reliably isolate user servers from each other.**
|
||||||
accomplished with wildcard DNS. Since this spreads the service across multiple
|
|
||||||
domains, you will need wildcard SSL as well. Unfortunately, for many
|
To enable subdomains, set:
|
||||||
institutional domains, wildcard DNS and SSL are not available. **If you do plan
|
|
||||||
to serve untrusted users, enabling subdomains is highly encouraged**, as it
|
```python
|
||||||
resolves the cross-site issues.
|
c.JupyterHub.subdomain_host = "https://jupyter.example.org"
|
||||||
|
```
|
||||||
|
|
||||||
|
When subdomains are enabled, each user's single-user server will be at e.g. `https://username.jupyter.example.org`.
|
||||||
|
This also requires all user subdomains to point to the same address,
|
||||||
|
which is most easily accomplished with wildcard DNS, where a single A record points to your server and a wildcard CNAME record points to your A record:
|
||||||
|
|
||||||
|
```
|
||||||
|
A jupyter.example.org 192.168.1.123
|
||||||
|
CNAME *.jupyter.example.org jupyter.example.org
|
||||||
|
```
|
||||||
|
|
||||||
|
Since this spreads the service across multiple domains, you will likely need wildcard SSL as well,
|
||||||
|
matching `*.jupyter.example.org`.
|
||||||
|
|
||||||
|
Unfortunately, for many institutional domains, wildcard DNS and SSL may not be available.
|
||||||
|
|
||||||
|
We also **strongly encourage** serving JupyterHub and user content on a domain that is _not_ a subdomain of any sensitive content.
|
||||||
|
For reasoning, see [GitHub's discussion of moving user content to github.io from \*.github.com](https://github.blog/2013-04-09-yummy-cookies-across-domains/).
|
||||||
|
|
||||||
|
**If you do plan to serve untrusted users, enabling subdomains is highly encouraged**,
|
||||||
|
as it resolves many security issues, which are difficult to unavoidable when JupyterHub is on a single-domain.
|
||||||
|
|
||||||
|
:::{important}
|
||||||
|
JupyterHub makes no guarantees about protecting users from each other unless subdomains are enabled.
|
||||||
|
|
||||||
|
If you want to protect users from each other, you **_must_** enable per-user domains.
|
||||||
|
:::
|
||||||
|
|
||||||
### Disable user config
|
### Disable user config
|
||||||
|
|
||||||
If subdomains are unavailable or undesirable, JupyterHub provides a
|
If subdomains are unavailable or undesirable, JupyterHub provides a
|
||||||
configuration option `Spawner.disable_user_config`, which can be set to prevent
|
configuration option `Spawner.disable_user_config = True`, which can be set to prevent
|
||||||
the user-owned configuration files from being loaded. After implementing this
|
the user-owned configuration files from being loaded. After implementing this
|
||||||
option, `PATH`s and package installation are the other things that the
|
option, `PATH`s and package installation are the other things that the
|
||||||
admin must enforce.
|
admin must enforce.
|
||||||
@@ -89,23 +123,24 @@ admin must enforce.
|
|||||||
For most Spawners, `PATH` is not something users can influence, but it's important that
|
For most Spawners, `PATH` is not something users can influence, but it's important that
|
||||||
the Spawner should _not_ evaluate shell configuration files prior to launching the server.
|
the Spawner should _not_ evaluate shell configuration files prior to launching the server.
|
||||||
|
|
||||||
### Isolate packages using virtualenv
|
### Isolate packages in a read-only environment
|
||||||
|
|
||||||
Package isolation is most easily handled by running the single-user server in
|
The user must not have permission to install packages into the environment where the singleuser-server runs.
|
||||||
a virtualenv with disabled system-site-packages. The user should not have
|
On a shared system, package isolation is most easily handled by running the single-user server in
|
||||||
permission to install packages into this environment.
|
a root-owned virtualenv with disabled system-site-packages.
|
||||||
|
The user must not have permission to install packages into this environment.
|
||||||
The same principle extends to the images used by container-based deployments.
|
The same principle extends to the images used by container-based deployments.
|
||||||
If users can select the images in which their servers run, they can disable all security.
|
If users can select the images in which their servers run, they can disable all security for their own servers.
|
||||||
|
|
||||||
It is important to note that the control over the environment only affects the
|
It is important to note that the control over the environment is only required for the
|
||||||
single-user server, and not the environment(s) in which the user's kernel(s)
|
single-user server, and not the environment(s) in which the users' kernel(s)
|
||||||
may run. Installing additional packages in the kernel environment does not
|
may run. Installing additional packages in the kernel environment does not
|
||||||
pose additional risk to the web application's security.
|
pose additional risk to the web application's security.
|
||||||
|
|
||||||
### Encrypt internal connections with SSL/TLS
|
### Encrypt internal connections with SSL/TLS
|
||||||
|
|
||||||
By default, all communications on the server, between the proxy, hub, and single
|
By default, all communications within JupyterHub—between the proxy, hub, and single
|
||||||
-user notebooks are performed unencrypted. Setting the `internal_ssl` flag in
|
-user notebooks—are performed unencrypted. Setting the `internal_ssl` flag in
|
||||||
`jupyterhub_config.py` secures the aforementioned routes. Turning this
|
`jupyterhub_config.py` secures the aforementioned routes. Turning this
|
||||||
feature on does require that the enabled `Spawner` can use the certificates
|
feature on does require that the enabled `Spawner` can use the certificates
|
||||||
generated by the `Hub` (the default `LocalProcessSpawner` can, for instance).
|
generated by the `Hub` (the default `LocalProcessSpawner` can, for instance).
|
||||||
@@ -119,6 +154,104 @@ Unix permissions to the communication sockets thereby restricting
|
|||||||
communication to the socket owner. The `internal_ssl` option will eventually
|
communication to the socket owner. The `internal_ssl` option will eventually
|
||||||
extend to securing the `tcp` sockets as well.
|
extend to securing the `tcp` sockets as well.
|
||||||
|
|
||||||
|
### Mitigating same-origin deployments
|
||||||
|
|
||||||
|
While per-user domains are **required** for robust protection of users from each other,
|
||||||
|
you can mitigate many (but not all) cross-user issues.
|
||||||
|
First, it is critical that users cannot modify their server environments, as described above.
|
||||||
|
Second, it is important that users do not have `access:servers` permission to any server other than their own.
|
||||||
|
|
||||||
|
If users can access each others' servers, additional security measures must be enabled, some of which come with distinct user-experience costs.
|
||||||
|
|
||||||
|
Without the [Same-Origin Policy] (SOP) protecting user servers from each other,
|
||||||
|
each user server is considered a trusted origin for requests to each other user server (and the Hub itself).
|
||||||
|
Servers _cannot_ meaningfully distinguish requests originating from other user servers,
|
||||||
|
because SOP implies a great deal of trust, losing many restrictions applied to cross-origin requests.
|
||||||
|
|
||||||
|
That means pages served from each user server can:
|
||||||
|
|
||||||
|
1. arbitrarily modify the path in the Referer
|
||||||
|
2. make fully authorized requests with cookies
|
||||||
|
3. access full page contents served from the hub or other servers via popups
|
||||||
|
|
||||||
|
JupyterHub uses distinct xsrf tokens stored in cookies on each server path to attempt to limit requests across.
|
||||||
|
This has limitations because not all requests are protected by these XSRF tokens,
|
||||||
|
and unless additional measures are taken, the XSRF tokens from other user prefixes may be retrieved.
|
||||||
|
|
||||||
|
[Same-Origin Policy]: https://developer.mozilla.org/en-US/docs/Web/Security/Same-origin_policy
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
- `Content-Security-Policy` header must prohibit popups and iframes from the same origin.
|
||||||
|
The following Content-Security-Policy rules are _insecure_ and readily enable users to access each others' servers:
|
||||||
|
|
||||||
|
- `frame-ancestors: 'self'`
|
||||||
|
- `frame-ancestors: '*'`
|
||||||
|
- `sandbox allow-popups`
|
||||||
|
|
||||||
|
- Ideally, pages should use the strictest `Content-Security-Policy: sandbox` available,
|
||||||
|
but this is not feasible in general for JupyterLab pages, which need at least `sandbox allow-same-origin allow-scripts` to work.
|
||||||
|
|
||||||
|
The default Content-Security-Policy for single-user servers is
|
||||||
|
|
||||||
|
```
|
||||||
|
frame-ancestors: 'none'
|
||||||
|
```
|
||||||
|
|
||||||
|
which prohibits iframe embedding, but not pop-ups.
|
||||||
|
|
||||||
|
A more secure Content-Security-Policy that has some costs to user experience is:
|
||||||
|
|
||||||
|
```
|
||||||
|
frame-ancestors: 'none'; sandbox allow-same-origin allow-scripts
|
||||||
|
```
|
||||||
|
|
||||||
|
`allow-popups` is not disabled by default because disabling it breaks legitimate functionality, like "Open this in a new tab", and the "JupyterHub Control Panel" menu item.
|
||||||
|
To reiterate, the right way to avoid these issues is to enable per-user domains, where none of these concerns come up.
|
||||||
|
|
||||||
|
Note: even this level of protection requires administrators maintaining full control over the user server environment.
|
||||||
|
If users can modify their server environment, these methods are ineffective, as users can readily disable them.
|
||||||
|
|
||||||
|
### Cookie tossing
|
||||||
|
|
||||||
|
Cookie tossing is a technique where another server on a subdomain or peer subdomain can set a cookie
|
||||||
|
which will be read on another domain.
|
||||||
|
This is not relevant unless there are other user-controlled servers on a peer domain.
|
||||||
|
|
||||||
|
"Domain-locked" cookies avoid this issue, but have their own restrictions:
|
||||||
|
|
||||||
|
- JupyterHub must be served over HTTPS
|
||||||
|
- All secure cookies must be set on `/`, not on sub-paths, which means they are shared by all JupyterHub components in a single-domain deployment.
|
||||||
|
|
||||||
|
As a result, this option is only recommended when per-user subdomains are enabled,
|
||||||
|
to prevent sending all jupyterhub cookies to all user servers.
|
||||||
|
|
||||||
|
To enable domain-locked cookies, set:
|
||||||
|
|
||||||
|
```python
|
||||||
|
c.JupyterHub.cookie_host_prefix_enabled = True
|
||||||
|
```
|
||||||
|
|
||||||
|
```{versionadded} 4.1
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
### Forced-login
|
||||||
|
|
||||||
|
Jupyter servers can share links with `?token=...`.
|
||||||
|
JupyterHub prior to 5.0 will accept this request and persist the token for future requests.
|
||||||
|
This is useful for enabling admins to create 'fully authenticated' links bypassing login.
|
||||||
|
However, it also means users can share their own links that will log other users into their own servers,
|
||||||
|
enabling them to serve each other notebooks and other arbitrary HTML, depending on server configuration.
|
||||||
|
|
||||||
|
```{versionadded} 4.1
|
||||||
|
Setting environment variable `JUPYTERHUB_ALLOW_TOKEN_IN_URL=0` in the single-user environment can opt out of accepting token auth in URL parameters.
|
||||||
|
```
|
||||||
|
|
||||||
|
```{versionadded} 5.0
|
||||||
|
Accepting tokens in URLs is disabled by default, and `JUPYTERHUB_ALLOW_TOKEN_IN_URL=1` environment variable must be set to _allow_ token auth in URL parameters.
|
||||||
|
```
|
||||||
|
|
||||||
## Security audits
|
## Security audits
|
||||||
|
|
||||||
We recommend that you do periodic reviews of your deployment's security. It's
|
We recommend that you do periodic reviews of your deployment's security. It's
|
||||||
|
@@ -37,14 +37,19 @@ A [generic implementation](https://github.com/jupyterhub/oauthenticator/blob/mas
|
|||||||
## The Dummy Authenticator
|
## The Dummy Authenticator
|
||||||
|
|
||||||
When testing, it may be helpful to use the
|
When testing, it may be helpful to use the
|
||||||
{class}`jupyterhub.auth.DummyAuthenticator`. This allows for any username and
|
{class}`~.jupyterhub.auth.DummyAuthenticator`. This allows for any username and
|
||||||
password unless if a global password has been set. Once set, any username will
|
password unless a global password has been set. Once set, any username will
|
||||||
still be accepted but the correct password will need to be provided.
|
still be accepted but the correct password will need to be provided.
|
||||||
|
|
||||||
|
:::{versionadded} 5.0
|
||||||
|
The DummyAuthenticator's default `allow_all` is True,
|
||||||
|
unlike most other Authenticators.
|
||||||
|
:::
|
||||||
|
|
||||||
## Additional Authenticators
|
## Additional Authenticators
|
||||||
|
|
||||||
A partial list of other authenticators is available on the
|
Additional authenticators can be found on GitHub
|
||||||
[JupyterHub wiki](https://github.com/jupyterhub/jupyterhub/wiki/Authenticators).
|
by searching for [topic:jupyterhub topic:authenticator](https://github.com/search?q=topic%3Ajupyterhub%20topic%3Aauthenticator&type=repositories).
|
||||||
|
|
||||||
## Technical Overview of Authentication
|
## Technical Overview of Authentication
|
||||||
|
|
||||||
@@ -54,9 +59,9 @@ The base authenticator uses simple username and password authentication.
|
|||||||
|
|
||||||
The base Authenticator has one central method:
|
The base Authenticator has one central method:
|
||||||
|
|
||||||
#### Authenticator.authenticate method
|
#### Authenticator.authenticate
|
||||||
|
|
||||||
Authenticator.authenticate(handler, data)
|
{meth}`.Authenticator.authenticate`
|
||||||
|
|
||||||
This method is passed the Tornado `RequestHandler` and the `POST data`
|
This method is passed the Tornado `RequestHandler` and the `POST data`
|
||||||
from JupyterHub's login form. Unless the login form has been customized,
|
from JupyterHub's login form. Unless the login form has been customized,
|
||||||
@@ -81,7 +86,8 @@ Writing an Authenticator that looks up passwords in a dictionary
|
|||||||
requires only overriding this one method:
|
requires only overriding this one method:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from IPython.utils.traitlets import Dict
|
from secrets import compare_digest
|
||||||
|
from traitlets import Dict
|
||||||
from jupyterhub.auth import Authenticator
|
from jupyterhub.auth import Authenticator
|
||||||
|
|
||||||
class DictionaryAuthenticator(Authenticator):
|
class DictionaryAuthenticator(Authenticator):
|
||||||
@@ -91,8 +97,14 @@ class DictionaryAuthenticator(Authenticator):
|
|||||||
)
|
)
|
||||||
|
|
||||||
async def authenticate(self, handler, data):
|
async def authenticate(self, handler, data):
|
||||||
if self.passwords.get(data['username']) == data['password']:
|
username = data["username"]
|
||||||
return data['username']
|
password = data["password"]
|
||||||
|
check_password = self.passwords.get(username, "")
|
||||||
|
# always call compare_digest, for timing attacks
|
||||||
|
if compare_digest(check_password, password) and username in self.passwords:
|
||||||
|
return username
|
||||||
|
else:
|
||||||
|
return None
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Normalize usernames
|
#### Normalize usernames
|
||||||
@@ -136,7 +148,7 @@ To only allow usernames that start with 'w':
|
|||||||
c.Authenticator.username_pattern = r'w.*'
|
c.Authenticator.username_pattern = r'w.*'
|
||||||
```
|
```
|
||||||
|
|
||||||
### How to write a custom authenticator
|
## How to write a custom authenticator
|
||||||
|
|
||||||
You can use custom Authenticator subclasses to enable authentication
|
You can use custom Authenticator subclasses to enable authentication
|
||||||
via other mechanisms. One such example is using [GitHub OAuth][].
|
via other mechanisms. One such example is using [GitHub OAuth][].
|
||||||
@@ -148,11 +160,6 @@ and {meth}`.Authenticator.post_spawn_stop`, are hooks that can be used to do
|
|||||||
auth-related startup (e.g. opening PAM sessions) and cleanup
|
auth-related startup (e.g. opening PAM sessions) and cleanup
|
||||||
(e.g. closing PAM sessions).
|
(e.g. closing PAM sessions).
|
||||||
|
|
||||||
See a list of custom Authenticators [on the wiki](https://github.com/jupyterhub/jupyterhub/wiki/Authenticators).
|
|
||||||
|
|
||||||
If you are interested in writing a custom authenticator, you can read
|
|
||||||
[this tutorial](http://jupyterhub-tutorial.readthedocs.io/en/latest/authenticators.html).
|
|
||||||
|
|
||||||
### Registering custom Authenticators via entry points
|
### Registering custom Authenticators via entry points
|
||||||
|
|
||||||
As of JupyterHub 1.0, custom authenticators can register themselves via
|
As of JupyterHub 1.0, custom authenticators can register themselves via
|
||||||
@@ -188,6 +195,166 @@ Additionally, configurable attributes for your authenticator will
|
|||||||
appear in jupyterhub help output and auto-generated configuration files
|
appear in jupyterhub help output and auto-generated configuration files
|
||||||
via `jupyterhub --generate-config`.
|
via `jupyterhub --generate-config`.
|
||||||
|
|
||||||
|
(authenticator-allow)=
|
||||||
|
|
||||||
|
### Allowing access
|
||||||
|
|
||||||
|
When dealing with logging in, there are generally two _separate_ steps:
|
||||||
|
|
||||||
|
authentication
|
||||||
|
: identifying who is trying to log in, and
|
||||||
|
|
||||||
|
authorization
|
||||||
|
: deciding whether an authenticated user is allowed to access your JupyterHub
|
||||||
|
|
||||||
|
{meth}`Authenticator.authenticate` is responsible for authenticating users.
|
||||||
|
It is perfectly fine in the simplest cases for `Authenticator.authenticate` to be responsible for authentication _and_ authorization,
|
||||||
|
in which case `authenticate` may return `None` if the user is not authorized.
|
||||||
|
|
||||||
|
However, Authenticators also have two methods, {meth}`~.Authenticator.check_allowed` and {meth}`~.Authenticator.check_blocked_users`, which are called after successful authentication to further check if the user is allowed.
|
||||||
|
|
||||||
|
If `check_blocked_users()` returns False, authorization stops and the user is not allowed.
|
||||||
|
|
||||||
|
If `Authenticator.allow_all` is True OR `check_allowed()` returns True, authorization proceeds.
|
||||||
|
|
||||||
|
:::{versionadded} 5.0
|
||||||
|
{attr}`.Authenticator.allow_all` and {attr}`.Authenticator.allow_existing_users` are new in JupyterHub 5.0.
|
||||||
|
|
||||||
|
By default, `allow_all` is False,
|
||||||
|
which is a change from pre-5.0, where `allow_all` was implicitly True if `allowed_users` was empty.
|
||||||
|
:::
|
||||||
|
|
||||||
|
### Overriding `check_allowed`
|
||||||
|
|
||||||
|
:::{versionchanged} 5.0
|
||||||
|
`check_allowed()` is **not called** if `allow_all` is True.
|
||||||
|
:::
|
||||||
|
|
||||||
|
:::{versionchanged} 5.0
|
||||||
|
Starting with 5.0, `check_allowed()` should **NOT** return True if no allow config
|
||||||
|
is specified (`allow_all` should be used instead).
|
||||||
|
|
||||||
|
:::
|
||||||
|
|
||||||
|
The base implementation of {meth}`~.Authenticator.check_allowed` checks:
|
||||||
|
|
||||||
|
- if username is in the `allowed_users` set, return True
|
||||||
|
- else return False
|
||||||
|
|
||||||
|
:::{versionchanged} 5.0
|
||||||
|
Prior to 5.0, this would also return True if `allowed_users` was empty.
|
||||||
|
|
||||||
|
For clarity, this is no longer the case. A new `allow_all` property (default False) has been added which is checked _before_ calling `check_allowed`.
|
||||||
|
If `allow_all` is True, this takes priority over `check_allowed`, which will be ignored.
|
||||||
|
|
||||||
|
If your Authenticator subclass similarly returns True when no allow config is defined,
|
||||||
|
this is fully backward compatible for your users, but means `allow_all = False` has no real effect.
|
||||||
|
|
||||||
|
You can make your Authenticator forward-compatible with JupyterHub 5 by defining `allow_all` as a boolean config trait on your class:
|
||||||
|
|
||||||
|
```python
|
||||||
|
class MyAuthenticator(Authenticator):
|
||||||
|
|
||||||
|
# backport allow_all from JupyterHub 5
|
||||||
|
allow_all = Bool(False, config=True)
|
||||||
|
|
||||||
|
def check_allowed(self, username, authentication):
|
||||||
|
if self.allow_all:
|
||||||
|
# replaces previous "if no auth config"
|
||||||
|
return True
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
:::
|
||||||
|
|
||||||
|
If an Authenticator defines additional sources of `allow` configuration,
|
||||||
|
such as membership in a group or other information,
|
||||||
|
it should override `check_allowed` to account for this.
|
||||||
|
|
||||||
|
:::{note}
|
||||||
|
`allow_` configuration should generally be _additive_,
|
||||||
|
i.e. if access is granted by _any_ allow configuration,
|
||||||
|
a user should be authorized.
|
||||||
|
|
||||||
|
JupyterHub recommends that Authenticators applying _restrictive_ configuration should use names like `block_` or `require_`,
|
||||||
|
and check this during `check_blocked_users` or `authenticate`, not `check_allowed`.
|
||||||
|
:::
|
||||||
|
|
||||||
|
In general, an Authenticator's skeleton should look like:
|
||||||
|
|
||||||
|
```python
|
||||||
|
class MyAuthenticator(Authenticator):
|
||||||
|
# backport allow_all for compatibility with JupyterHub < 5
|
||||||
|
allow_all = Bool(False, config=True)
|
||||||
|
require_something = List(config=True)
|
||||||
|
allowed_something = Set()
|
||||||
|
|
||||||
|
def authenticate(self, data, handler):
|
||||||
|
...
|
||||||
|
if success:
|
||||||
|
return {"username": username, "auth_state": {...}}
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def check_blocked_users(self, username, authentication=None):
|
||||||
|
"""Apply _restrictive_ configuration"""
|
||||||
|
|
||||||
|
if self.require_something and not has_something(username, self.request_):
|
||||||
|
return False
|
||||||
|
# repeat for each restriction
|
||||||
|
if restriction_defined and restriction_not_met:
|
||||||
|
return False
|
||||||
|
return super().check_blocked_users(self, username, authentication)
|
||||||
|
|
||||||
|
def check_allowed(self, username, authentication=None):
|
||||||
|
"""Apply _permissive_ configuration
|
||||||
|
|
||||||
|
Only called if check_blocked_users returns True
|
||||||
|
AND allow_all is False
|
||||||
|
"""
|
||||||
|
if self.allow_all:
|
||||||
|
# check here to backport allow_all behavior
|
||||||
|
# from JupyterHub 5
|
||||||
|
# this branch will never be taken with jupyterhub >=5
|
||||||
|
return True
|
||||||
|
if self.allowed_something and user_has_something(username):
|
||||||
|
return True
|
||||||
|
# repeat for each allow
|
||||||
|
if allow_config and allow_met:
|
||||||
|
return True
|
||||||
|
# should always have this at the end
|
||||||
|
if self.allowed_users and username in self.allowed_users:
|
||||||
|
return True
|
||||||
|
# do not call super!
|
||||||
|
# super().check_allowed is not safe with JupyterHub < 5.0,
|
||||||
|
# as it will return True if allowed_users is empty
|
||||||
|
return False
|
||||||
|
```
|
||||||
|
|
||||||
|
Key points:
|
||||||
|
|
||||||
|
- `allow_all` is backported from JupyterHub 5, for consistent behavior in all versions of JupyterHub (optional)
|
||||||
|
- restrictive configuration is checked in `check_blocked_users`
|
||||||
|
- if any restriction is not met, `check_blocked_users` returns False
|
||||||
|
- permissive configuration is checked in `check_allowed`
|
||||||
|
- if any `allow` condition is met, `check_allowed` returns True
|
||||||
|
|
||||||
|
So the logical expression for a user being authorized should look like:
|
||||||
|
|
||||||
|
> if ALL restrictions are met AND ANY admissions are met: user is authorized
|
||||||
|
|
||||||
|
#### Custom error messages
|
||||||
|
|
||||||
|
Any of these authentication and authorization methods may raise a `web.HTTPError` Exception
|
||||||
|
|
||||||
|
```python
|
||||||
|
from tornado import web
|
||||||
|
|
||||||
|
raise web.HTTPError(403, "informative message")
|
||||||
|
```
|
||||||
|
|
||||||
|
if you want to show a more informative login failure message rather than the generic one.
|
||||||
|
|
||||||
(authenticator-auth-state)=
|
(authenticator-auth-state)=
|
||||||
|
|
||||||
### Authentication state
|
### Authentication state
|
||||||
|
@@ -6,8 +6,161 @@ For detailed changes from the prior release, click on the version number, and
|
|||||||
its link will bring up a GitHub listing of changes. Use `git log` on the
|
its link will bring up a GitHub listing of changes. Use `git log` on the
|
||||||
command line for details.
|
command line for details.
|
||||||
|
|
||||||
|
## Versioning
|
||||||
|
|
||||||
|
JupyterHub follows Intended Effort Versioning ([EffVer](https://jacobtomlinson.dev/effver/)) for versioning,
|
||||||
|
where the version number is meant to indicate the amount of effort required to upgrade to the new version.
|
||||||
|
|
||||||
|
Contributors to major version bumps in JupyterHub include:
|
||||||
|
|
||||||
|
- Database schema changes that require migrations and are hard to roll back
|
||||||
|
- Increasing the minimum required Python version
|
||||||
|
- Large new features
|
||||||
|
- Breaking changes likely to affect users
|
||||||
|
|
||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
|
|
||||||
|
## 4.1
|
||||||
|
|
||||||
|
### 4.1.5 - 2024-04-04
|
||||||
|
|
||||||
|
([full changelog](https://github.com/jupyterhub/jupyterhub/compare/4.1.4...4.1.5))
|
||||||
|
|
||||||
|
#### Bugs fixed
|
||||||
|
|
||||||
|
- singleuser mixin: include check_xsrf_cookie in overrides [#4771](https://github.com/jupyterhub/jupyterhub/pull/4771) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio))
|
||||||
|
|
||||||
|
#### Contributors to this release
|
||||||
|
|
||||||
|
The following people contributed discussions, new ideas, code and documentation contributions, and review.
|
||||||
|
See [our definition of contributors](https://github-activity.readthedocs.io/en/latest/#how-does-this-tool-define-contributions-in-the-reports).
|
||||||
|
|
||||||
|
([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2024-03-30&to=2024-04-04&type=c))
|
||||||
|
|
||||||
|
@consideRatio ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2024-03-30..2024-04-04&type=Issues)) | @manics ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amanics+updated%3A2024-03-30..2024-04-04&type=Issues)) | @minrk ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2024-03-30..2024-04-04&type=Issues))
|
||||||
|
|
||||||
|
### 4.1.4 - 2024-03-30
|
||||||
|
|
||||||
|
([full changelog](https://github.com/jupyterhub/jupyterhub/compare/4.1.3...4.1.4))
|
||||||
|
|
||||||
|
#### Bugs fixed
|
||||||
|
|
||||||
|
- avoid xsrf check on navigate GET requests [#4759](https://github.com/jupyterhub/jupyterhub/pull/4759) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio))
|
||||||
|
|
||||||
|
#### Contributors to this release
|
||||||
|
|
||||||
|
The following people contributed discussions, new ideas, code and documentation contributions, and review.
|
||||||
|
See [our definition of contributors](https://github-activity.readthedocs.io/en/latest/#how-does-this-tool-define-contributions-in-the-reports).
|
||||||
|
|
||||||
|
([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2024-03-26&to=2024-03-30&type=c))
|
||||||
|
|
||||||
|
@consideRatio ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2024-03-26..2024-03-30&type=Issues)) | @minrk ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2024-03-26..2024-03-30&type=Issues))
|
||||||
|
|
||||||
|
### 4.1.3 - 2024-03-26
|
||||||
|
|
||||||
|
([full changelog](https://github.com/jupyterhub/jupyterhub/compare/4.1.2...4.1.3))
|
||||||
|
|
||||||
|
#### Bugs fixed
|
||||||
|
|
||||||
|
- respect jupyter-server disable_check_xsrf setting [#4753](https://github.com/jupyterhub/jupyterhub/pull/4753) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio))
|
||||||
|
|
||||||
|
#### Contributors to this release
|
||||||
|
|
||||||
|
The following people contributed discussions, new ideas, code and documentation contributions, and review.
|
||||||
|
See [our definition of contributors](https://github-activity.readthedocs.io/en/latest/#how-does-this-tool-define-contributions-in-the-reports).
|
||||||
|
|
||||||
|
([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2024-03-25&to=2024-03-26&type=c))
|
||||||
|
|
||||||
|
@consideRatio ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2024-03-25..2024-03-26&type=Issues)) | @minrk ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2024-03-25..2024-03-26&type=Issues))
|
||||||
|
|
||||||
|
### 4.1.2 - 2024-03-25
|
||||||
|
|
||||||
|
4.1.2 fixes a regression in 4.1.0 affecting named servers.
|
||||||
|
|
||||||
|
([full changelog](https://github.com/jupyterhub/jupyterhub/compare/4.1.1...4.1.2))
|
||||||
|
|
||||||
|
#### Bugs fixed
|
||||||
|
|
||||||
|
- rework handling of multiple xsrf tokens [#4750](https://github.com/jupyterhub/jupyterhub/pull/4750) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio))
|
||||||
|
|
||||||
|
#### Contributors to this release
|
||||||
|
|
||||||
|
The following people contributed discussions, new ideas, code and documentation contributions, and review.
|
||||||
|
See [our definition of contributors](https://github-activity.readthedocs.io/en/latest/#how-does-this-tool-define-contributions-in-the-reports).
|
||||||
|
|
||||||
|
([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2024-03-23&to=2024-03-25&type=c))
|
||||||
|
|
||||||
|
@consideRatio ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2024-03-23..2024-03-25&type=Issues)) | @minrk ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2024-03-23..2024-03-25&type=Issues))
|
||||||
|
|
||||||
|
### 4.1.1 - 2024-03-23
|
||||||
|
|
||||||
|
4.1.1 fixes a compatibility regression in 4.1.0 for some extensions,
|
||||||
|
particularly jupyter-server-proxy.
|
||||||
|
|
||||||
|
([full changelog](https://github.com/jupyterhub/jupyterhub/compare/4.1.0...4.1.1))
|
||||||
|
|
||||||
|
#### Bugs fixed
|
||||||
|
|
||||||
|
- allow subclasses to override xsrf check [#4745](https://github.com/jupyterhub/jupyterhub/pull/4745) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio))
|
||||||
|
|
||||||
|
#### Contributors to this release
|
||||||
|
|
||||||
|
The following people contributed discussions, new ideas, code and documentation contributions, and review.
|
||||||
|
See [our definition of contributors](https://github-activity.readthedocs.io/en/latest/#how-does-this-tool-define-contributions-in-the-reports).
|
||||||
|
|
||||||
|
([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2024-03-20&to=2024-03-23&type=c))
|
||||||
|
|
||||||
|
@consideRatio ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2024-03-20..2024-03-23&type=Issues)) | @minrk ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2024-03-20..2024-03-23&type=Issues))
|
||||||
|
|
||||||
|
### 4.1.0 - 2024-03-20
|
||||||
|
|
||||||
|
JupyterHub 4.1 is a security release, fixing [CVE-2024-28233].
|
||||||
|
All JupyterHub deployments are encouraged to upgrade,
|
||||||
|
especially those with other user content on peer domains to JupyterHub.
|
||||||
|
|
||||||
|
As always, JupyterHub deployments are especially encouraged to enable per-user domains if protecting users from each other is a concern.
|
||||||
|
|
||||||
|
For more information on securely deploying JupyterHub, see the [web security documentation](web-security).
|
||||||
|
|
||||||
|
[CVE-2024-28233]: https://github.com/jupyterhub/jupyterhub/security/advisories/GHSA-7r3h-4ph8-w38g
|
||||||
|
|
||||||
|
([full changelog](https://github.com/jupyterhub/jupyterhub/compare/4.0.2...4.1.0))
|
||||||
|
|
||||||
|
#### Enhancements made
|
||||||
|
|
||||||
|
- Backport PR #4628 on branch 4.x (Include LDAP groups in local spawner gids) [#4735](https://github.com/jupyterhub/jupyterhub/pull/4735) ([@minrk](https://github.com/minrk))
|
||||||
|
- Backport PR #4561 on branch 4.x (Improve debugging when waiting for servers) [#4714](https://github.com/jupyterhub/jupyterhub/pull/4714) ([@minrk](https://github.com/minrk))
|
||||||
|
- Backport PR #4563 on branch 4.x (only set 'domain' field on session-id cookie) [#4707](https://github.com/jupyterhub/jupyterhub/pull/4707) ([@minrk](https://github.com/minrk))
|
||||||
|
|
||||||
|
#### Bugs fixed
|
||||||
|
|
||||||
|
- Backport PR #4733 on branch 4.x (Catch ValueError while waiting for server to be reachable) [#4734](https://github.com/jupyterhub/jupyterhub/pull/4734) ([@minrk](https://github.com/minrk))
|
||||||
|
- Backport PR #4679 on branch 4.x (Unescape jinja username) [#4705](https://github.com/jupyterhub/jupyterhub/pull/4705) ([@minrk](https://github.com/minrk))
|
||||||
|
- Backport PR #4630: avoid setting unused oauth state cookies on API requests [#4697](https://github.com/jupyterhub/jupyterhub/pull/4697) ([@minrk](https://github.com/minrk))
|
||||||
|
- Backport PR #4632: simplify, avoid errors in parsing accept headers [#4696](https://github.com/jupyterhub/jupyterhub/pull/4696) ([@minrk](https://github.com/minrk))
|
||||||
|
- Backport PR #4677 on branch 4.x (Improve validation, docs for token.expires_in) [#4692](https://github.com/jupyterhub/jupyterhub/pull/4692) ([@minrk](https://github.com/minrk))
|
||||||
|
- Backport PR #4570 on branch 4.x (fix mutation of frozenset in scope intersection) [#4691](https://github.com/jupyterhub/jupyterhub/pull/4691) ([@minrk](https://github.com/minrk))
|
||||||
|
- Backport PR #4562 on branch 4.x (Use `user.stop` to cleanup spawners that stopped while Hub was down) [#4690](https://github.com/jupyterhub/jupyterhub/pull/4690) ([@minrk](https://github.com/minrk))
|
||||||
|
- Backport PR #4542 on branch 4.x (Fix include_stopped_servers in paginated next_url) [#4689](https://github.com/jupyterhub/jupyterhub/pull/4689) ([@minrk](https://github.com/minrk))
|
||||||
|
- Backport PR #4651 on branch 4.x (avoid attempting to patch removed IPythonHandler with notebook v7) [#4688](https://github.com/jupyterhub/jupyterhub/pull/4688) ([@minrk](https://github.com/minrk))
|
||||||
|
- Backport PR #4560 on branch 4.x (singleuser extension: persist token from ?token=... url in cookie) [#4687](https://github.com/jupyterhub/jupyterhub/pull/4687) ([@minrk](https://github.com/minrk))
|
||||||
|
|
||||||
|
#### Maintenance and upkeep improvements
|
||||||
|
|
||||||
|
- Backport quay.io publishing [#4698](https://github.com/jupyterhub/jupyterhub/pull/4698) ([@minrk](https://github.com/minrk))
|
||||||
|
- Backport PR #4617: try to improve reliability of test_external_proxy [#4695](https://github.com/jupyterhub/jupyterhub/pull/4695) ([@minrk](https://github.com/minrk))
|
||||||
|
- Backport PR #4618 on branch 4.x (browser test: wait for token request to finish before reloading) [#4694](https://github.com/jupyterhub/jupyterhub/pull/4694) ([@minrk](https://github.com/minrk))
|
||||||
|
- preparing 4.x branch [#4685](https://github.com/jupyterhub/jupyterhub/pull/4685) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio))
|
||||||
|
|
||||||
|
#### Contributors to this release
|
||||||
|
|
||||||
|
The following people contributed discussions, new ideas, code and documentation contributions, and review.
|
||||||
|
See [our definition of contributors](https://github-activity.readthedocs.io/en/latest/#how-does-this-tool-define-contributions-in-the-reports).
|
||||||
|
|
||||||
|
([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2023-08-10&to=2024-03-19&type=c))
|
||||||
|
|
||||||
|
@Achele ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AAchele+updated%3A2023-08-10..2024-03-19&type=Issues)) | @akashthedeveloper ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aakashthedeveloper+updated%3A2023-08-10..2024-03-19&type=Issues)) | @balajialg ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abalajialg+updated%3A2023-08-10..2024-03-19&type=Issues)) | @BhavyaT-135 ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3ABhavyaT-135+updated%3A2023-08-10..2024-03-19&type=Issues)) | @blink1073 ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ablink1073+updated%3A2023-08-10..2024-03-19&type=Issues)) | @consideRatio ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2023-08-10..2024-03-19&type=Issues)) | @fcollonval ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Afcollonval+updated%3A2023-08-10..2024-03-19&type=Issues)) | @I-Am-D-B ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AI-Am-D-B+updated%3A2023-08-10..2024-03-19&type=Issues)) | @jakirkham ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ajakirkham+updated%3A2023-08-10..2024-03-19&type=Issues)) | @ktaletsk ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aktaletsk+updated%3A2023-08-10..2024-03-19&type=Issues)) | @kzgrzendek ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Akzgrzendek+updated%3A2023-08-10..2024-03-19&type=Issues)) | @lumberbot-app ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Alumberbot-app+updated%3A2023-08-10..2024-03-19&type=Issues)) | @manics ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amanics+updated%3A2023-08-10..2024-03-19&type=Issues)) | @mbiette ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ambiette+updated%3A2023-08-10..2024-03-19&type=Issues)) | @minrk ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2023-08-10..2024-03-19&type=Issues)) | @rcthomas ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arcthomas+updated%3A2023-08-10..2024-03-19&type=Issues)) | @ryanlovett ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aryanlovett+updated%3A2023-08-10..2024-03-19&type=Issues)) | @sgaist ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asgaist+updated%3A2023-08-10..2024-03-19&type=Issues)) | @shubham0473 ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ashubham0473+updated%3A2023-08-10..2024-03-19&type=Issues)) | @Temidayo32 ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3ATemidayo32+updated%3A2023-08-10..2024-03-19&type=Issues)) | @willingc ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Awillingc+updated%3A2023-08-10..2024-03-19&type=Issues)) | @yuvipanda ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ayuvipanda+updated%3A2023-08-10..2024-03-19&type=Issues))
|
||||||
|
|
||||||
## 4.0
|
## 4.0
|
||||||
|
|
||||||
### 4.0.2 - 2023-08-10
|
### 4.0.2 - 2023-08-10
|
||||||
|
@@ -6,21 +6,58 @@ The default Authenticator uses [PAM][] (Pluggable Authentication Module) to auth
|
|||||||
their usernames and passwords. With the default Authenticator, any user
|
their usernames and passwords. With the default Authenticator, any user
|
||||||
with an account and password on the system will be allowed to login.
|
with an account and password on the system will be allowed to login.
|
||||||
|
|
||||||
## Create a set of allowed users (`allowed_users`)
|
## Deciding who is allowed
|
||||||
|
|
||||||
|
In the base Authenticator, there are 3 configuration options for granting users access to your Hub:
|
||||||
|
|
||||||
|
1. `allow_all` grants any user who can successfully authenticate access to the Hub
|
||||||
|
2. `allowed_users` defines a set of users who can access the Hub
|
||||||
|
3. `allow_existing_users` enables managing users via the JupyterHub API or admin page
|
||||||
|
|
||||||
|
These options should apply to all Authenticators.
|
||||||
|
Your chosen Authenticator may add additional configuration options to admit users, such as team membership, course enrollment, etc.
|
||||||
|
|
||||||
|
:::{important}
|
||||||
|
You should always specify at least one allow configuration if you want people to be able to access your Hub!
|
||||||
|
In most cases, this looks like:
|
||||||
|
|
||||||
|
```python
|
||||||
|
c.Authenticator.allow_all = True
|
||||||
|
# or
|
||||||
|
c.Authenticator.allowed_users = {"name", ...}
|
||||||
|
```
|
||||||
|
|
||||||
|
:::
|
||||||
|
|
||||||
|
:::{versionchanged} 5.0
|
||||||
|
If no allow config is specified, then by default **nobody will have access to your Hub**.
|
||||||
|
Prior to 5.0, the opposite was true; effectively `allow_all = True` if no other allow config was specified.
|
||||||
|
:::
|
||||||
|
|
||||||
You can restrict which users are allowed to login with a set,
|
You can restrict which users are allowed to login with a set,
|
||||||
`Authenticator.allowed_users`:
|
`Authenticator.allowed_users`:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
c.Authenticator.allowed_users = {'mal', 'zoe', 'inara', 'kaylee'}
|
c.Authenticator.allowed_users = {'mal', 'zoe', 'inara', 'kaylee'}
|
||||||
|
# c.Authenticator.allow_all = False
|
||||||
|
c.Authenticator.allow_existing_users = False
|
||||||
```
|
```
|
||||||
|
|
||||||
Users in the `allowed_users` set are added to the Hub database when the Hub is
|
Users in the `allowed_users` set are added to the Hub database when the Hub is started.
|
||||||
started.
|
|
||||||
|
|
||||||
```{warning}
|
:::{versionchanged} 5.0
|
||||||
If this configuration value is not set, then **all authenticated users will be allowed into your hub**.
|
{attr}`.Authenticator.allow_all` and {attr}`.Authenticator.allow_existing_users` are new in JupyterHub 5.0
|
||||||
```
|
to enable explicit configuration of previously implicit behavior.
|
||||||
|
|
||||||
|
Prior to 5.0, `allow_all` was implicitly True if `allowed_users` was empty.
|
||||||
|
Starting with 5.0, to allow all authenticated users by default,
|
||||||
|
`allow_all` must be explicitly set to True.
|
||||||
|
|
||||||
|
By default, `allow_existing_users` is True when `allowed_users` is not empty,
|
||||||
|
to ensure backward-compatibility.
|
||||||
|
To make the `allowed_users` set _restrictive_,
|
||||||
|
set `allow_existing_users = False`.
|
||||||
|
:::
|
||||||
|
|
||||||
## One Time Passwords ( request_otp )
|
## One Time Passwords ( request_otp )
|
||||||
|
|
||||||
@@ -42,7 +79,7 @@ c.Authenticator.otp_prompt = 'Google Authenticator:'
|
|||||||
```{note}
|
```{note}
|
||||||
As of JupyterHub 2.0, the full permissions of `admin_users`
|
As of JupyterHub 2.0, the full permissions of `admin_users`
|
||||||
should not be required.
|
should not be required.
|
||||||
Instead, you can assign [roles](define-role-target) to users or groups
|
Instead, it is best to assign [roles](define-role-target) to users or groups
|
||||||
with only the scopes they require.
|
with only the scopes they require.
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -68,26 +105,55 @@ group. For example, we can let any user in the `wheel` group be an admin:
|
|||||||
c.PAMAuthenticator.admin_groups = {'wheel'}
|
c.PAMAuthenticator.admin_groups = {'wheel'}
|
||||||
```
|
```
|
||||||
|
|
||||||
## Give admin access to other users' notebook servers (`admin_access`)
|
## Give some users access to other users' notebook servers
|
||||||
|
|
||||||
Since the default `JupyterHub.admin_access` setting is `False`, the admins
|
The `access:servers` scope can be granted to users to give them permission to visit other users' servers.
|
||||||
do not have permission to log in to the single user notebook servers
|
For example, to give members of the `teachers` group access to the servers of members of the `students` group:
|
||||||
owned by _other users_. If `JupyterHub.admin_access` is set to `True`,
|
|
||||||
then admins have permission to log in _as other users_ on their
|
```python
|
||||||
respective machines for debugging. **As a courtesy, you should make
|
c.JupyterHub.load_roles = [
|
||||||
sure your users know if admin_access is enabled.**
|
{
|
||||||
|
"name": "teachers",
|
||||||
|
"scopes": [
|
||||||
|
"admin-ui",
|
||||||
|
"list:users",
|
||||||
|
"access:servers!group=students",
|
||||||
|
],
|
||||||
|
"groups": ["teachers"],
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
By default, only the deprecated `admin` role has global `access` permissions.
|
||||||
|
**As a courtesy, you should make sure your users know if admin access is enabled.**
|
||||||
|
|
||||||
## Add or remove users from the Hub
|
## Add or remove users from the Hub
|
||||||
|
|
||||||
|
:::{versionadded} 5.0
|
||||||
|
`c.Authenticator.allow_existing_users` is added in 5.0 and True by default _if_ any `allowed_users` are specified.
|
||||||
|
|
||||||
|
Prior to 5.0, this behavior was not optional.
|
||||||
|
:::
|
||||||
|
|
||||||
Users can be added to and removed from the Hub via the admin
|
Users can be added to and removed from the Hub via the admin
|
||||||
panel or the REST API. When a user is **added**, the user will be
|
panel or the REST API.
|
||||||
automatically added to the `allowed_users` set and database. Restarting the Hub
|
|
||||||
will not require manually updating the `allowed_users` set in your config file,
|
To enable this behavior, set:
|
||||||
|
|
||||||
|
```python
|
||||||
|
c.Authenticator.allow_existing_users = True
|
||||||
|
```
|
||||||
|
|
||||||
|
When a user is **added**, the user will be
|
||||||
|
automatically added to the `allowed_users` set and database.
|
||||||
|
If `allow_existing_users` is True, restarting the Hub will not require manually updating the `allowed_users` set in your config file,
|
||||||
as the users will be loaded from the database.
|
as the users will be loaded from the database.
|
||||||
|
If `allow_existing_users` is False, users not granted access by configuration such as `allowed_users` will not be permitted to login,
|
||||||
|
even if they are present in the database.
|
||||||
|
|
||||||
After starting the Hub once, it is not sufficient to **remove** a user
|
After starting the Hub once, it is not sufficient to **remove** a user
|
||||||
from the allowed users set in your config file. You must also remove the user
|
from the allowed users set in your config file. You must also remove the user
|
||||||
from the Hub's database, either by deleting the user from JupyterHub's
|
from the Hub's database, either by deleting the user via JupyterHub's
|
||||||
admin page, or you can clear the `jupyterhub.sqlite` database and start
|
admin page, or you can clear the `jupyterhub.sqlite` database and start
|
||||||
fresh.
|
fresh.
|
||||||
|
|
||||||
|
@@ -5,11 +5,11 @@
|
|||||||
Before installing JupyterHub, you will need:
|
Before installing JupyterHub, you will need:
|
||||||
|
|
||||||
- a Linux/Unix-based system
|
- a Linux/Unix-based system
|
||||||
- [Python](https://www.python.org/downloads/) 3.6 or greater. An understanding
|
- [Python {{python_min}}](https://www.python.org/downloads/) or greater. An understanding
|
||||||
of using [`pip`](https://pip.pypa.io) or
|
of using [`pip`](https://pip.pypa.io) or
|
||||||
[`conda`](https://docs.conda.io/projects/conda/en/latest/user-guide/getting-started.html) for
|
[`conda`](https://docs.conda.io/projects/conda/en/latest/user-guide/getting-started.html) for
|
||||||
installing Python packages is helpful.
|
installing Python packages is helpful.
|
||||||
- [nodejs/npm](https://www.npmjs.com/). [Install nodejs/npm](https://docs.npmjs.com/getting-started/installing-node),
|
- [Node.js {{node_min}}](https://www.npmjs.com/) or greater, along with npm. [Install Node.js/npm](https://docs.npmjs.com/getting-started/installing-node),
|
||||||
using your operating system's package manager.
|
using your operating system's package manager.
|
||||||
|
|
||||||
- If you are using **`conda`**, the nodejs and npm dependencies will be installed for
|
- If you are using **`conda`**, the nodejs and npm dependencies will be installed for
|
||||||
@@ -24,7 +24,7 @@ Before installing JupyterHub, you will need:
|
|||||||
```
|
```
|
||||||
|
|
||||||
[nodesource][] is a great resource to get more recent versions of the nodejs runtime,
|
[nodesource][] is a great resource to get more recent versions of the nodejs runtime,
|
||||||
if your system package manager only has an old version of Node.js (e.g. 10 or older).
|
if your system package manager only has an old version of Node.js.
|
||||||
|
|
||||||
- A [pluggable authentication module (PAM)](https://en.wikipedia.org/wiki/Pluggable_authentication_module)
|
- A [pluggable authentication module (PAM)](https://en.wikipedia.org/wiki/Pluggable_authentication_module)
|
||||||
to use the [default Authenticator](authenticators).
|
to use the [default Authenticator](authenticators).
|
||||||
|
62
jsx/package-lock.json
generated
62
jsx/package-lock.json
generated
@@ -3599,12 +3599,13 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/body-parser": {
|
"node_modules/body-parser": {
|
||||||
"version": "1.20.1",
|
"version": "1.20.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz",
|
||||||
|
"integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"bytes": "3.1.2",
|
"bytes": "3.1.2",
|
||||||
"content-type": "~1.0.4",
|
"content-type": "~1.0.5",
|
||||||
"debug": "2.6.9",
|
"debug": "2.6.9",
|
||||||
"depd": "2.0.0",
|
"depd": "2.0.0",
|
||||||
"destroy": "1.2.0",
|
"destroy": "1.2.0",
|
||||||
@@ -3612,7 +3613,7 @@
|
|||||||
"iconv-lite": "0.4.24",
|
"iconv-lite": "0.4.24",
|
||||||
"on-finished": "2.4.1",
|
"on-finished": "2.4.1",
|
||||||
"qs": "6.11.0",
|
"qs": "6.11.0",
|
||||||
"raw-body": "2.5.1",
|
"raw-body": "2.5.2",
|
||||||
"type-is": "~1.6.18",
|
"type-is": "~1.6.18",
|
||||||
"unpipe": "1.0.0"
|
"unpipe": "1.0.0"
|
||||||
},
|
},
|
||||||
@@ -3623,16 +3624,18 @@
|
|||||||
},
|
},
|
||||||
"node_modules/body-parser/node_modules/debug": {
|
"node_modules/body-parser/node_modules/debug": {
|
||||||
"version": "2.6.9",
|
"version": "2.6.9",
|
||||||
|
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
|
||||||
|
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"ms": "2.0.0"
|
"ms": "2.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/body-parser/node_modules/iconv-lite": {
|
"node_modules/body-parser/node_modules/iconv-lite": {
|
||||||
"version": "0.4.24",
|
"version": "0.4.24",
|
||||||
|
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
|
||||||
|
"integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"safer-buffer": ">= 2.1.2 < 3"
|
"safer-buffer": ">= 2.1.2 < 3"
|
||||||
},
|
},
|
||||||
@@ -3642,8 +3645,9 @@
|
|||||||
},
|
},
|
||||||
"node_modules/body-parser/node_modules/ms": {
|
"node_modules/body-parser/node_modules/ms": {
|
||||||
"version": "2.0.0",
|
"version": "2.0.0",
|
||||||
"dev": true,
|
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||||
"license": "MIT"
|
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==",
|
||||||
|
"dev": true
|
||||||
},
|
},
|
||||||
"node_modules/bonjour-service": {
|
"node_modules/bonjour-service": {
|
||||||
"version": "1.1.1",
|
"version": "1.1.1",
|
||||||
@@ -3739,8 +3743,9 @@
|
|||||||
},
|
},
|
||||||
"node_modules/bytes": {
|
"node_modules/bytes": {
|
||||||
"version": "3.1.2",
|
"version": "3.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
|
||||||
|
"integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 0.8"
|
"node": ">= 0.8"
|
||||||
}
|
}
|
||||||
@@ -4051,8 +4056,9 @@
|
|||||||
},
|
},
|
||||||
"node_modules/content-type": {
|
"node_modules/content-type": {
|
||||||
"version": "1.0.5",
|
"version": "1.0.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz",
|
||||||
|
"integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 0.6"
|
"node": ">= 0.6"
|
||||||
}
|
}
|
||||||
@@ -4063,9 +4069,10 @@
|
|||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/cookie": {
|
"node_modules/cookie": {
|
||||||
"version": "0.5.0",
|
"version": "0.6.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz",
|
||||||
|
"integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 0.6"
|
"node": ">= 0.6"
|
||||||
}
|
}
|
||||||
@@ -5071,16 +5078,17 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/express": {
|
"node_modules/express": {
|
||||||
"version": "4.18.2",
|
"version": "4.19.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/express/-/express-4.19.2.tgz",
|
||||||
|
"integrity": "sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"accepts": "~1.3.8",
|
"accepts": "~1.3.8",
|
||||||
"array-flatten": "1.1.1",
|
"array-flatten": "1.1.1",
|
||||||
"body-parser": "1.20.1",
|
"body-parser": "1.20.2",
|
||||||
"content-disposition": "0.5.4",
|
"content-disposition": "0.5.4",
|
||||||
"content-type": "~1.0.4",
|
"content-type": "~1.0.4",
|
||||||
"cookie": "0.5.0",
|
"cookie": "0.6.0",
|
||||||
"cookie-signature": "1.0.6",
|
"cookie-signature": "1.0.6",
|
||||||
"debug": "2.6.9",
|
"debug": "2.6.9",
|
||||||
"depd": "2.0.0",
|
"depd": "2.0.0",
|
||||||
@@ -7319,8 +7327,9 @@
|
|||||||
},
|
},
|
||||||
"node_modules/media-typer": {
|
"node_modules/media-typer": {
|
||||||
"version": "0.3.0",
|
"version": "0.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
|
||||||
|
"integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">= 0.6"
|
"node": ">= 0.6"
|
||||||
}
|
}
|
||||||
@@ -8120,8 +8129,9 @@
|
|||||||
},
|
},
|
||||||
"node_modules/qs": {
|
"node_modules/qs": {
|
||||||
"version": "6.11.0",
|
"version": "6.11.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz",
|
||||||
|
"integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "BSD-3-Clause",
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"side-channel": "^1.0.4"
|
"side-channel": "^1.0.4"
|
||||||
},
|
},
|
||||||
@@ -8173,9 +8183,10 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/raw-body": {
|
"node_modules/raw-body": {
|
||||||
"version": "2.5.1",
|
"version": "2.5.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz",
|
||||||
|
"integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"bytes": "3.1.2",
|
"bytes": "3.1.2",
|
||||||
"http-errors": "2.0.0",
|
"http-errors": "2.0.0",
|
||||||
@@ -8188,8 +8199,9 @@
|
|||||||
},
|
},
|
||||||
"node_modules/raw-body/node_modules/iconv-lite": {
|
"node_modules/raw-body/node_modules/iconv-lite": {
|
||||||
"version": "0.4.24",
|
"version": "0.4.24",
|
||||||
|
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
|
||||||
|
"integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"safer-buffer": ">= 2.1.2 < 3"
|
"safer-buffer": ">= 2.1.2 < 3"
|
||||||
},
|
},
|
||||||
@@ -9468,8 +9480,9 @@
|
|||||||
},
|
},
|
||||||
"node_modules/type-is": {
|
"node_modules/type-is": {
|
||||||
"version": "1.6.18",
|
"version": "1.6.18",
|
||||||
|
"resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz",
|
||||||
|
"integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"media-typer": "0.3.0",
|
"media-typer": "0.3.0",
|
||||||
"mime-types": "~2.1.24"
|
"mime-types": "~2.1.24"
|
||||||
@@ -9797,9 +9810,10 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/webpack-dev-middleware": {
|
"node_modules/webpack-dev-middleware": {
|
||||||
"version": "5.3.3",
|
"version": "5.3.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-5.3.4.tgz",
|
||||||
|
"integrity": "sha512-BVdTqhhs+0IfoeAf7EoH5WE+exCmqGerHfDM0IL096Px60Tq2Mn9MAbnaGUe6HiMa41KMCYF19gyzZmBcq/o4Q==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"colorette": "^2.0.10",
|
"colorette": "^2.0.10",
|
||||||
"memfs": "^3.4.3",
|
"memfs": "^3.4.3",
|
||||||
|
247
jupyterhub/_xsrf_utils.py
Normal file
247
jupyterhub/_xsrf_utils.py
Normal file
@@ -0,0 +1,247 @@
|
|||||||
|
"""utilities for XSRF
|
||||||
|
|
||||||
|
Extends tornado's xsrf token checks with the following:
|
||||||
|
|
||||||
|
- only set xsrf cookie on navigation requests (cannot be fetched)
|
||||||
|
|
||||||
|
This utility file enables the consistent reuse of these functions
|
||||||
|
in both Hub and single-user code
|
||||||
|
"""
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
from http.cookies import SimpleCookie
|
||||||
|
|
||||||
|
from tornado import web
|
||||||
|
from tornado.log import app_log
|
||||||
|
|
||||||
|
|
||||||
|
def _get_signed_value_urlsafe(handler, name, b64_value):
|
||||||
|
"""Like get_signed_value (used in get_secure_cookie), but for urlsafe values
|
||||||
|
|
||||||
|
Decodes urlsafe_base64-encoded signed values
|
||||||
|
|
||||||
|
Returns None if any decoding failed
|
||||||
|
"""
|
||||||
|
if b64_value is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if isinstance(b64_value, str):
|
||||||
|
try:
|
||||||
|
b64_value = b64_value.encode("ascii")
|
||||||
|
except UnicodeEncodeError:
|
||||||
|
app_log.warning("Invalid value %r", b64_value)
|
||||||
|
return None
|
||||||
|
# re-pad, since we stripped padding in _create_signed_value
|
||||||
|
remainder = len(b64_value) % 4
|
||||||
|
if remainder:
|
||||||
|
b64_value = b64_value + (b'=' * (4 - remainder))
|
||||||
|
try:
|
||||||
|
value = base64.urlsafe_b64decode(b64_value)
|
||||||
|
except ValueError:
|
||||||
|
app_log.warning("Invalid base64 value %r", b64_value)
|
||||||
|
return None
|
||||||
|
|
||||||
|
return web.decode_signed_value(
|
||||||
|
handler.application.settings["cookie_secret"],
|
||||||
|
name,
|
||||||
|
value,
|
||||||
|
max_age_days=31,
|
||||||
|
min_version=2,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _create_signed_value_urlsafe(handler, name, value):
|
||||||
|
"""Like tornado's create_signed_value (used in set_secure_cookie), but returns urlsafe bytes"""
|
||||||
|
|
||||||
|
signed_value = handler.create_signed_value(name, value)
|
||||||
|
return base64.urlsafe_b64encode(signed_value).rstrip(b"=")
|
||||||
|
|
||||||
|
|
||||||
|
def _get_xsrf_token_cookie(handler):
|
||||||
|
"""
|
||||||
|
Get the _valid_ XSRF token and id from Cookie
|
||||||
|
|
||||||
|
Returns (xsrf_token, xsrf_id) found in Cookies header.
|
||||||
|
|
||||||
|
multiple xsrf cookies may be set on multiple paths;
|
||||||
|
|
||||||
|
RFC 6265 states that they should be in order of more specific path to less,
|
||||||
|
but ALSO states that servers should never rely on order.
|
||||||
|
|
||||||
|
Tornado (6.4) and stdlib (3.12) SimpleCookie explicitly use the _last_ value,
|
||||||
|
which means the cookie with the _least_ specific prefix will be used if more than one is present.
|
||||||
|
|
||||||
|
Because we sign values, we can get the first valid cookie and not worry about order too much.
|
||||||
|
|
||||||
|
This is simplified from tornado's HTTPRequest.cookies property
|
||||||
|
only looking for a single cookie.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if "Cookie" not in handler.request.headers:
|
||||||
|
return (None, None)
|
||||||
|
|
||||||
|
for chunk in handler.request.headers["Cookie"].split(";"):
|
||||||
|
key = chunk.partition("=")[0].strip()
|
||||||
|
if key != "_xsrf":
|
||||||
|
# we are only looking for the _xsrf cookie
|
||||||
|
# ignore everything else
|
||||||
|
continue
|
||||||
|
|
||||||
|
# use stdlib parsing to handle quotes, validation, etc.
|
||||||
|
try:
|
||||||
|
xsrf_token = SimpleCookie(chunk)[key].value.encode("ascii")
|
||||||
|
except (ValueError, KeyError):
|
||||||
|
continue
|
||||||
|
|
||||||
|
xsrf_token_id = _get_signed_value_urlsafe(handler, "_xsrf", xsrf_token)
|
||||||
|
|
||||||
|
if xsrf_token_id:
|
||||||
|
# only return if we found a _valid_ xsrf cookie
|
||||||
|
# otherwise, keep looking
|
||||||
|
return (xsrf_token, xsrf_token_id)
|
||||||
|
# no valid token found found
|
||||||
|
return (None, None)
|
||||||
|
|
||||||
|
|
||||||
|
def _set_xsrf_cookie(handler, xsrf_id, *, cookie_path="", authenticated=None):
|
||||||
|
"""Set xsrf token cookie"""
|
||||||
|
xsrf_token = _create_signed_value_urlsafe(handler, "_xsrf", xsrf_id)
|
||||||
|
xsrf_cookie_kwargs = {}
|
||||||
|
xsrf_cookie_kwargs.update(handler.settings.get('xsrf_cookie_kwargs', {}))
|
||||||
|
xsrf_cookie_kwargs.setdefault("path", cookie_path)
|
||||||
|
if authenticated is None:
|
||||||
|
try:
|
||||||
|
current_user = handler.current_user
|
||||||
|
except Exception:
|
||||||
|
authenticated = False
|
||||||
|
else:
|
||||||
|
authenticated = bool(current_user)
|
||||||
|
if not authenticated:
|
||||||
|
# limit anonymous xsrf cookies to one hour
|
||||||
|
xsrf_cookie_kwargs.pop("expires", None)
|
||||||
|
xsrf_cookie_kwargs.pop("expires_days", None)
|
||||||
|
xsrf_cookie_kwargs["max_age"] = 3600
|
||||||
|
app_log.info(
|
||||||
|
"Setting new xsrf cookie for %r %r",
|
||||||
|
xsrf_id,
|
||||||
|
xsrf_cookie_kwargs,
|
||||||
|
)
|
||||||
|
handler.set_cookie("_xsrf", xsrf_token, **xsrf_cookie_kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def get_xsrf_token(handler, cookie_path=""):
|
||||||
|
"""Override tornado's xsrf token to add further restrictions
|
||||||
|
|
||||||
|
- only set cookie for regular pages (not API requests)
|
||||||
|
- include login info in xsrf token
|
||||||
|
- verify signature
|
||||||
|
"""
|
||||||
|
# original: https://github.com/tornadoweb/tornado/blob/v6.4.0/tornado/web.py#L1455
|
||||||
|
if hasattr(handler, "_xsrf_token"):
|
||||||
|
return handler._xsrf_token
|
||||||
|
|
||||||
|
_set_cookie = False
|
||||||
|
# the raw cookie is the token
|
||||||
|
xsrf_token, xsrf_id_cookie = _get_xsrf_token_cookie(handler)
|
||||||
|
cookie_token = xsrf_token
|
||||||
|
|
||||||
|
# check the decoded, signed value for validity
|
||||||
|
xsrf_id = handler._xsrf_token_id
|
||||||
|
if xsrf_id_cookie != xsrf_id:
|
||||||
|
# this will usually happen on the first page request after login,
|
||||||
|
# which changes the inputs to the token id
|
||||||
|
if xsrf_id_cookie:
|
||||||
|
app_log.debug("xsrf id mismatch %r != %r", xsrf_id_cookie, xsrf_id)
|
||||||
|
# generate new value
|
||||||
|
xsrf_token = _create_signed_value_urlsafe(handler, "_xsrf", xsrf_id)
|
||||||
|
# only set cookie on regular navigation pages
|
||||||
|
# i.e. not API requests, etc.
|
||||||
|
# insecure URLs (public hostname/ip, no https)
|
||||||
|
# don't set Sec-Fetch-Mode.
|
||||||
|
# consequence of assuming 'navigate': setting a cookie unnecessarily
|
||||||
|
# consequence of assuming not 'navigate': xsrf never set, nothing works
|
||||||
|
_set_cookie = (
|
||||||
|
handler.request.headers.get("Sec-Fetch-Mode", "navigate") == "navigate"
|
||||||
|
)
|
||||||
|
if xsrf_id_cookie and not _set_cookie:
|
||||||
|
# if we aren't setting a cookie here but we got one,
|
||||||
|
# this means things probably aren't going to work
|
||||||
|
app_log.warning(
|
||||||
|
"Not accepting incorrect xsrf token id in cookie on %s",
|
||||||
|
handler.request.path,
|
||||||
|
)
|
||||||
|
|
||||||
|
if _set_cookie:
|
||||||
|
_set_xsrf_cookie(handler, xsrf_id, cookie_path=cookie_path)
|
||||||
|
handler._xsrf_token = xsrf_token
|
||||||
|
return xsrf_token
|
||||||
|
|
||||||
|
|
||||||
|
def _needs_check_xsrf(handler):
|
||||||
|
"""Does the given cookie-authenticated request need to check xsrf?"""
|
||||||
|
|
||||||
|
if getattr(handler, "_token_authenticated", False):
|
||||||
|
return False
|
||||||
|
|
||||||
|
fetch_mode = handler.request.headers.get("Sec-Fetch-Mode", "unspecified")
|
||||||
|
if fetch_mode in {"websocket", "no-cors"} or (
|
||||||
|
fetch_mode in {"navigate", "unspecified"}
|
||||||
|
and handler.request.method.lower() in {"get", "head", "options"}
|
||||||
|
):
|
||||||
|
# no xsrf check needed for regular page views or no-cors
|
||||||
|
# or websockets after allow_websocket_cookie_auth passes
|
||||||
|
if fetch_mode == "unspecified":
|
||||||
|
app_log.warning(
|
||||||
|
f"Skipping XSRF check for insecure request {handler.request.method} {handler.request.path}"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def check_xsrf_cookie(handler):
|
||||||
|
"""Check that xsrf cookie matches xsrf token in request"""
|
||||||
|
# overrides tornado's implementation
|
||||||
|
# because we changed what a correct value should be in xsrf_token
|
||||||
|
if not _needs_check_xsrf(handler):
|
||||||
|
# don't require XSRF for regular page views
|
||||||
|
return
|
||||||
|
|
||||||
|
token = (
|
||||||
|
handler.get_argument("_xsrf", None)
|
||||||
|
or handler.request.headers.get("X-Xsrftoken")
|
||||||
|
or handler.request.headers.get("X-Csrftoken")
|
||||||
|
)
|
||||||
|
|
||||||
|
if not token:
|
||||||
|
raise web.HTTPError(
|
||||||
|
403, f"'_xsrf' argument missing from {handler.request.method}"
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
token = token.encode("utf8")
|
||||||
|
except UnicodeEncodeError:
|
||||||
|
raise web.HTTPError(403, "'_xsrf' argument invalid")
|
||||||
|
|
||||||
|
if token != handler.xsrf_token:
|
||||||
|
raise web.HTTPError(
|
||||||
|
403, f"XSRF cookie does not match {handler.request.method.upper()} argument"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _anonymous_xsrf_id(handler):
|
||||||
|
"""Generate an appropriate xsrf token id for an anonymous request
|
||||||
|
|
||||||
|
Currently uses hash of request ip and user-agent
|
||||||
|
|
||||||
|
These are typically used only for the initial login page,
|
||||||
|
so only need to be valid for a few seconds to a few minutes
|
||||||
|
(enough to submit a login form with MFA).
|
||||||
|
"""
|
||||||
|
hasher = hashlib.sha256()
|
||||||
|
hasher.update(handler.request.remote_ip.encode("ascii"))
|
||||||
|
hasher.update(
|
||||||
|
handler.request.headers.get("User-Agent", "").encode("utf8", "replace")
|
||||||
|
)
|
||||||
|
return base64.urlsafe_b64encode(hasher.digest()).decode("ascii")
|
@@ -76,15 +76,8 @@ class APIHandler(BaseHandler):
|
|||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def prepare(self):
|
# we also check xsrf on GETs to API endpoints
|
||||||
await super().prepare()
|
_xsrf_safe_methods = {"HEAD", "OPTIONS"}
|
||||||
# tornado only checks xsrf on non-GET
|
|
||||||
# we also check xsrf on GETs to API endpoints
|
|
||||||
# make sure this runs after auth, which happens in super().prepare()
|
|
||||||
if self.request.method not in {"HEAD", "OPTIONS"} and self.settings.get(
|
|
||||||
"xsrf_cookies"
|
|
||||||
):
|
|
||||||
self.check_xsrf_cookie()
|
|
||||||
|
|
||||||
def check_xsrf_cookie(self):
|
def check_xsrf_cookie(self):
|
||||||
if not hasattr(self, '_jupyterhub_user'):
|
if not hasattr(self, '_jupyterhub_user'):
|
||||||
|
@@ -402,6 +402,25 @@ class JupyterHub(Application):
|
|||||||
Useful for daemonizing JupyterHub.
|
Useful for daemonizing JupyterHub.
|
||||||
""",
|
""",
|
||||||
).tag(config=True)
|
).tag(config=True)
|
||||||
|
|
||||||
|
cookie_host_prefix_enabled = Bool(
|
||||||
|
False,
|
||||||
|
help="""Enable `__Host-` prefix on authentication cookies.
|
||||||
|
|
||||||
|
The `__Host-` prefix on JupyterHub cookies provides further
|
||||||
|
protection against cookie tossing when untrusted servers
|
||||||
|
may control subdomains of your jupyterhub deployment.
|
||||||
|
|
||||||
|
_However_, it also requires that cookies be set on the path `/`,
|
||||||
|
which means they are shared by all JupyterHub components,
|
||||||
|
so a compromised server component will have access to _all_ JupyterHub-related
|
||||||
|
cookies of the visiting browser.
|
||||||
|
It is recommended to only combine `__Host-` cookies with per-user domains.
|
||||||
|
|
||||||
|
.. versionadded:: 4.1
|
||||||
|
""",
|
||||||
|
).tag(config=True)
|
||||||
|
|
||||||
cookie_max_age_days = Float(
|
cookie_max_age_days = Float(
|
||||||
14,
|
14,
|
||||||
help="""Number of days for a login cookie to be valid.
|
help="""Number of days for a login cookie to be valid.
|
||||||
@@ -2034,6 +2053,8 @@ class JupyterHub(Application):
|
|||||||
hub_args['port'] = self.hub_port
|
hub_args['port'] = self.hub_port
|
||||||
|
|
||||||
self.hub = Hub(**hub_args)
|
self.hub = Hub(**hub_args)
|
||||||
|
if self.cookie_host_prefix_enabled:
|
||||||
|
self.hub.cookie_name = "__Host-" + self.hub.cookie_name
|
||||||
|
|
||||||
if not self.subdomain_host:
|
if not self.subdomain_host:
|
||||||
api_prefix = url_path_join(self.hub.base_url, "api/")
|
api_prefix = url_path_join(self.hub.base_url, "api/")
|
||||||
@@ -2077,6 +2098,9 @@ class JupyterHub(Application):
|
|||||||
"auth_state is enabled, but encryption is not available: %s" % e
|
"auth_state is enabled, but encryption is not available: %s" % e
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# give the authenticator a chance to check its own config
|
||||||
|
self.authenticator.check_allow_config()
|
||||||
|
|
||||||
if self.admin_users and not self.authenticator.admin_users:
|
if self.admin_users and not self.authenticator.admin_users:
|
||||||
self.log.warning(
|
self.log.warning(
|
||||||
"\nJupyterHub.admin_users is deprecated since version 0.7.2."
|
"\nJupyterHub.admin_users is deprecated since version 0.7.2."
|
||||||
@@ -2104,9 +2128,9 @@ class JupyterHub(Application):
|
|||||||
new_users.append(user)
|
new_users.append(user)
|
||||||
else:
|
else:
|
||||||
user.admin = True
|
user.admin = True
|
||||||
|
|
||||||
# the admin_users config variable will never be used after this point.
|
# the admin_users config variable will never be used after this point.
|
||||||
# only the database values will be referenced.
|
# only the database values will be referenced.
|
||||||
|
|
||||||
allowed_users = [
|
allowed_users = [
|
||||||
self.authenticator.normalize_username(name)
|
self.authenticator.normalize_username(name)
|
||||||
for name in self.authenticator.allowed_users
|
for name in self.authenticator.allowed_users
|
||||||
@@ -2116,10 +2140,10 @@ class JupyterHub(Application):
|
|||||||
if not self.authenticator.validate_username(username):
|
if not self.authenticator.validate_username(username):
|
||||||
raise ValueError("username %r is not valid" % username)
|
raise ValueError("username %r is not valid" % username)
|
||||||
|
|
||||||
if not allowed_users:
|
if self.authenticator.allowed_users and self.authenticator.admin_users:
|
||||||
self.log.info(
|
# make sure admin users are in the allowed_users set, if defined,
|
||||||
"Not using allowed_users. Any authenticated user will be allowed."
|
# otherwise they won't be able to login
|
||||||
)
|
self.authenticator.allowed_users |= self.authenticator.admin_users
|
||||||
|
|
||||||
# add allowed users to the db
|
# add allowed users to the db
|
||||||
for name in allowed_users:
|
for name in allowed_users:
|
||||||
@@ -3161,6 +3185,7 @@ class JupyterHub(Application):
|
|||||||
default_url=self.default_url,
|
default_url=self.default_url,
|
||||||
public_url=urlparse(self.public_url) if self.public_url else "",
|
public_url=urlparse(self.public_url) if self.public_url else "",
|
||||||
cookie_secret=self.cookie_secret,
|
cookie_secret=self.cookie_secret,
|
||||||
|
cookie_host_prefix_enabled=self.cookie_host_prefix_enabled,
|
||||||
cookie_max_age_days=self.cookie_max_age_days,
|
cookie_max_age_days=self.cookie_max_age_days,
|
||||||
redirect_to_server=self.redirect_to_server,
|
redirect_to_server=self.redirect_to_server,
|
||||||
login_url=login_url,
|
login_url=login_url,
|
||||||
|
@@ -121,6 +121,55 @@ class Authenticator(LoggingConfigurable):
|
|||||||
"""
|
"""
|
||||||
).tag(config=True)
|
).tag(config=True)
|
||||||
|
|
||||||
|
any_allow_config = Bool(
|
||||||
|
False,
|
||||||
|
help="""Is there any allow config?
|
||||||
|
|
||||||
|
Used to show a warning if it looks like nobody can access the Hub,
|
||||||
|
which can happen when upgrading to JupyterHub 5,
|
||||||
|
now that `allow_all` defaults to False.
|
||||||
|
|
||||||
|
Deployments can set this explicitly to True to suppress
|
||||||
|
the "No allow config found" warning.
|
||||||
|
|
||||||
|
Will be True if any config tagged with `.tag(allow_config=True)`
|
||||||
|
or starts with `allow` is truthy.
|
||||||
|
|
||||||
|
.. versionadded:: 5.0
|
||||||
|
""",
|
||||||
|
).tag(config=True)
|
||||||
|
|
||||||
|
@default("any_allow_config")
|
||||||
|
def _default_any_allowed(self):
|
||||||
|
for trait_name, trait in self.traits(config=True).items():
|
||||||
|
if trait.metadata.get("allow_config", False) or trait_name.startswith(
|
||||||
|
"allow"
|
||||||
|
):
|
||||||
|
# this is only used for a helpful warning, so not the biggest deal if it's imperfect
|
||||||
|
if getattr(self, trait_name):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def check_allow_config(self):
|
||||||
|
"""Log a warning if no allow config can be found.
|
||||||
|
|
||||||
|
Could get a false positive if _only_ unrecognized allow config is used.
|
||||||
|
Authenticators can apply `.tag(allow_config=True)` to label this config
|
||||||
|
to make sure it is found.
|
||||||
|
|
||||||
|
Subclasses can override to perform additonal checks and warn about likely
|
||||||
|
authenticator configuration problems.
|
||||||
|
|
||||||
|
.. versionadded:: 5.0
|
||||||
|
"""
|
||||||
|
if not self.any_allow_config:
|
||||||
|
self.log.warning(
|
||||||
|
"No allow config found, it's possible that nobody can login to your Hub!\n"
|
||||||
|
"You can set `c.Authenticator.allow_all = True` to allow any user who can login to access the Hub,\n"
|
||||||
|
"or e.g. `allowed_users` to a set of users who should have access.\n"
|
||||||
|
"You may suppress this warning by setting c.Authenticator.any_allow_config = True."
|
||||||
|
)
|
||||||
|
|
||||||
whitelist = Set(
|
whitelist = Set(
|
||||||
help="Deprecated, use `Authenticator.allowed_users`",
|
help="Deprecated, use `Authenticator.allowed_users`",
|
||||||
config=True,
|
config=True,
|
||||||
@@ -144,6 +193,83 @@ class Authenticator(LoggingConfigurable):
|
|||||||
"""
|
"""
|
||||||
).tag(config=True)
|
).tag(config=True)
|
||||||
|
|
||||||
|
allow_all = Bool(
|
||||||
|
False,
|
||||||
|
config=True,
|
||||||
|
help="""
|
||||||
|
Allow every user who can successfully authenticate to access JupyterHub.
|
||||||
|
|
||||||
|
False by default, which means for most Authenticators,
|
||||||
|
_some_ allow-related configuration is required to allow users to log in.
|
||||||
|
|
||||||
|
Authenticator subclasses may override the default with e.g.::
|
||||||
|
|
||||||
|
@default("allow_all")
|
||||||
|
def _default_allow_all(self):
|
||||||
|
# if _any_ auth config (depends on the Authenticator)
|
||||||
|
if self.allowed_users or self.allowed_groups or self.allow_existing_users:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
|
.. versionadded:: 5.0
|
||||||
|
|
||||||
|
.. versionchanged:: 5.0
|
||||||
|
Prior to 5.0, `allow_all` wasn't defined on its own,
|
||||||
|
and was instead implicitly True when no allow config was provided,
|
||||||
|
i.e. `allowed_users` unspecified or empty on the base Authenticator class.
|
||||||
|
|
||||||
|
To preserve pre-5.0 behavior,
|
||||||
|
set `allow_all = True` if you have no other allow configuration.
|
||||||
|
""",
|
||||||
|
).tag(allow_config=True)
|
||||||
|
|
||||||
|
allow_existing_users = Bool(
|
||||||
|
# dynamic default computed from allowed_users
|
||||||
|
config=True,
|
||||||
|
help="""
|
||||||
|
Allow existing users to login.
|
||||||
|
|
||||||
|
Defaults to True if `allowed_users` is set for historical reasons, and
|
||||||
|
False otherwise.
|
||||||
|
|
||||||
|
With this enabled, all users present in the JupyterHub database are allowed to login.
|
||||||
|
This has the effect of any user who has _previously_ been allowed to login
|
||||||
|
via any means will continue to be allowed until the user is deleted via the /hub/admin page
|
||||||
|
or REST API.
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
Before enabling this you should review the existing users in the
|
||||||
|
JupyterHub admin panel at `/hub/admin`. You may find users existing
|
||||||
|
there because they have previously been declared in config such as
|
||||||
|
`allowed_users` or allowed to sign in.
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
When this is enabled and you wish to remove access for one or more
|
||||||
|
users previously allowed, you must make sure that they
|
||||||
|
are removed from the jupyterhub database. This can be tricky to do
|
||||||
|
if you stop allowing an externally managed group of users for example.
|
||||||
|
|
||||||
|
With this enabled, JupyterHub admin users can visit `/hub/admin` or use
|
||||||
|
JupyterHub's REST API to add and remove users to manage who can login.
|
||||||
|
|
||||||
|
.. versionadded:: 5.0
|
||||||
|
""",
|
||||||
|
).tag(allow_config=True)
|
||||||
|
|
||||||
|
@default("allow_existing_users")
|
||||||
|
def _allow_existing_users_default(self):
|
||||||
|
"""
|
||||||
|
Computes the default value of allow_existing_users based on if
|
||||||
|
allowed_users to align with original behavior not introduce a breaking
|
||||||
|
change.
|
||||||
|
"""
|
||||||
|
if self.allowed_users:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
blocked_users = Set(
|
blocked_users = Set(
|
||||||
help="""
|
help="""
|
||||||
Set of usernames that are not allowed to log in.
|
Set of usernames that are not allowed to log in.
|
||||||
@@ -472,13 +598,12 @@ class Authenticator(LoggingConfigurable):
|
|||||||
web.HTTPError(403):
|
web.HTTPError(403):
|
||||||
Raising HTTPErrors directly allows customizing the message shown to the user.
|
Raising HTTPErrors directly allows customizing the message shown to the user.
|
||||||
"""
|
"""
|
||||||
if not self.allowed_users:
|
if self.allow_all:
|
||||||
# No allowed set means any name is allowed
|
|
||||||
return True
|
return True
|
||||||
return username in self.allowed_users
|
return username in self.allowed_users
|
||||||
|
|
||||||
def check_blocked_users(self, username, authentication=None):
|
def check_blocked_users(self, username, authentication=None):
|
||||||
"""Check if a username is blocked to authenticate based on Authenticator.blocked configuration
|
"""Check if a username is blocked to authenticate based on Authenticator.blocked_users configuration
|
||||||
|
|
||||||
Return True if username is allowed, False otherwise.
|
Return True if username is allowed, False otherwise.
|
||||||
No block list means any username is allowed.
|
No block list means any username is allowed.
|
||||||
@@ -525,8 +650,9 @@ class Authenticator(LoggingConfigurable):
|
|||||||
The various stages can be overridden separately:
|
The various stages can be overridden separately:
|
||||||
- `authenticate` turns formdata into a username
|
- `authenticate` turns formdata into a username
|
||||||
- `normalize_username` normalizes the username
|
- `normalize_username` normalizes the username
|
||||||
- `check_allowed` checks against the allowed usernames
|
|
||||||
- `check_blocked_users` check against the blocked usernames
|
- `check_blocked_users` check against the blocked usernames
|
||||||
|
- `allow_all` is checked
|
||||||
|
- `check_allowed` checks against the allowed usernames
|
||||||
- `is_admin` check if a user is an admin
|
- `is_admin` check if a user is an admin
|
||||||
|
|
||||||
.. versionchanged:: 0.8
|
.. versionchanged:: 0.8
|
||||||
@@ -560,7 +686,11 @@ class Authenticator(LoggingConfigurable):
|
|||||||
self.log.warning("User %r blocked. Stop authentication", username)
|
self.log.warning("User %r blocked. Stop authentication", username)
|
||||||
return
|
return
|
||||||
|
|
||||||
allowed_pass = await maybe_future(self.check_allowed(username, authenticated))
|
allowed_pass = self.allow_all
|
||||||
|
if not allowed_pass:
|
||||||
|
allowed_pass = await maybe_future(
|
||||||
|
self.check_allowed(username, authenticated)
|
||||||
|
)
|
||||||
|
|
||||||
if allowed_pass:
|
if allowed_pass:
|
||||||
if authenticated['admin'] is None:
|
if authenticated['admin'] is None:
|
||||||
@@ -697,25 +827,31 @@ class Authenticator(LoggingConfigurable):
|
|||||||
"""Hook called when a user is added to JupyterHub
|
"""Hook called when a user is added to JupyterHub
|
||||||
|
|
||||||
This is called:
|
This is called:
|
||||||
- When a user first authenticates
|
- When a user first authenticates, _after_ all allow and block checks have passed
|
||||||
- When the hub restarts, for all users.
|
- When the hub restarts, for all users in the database (i.e. users previously allowed)
|
||||||
|
- When a user is added to the database, either via configuration or REST API
|
||||||
|
|
||||||
This method may be a coroutine.
|
This method may be a coroutine.
|
||||||
|
|
||||||
By default, this just adds the user to the allowed_users set.
|
By default, this adds the user to the allowed_users set if
|
||||||
|
allow_existing_users is true.
|
||||||
|
|
||||||
Subclasses may do more extensive things, such as adding actual unix users,
|
Subclasses may do more extensive things, such as creating actual system users,
|
||||||
but they should call super to ensure the allowed_users set is updated.
|
but they should call super to ensure the allowed_users set is updated.
|
||||||
|
|
||||||
Note that this should be idempotent, since it is called whenever the hub restarts
|
Note that this should be idempotent, since it is called whenever the hub restarts
|
||||||
for all users.
|
for all users.
|
||||||
|
|
||||||
|
.. versionchanged:: 5.0
|
||||||
|
Now adds users to the allowed_users set if allow_all is False and allow_existing_users is True,
|
||||||
|
instead of if allowed_users is not empty.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
user (User): The User wrapper object
|
user (User): The User wrapper object
|
||||||
"""
|
"""
|
||||||
if not self.validate_username(user.name):
|
if not self.validate_username(user.name):
|
||||||
raise ValueError("Invalid username: %s" % user.name)
|
raise ValueError("Invalid username: %s" % user.name)
|
||||||
if self.allowed_users:
|
if self.allow_existing_users and not self.allow_all:
|
||||||
self.allowed_users.add(user.name)
|
self.allowed_users.add(user.name)
|
||||||
|
|
||||||
def delete_user(self, user):
|
def delete_user(self, user):
|
||||||
@@ -962,23 +1098,16 @@ class LocalAuthenticator(Authenticator):
|
|||||||
help="""
|
help="""
|
||||||
Allow login from all users in these UNIX groups.
|
Allow login from all users in these UNIX groups.
|
||||||
|
|
||||||
If set, allowed username set is ignored.
|
.. versionchanged:: 5.0
|
||||||
|
`allowed_groups` may be specified together with allowed_users,
|
||||||
|
to grant access by group OR name.
|
||||||
"""
|
"""
|
||||||
).tag(config=True)
|
).tag(config=True, allow_config=True)
|
||||||
|
|
||||||
@observe('allowed_groups')
|
|
||||||
def _allowed_groups_changed(self, change):
|
|
||||||
"""Log a warning if mutually exclusive user and group allowed sets are specified."""
|
|
||||||
if self.allowed_users:
|
|
||||||
self.log.warning(
|
|
||||||
"Ignoring Authenticator.allowed_users set because Authenticator.allowed_groups supplied!"
|
|
||||||
)
|
|
||||||
|
|
||||||
def check_allowed(self, username, authentication=None):
|
def check_allowed(self, username, authentication=None):
|
||||||
if self.allowed_groups:
|
if self.check_allowed_groups(username, authentication):
|
||||||
return self.check_allowed_groups(username, authentication)
|
return True
|
||||||
else:
|
return super().check_allowed(username, authentication)
|
||||||
return super().check_allowed(username, authentication)
|
|
||||||
|
|
||||||
def check_allowed_groups(self, username, authentication=None):
|
def check_allowed_groups(self, username, authentication=None):
|
||||||
"""
|
"""
|
||||||
@@ -1308,8 +1437,20 @@ class DummyAuthenticator(Authenticator):
|
|||||||
if it logs in with that password.
|
if it logs in with that password.
|
||||||
|
|
||||||
.. versionadded:: 1.0
|
.. versionadded:: 1.0
|
||||||
|
|
||||||
|
.. versionadded:: 5.0
|
||||||
|
`allow_all` defaults to True,
|
||||||
|
preserving default behavior.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@default("allow_all")
|
||||||
|
def _allow_all_default(self):
|
||||||
|
if self.allowed_users:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
# allow all by default
|
||||||
|
return True
|
||||||
|
|
||||||
password = Unicode(
|
password = Unicode(
|
||||||
config=True,
|
config=True,
|
||||||
help="""
|
help="""
|
||||||
@@ -1319,6 +1460,12 @@ class DummyAuthenticator(Authenticator):
|
|||||||
""",
|
""",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def check_allow_config(self):
|
||||||
|
super().check_allow_config()
|
||||||
|
self.log.warning(
|
||||||
|
f"Using testing authenticator {self.__class__.__name__}! This is not meant for production!"
|
||||||
|
)
|
||||||
|
|
||||||
async def authenticate(self, handler, data):
|
async def authenticate(self, handler, data):
|
||||||
"""Checks against a global password if it's been set. If not, allow any user/pass combo"""
|
"""Checks against a global password if it's been set. If not, allow any user/pass combo"""
|
||||||
if self.password:
|
if self.password:
|
||||||
|
@@ -24,6 +24,12 @@ from tornado.log import app_log
|
|||||||
from tornado.web import RequestHandler, addslash
|
from tornado.web import RequestHandler, addslash
|
||||||
|
|
||||||
from .. import __version__, orm, roles, scopes
|
from .. import __version__, orm, roles, scopes
|
||||||
|
from .._xsrf_utils import (
|
||||||
|
_anonymous_xsrf_id,
|
||||||
|
_set_xsrf_cookie,
|
||||||
|
check_xsrf_cookie,
|
||||||
|
get_xsrf_token,
|
||||||
|
)
|
||||||
from ..metrics import (
|
from ..metrics import (
|
||||||
PROXY_ADD_DURATION_SECONDS,
|
PROXY_ADD_DURATION_SECONDS,
|
||||||
PROXY_DELETE_DURATION_SECONDS,
|
PROXY_DELETE_DURATION_SECONDS,
|
||||||
@@ -38,7 +44,6 @@ from ..metrics import (
|
|||||||
ServerStopStatus,
|
ServerStopStatus,
|
||||||
)
|
)
|
||||||
from ..objects import Server
|
from ..objects import Server
|
||||||
from ..scopes import needs_scope
|
|
||||||
from ..spawner import LocalProcessSpawner
|
from ..spawner import LocalProcessSpawner
|
||||||
from ..user import User
|
from ..user import User
|
||||||
from ..utils import (
|
from ..utils import (
|
||||||
@@ -100,7 +105,14 @@ class BaseHandler(RequestHandler):
|
|||||||
self.log.error("Rolling back session due to database error")
|
self.log.error("Rolling back session due to database error")
|
||||||
self.db.rollback()
|
self.db.rollback()
|
||||||
self._resolve_roles_and_scopes()
|
self._resolve_roles_and_scopes()
|
||||||
return await maybe_future(super().prepare())
|
await maybe_future(super().prepare())
|
||||||
|
# run xsrf check after prepare
|
||||||
|
# because our version takes auth info into account
|
||||||
|
if (
|
||||||
|
self.request.method not in self._xsrf_safe_methods
|
||||||
|
and self.application.settings.get("xsrf_cookies")
|
||||||
|
):
|
||||||
|
self.check_xsrf_cookie()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def log(self):
|
def log(self):
|
||||||
@@ -205,9 +217,13 @@ class BaseHandler(RequestHandler):
|
|||||||
"""The default Content-Security-Policy header
|
"""The default Content-Security-Policy header
|
||||||
|
|
||||||
Can be overridden by defining Content-Security-Policy in settings['headers']
|
Can be overridden by defining Content-Security-Policy in settings['headers']
|
||||||
|
|
||||||
|
..versionchanged:: 4.1
|
||||||
|
|
||||||
|
Change default frame-ancestors from 'self' to 'none'
|
||||||
"""
|
"""
|
||||||
return '; '.join(
|
return '; '.join(
|
||||||
["frame-ancestors 'self'", "report-uri " + self.csp_report_uri]
|
["frame-ancestors 'none'", "report-uri " + self.csp_report_uri]
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_content_type(self):
|
def get_content_type(self):
|
||||||
@@ -217,7 +233,6 @@ class BaseHandler(RequestHandler):
|
|||||||
"""
|
"""
|
||||||
Set any headers passed as tornado_settings['headers'].
|
Set any headers passed as tornado_settings['headers'].
|
||||||
|
|
||||||
By default sets Content-Security-Policy of frame-ancestors 'self'.
|
|
||||||
Also responsible for setting content-type header
|
Also responsible for setting content-type header
|
||||||
"""
|
"""
|
||||||
# wrap in HTTPHeaders for case-insensitivity
|
# wrap in HTTPHeaders for case-insensitivity
|
||||||
@@ -239,17 +254,63 @@ class BaseHandler(RequestHandler):
|
|||||||
# Login and cookie-related
|
# Login and cookie-related
|
||||||
# ---------------------------------------------------------------
|
# ---------------------------------------------------------------
|
||||||
|
|
||||||
|
_xsrf_safe_methods = {"GET", "HEAD", "OPTIONS"}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _xsrf_token_id(self):
|
||||||
|
"""Value to be signed/encrypted for xsrf token
|
||||||
|
|
||||||
|
include login info in xsrf token
|
||||||
|
this means xsrf tokens are tied to logged-in users,
|
||||||
|
and change after a user logs in.
|
||||||
|
|
||||||
|
While the user is not yet logged in,
|
||||||
|
an anonymous value is used, to prevent portability.
|
||||||
|
These anonymous values are short-lived.
|
||||||
|
"""
|
||||||
|
# cases:
|
||||||
|
# 1. logged in, session id (session_id:user_id)
|
||||||
|
# 2. logged in, no session id (anonymous_id:user_id)
|
||||||
|
# 3. not logged in, session id (session_id:anonymous_id)
|
||||||
|
# 4. no cookies at all, use single anonymous value (:anonymous_id)
|
||||||
|
session_id = self.get_session_cookie()
|
||||||
|
if self.current_user:
|
||||||
|
if isinstance(self.current_user, User):
|
||||||
|
user_id = self.current_user.cookie_id
|
||||||
|
else:
|
||||||
|
# this shouldn't happen, but may if e.g. a Service attempts to fetch a page,
|
||||||
|
# which usually won't work, but this method should not be what raises
|
||||||
|
user_id = ""
|
||||||
|
if not session_id:
|
||||||
|
# no session id, use non-portable anonymous id
|
||||||
|
session_id = _anonymous_xsrf_id(self)
|
||||||
|
else:
|
||||||
|
# not logged in yet, use non-portable anonymous id
|
||||||
|
user_id = _anonymous_xsrf_id(self)
|
||||||
|
xsrf_id = f"{session_id}:{user_id}".encode("utf8", "replace")
|
||||||
|
return xsrf_id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def xsrf_token(self):
|
||||||
|
"""Override tornado's xsrf token with further restrictions
|
||||||
|
|
||||||
|
- only set cookie for regular pages
|
||||||
|
- include login info in xsrf token
|
||||||
|
- verify signature
|
||||||
|
"""
|
||||||
|
return get_xsrf_token(self, cookie_path=self.hub.base_url)
|
||||||
|
|
||||||
def check_xsrf_cookie(self):
|
def check_xsrf_cookie(self):
|
||||||
try:
|
"""Check that xsrf cookie matches xsrf token in request"""
|
||||||
return super().check_xsrf_cookie()
|
# overrides tornado's implementation
|
||||||
except web.HTTPError as e:
|
# because we changed what a correct value should be in xsrf_token
|
||||||
# ensure _jupyterhub_user is defined on rejected requests
|
|
||||||
if not hasattr(self, "_jupyterhub_user"):
|
if not hasattr(self, "_jupyterhub_user"):
|
||||||
self._jupyterhub_user = None
|
# run too early to check the value
|
||||||
self._resolve_roles_and_scopes()
|
# tornado runs this before 'prepare',
|
||||||
# rewrite message because we use this on methods other than POST
|
# but we run it again after so auth info is available, which happens in 'prepare'
|
||||||
e.log_message = e.log_message.replace("POST", self.request.method)
|
return None
|
||||||
raise
|
return check_xsrf_cookie(self)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def admin_users(self):
|
def admin_users(self):
|
||||||
@@ -526,6 +587,16 @@ class BaseHandler(RequestHandler):
|
|||||||
user = self._user_from_orm(u)
|
user = self._user_from_orm(u)
|
||||||
return user
|
return user
|
||||||
|
|
||||||
|
def clear_cookie(self, cookie_name, **kwargs):
|
||||||
|
"""Clear a cookie
|
||||||
|
|
||||||
|
overrides RequestHandler to always handle __Host- prefix correctly
|
||||||
|
"""
|
||||||
|
if cookie_name.startswith("__Host-"):
|
||||||
|
kwargs["path"] = "/"
|
||||||
|
kwargs["secure"] = True
|
||||||
|
return super().clear_cookie(cookie_name, **kwargs)
|
||||||
|
|
||||||
def clear_login_cookie(self, name=None):
|
def clear_login_cookie(self, name=None):
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
user = self.get_current_user_cookie()
|
user = self.get_current_user_cookie()
|
||||||
@@ -597,6 +668,11 @@ class BaseHandler(RequestHandler):
|
|||||||
kwargs.update(self.settings.get('cookie_options', {}))
|
kwargs.update(self.settings.get('cookie_options', {}))
|
||||||
kwargs.update(overrides)
|
kwargs.update(overrides)
|
||||||
|
|
||||||
|
if key.startswith("__Host-"):
|
||||||
|
# __Host- cookies must be secure and on /
|
||||||
|
kwargs["path"] = "/"
|
||||||
|
kwargs["secure"] = True
|
||||||
|
|
||||||
if encrypted:
|
if encrypted:
|
||||||
set_cookie = self.set_secure_cookie
|
set_cookie = self.set_secure_cookie
|
||||||
else:
|
else:
|
||||||
@@ -626,7 +702,9 @@ class BaseHandler(RequestHandler):
|
|||||||
Session id cookie is *not* encrypted,
|
Session id cookie is *not* encrypted,
|
||||||
so other services on this domain can read it.
|
so other services on this domain can read it.
|
||||||
"""
|
"""
|
||||||
session_id = uuid.uuid4().hex
|
if not hasattr(self, "_session_id"):
|
||||||
|
self._session_id = uuid.uuid4().hex
|
||||||
|
session_id = self._session_id
|
||||||
# if using subdomains, set session cookie on the domain,
|
# if using subdomains, set session cookie on the domain,
|
||||||
# which allows it to be shared by subdomains.
|
# which allows it to be shared by subdomains.
|
||||||
# if domain is unspecified, it is _more_ restricted to only the setting domain
|
# if domain is unspecified, it is _more_ restricted to only the setting domain
|
||||||
@@ -666,10 +744,20 @@ class BaseHandler(RequestHandler):
|
|||||||
if not self.get_session_cookie():
|
if not self.get_session_cookie():
|
||||||
self.set_session_cookie()
|
self.set_session_cookie()
|
||||||
|
|
||||||
# create and set a new cookie token for the hub
|
# create and set a new cookie for the hub
|
||||||
if not self.get_current_user_cookie():
|
cookie_user = self.get_current_user_cookie()
|
||||||
|
if cookie_user is None or cookie_user.id != user.id:
|
||||||
|
if cookie_user:
|
||||||
|
self.log.info(f"User {cookie_user.name} is logging in as {user.name}")
|
||||||
self.set_hub_cookie(user)
|
self.set_hub_cookie(user)
|
||||||
|
|
||||||
|
# make sure xsrf cookie is updated
|
||||||
|
# this avoids needing a second request to set the right xsrf cookie
|
||||||
|
self._jupyterhub_user = user
|
||||||
|
_set_xsrf_cookie(
|
||||||
|
self, self._xsrf_token_id, cookie_path=self.hub.base_url, authenticated=True
|
||||||
|
)
|
||||||
|
|
||||||
def authenticate(self, data):
|
def authenticate(self, data):
|
||||||
return maybe_future(self.authenticator.get_authenticated_user(self, data))
|
return maybe_future(self.authenticator.get_authenticated_user(self, data))
|
||||||
|
|
||||||
@@ -1426,7 +1514,7 @@ class BaseHandler(RequestHandler):
|
|||||||
try:
|
try:
|
||||||
html = self.render_template('%s.html' % status_code, sync=True, **ns)
|
html = self.render_template('%s.html' % status_code, sync=True, **ns)
|
||||||
except TemplateNotFound:
|
except TemplateNotFound:
|
||||||
self.log.debug("No template for %d", status_code)
|
self.log.debug("Using default error template for %d", status_code)
|
||||||
try:
|
try:
|
||||||
html = self.render_template('error.html', sync=True, **ns)
|
html = self.render_template('error.html', sync=True, **ns)
|
||||||
except Exception:
|
except Exception:
|
||||||
@@ -1561,10 +1649,28 @@ class UserUrlHandler(BaseHandler):
|
|||||||
delete = non_get
|
delete = non_get
|
||||||
|
|
||||||
@web.authenticated
|
@web.authenticated
|
||||||
@needs_scope("access:servers")
|
|
||||||
async def get(self, user_name, user_path):
|
async def get(self, user_name, user_path):
|
||||||
if not user_path:
|
if not user_path:
|
||||||
user_path = '/'
|
user_path = '/'
|
||||||
|
path_parts = user_path.split("/", 2)
|
||||||
|
server_names = [""]
|
||||||
|
if len(path_parts) >= 3:
|
||||||
|
# second part _may_ be a server name
|
||||||
|
server_names.append(path_parts[1])
|
||||||
|
|
||||||
|
access_scopes = [
|
||||||
|
f"access:servers!server={user_name}/{server_name}"
|
||||||
|
for server_name in server_names
|
||||||
|
]
|
||||||
|
if not any(self.has_scope(scope) for scope in access_scopes):
|
||||||
|
self.log.warning(
|
||||||
|
"Not authorizing access to %s. Requires any of [%s], not derived from scopes [%s]",
|
||||||
|
self.request.path,
|
||||||
|
", ".join(access_scopes),
|
||||||
|
", ".join(self.expanded_scopes),
|
||||||
|
)
|
||||||
|
raise web.HTTPError(404, "No access to resources or resources not found")
|
||||||
|
|
||||||
current_user = self.current_user
|
current_user = self.current_user
|
||||||
if user_name != current_user.name:
|
if user_name != current_user.name:
|
||||||
user = self.find_user(user_name)
|
user = self.find_user(user_name)
|
||||||
|
@@ -657,7 +657,7 @@ class ProxyErrorHandler(BaseHandler):
|
|||||||
try:
|
try:
|
||||||
html = await self.render_template('%s.html' % status_code, **ns)
|
html = await self.render_template('%s.html' % status_code, **ns)
|
||||||
except TemplateNotFound:
|
except TemplateNotFound:
|
||||||
self.log.debug("No template for %d", status_code)
|
self.log.debug("Using default error template for %d", status_code)
|
||||||
html = await self.render_template('error.html', **ns)
|
html = await self.render_template('error.html', **ns)
|
||||||
|
|
||||||
self.write(html)
|
self.write(html)
|
||||||
|
@@ -35,6 +35,7 @@ import socket
|
|||||||
import string
|
import string
|
||||||
import time
|
import time
|
||||||
import warnings
|
import warnings
|
||||||
|
from functools import partial
|
||||||
from http import HTTPStatus
|
from http import HTTPStatus
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
from urllib.parse import urlencode, urlparse
|
from urllib.parse import urlencode, urlparse
|
||||||
@@ -43,8 +44,10 @@ from tornado.httpclient import AsyncHTTPClient, HTTPRequest
|
|||||||
from tornado.httputil import url_concat
|
from tornado.httputil import url_concat
|
||||||
from tornado.log import app_log
|
from tornado.log import app_log
|
||||||
from tornado.web import HTTPError, RequestHandler
|
from tornado.web import HTTPError, RequestHandler
|
||||||
|
from tornado.websocket import WebSocketHandler
|
||||||
from traitlets import (
|
from traitlets import (
|
||||||
Any,
|
Any,
|
||||||
|
Bool,
|
||||||
Dict,
|
Dict,
|
||||||
Instance,
|
Instance,
|
||||||
Integer,
|
Integer,
|
||||||
@@ -56,8 +59,15 @@ from traitlets import (
|
|||||||
)
|
)
|
||||||
from traitlets.config import SingletonConfigurable
|
from traitlets.config import SingletonConfigurable
|
||||||
|
|
||||||
|
from .._xsrf_utils import (
|
||||||
|
_anonymous_xsrf_id,
|
||||||
|
_needs_check_xsrf,
|
||||||
|
_set_xsrf_cookie,
|
||||||
|
check_xsrf_cookie,
|
||||||
|
get_xsrf_token,
|
||||||
|
)
|
||||||
from ..scopes import _intersect_expanded_scopes
|
from ..scopes import _intersect_expanded_scopes
|
||||||
from ..utils import get_browser_protocol, url_path_join
|
from ..utils import _bool_env, get_browser_protocol, url_path_join
|
||||||
|
|
||||||
|
|
||||||
def check_scopes(required_scopes, scopes):
|
def check_scopes(required_scopes, scopes):
|
||||||
@@ -356,6 +366,46 @@ class HubAuth(SingletonConfigurable):
|
|||||||
""",
|
""",
|
||||||
).tag(config=True)
|
).tag(config=True)
|
||||||
|
|
||||||
|
allow_token_in_url = Bool(
|
||||||
|
_bool_env("JUPYTERHUB_ALLOW_TOKEN_IN_URL", default=False),
|
||||||
|
help="""Allow requests to pages with ?token=... in the URL
|
||||||
|
|
||||||
|
This allows starting a user session by sharing a URL with credentials,
|
||||||
|
bypassing authentication with the Hub.
|
||||||
|
|
||||||
|
If False, tokens in URLs will be ignored by the server,
|
||||||
|
except on websocket requests.
|
||||||
|
|
||||||
|
Has no effect on websocket requests,
|
||||||
|
which can only reliably authenticate via token in the URL,
|
||||||
|
as recommended by browser Websocket implementations.
|
||||||
|
|
||||||
|
This will default to False in JupyterHub 5.
|
||||||
|
|
||||||
|
.. versionadded:: 4.1
|
||||||
|
|
||||||
|
.. versionchanged:: 5.0
|
||||||
|
default changed to False
|
||||||
|
""",
|
||||||
|
).tag(config=True)
|
||||||
|
|
||||||
|
allow_websocket_cookie_auth = Bool(
|
||||||
|
_bool_env("JUPYTERHUB_ALLOW_WEBSOCKET_COOKIE_AUTH", default=True),
|
||||||
|
help="""Allow websocket requests with only cookie for authentication
|
||||||
|
|
||||||
|
Cookie-authenticated websockets cannot be protected from other user servers unless per-user domains are used.
|
||||||
|
Disabling cookie auth on websockets protects user servers from each other,
|
||||||
|
but may break some user applications.
|
||||||
|
Per-user domains eliminate the need to lock this down.
|
||||||
|
|
||||||
|
JupyterLab 4.1.2 and Notebook 6.5.6, 7.1.0 will not work
|
||||||
|
because they rely on cookie authentication without
|
||||||
|
API or XSRF tokens.
|
||||||
|
|
||||||
|
.. versionadded:: 4.1
|
||||||
|
""",
|
||||||
|
).tag(config=True)
|
||||||
|
|
||||||
cookie_options = Dict(
|
cookie_options = Dict(
|
||||||
help="""Additional options to pass when setting cookies.
|
help="""Additional options to pass when setting cookies.
|
||||||
|
|
||||||
@@ -374,6 +424,40 @@ class HubAuth(SingletonConfigurable):
|
|||||||
else:
|
else:
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
cookie_host_prefix_enabled = Bool(
|
||||||
|
False,
|
||||||
|
help="""Enable `__Host-` prefix on authentication cookies.
|
||||||
|
|
||||||
|
The `__Host-` prefix on JupyterHub cookies provides further
|
||||||
|
protection against cookie tossing when untrusted servers
|
||||||
|
may control subdomains of your jupyterhub deployment.
|
||||||
|
|
||||||
|
_However_, it also requires that cookies be set on the path `/`,
|
||||||
|
which means they are shared by all JupyterHub components,
|
||||||
|
so a compromised server component will have access to _all_ JupyterHub-related
|
||||||
|
cookies of the visiting browser.
|
||||||
|
It is recommended to only combine `__Host-` cookies with per-user domains.
|
||||||
|
|
||||||
|
Set via $JUPYTERHUB_COOKIE_HOST_PREFIX_ENABLED
|
||||||
|
""",
|
||||||
|
).tag(config=True)
|
||||||
|
|
||||||
|
@default("cookie_host_prefix_enabled")
|
||||||
|
def _default_cookie_host_prefix_enabled(self):
|
||||||
|
return _bool_env("JUPYTERHUB_COOKIE_HOST_PREFIX_ENABLED")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cookie_path(self):
|
||||||
|
"""
|
||||||
|
Path prefix on which to set cookies
|
||||||
|
|
||||||
|
self.base_url, but '/' when cookie_host_prefix_enabled is True
|
||||||
|
"""
|
||||||
|
if self.cookie_host_prefix_enabled:
|
||||||
|
return "/"
|
||||||
|
else:
|
||||||
|
return self.base_url
|
||||||
|
|
||||||
cookie_cache_max_age = Integer(help="DEPRECATED. Use cache_max_age")
|
cookie_cache_max_age = Integer(help="DEPRECATED. Use cache_max_age")
|
||||||
|
|
||||||
@observe('cookie_cache_max_age')
|
@observe('cookie_cache_max_age')
|
||||||
@@ -636,6 +720,17 @@ class HubAuth(SingletonConfigurable):
|
|||||||
auth_header_name = 'Authorization'
|
auth_header_name = 'Authorization'
|
||||||
auth_header_pat = re.compile(r'(?:token|bearer)\s+(.+)', re.IGNORECASE)
|
auth_header_pat = re.compile(r'(?:token|bearer)\s+(.+)', re.IGNORECASE)
|
||||||
|
|
||||||
|
def _get_token_url(self, handler):
|
||||||
|
"""Get the token from the URL
|
||||||
|
|
||||||
|
Always run for websockets,
|
||||||
|
otherwise run only if self.allow_token_in_url
|
||||||
|
"""
|
||||||
|
fetch_mode = handler.request.headers.get("Sec-Fetch-Mode", "unspecified")
|
||||||
|
if self.allow_token_in_url or fetch_mode == "websocket":
|
||||||
|
return handler.get_argument("token", "")
|
||||||
|
return ""
|
||||||
|
|
||||||
def get_token(self, handler, in_cookie=True):
|
def get_token(self, handler, in_cookie=True):
|
||||||
"""Get the token authenticating a request
|
"""Get the token authenticating a request
|
||||||
|
|
||||||
@@ -651,8 +746,7 @@ class HubAuth(SingletonConfigurable):
|
|||||||
Args:
|
Args:
|
||||||
handler (tornado.web.RequestHandler): the current request handler
|
handler (tornado.web.RequestHandler): the current request handler
|
||||||
"""
|
"""
|
||||||
|
user_token = self._get_token_url(handler)
|
||||||
user_token = handler.get_argument('token', '')
|
|
||||||
if not user_token:
|
if not user_token:
|
||||||
# get it from Authorization header
|
# get it from Authorization header
|
||||||
m = self.auth_header_pat.match(
|
m = self.auth_header_pat.match(
|
||||||
@@ -702,6 +796,14 @@ class HubAuth(SingletonConfigurable):
|
|||||||
"""
|
"""
|
||||||
return self._call_coroutine(sync, self._get_user, handler)
|
return self._call_coroutine(sync, self._get_user, handler)
|
||||||
|
|
||||||
|
def _patch_xsrf(self, handler):
|
||||||
|
"""Overridden in HubOAuth
|
||||||
|
|
||||||
|
HubAuth base class doesn't handle xsrf,
|
||||||
|
which is only relevant for cookie-based auth
|
||||||
|
"""
|
||||||
|
return
|
||||||
|
|
||||||
async def _get_user(self, handler):
|
async def _get_user(self, handler):
|
||||||
# only allow this to be called once per handler
|
# only allow this to be called once per handler
|
||||||
# avoids issues if an error is raised,
|
# avoids issues if an error is raised,
|
||||||
@@ -709,6 +811,9 @@ class HubAuth(SingletonConfigurable):
|
|||||||
if hasattr(handler, '_cached_hub_user'):
|
if hasattr(handler, '_cached_hub_user'):
|
||||||
return handler._cached_hub_user
|
return handler._cached_hub_user
|
||||||
|
|
||||||
|
# patch XSRF checks, which will apply after user check
|
||||||
|
self._patch_xsrf(handler)
|
||||||
|
|
||||||
handler._cached_hub_user = user_model = None
|
handler._cached_hub_user = user_model = None
|
||||||
session_id = self.get_session_id(handler)
|
session_id = self.get_session_id(handler)
|
||||||
|
|
||||||
@@ -758,6 +863,10 @@ class HubAuth(SingletonConfigurable):
|
|||||||
if not hasattr(self, 'set_cookie'):
|
if not hasattr(self, 'set_cookie'):
|
||||||
# only HubOAuth can persist cookies
|
# only HubOAuth can persist cookies
|
||||||
return
|
return
|
||||||
|
fetch_mode = handler.request.headers.get("Sec-Fetch-Mode", "navigate")
|
||||||
|
if isinstance(handler, WebSocketHandler) or fetch_mode != "navigate":
|
||||||
|
# don't do this on websockets or non-navigate requests
|
||||||
|
return
|
||||||
self.log.info(
|
self.log.info(
|
||||||
"Storing token from url in cookie for %s",
|
"Storing token from url in cookie for %s",
|
||||||
handler.request.remote_ip,
|
handler.request.remote_ip,
|
||||||
@@ -794,7 +903,10 @@ class HubOAuth(HubAuth):
|
|||||||
because we don't want to use the same cookie name
|
because we don't want to use the same cookie name
|
||||||
across OAuth clients.
|
across OAuth clients.
|
||||||
"""
|
"""
|
||||||
return self.oauth_client_id
|
cookie_name = self.oauth_client_id
|
||||||
|
if self.cookie_host_prefix_enabled:
|
||||||
|
cookie_name = "__Host-" + cookie_name
|
||||||
|
return cookie_name
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def state_cookie_name(self):
|
def state_cookie_name(self):
|
||||||
@@ -806,22 +918,115 @@ class HubOAuth(HubAuth):
|
|||||||
|
|
||||||
def _get_token_cookie(self, handler):
|
def _get_token_cookie(self, handler):
|
||||||
"""Base class doesn't store tokens in cookies"""
|
"""Base class doesn't store tokens in cookies"""
|
||||||
|
if hasattr(handler, "_hub_auth_token_cookie"):
|
||||||
|
return handler._hub_auth_token_cookie
|
||||||
|
|
||||||
|
fetch_mode = handler.request.headers.get("Sec-Fetch-Mode", "unset")
|
||||||
|
if fetch_mode == "websocket" and not self.allow_websocket_cookie_auth:
|
||||||
|
# disallow cookie auth on websockets
|
||||||
|
return None
|
||||||
|
|
||||||
token = handler.get_secure_cookie(self.cookie_name)
|
token = handler.get_secure_cookie(self.cookie_name)
|
||||||
if token:
|
if token:
|
||||||
# decode cookie bytes
|
# decode cookie bytes
|
||||||
token = token.decode('ascii', 'replace')
|
token = token.decode('ascii', 'replace')
|
||||||
return token
|
return token
|
||||||
|
|
||||||
async def _get_user_cookie(self, handler):
|
def _get_xsrf_token_id(self, handler):
|
||||||
|
"""Get contents for xsrf token for a given Handler
|
||||||
|
|
||||||
|
This is the value to be encrypted & signed in the xsrf token
|
||||||
|
"""
|
||||||
token = self._get_token_cookie(handler)
|
token = self._get_token_cookie(handler)
|
||||||
session_id = self.get_session_id(handler)
|
session_id = self.get_session_id(handler)
|
||||||
|
if token:
|
||||||
|
token_hash = hashlib.sha256(token.encode("ascii", "replace")).hexdigest()
|
||||||
|
if not session_id:
|
||||||
|
session_id = _anonymous_xsrf_id(handler)
|
||||||
|
else:
|
||||||
|
token_hash = _anonymous_xsrf_id(handler)
|
||||||
|
return f"{session_id}:{token_hash}".encode("ascii", "replace")
|
||||||
|
|
||||||
|
def _patch_xsrf(self, handler):
|
||||||
|
"""Patch handler to inject JuptyerHub xsrf token behavior"""
|
||||||
|
if isinstance(handler, HubAuthenticated):
|
||||||
|
# doesn't need patch
|
||||||
|
return
|
||||||
|
|
||||||
|
# patch in our xsrf token handling
|
||||||
|
# overrides tornado and jupyter_server defaults,
|
||||||
|
# but not others.
|
||||||
|
# subclasses will still inherit our overridden behavior,
|
||||||
|
# but their overrides (if any) will take precedence over ours
|
||||||
|
# such as jupyter-server-proxy
|
||||||
|
for cls in handler.__class__.__mro__:
|
||||||
|
# search for the nearest parent class defined
|
||||||
|
# in one of the 'base' Handler-defining packages.
|
||||||
|
# In current implementations, this will
|
||||||
|
# generally be jupyter_server.base.handlers.JupyterHandler
|
||||||
|
# or tornado.web.RequestHandler,
|
||||||
|
# but doing it this way ensures consistent results
|
||||||
|
if (cls.__module__ or '').partition('.')[0] not in {
|
||||||
|
"jupyter_server",
|
||||||
|
"notebook",
|
||||||
|
"tornado",
|
||||||
|
}:
|
||||||
|
continue
|
||||||
|
# override check_xsrf_cookie where it's defined
|
||||||
|
if "check_xsrf_cookie" in cls.__dict__:
|
||||||
|
if "_get_xsrf_token_id" in cls.__dict__:
|
||||||
|
# already patched
|
||||||
|
return
|
||||||
|
cls._xsrf_token_id = property(self._get_xsrf_token_id)
|
||||||
|
cls.xsrf_token = property(
|
||||||
|
partial(get_xsrf_token, cookie_path=self.base_url)
|
||||||
|
)
|
||||||
|
cls.check_xsrf_cookie = lambda handler: self.check_xsrf_cookie(handler)
|
||||||
|
|
||||||
|
def check_xsrf_cookie(self, handler):
|
||||||
|
"""check_xsrf_cookie patch
|
||||||
|
|
||||||
|
Applies JupyterHub check_xsrf_cookie if not token authenticated
|
||||||
|
"""
|
||||||
|
if getattr(handler, '_token_authenticated', False) or handler.settings.get(
|
||||||
|
"disable_check_xsrf", False
|
||||||
|
):
|
||||||
|
return
|
||||||
|
check_xsrf_cookie(handler)
|
||||||
|
|
||||||
|
def _clear_cookie(self, handler, cookie_name, **kwargs):
|
||||||
|
"""Clear a cookie, handling __Host- prefix"""
|
||||||
|
# Set-Cookie is rejected without 'secure',
|
||||||
|
# this includes clearing cookies!
|
||||||
|
if cookie_name.startswith("__Host-"):
|
||||||
|
kwargs["path"] = "/"
|
||||||
|
kwargs["secure"] = True
|
||||||
|
return handler.clear_cookie(cookie_name, **kwargs)
|
||||||
|
|
||||||
|
async def _get_user_cookie(self, handler):
|
||||||
|
# check xsrf if needed
|
||||||
|
token = self._get_token_cookie(handler)
|
||||||
|
session_id = self.get_session_id(handler)
|
||||||
|
if token and _needs_check_xsrf(handler):
|
||||||
|
# call handler.check_xsrf_cookie instead of self.check_xsrf_cookie
|
||||||
|
# to allow subclass overrides
|
||||||
|
try:
|
||||||
|
handler.check_xsrf_cookie()
|
||||||
|
except HTTPError as e:
|
||||||
|
self.log.debug(
|
||||||
|
f"Not accepting cookie auth on {handler.request.method} {handler.request.path}: {e.log_message}"
|
||||||
|
)
|
||||||
|
# don't proceed with cookie auth unless xsrf is okay
|
||||||
|
# don't raise either, because that makes a mess
|
||||||
|
return None
|
||||||
|
|
||||||
if token:
|
if token:
|
||||||
user_model = await self.user_for_token(
|
user_model = await self.user_for_token(
|
||||||
token, session_id=session_id, sync=False
|
token, session_id=session_id, sync=False
|
||||||
)
|
)
|
||||||
if user_model is None:
|
if user_model is None:
|
||||||
app_log.warning("Token stored in cookie may have expired")
|
app_log.warning("Token stored in cookie may have expired")
|
||||||
handler.clear_cookie(self.cookie_name)
|
self._clear_cookie(handler, self.cookie_name, path=self.cookie_path)
|
||||||
return user_model
|
return user_model
|
||||||
|
|
||||||
# HubOAuth API
|
# HubOAuth API
|
||||||
@@ -962,7 +1167,7 @@ class HubOAuth(HubAuth):
|
|||||||
cookie_name = self.state_cookie_name
|
cookie_name = self.state_cookie_name
|
||||||
state_id = self.generate_state(next_url, **extra_state)
|
state_id = self.generate_state(next_url, **extra_state)
|
||||||
kwargs = {
|
kwargs = {
|
||||||
'path': self.base_url,
|
'path': self.cookie_path,
|
||||||
'httponly': True,
|
'httponly': True,
|
||||||
# Expire oauth state cookie in ten minutes.
|
# Expire oauth state cookie in ten minutes.
|
||||||
# Usually this will be cleared by completed login
|
# Usually this will be cleared by completed login
|
||||||
@@ -1020,9 +1225,10 @@ class HubOAuth(HubAuth):
|
|||||||
"""Clear persisted oauth state"""
|
"""Clear persisted oauth state"""
|
||||||
for cookie_name, cookie in handler.request.cookies.items():
|
for cookie_name, cookie in handler.request.cookies.items():
|
||||||
if cookie_name.startswith(self.state_cookie_name):
|
if cookie_name.startswith(self.state_cookie_name):
|
||||||
handler.clear_cookie(
|
self._clear_cookie(
|
||||||
|
handler,
|
||||||
cookie_name,
|
cookie_name,
|
||||||
path=self.base_url,
|
path=self.cookie_path,
|
||||||
)
|
)
|
||||||
|
|
||||||
def _decode_state(self, state_id, /):
|
def _decode_state(self, state_id, /):
|
||||||
@@ -1044,8 +1250,11 @@ class HubOAuth(HubAuth):
|
|||||||
|
|
||||||
def set_cookie(self, handler, access_token):
|
def set_cookie(self, handler, access_token):
|
||||||
"""Set a cookie recording OAuth result"""
|
"""Set a cookie recording OAuth result"""
|
||||||
kwargs = {'path': self.base_url, 'httponly': True}
|
kwargs = {'path': self.cookie_path, 'httponly': True}
|
||||||
if get_browser_protocol(handler.request) == 'https':
|
if (
|
||||||
|
get_browser_protocol(handler.request) == 'https'
|
||||||
|
or self.cookie_host_prefix_enabled
|
||||||
|
):
|
||||||
kwargs['secure'] = True
|
kwargs['secure'] = True
|
||||||
# load user cookie overrides
|
# load user cookie overrides
|
||||||
kwargs.update(self.cookie_options)
|
kwargs.update(self.cookie_options)
|
||||||
@@ -1056,6 +1265,15 @@ class HubOAuth(HubAuth):
|
|||||||
kwargs,
|
kwargs,
|
||||||
)
|
)
|
||||||
handler.set_secure_cookie(self.cookie_name, access_token, **kwargs)
|
handler.set_secure_cookie(self.cookie_name, access_token, **kwargs)
|
||||||
|
# set updated xsrf token cookie,
|
||||||
|
# which changes after login
|
||||||
|
handler._hub_auth_token_cookie = access_token
|
||||||
|
_set_xsrf_cookie(
|
||||||
|
handler,
|
||||||
|
handler._xsrf_token_id,
|
||||||
|
cookie_path=self.base_url,
|
||||||
|
authenticated=True,
|
||||||
|
)
|
||||||
|
|
||||||
def clear_cookie(self, handler):
|
def clear_cookie(self, handler):
|
||||||
"""Clear the OAuth cookie
|
"""Clear the OAuth cookie
|
||||||
@@ -1063,7 +1281,7 @@ class HubOAuth(HubAuth):
|
|||||||
Args:
|
Args:
|
||||||
handler (tornado.web.RequestHandler): the current request handler
|
handler (tornado.web.RequestHandler): the current request handler
|
||||||
"""
|
"""
|
||||||
handler.clear_cookie(self.cookie_name, path=self.base_url)
|
self._clear_cookie(handler, self.cookie_name, path=self.cookie_path)
|
||||||
|
|
||||||
|
|
||||||
class UserNotAllowed(Exception):
|
class UserNotAllowed(Exception):
|
||||||
@@ -1275,7 +1493,7 @@ class HubAuthenticated:
|
|||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
self._hub_auth_user_cache = self.check_hub_user(user_model)
|
self._hub_auth_user_cache = self.check_hub_user(user_model)
|
||||||
except UserNotAllowed as e:
|
except UserNotAllowed:
|
||||||
# cache None, in case get_user is called again while processing the error
|
# cache None, in case get_user is called again while processing the error
|
||||||
self._hub_auth_user_cache = None
|
self._hub_auth_user_cache = None
|
||||||
|
|
||||||
@@ -1297,6 +1515,25 @@ class HubAuthenticated:
|
|||||||
self.hub_auth._persist_url_token_if_set(self)
|
self.hub_auth._persist_url_token_if_set(self)
|
||||||
return self._hub_auth_user_cache
|
return self._hub_auth_user_cache
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _xsrf_token_id(self):
|
||||||
|
if hasattr(self, "__xsrf_token_id"):
|
||||||
|
return self.__xsrf_token_id
|
||||||
|
if not isinstance(self.hub_auth, HubOAuth):
|
||||||
|
return ""
|
||||||
|
return self.hub_auth._get_xsrf_token_id(self)
|
||||||
|
|
||||||
|
@_xsrf_token_id.setter
|
||||||
|
def _xsrf_token_id(self, value):
|
||||||
|
self.__xsrf_token_id = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def xsrf_token(self):
|
||||||
|
return get_xsrf_token(self, cookie_path=self.hub_auth.base_url)
|
||||||
|
|
||||||
|
def check_xsrf_cookie(self):
|
||||||
|
return self.hub_auth.check_xsrf_cookie(self)
|
||||||
|
|
||||||
|
|
||||||
class HubOAuthenticated(HubAuthenticated):
|
class HubOAuthenticated(HubAuthenticated):
|
||||||
"""Simple subclass of HubAuthenticated using OAuth instead of old shared cookies"""
|
"""Simple subclass of HubAuthenticated using OAuth instead of old shared cookies"""
|
||||||
@@ -1332,7 +1569,7 @@ class HubOAuthCallbackHandler(HubOAuthenticated, RequestHandler):
|
|||||||
cookie_state = self.get_secure_cookie(cookie_name)
|
cookie_state = self.get_secure_cookie(cookie_name)
|
||||||
# clear cookie state now that we've consumed it
|
# clear cookie state now that we've consumed it
|
||||||
if cookie_state:
|
if cookie_state:
|
||||||
self.clear_cookie(cookie_name, path=self.hub_auth.base_url)
|
self.hub_auth.clear_oauth_state_cookies(self)
|
||||||
else:
|
else:
|
||||||
# completing oauth with stale state, but already logged in.
|
# completing oauth with stale state, but already logged in.
|
||||||
# stop here and redirect to default URL
|
# stop here and redirect to default URL
|
||||||
@@ -1349,8 +1586,13 @@ class HubOAuthCallbackHandler(HubOAuthenticated, RequestHandler):
|
|||||||
|
|
||||||
# check that state matches
|
# check that state matches
|
||||||
if arg_state != cookie_state:
|
if arg_state != cookie_state:
|
||||||
app_log.warning("oauth state %r != %r", arg_state, cookie_state)
|
app_log.warning(
|
||||||
raise HTTPError(403, "OAuth state does not match. Try logging in again.")
|
"oauth state argument %r != cookie %s=%r",
|
||||||
|
arg_state,
|
||||||
|
cookie_name,
|
||||||
|
cookie_state,
|
||||||
|
)
|
||||||
|
raise HTTPError(403, "oauth state does not match. Try logging in again.")
|
||||||
next_url = self.hub_auth.get_next_url(cookie_state)
|
next_url = self.hub_auth.get_next_url(cookie_state)
|
||||||
# clear consumed state from _oauth_states cache now that we're done with it
|
# clear consumed state from _oauth_states cache now that we're done with it
|
||||||
self.hub_auth.clear_oauth_state(cookie_state)
|
self.hub_auth.clear_oauth_state(cookie_state)
|
||||||
|
14
jupyterhub/singleuser/_decorator.py
Normal file
14
jupyterhub/singleuser/_decorator.py
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
from typing import Any, Callable, TypeVar
|
||||||
|
|
||||||
|
try:
|
||||||
|
from jupyter_server.auth.decorator import allow_unauthenticated
|
||||||
|
except ImportError:
|
||||||
|
FuncT = TypeVar("FuncT", bound=Callable[..., Any])
|
||||||
|
|
||||||
|
# if using an older jupyter-server version this can be a no-op,
|
||||||
|
# as these do not support marking endpoints anyways
|
||||||
|
def allow_unauthenticated(method: FuncT) -> FuncT:
|
||||||
|
return method
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["allow_unauthenticated"]
|
@@ -44,28 +44,19 @@ from jupyterhub._version import __version__, _check_version
|
|||||||
from jupyterhub.log import log_request
|
from jupyterhub.log import log_request
|
||||||
from jupyterhub.services.auth import HubOAuth, HubOAuthCallbackHandler
|
from jupyterhub.services.auth import HubOAuth, HubOAuthCallbackHandler
|
||||||
from jupyterhub.utils import (
|
from jupyterhub.utils import (
|
||||||
|
_bool_env,
|
||||||
exponential_backoff,
|
exponential_backoff,
|
||||||
isoformat,
|
isoformat,
|
||||||
make_ssl_context,
|
make_ssl_context,
|
||||||
url_path_join,
|
url_path_join,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from ._decorator import allow_unauthenticated
|
||||||
from ._disable_user_config import _disable_user_config
|
from ._disable_user_config import _disable_user_config
|
||||||
|
|
||||||
SINGLEUSER_TEMPLATES_DIR = str(Path(__file__).parent.resolve().joinpath("templates"))
|
SINGLEUSER_TEMPLATES_DIR = str(Path(__file__).parent.resolve().joinpath("templates"))
|
||||||
|
|
||||||
|
|
||||||
def _bool_env(key):
|
|
||||||
"""Cast an environment variable to bool
|
|
||||||
|
|
||||||
0, empty, or unset is False; All other values are True.
|
|
||||||
"""
|
|
||||||
if os.environ.get(key, "") in {"", "0"}:
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def _exclude_home(path_list):
|
def _exclude_home(path_list):
|
||||||
"""Filter out any entries in a path list that are in my home directory.
|
"""Filter out any entries in a path list that are in my home directory.
|
||||||
|
|
||||||
@@ -78,6 +69,7 @@ def _exclude_home(path_list):
|
|||||||
|
|
||||||
|
|
||||||
class JupyterHubLogoutHandler(LogoutHandler):
|
class JupyterHubLogoutHandler(LogoutHandler):
|
||||||
|
@allow_unauthenticated
|
||||||
def get(self):
|
def get(self):
|
||||||
hub_auth = self.identity_provider.hub_auth
|
hub_auth = self.identity_provider.hub_auth
|
||||||
# clear token stored in single-user cookie (set by hub_auth)
|
# clear token stored in single-user cookie (set by hub_auth)
|
||||||
@@ -105,6 +97,10 @@ class JupyterHubOAuthCallbackHandler(HubOAuthCallbackHandler):
|
|||||||
def initialize(self, hub_auth):
|
def initialize(self, hub_auth):
|
||||||
self.hub_auth = hub_auth
|
self.hub_auth = hub_auth
|
||||||
|
|
||||||
|
@allow_unauthenticated
|
||||||
|
async def get(self):
|
||||||
|
return await super().get()
|
||||||
|
|
||||||
|
|
||||||
class JupyterHubIdentityProvider(IdentityProvider):
|
class JupyterHubIdentityProvider(IdentityProvider):
|
||||||
"""Identity Provider for JupyterHub OAuth
|
"""Identity Provider for JupyterHub OAuth
|
||||||
@@ -127,6 +123,9 @@ class JupyterHubIdentityProvider(IdentityProvider):
|
|||||||
# HubAuth gets most of its config from the environment
|
# HubAuth gets most of its config from the environment
|
||||||
return HubOAuth(parent=self)
|
return HubOAuth(parent=self)
|
||||||
|
|
||||||
|
def _patch_xsrf(self, handler):
|
||||||
|
self.hub_auth._patch_xsrf(handler)
|
||||||
|
|
||||||
def _patch_get_login_url(self, handler):
|
def _patch_get_login_url(self, handler):
|
||||||
original_get_login_url = handler.get_login_url
|
original_get_login_url = handler.get_login_url
|
||||||
|
|
||||||
@@ -161,6 +160,7 @@ class JupyterHubIdentityProvider(IdentityProvider):
|
|||||||
if hasattr(handler, "_jupyterhub_user"):
|
if hasattr(handler, "_jupyterhub_user"):
|
||||||
return handler._jupyterhub_user
|
return handler._jupyterhub_user
|
||||||
self._patch_get_login_url(handler)
|
self._patch_get_login_url(handler)
|
||||||
|
self._patch_xsrf(handler)
|
||||||
user = await self.hub_auth.get_user(handler, sync=False)
|
user = await self.hub_auth.get_user(handler, sync=False)
|
||||||
if user is None:
|
if user is None:
|
||||||
handler._jupyterhub_user = None
|
handler._jupyterhub_user = None
|
||||||
@@ -632,6 +632,9 @@ class JupyterHubSingleUser(ExtensionApp):
|
|||||||
app.web_app.settings["page_config_hook"] = (
|
app.web_app.settings["page_config_hook"] = (
|
||||||
app.identity_provider.page_config_hook
|
app.identity_provider.page_config_hook
|
||||||
)
|
)
|
||||||
|
# disable xsrf_cookie checks by Tornado, which run too early
|
||||||
|
# checks in Jupyter Server are unconditional
|
||||||
|
app.web_app.settings["xsrf_cookies"] = False
|
||||||
# if the user has configured a log function in the tornado settings, do not override it
|
# if the user has configured a log function in the tornado settings, do not override it
|
||||||
if not 'log_function' in app.config.ServerApp.get('tornado_settings', {}):
|
if not 'log_function' in app.config.ServerApp.get('tornado_settings', {}):
|
||||||
app.web_app.settings["log_function"] = log_request
|
app.web_app.settings["log_function"] = log_request
|
||||||
@@ -642,6 +645,9 @@ class JupyterHubSingleUser(ExtensionApp):
|
|||||||
# check jupyterhub version
|
# check jupyterhub version
|
||||||
app.io_loop.run_sync(self.check_hub_version)
|
app.io_loop.run_sync(self.check_hub_version)
|
||||||
|
|
||||||
|
# set default CSP to prevent iframe embedding across jupyterhub components
|
||||||
|
headers.setdefault("Content-Security-Policy", "frame-ancestors 'none'")
|
||||||
|
|
||||||
async def _start_activity():
|
async def _start_activity():
|
||||||
self._activity_task = asyncio.ensure_future(self.keep_activity_updated())
|
self._activity_task = asyncio.ensure_future(self.keep_activity_updated())
|
||||||
|
|
||||||
|
@@ -45,21 +45,16 @@ from traitlets.config import Configurable
|
|||||||
from .._version import __version__, _check_version
|
from .._version import __version__, _check_version
|
||||||
from ..log import log_request
|
from ..log import log_request
|
||||||
from ..services.auth import HubOAuth, HubOAuthCallbackHandler, HubOAuthenticated
|
from ..services.auth import HubOAuth, HubOAuthCallbackHandler, HubOAuthenticated
|
||||||
from ..utils import exponential_backoff, isoformat, make_ssl_context, url_path_join
|
from ..utils import (
|
||||||
|
_bool_env,
|
||||||
|
exponential_backoff,
|
||||||
|
isoformat,
|
||||||
|
make_ssl_context,
|
||||||
|
url_path_join,
|
||||||
|
)
|
||||||
|
from ._decorator import allow_unauthenticated
|
||||||
from ._disable_user_config import _disable_user_config, _exclude_home
|
from ._disable_user_config import _disable_user_config, _exclude_home
|
||||||
|
|
||||||
|
|
||||||
def _bool_env(key):
|
|
||||||
"""Cast an environment variable to bool
|
|
||||||
|
|
||||||
0, empty, or unset is False; All other values are True.
|
|
||||||
"""
|
|
||||||
if os.environ.get(key, "") in {"", "0"}:
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
# Authenticate requests with the Hub
|
# Authenticate requests with the Hub
|
||||||
|
|
||||||
|
|
||||||
@@ -138,6 +133,7 @@ class JupyterHubLoginHandlerMixin:
|
|||||||
|
|
||||||
|
|
||||||
class JupyterHubLogoutHandlerMixin:
|
class JupyterHubLogoutHandlerMixin:
|
||||||
|
@allow_unauthenticated
|
||||||
def get(self):
|
def get(self):
|
||||||
self.settings['hub_auth'].clear_cookie(self)
|
self.settings['hub_auth'].clear_cookie(self)
|
||||||
self.redirect(
|
self.redirect(
|
||||||
@@ -153,6 +149,10 @@ class OAuthCallbackHandlerMixin(HubOAuthCallbackHandler):
|
|||||||
def hub_auth(self):
|
def hub_auth(self):
|
||||||
return self.settings['hub_auth']
|
return self.settings['hub_auth']
|
||||||
|
|
||||||
|
@allow_unauthenticated
|
||||||
|
async def get(self):
|
||||||
|
return await super().get()
|
||||||
|
|
||||||
|
|
||||||
# register new hub related command-line aliases
|
# register new hub related command-line aliases
|
||||||
aliases = {
|
aliases = {
|
||||||
@@ -683,10 +683,10 @@ class SingleUserNotebookAppMixin(Configurable):
|
|||||||
)
|
)
|
||||||
headers = s.setdefault('headers', {})
|
headers = s.setdefault('headers', {})
|
||||||
headers['X-JupyterHub-Version'] = __version__
|
headers['X-JupyterHub-Version'] = __version__
|
||||||
# set CSP header directly to workaround bugs in jupyter/notebook 5.0
|
# set default CSP to prevent iframe embedding across jupyterhub components
|
||||||
headers.setdefault(
|
headers.setdefault(
|
||||||
'Content-Security-Policy',
|
'Content-Security-Policy',
|
||||||
';'.join(["frame-ancestors 'self'", "report-uri " + csp_report_uri]),
|
';'.join(["frame-ancestors 'none'", "report-uri " + csp_report_uri]),
|
||||||
)
|
)
|
||||||
super().init_webapp()
|
super().init_webapp()
|
||||||
|
|
||||||
@@ -833,7 +833,7 @@ def patch_base_handler(BaseHandler, log=None):
|
|||||||
# but we also need to ensure BaseHandler *itself* doesn't
|
# but we also need to ensure BaseHandler *itself* doesn't
|
||||||
# override the public tornado API methods we have inserted.
|
# override the public tornado API methods we have inserted.
|
||||||
# If they are defined in BaseHandler, explicitly replace them with our methods.
|
# If they are defined in BaseHandler, explicitly replace them with our methods.
|
||||||
for name in ("get_current_user", "get_login_url"):
|
for name in ("get_current_user", "get_login_url", "check_xsrf_cookie"):
|
||||||
if name in BaseHandler.__dict__:
|
if name in BaseHandler.__dict__:
|
||||||
log.debug(
|
log.debug(
|
||||||
f"Overriding {BaseHandler}.{name} with HubAuthenticatedHandler.{name}"
|
f"Overriding {BaseHandler}.{name} with HubAuthenticatedHandler.{name}"
|
||||||
|
@@ -163,6 +163,7 @@ class Spawner(LoggingConfigurable):
|
|||||||
hub = Any()
|
hub = Any()
|
||||||
orm_spawner = Any()
|
orm_spawner = Any()
|
||||||
cookie_options = Dict()
|
cookie_options = Dict()
|
||||||
|
cookie_host_prefix_enabled = Bool()
|
||||||
public_url = Unicode(help="Public URL of this spawner's server")
|
public_url = Unicode(help="Public URL of this spawner's server")
|
||||||
public_hub_url = Unicode(help="Public URL of the Hub itself")
|
public_hub_url = Unicode(help="Public URL of the Hub itself")
|
||||||
|
|
||||||
@@ -1006,6 +1007,10 @@ class Spawner(LoggingConfigurable):
|
|||||||
env['JUPYTERHUB_CLIENT_ID'] = self.oauth_client_id
|
env['JUPYTERHUB_CLIENT_ID'] = self.oauth_client_id
|
||||||
if self.cookie_options:
|
if self.cookie_options:
|
||||||
env['JUPYTERHUB_COOKIE_OPTIONS'] = json.dumps(self.cookie_options)
|
env['JUPYTERHUB_COOKIE_OPTIONS'] = json.dumps(self.cookie_options)
|
||||||
|
|
||||||
|
env["JUPYTERHUB_COOKIE_HOST_PREFIX_ENABLED"] = str(
|
||||||
|
int(self.cookie_host_prefix_enabled)
|
||||||
|
)
|
||||||
env['JUPYTERHUB_HOST'] = self.hub.public_host
|
env['JUPYTERHUB_HOST'] = self.hub.public_host
|
||||||
env['JUPYTERHUB_OAUTH_CALLBACK_URL'] = url_path_join(
|
env['JUPYTERHUB_OAUTH_CALLBACK_URL'] = url_path_join(
|
||||||
self.user.url, url_escape_path(self.name), 'oauth_callback'
|
self.user.url, url_escape_path(self.name), 'oauth_callback'
|
||||||
|
@@ -1,6 +1,8 @@
|
|||||||
"""Tests for the Playwright Python"""
|
"""Tests for the Playwright Python"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
import json
|
import json
|
||||||
|
import pprint
|
||||||
import re
|
import re
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
from urllib.parse import parse_qs, urlparse
|
from urllib.parse import parse_qs, urlparse
|
||||||
@@ -11,7 +13,8 @@ from tornado.escape import url_escape
|
|||||||
from tornado.httputil import url_concat
|
from tornado.httputil import url_concat
|
||||||
|
|
||||||
from jupyterhub import orm, roles, scopes
|
from jupyterhub import orm, roles, scopes
|
||||||
from jupyterhub.tests.utils import public_host, public_url, ujoin
|
from jupyterhub.tests.test_named_servers import named_servers # noqa
|
||||||
|
from jupyterhub.tests.utils import async_requests, public_host, public_url, ujoin
|
||||||
from jupyterhub.utils import url_escape_path, url_path_join
|
from jupyterhub.utils import url_escape_path, url_path_join
|
||||||
|
|
||||||
pytestmark = pytest.mark.browser
|
pytestmark = pytest.mark.browser
|
||||||
@@ -44,7 +47,7 @@ async def test_submit_login_form(app, browser, user_special_chars):
|
|||||||
login_url = url_path_join(public_host(app), app.hub.base_url, "login")
|
login_url = url_path_join(public_host(app), app.hub.base_url, "login")
|
||||||
await browser.goto(login_url)
|
await browser.goto(login_url)
|
||||||
await login(browser, user.name, password=user.name)
|
await login(browser, user.name, password=user.name)
|
||||||
expected_url = ujoin(public_url(app), f"/user/{user_special_chars.urlname}/")
|
expected_url = public_url(app, user)
|
||||||
await expect(browser).to_have_url(expected_url)
|
await expect(browser).to_have_url(expected_url)
|
||||||
|
|
||||||
|
|
||||||
@@ -56,7 +59,7 @@ async def test_submit_login_form(app, browser, user_special_chars):
|
|||||||
# will encode given parameters for an unauthenticated URL in the next url
|
# will encode given parameters for an unauthenticated URL in the next url
|
||||||
# the next parameter will contain the app base URL (replaces BASE_URL in tests)
|
# the next parameter will contain the app base URL (replaces BASE_URL in tests)
|
||||||
'spawn',
|
'spawn',
|
||||||
[('param', 'value')],
|
{'param': 'value'},
|
||||||
'/hub/login?next={{BASE_URL}}hub%2Fspawn%3Fparam%3Dvalue',
|
'/hub/login?next={{BASE_URL}}hub%2Fspawn%3Fparam%3Dvalue',
|
||||||
'/hub/login?next={{BASE_URL}}hub%2Fspawn%3Fparam%3Dvalue',
|
'/hub/login?next={{BASE_URL}}hub%2Fspawn%3Fparam%3Dvalue',
|
||||||
),
|
),
|
||||||
@@ -64,15 +67,15 @@ async def test_submit_login_form(app, browser, user_special_chars):
|
|||||||
# login?param=fromlogin&next=encoded(/hub/spawn?param=value)
|
# login?param=fromlogin&next=encoded(/hub/spawn?param=value)
|
||||||
# will drop parameters given to the login page, passing only the next url
|
# will drop parameters given to the login page, passing only the next url
|
||||||
'login',
|
'login',
|
||||||
[('param', 'fromlogin'), ('next', '/hub/spawn?param=value')],
|
{'param': 'fromlogin', 'next': '/hub/spawn?param=value'},
|
||||||
'/hub/login?param=fromlogin&next=%2Fhub%2Fspawn%3Fparam%3Dvalue',
|
'/hub/login?param=fromlogin&next={{BASE_URL}}hub%2Fspawn%3Fparam%3Dvalue',
|
||||||
'/hub/login?next=%2Fhub%2Fspawn%3Fparam%3Dvalue',
|
'/hub/login?next={{BASE_URL}}hub%2Fspawn%3Fparam%3Dvalue',
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
# login?param=value&anotherparam=anothervalue
|
# login?param=value&anotherparam=anothervalue
|
||||||
# will drop parameters given to the login page, and use an empty next url
|
# will drop parameters given to the login page, and use an empty next url
|
||||||
'login',
|
'login',
|
||||||
[('param', 'value'), ('anotherparam', 'anothervalue')],
|
{'param': 'value', 'anotherparam': 'anothervalue'},
|
||||||
'/hub/login?param=value&anotherparam=anothervalue',
|
'/hub/login?param=value&anotherparam=anothervalue',
|
||||||
'/hub/login?next=',
|
'/hub/login?next=',
|
||||||
),
|
),
|
||||||
@@ -80,7 +83,7 @@ async def test_submit_login_form(app, browser, user_special_chars):
|
|||||||
# login
|
# login
|
||||||
# simplest case, accessing the login URL, gives an empty next url
|
# simplest case, accessing the login URL, gives an empty next url
|
||||||
'login',
|
'login',
|
||||||
[],
|
{},
|
||||||
'/hub/login',
|
'/hub/login',
|
||||||
'/hub/login?next=',
|
'/hub/login?next=',
|
||||||
),
|
),
|
||||||
@@ -98,6 +101,8 @@ async def test_open_url_login(
|
|||||||
user = user_special_chars.user
|
user = user_special_chars.user
|
||||||
login_url = url_path_join(public_host(app), app.hub.base_url, url)
|
login_url = url_path_join(public_host(app), app.hub.base_url, url)
|
||||||
await browser.goto(login_url)
|
await browser.goto(login_url)
|
||||||
|
if params.get("next"):
|
||||||
|
params["next"] = url_path_join(app.base_url, params["next"])
|
||||||
url_new = url_path_join(public_host(app), app.hub.base_url, url_concat(url, params))
|
url_new = url_path_join(public_host(app), app.hub.base_url, url_concat(url, params))
|
||||||
print(url_new)
|
print(url_new)
|
||||||
await browser.goto(url_new)
|
await browser.goto(url_new)
|
||||||
@@ -853,12 +858,15 @@ async def test_oauth_page(
|
|||||||
oauth_client.allowed_scopes = sorted(roles.roles_to_scopes([service_role]))
|
oauth_client.allowed_scopes = sorted(roles.roles_to_scopes([service_role]))
|
||||||
app.db.commit()
|
app.db.commit()
|
||||||
# open the service url in the browser
|
# open the service url in the browser
|
||||||
service_url = url_path_join(public_url(app, service) + 'owhoami/?arg=x')
|
service_url = url_path_join(public_url(app, service), 'owhoami/?arg=x')
|
||||||
await browser.goto(service_url)
|
await browser.goto(service_url)
|
||||||
|
|
||||||
expected_redirect_url = url_path_join(
|
if app.subdomain_host:
|
||||||
app.base_url + f"services/{service.name}/oauth_callback"
|
expected_redirect_url = url_path_join(
|
||||||
)
|
public_url(app, service), "oauth_callback"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
expected_redirect_url = url_path_join(service.prefix, "oauth_callback")
|
||||||
expected_client_id = f"service-{service.name}"
|
expected_client_id = f"service-{service.name}"
|
||||||
|
|
||||||
# decode the URL
|
# decode the URL
|
||||||
@@ -1236,3 +1244,266 @@ async def test_start_stop_server_on_admin_page(
|
|||||||
await expect(browser.get_by_role("button", name="Spawn Page")).to_have_count(
|
await expect(browser.get_by_role("button", name="Spawn Page")).to_have_count(
|
||||||
len(users_list)
|
len(users_list)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"case",
|
||||||
|
[
|
||||||
|
"fresh",
|
||||||
|
"invalid",
|
||||||
|
"valid-prefix-invalid-root",
|
||||||
|
"valid-prefix-invalid-other-prefix",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
async def test_login_xsrf_initial_cookies(app, browser, case, username):
|
||||||
|
"""Test that login works with various initial states for xsrf tokens
|
||||||
|
|
||||||
|
Page will be reloaded with correct values
|
||||||
|
"""
|
||||||
|
hub_root = public_host(app)
|
||||||
|
hub_url = url_path_join(public_host(app), app.hub.base_url)
|
||||||
|
hub_parent = hub_url.rstrip("/").rsplit("/", 1)[0] + "/"
|
||||||
|
login_url = url_path_join(
|
||||||
|
hub_url, url_concat("login", {"next": url_path_join(app.base_url, "/hub/home")})
|
||||||
|
)
|
||||||
|
# start with all cookies cleared
|
||||||
|
await browser.context.clear_cookies()
|
||||||
|
if case == "invalid":
|
||||||
|
await browser.context.add_cookies(
|
||||||
|
[{"name": "_xsrf", "value": "invalid-hub-prefix", "url": hub_url}]
|
||||||
|
)
|
||||||
|
elif case.startswith("valid-prefix"):
|
||||||
|
if "invalid-root" in case:
|
||||||
|
invalid_url = hub_root
|
||||||
|
else:
|
||||||
|
invalid_url = hub_parent
|
||||||
|
await browser.goto(login_url)
|
||||||
|
# first visit sets valid xsrf cookie
|
||||||
|
cookies = await browser.context.cookies()
|
||||||
|
assert len(cookies) == 1
|
||||||
|
# second visit is also made with invalid xsrf on `/`
|
||||||
|
# handling of this behavior is undefined in HTTP itself!
|
||||||
|
# _either_ the invalid cookie on / is ignored
|
||||||
|
# _or_ both will be cleared
|
||||||
|
# currently, this test assumes the observed behavior,
|
||||||
|
# which is that the invalid cookie on `/` has _higher_ priority
|
||||||
|
await browser.context.add_cookies(
|
||||||
|
[{"name": "_xsrf", "value": "invalid-root", "url": invalid_url}]
|
||||||
|
)
|
||||||
|
cookies = await browser.context.cookies()
|
||||||
|
assert len(cookies) == 2
|
||||||
|
|
||||||
|
# after visiting page, cookies get re-established
|
||||||
|
await browser.goto(login_url)
|
||||||
|
cookies = await browser.context.cookies()
|
||||||
|
print(cookies)
|
||||||
|
cookie = cookies[0]
|
||||||
|
assert cookie['name'] == '_xsrf'
|
||||||
|
assert cookie["path"] == app.hub.base_url
|
||||||
|
|
||||||
|
# next page visit, cookies don't change
|
||||||
|
await browser.goto(login_url)
|
||||||
|
cookies_2 = await browser.context.cookies()
|
||||||
|
assert cookies == cookies_2
|
||||||
|
# login is successful
|
||||||
|
await login(browser, username, username)
|
||||||
|
|
||||||
|
|
||||||
|
def _cookie_dict(cookie_list):
|
||||||
|
"""Convert list of cookies to dict of the form
|
||||||
|
|
||||||
|
{ 'path': {'key': {cookie} } }
|
||||||
|
"""
|
||||||
|
cookie_dict = {}
|
||||||
|
for cookie in cookie_list:
|
||||||
|
path_cookies = cookie_dict.setdefault(cookie['path'], {})
|
||||||
|
path_cookies[cookie['name']] = cookie
|
||||||
|
return cookie_dict
|
||||||
|
|
||||||
|
|
||||||
|
async def test_singleuser_xsrf(
|
||||||
|
app,
|
||||||
|
browser,
|
||||||
|
user,
|
||||||
|
create_user_with_scopes,
|
||||||
|
full_spawn,
|
||||||
|
named_servers, # noqa: F811
|
||||||
|
):
|
||||||
|
# full login process, checking XSRF handling
|
||||||
|
# start two servers
|
||||||
|
target_user = user
|
||||||
|
target_start = asyncio.ensure_future(target_user.spawn())
|
||||||
|
|
||||||
|
browser_user = create_user_with_scopes("self", "access:servers")
|
||||||
|
# login browser_user
|
||||||
|
login_url = url_path_join(public_host(app), app.hub.base_url, "login")
|
||||||
|
await browser.goto(login_url)
|
||||||
|
await login(browser, browser_user.name, browser_user.name)
|
||||||
|
# end up at single-user
|
||||||
|
await expect(browser).to_have_url(re.compile(rf".*/user/{browser_user.name}/.*"))
|
||||||
|
# wait for target user to start, too
|
||||||
|
await target_start
|
||||||
|
await app.proxy.add_user(target_user)
|
||||||
|
|
||||||
|
# visit target user, sets credentials for second server
|
||||||
|
await browser.goto(public_url(app, target_user))
|
||||||
|
await expect(browser).to_have_url(re.compile(r".*/oauth2/authorize"))
|
||||||
|
auth_button = browser.locator('//input[@type="submit"]')
|
||||||
|
await expect(auth_button).to_be_enabled()
|
||||||
|
await auth_button.click()
|
||||||
|
await expect(browser).to_have_url(re.compile(rf".*/user/{target_user.name}/.*"))
|
||||||
|
|
||||||
|
# at this point, we are on a page served by target_user,
|
||||||
|
# logged in as browser_user
|
||||||
|
# basic check that xsrf isolation works
|
||||||
|
cookies = await browser.context.cookies()
|
||||||
|
cookie_dict = _cookie_dict(cookies)
|
||||||
|
pprint.pprint(cookie_dict)
|
||||||
|
|
||||||
|
# we should have xsrf tokens for both singleuser servers and the hub
|
||||||
|
target_prefix = target_user.prefix
|
||||||
|
user_prefix = browser_user.prefix
|
||||||
|
hub_prefix = app.hub.base_url
|
||||||
|
assert target_prefix in cookie_dict
|
||||||
|
assert user_prefix in cookie_dict
|
||||||
|
assert hub_prefix in cookie_dict
|
||||||
|
target_xsrf = cookie_dict[target_prefix].get("_xsrf", {}).get("value")
|
||||||
|
assert target_xsrf
|
||||||
|
user_xsrf = cookie_dict[user_prefix].get("_xsrf", {}).get("value")
|
||||||
|
assert user_xsrf
|
||||||
|
hub_xsrf = cookie_dict[hub_prefix].get("_xsrf", {}).get("value")
|
||||||
|
assert hub_xsrf
|
||||||
|
assert hub_xsrf != target_xsrf
|
||||||
|
assert hub_xsrf != user_xsrf
|
||||||
|
assert target_xsrf != user_xsrf
|
||||||
|
|
||||||
|
# we are on a page served by target_user
|
||||||
|
# check that we can't access
|
||||||
|
|
||||||
|
async def fetch_user_page(path, params=None):
|
||||||
|
url = url_path_join(public_url(app, browser_user), path)
|
||||||
|
if params:
|
||||||
|
url = url_concat(url, params)
|
||||||
|
status = await browser.evaluate(
|
||||||
|
"""
|
||||||
|
async (user_url) => {
|
||||||
|
try {
|
||||||
|
response = await fetch(user_url);
|
||||||
|
} catch (e) {
|
||||||
|
return 'error';
|
||||||
|
}
|
||||||
|
return response.status;
|
||||||
|
}
|
||||||
|
""",
|
||||||
|
url,
|
||||||
|
)
|
||||||
|
return status
|
||||||
|
|
||||||
|
if app.subdomain_host:
|
||||||
|
expected_status = 'error'
|
||||||
|
else:
|
||||||
|
expected_status = 403
|
||||||
|
status = await fetch_user_page("/api/contents")
|
||||||
|
assert status == expected_status
|
||||||
|
status = await fetch_user_page("/api/contents", params={"_xsrf": target_xsrf})
|
||||||
|
assert status == expected_status
|
||||||
|
|
||||||
|
if not app.subdomain_host:
|
||||||
|
expected_status = 200
|
||||||
|
status = await fetch_user_page("/api/contents", params={"_xsrf": user_xsrf})
|
||||||
|
assert status == expected_status
|
||||||
|
|
||||||
|
# check that we can't iframe the other user's page
|
||||||
|
async def iframe(src):
|
||||||
|
return await browser.evaluate(
|
||||||
|
"""
|
||||||
|
async (src) => {
|
||||||
|
const frame = document.createElement("iframe");
|
||||||
|
frame.src = src;
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
frame.addEventListener("load", (event) => {
|
||||||
|
if (frame.contentDocument) {
|
||||||
|
resolve("got document!");
|
||||||
|
} else {
|
||||||
|
resolve("blocked")
|
||||||
|
}
|
||||||
|
});
|
||||||
|
setTimeout(() => {
|
||||||
|
// some browsers (firefox) never fire load event
|
||||||
|
// despite spec appasrently stating it must always do so,
|
||||||
|
// even for rejected frames
|
||||||
|
resolve("timeout")
|
||||||
|
}, 3000)
|
||||||
|
|
||||||
|
document.body.appendChild(frame);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
""",
|
||||||
|
src,
|
||||||
|
)
|
||||||
|
|
||||||
|
hub_iframe = await iframe(url_path_join(public_url(app), "hub/admin"))
|
||||||
|
assert hub_iframe in {"timeout", "blocked"}
|
||||||
|
user_iframe = await iframe(public_url(app, browser_user))
|
||||||
|
assert user_iframe in {"timeout", "blocked"}
|
||||||
|
|
||||||
|
# check that server page can still connect to its own kernels
|
||||||
|
token = target_user.new_api_token(scopes=["access:servers!user"])
|
||||||
|
|
||||||
|
async def test_kernel(kernels_url):
|
||||||
|
headers = {"Authorization": f"Bearer {token}"}
|
||||||
|
r = await async_requests.post(kernels_url, headers=headers)
|
||||||
|
r.raise_for_status()
|
||||||
|
kernel = r.json()
|
||||||
|
kernel_id = kernel["id"]
|
||||||
|
kernel_url = url_path_join(kernels_url, kernel_id)
|
||||||
|
kernel_ws_url = "ws" + url_path_join(kernel_url, "channels")[4:]
|
||||||
|
try:
|
||||||
|
result = await browser.evaluate(
|
||||||
|
"""
|
||||||
|
async (ws_url) => {
|
||||||
|
ws = new WebSocket(ws_url);
|
||||||
|
finished = await new Promise((resolve, reject) => {
|
||||||
|
ws.onerror = (err) => {
|
||||||
|
reject(err);
|
||||||
|
};
|
||||||
|
ws.onopen = () => {
|
||||||
|
resolve("ok");
|
||||||
|
};
|
||||||
|
});
|
||||||
|
return finished;
|
||||||
|
}
|
||||||
|
""",
|
||||||
|
kernel_ws_url,
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
r = await async_requests.delete(kernel_url, headers=headers)
|
||||||
|
r.raise_for_status()
|
||||||
|
assert result == "ok"
|
||||||
|
|
||||||
|
kernels_url = url_path_join(public_url(app, target_user), "/api/kernels")
|
||||||
|
await test_kernel(kernels_url)
|
||||||
|
|
||||||
|
# final check: make sure named servers work.
|
||||||
|
# first, visit spawn page to launch server,
|
||||||
|
# will issue cookies, etc.
|
||||||
|
server_name = "named"
|
||||||
|
url = url_path_join(
|
||||||
|
public_host(app),
|
||||||
|
url_path_join(app.base_url, f"hub/spawn/{browser_user.name}/{server_name}"),
|
||||||
|
)
|
||||||
|
await browser.goto(url)
|
||||||
|
await expect(browser).to_have_url(
|
||||||
|
re.compile(rf".*/user/{browser_user.name}/{server_name}/.*")
|
||||||
|
)
|
||||||
|
# from named server URL, make sure we can talk to a kernel
|
||||||
|
token = browser_user.new_api_token(scopes=["access:servers!user"])
|
||||||
|
# named-server URL
|
||||||
|
kernels_url = url_path_join(
|
||||||
|
public_url(app, browser_user), server_name, "api/kernels"
|
||||||
|
)
|
||||||
|
await test_kernel(kernels_url)
|
||||||
|
# go back to user's own page, test again
|
||||||
|
# make sure we didn't break anything
|
||||||
|
await browser.goto(public_url(app, browser_user))
|
||||||
|
await test_kernel(url_path_join(public_url(app, browser_user), "api/kernels"))
|
||||||
|
@@ -502,8 +502,6 @@ def create_user_with_scopes(app, create_temp_role):
|
|||||||
return app.users[orm_user.id]
|
return app.users[orm_user.id]
|
||||||
|
|
||||||
yield temp_user_creator
|
yield temp_user_creator
|
||||||
for user in temp_users:
|
|
||||||
app.users.delete(user)
|
|
||||||
|
|
||||||
|
|
||||||
@fixture
|
@fixture
|
||||||
|
@@ -44,8 +44,8 @@ from .. import metrics, orm, roles
|
|||||||
from ..app import JupyterHub
|
from ..app import JupyterHub
|
||||||
from ..auth import PAMAuthenticator
|
from ..auth import PAMAuthenticator
|
||||||
from ..spawner import SimpleLocalProcessSpawner
|
from ..spawner import SimpleLocalProcessSpawner
|
||||||
from ..utils import random_port, utcnow
|
from ..utils import random_port, url_path_join, utcnow
|
||||||
from .utils import async_requests, public_url, ssl_setup
|
from .utils import AsyncSession, public_url, ssl_setup
|
||||||
|
|
||||||
|
|
||||||
def mock_authenticate(username, password, service, encoding):
|
def mock_authenticate(username, password, service, encoding):
|
||||||
@@ -243,6 +243,8 @@ class MockHub(JupyterHub):
|
|||||||
cert_location = kwargs['internal_certs_location']
|
cert_location = kwargs['internal_certs_location']
|
||||||
kwargs['external_certs'] = ssl_setup(cert_location, 'hub-ca')
|
kwargs['external_certs'] = ssl_setup(cert_location, 'hub-ca')
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
if 'allow_all' not in self.config.Authenticator:
|
||||||
|
self.config.Authenticator.allow_all = True
|
||||||
|
|
||||||
@default('subdomain_host')
|
@default('subdomain_host')
|
||||||
def _subdomain_host_default(self):
|
def _subdomain_host_default(self):
|
||||||
@@ -372,29 +374,32 @@ class MockHub(JupyterHub):
|
|||||||
async def login_user(self, name):
|
async def login_user(self, name):
|
||||||
"""Login a user by name, returning her cookies."""
|
"""Login a user by name, returning her cookies."""
|
||||||
base_url = public_url(self)
|
base_url = public_url(self)
|
||||||
external_ca = None
|
s = AsyncSession()
|
||||||
if self.internal_ssl:
|
if self.internal_ssl:
|
||||||
external_ca = self.external_certs['files']['ca']
|
s.verify = self.external_certs['files']['ca']
|
||||||
login_url = base_url + 'hub/login'
|
login_url = base_url + 'hub/login'
|
||||||
r = await async_requests.get(login_url)
|
r = await s.get(login_url)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
xsrf = r.cookies['_xsrf']
|
xsrf = r.cookies['_xsrf']
|
||||||
|
|
||||||
r = await async_requests.post(
|
r = await s.post(
|
||||||
url_concat(login_url, {"_xsrf": xsrf}),
|
url_concat(login_url, {"_xsrf": xsrf}),
|
||||||
cookies=r.cookies,
|
|
||||||
data={'username': name, 'password': name},
|
data={'username': name, 'password': name},
|
||||||
allow_redirects=False,
|
allow_redirects=False,
|
||||||
verify=external_ca,
|
|
||||||
)
|
)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
r.cookies["_xsrf"] = xsrf
|
# make second request to get updated xsrf cookie
|
||||||
assert sorted(r.cookies.keys()) == [
|
r2 = await s.get(
|
||||||
|
url_path_join(base_url, "hub/home"),
|
||||||
|
allow_redirects=False,
|
||||||
|
)
|
||||||
|
assert r2.status_code == 200
|
||||||
|
assert sorted(s.cookies.keys()) == [
|
||||||
'_xsrf',
|
'_xsrf',
|
||||||
'jupyterhub-hub-login',
|
'jupyterhub-hub-login',
|
||||||
'jupyterhub-session-id',
|
'jupyterhub-session-id',
|
||||||
]
|
]
|
||||||
return r.cookies
|
return s.cookies
|
||||||
|
|
||||||
|
|
||||||
class InstrumentedSpawner(MockSpawner):
|
class InstrumentedSpawner(MockSpawner):
|
||||||
|
@@ -99,7 +99,7 @@ async def test_post_content_type(app, content_type, status):
|
|||||||
assert r.status_code == status
|
assert r.status_code == status
|
||||||
|
|
||||||
|
|
||||||
@mark.parametrize("xsrf_in_url", [True, False])
|
@mark.parametrize("xsrf_in_url", [True, False, "invalid"])
|
||||||
@mark.parametrize(
|
@mark.parametrize(
|
||||||
"method, path",
|
"method, path",
|
||||||
[
|
[
|
||||||
@@ -110,6 +110,13 @@ async def test_post_content_type(app, content_type, status):
|
|||||||
async def test_xsrf_check(app, username, method, path, xsrf_in_url):
|
async def test_xsrf_check(app, username, method, path, xsrf_in_url):
|
||||||
cookies = await app.login_user(username)
|
cookies = await app.login_user(username)
|
||||||
xsrf = cookies['_xsrf']
|
xsrf = cookies['_xsrf']
|
||||||
|
if xsrf_in_url == "invalid":
|
||||||
|
cookies.pop("_xsrf")
|
||||||
|
# a valid old-style tornado xsrf token is no longer valid
|
||||||
|
xsrf = cookies['_xsrf'] = (
|
||||||
|
"2|7329b149|d837ced983e8aac7468bc7a61ce3d51a|1708610065"
|
||||||
|
)
|
||||||
|
|
||||||
url = path.format(username=username)
|
url = path.format(username=username)
|
||||||
if xsrf_in_url:
|
if xsrf_in_url:
|
||||||
url = f"{url}?_xsrf={xsrf}"
|
url = f"{url}?_xsrf={xsrf}"
|
||||||
@@ -120,7 +127,7 @@ async def test_xsrf_check(app, username, method, path, xsrf_in_url):
|
|||||||
noauth=True,
|
noauth=True,
|
||||||
cookies=cookies,
|
cookies=cookies,
|
||||||
)
|
)
|
||||||
if xsrf_in_url:
|
if xsrf_in_url is True:
|
||||||
assert r.status_code == 200
|
assert r.status_code == 200
|
||||||
else:
|
else:
|
||||||
assert r.status_code == 403
|
assert r.status_code == 403
|
||||||
@@ -153,7 +160,7 @@ async def test_permission_error_messages(app, user, auth, expected_message):
|
|||||||
params["_xsrf"] = cookies["_xsrf"]
|
params["_xsrf"] = cookies["_xsrf"]
|
||||||
if auth == "cookie_xsrf_mismatch":
|
if auth == "cookie_xsrf_mismatch":
|
||||||
params["_xsrf"] = "somethingelse"
|
params["_xsrf"] = "somethingelse"
|
||||||
|
headers['Sec-Fetch-Mode'] = 'cors'
|
||||||
r = await async_requests.get(url, **kwargs)
|
r = await async_requests.get(url, **kwargs)
|
||||||
assert r.status_code == 403
|
assert r.status_code == 403
|
||||||
response = r.json()
|
response = r.json()
|
||||||
|
@@ -475,6 +475,7 @@ async def test_user_creation(tmpdir, request):
|
|||||||
]
|
]
|
||||||
|
|
||||||
cfg = Config()
|
cfg = Config()
|
||||||
|
cfg.Authenticator.allow_all = False
|
||||||
cfg.Authenticator.allowed_users = allowed_users
|
cfg.Authenticator.allowed_users = allowed_users
|
||||||
cfg.JupyterHub.load_groups = groups
|
cfg.JupyterHub.load_groups = groups
|
||||||
cfg.JupyterHub.load_roles = roles
|
cfg.JupyterHub.load_roles = roles
|
||||||
|
@@ -3,12 +3,13 @@
|
|||||||
# Copyright (c) Jupyter Development Team.
|
# Copyright (c) Jupyter Development Team.
|
||||||
# Distributed under the terms of the Modified BSD License.
|
# Distributed under the terms of the Modified BSD License.
|
||||||
import logging
|
import logging
|
||||||
|
from itertools import chain
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from requests import HTTPError
|
from requests import HTTPError
|
||||||
from traitlets import Any
|
from traitlets import Any, Tuple
|
||||||
from traitlets.config import Config
|
from traitlets.config import Config
|
||||||
|
|
||||||
from jupyterhub import auth, crypto, orm
|
from jupyterhub import auth, crypto, orm
|
||||||
@@ -18,7 +19,7 @@ from .utils import add_user, async_requests, get_page, public_url
|
|||||||
|
|
||||||
|
|
||||||
async def test_pam_auth():
|
async def test_pam_auth():
|
||||||
authenticator = MockPAMAuthenticator()
|
authenticator = MockPAMAuthenticator(allow_all=True)
|
||||||
authorized = await authenticator.get_authenticated_user(
|
authorized = await authenticator.get_authenticated_user(
|
||||||
None, {'username': 'match', 'password': 'match'}
|
None, {'username': 'match', 'password': 'match'}
|
||||||
)
|
)
|
||||||
@@ -37,7 +38,7 @@ async def test_pam_auth():
|
|||||||
|
|
||||||
|
|
||||||
async def test_pam_auth_account_check_disabled():
|
async def test_pam_auth_account_check_disabled():
|
||||||
authenticator = MockPAMAuthenticator(check_account=False)
|
authenticator = MockPAMAuthenticator(allow_all=True, check_account=False)
|
||||||
authorized = await authenticator.get_authenticated_user(
|
authorized = await authenticator.get_authenticated_user(
|
||||||
None, {'username': 'allowedmatch', 'password': 'allowedmatch'}
|
None, {'username': 'allowedmatch', 'password': 'allowedmatch'}
|
||||||
)
|
)
|
||||||
@@ -82,7 +83,9 @@ async def test_pam_auth_admin_groups():
|
|||||||
return user_group_map[name]
|
return user_group_map[name]
|
||||||
|
|
||||||
authenticator = MockPAMAuthenticator(
|
authenticator = MockPAMAuthenticator(
|
||||||
admin_groups={'jh_admins', 'wheel'}, admin_users={'override_admin'}
|
admin_groups={'jh_admins', 'wheel'},
|
||||||
|
admin_users={'override_admin'},
|
||||||
|
allow_all=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check admin_group applies as expected
|
# Check admin_group applies as expected
|
||||||
@@ -141,7 +144,10 @@ async def test_pam_auth_admin_groups():
|
|||||||
|
|
||||||
|
|
||||||
async def test_pam_auth_allowed():
|
async def test_pam_auth_allowed():
|
||||||
authenticator = MockPAMAuthenticator(allowed_users={'wash', 'kaylee'})
|
authenticator = MockPAMAuthenticator(
|
||||||
|
allowed_users={'wash', 'kaylee'}, allow_all=False
|
||||||
|
)
|
||||||
|
|
||||||
authorized = await authenticator.get_authenticated_user(
|
authorized = await authenticator.get_authenticated_user(
|
||||||
None, {'username': 'kaylee', 'password': 'kaylee'}
|
None, {'username': 'kaylee', 'password': 'kaylee'}
|
||||||
)
|
)
|
||||||
@@ -162,7 +168,7 @@ async def test_pam_auth_allowed_groups():
|
|||||||
def getgrnam(name):
|
def getgrnam(name):
|
||||||
return MockStructGroup('grp', ['kaylee'])
|
return MockStructGroup('grp', ['kaylee'])
|
||||||
|
|
||||||
authenticator = MockPAMAuthenticator(allowed_groups={'group'})
|
authenticator = MockPAMAuthenticator(allowed_groups={'group'}, allow_all=False)
|
||||||
|
|
||||||
with mock.patch.object(authenticator, '_getgrnam', getgrnam):
|
with mock.patch.object(authenticator, '_getgrnam', getgrnam):
|
||||||
authorized = await authenticator.get_authenticated_user(
|
authorized = await authenticator.get_authenticated_user(
|
||||||
@@ -179,14 +185,14 @@ async def test_pam_auth_allowed_groups():
|
|||||||
|
|
||||||
async def test_pam_auth_blocked():
|
async def test_pam_auth_blocked():
|
||||||
# Null case compared to next case
|
# Null case compared to next case
|
||||||
authenticator = MockPAMAuthenticator()
|
authenticator = MockPAMAuthenticator(allow_all=True)
|
||||||
authorized = await authenticator.get_authenticated_user(
|
authorized = await authenticator.get_authenticated_user(
|
||||||
None, {'username': 'wash', 'password': 'wash'}
|
None, {'username': 'wash', 'password': 'wash'}
|
||||||
)
|
)
|
||||||
assert authorized['name'] == 'wash'
|
assert authorized['name'] == 'wash'
|
||||||
|
|
||||||
# Blacklist basics
|
# Blocklist basics
|
||||||
authenticator = MockPAMAuthenticator(blocked_users={'wash'})
|
authenticator = MockPAMAuthenticator(blocked_users={'wash'}, allow_all=True)
|
||||||
authorized = await authenticator.get_authenticated_user(
|
authorized = await authenticator.get_authenticated_user(
|
||||||
None, {'username': 'wash', 'password': 'wash'}
|
None, {'username': 'wash', 'password': 'wash'}
|
||||||
)
|
)
|
||||||
@@ -194,7 +200,9 @@ async def test_pam_auth_blocked():
|
|||||||
|
|
||||||
# User in both allowed and blocked: default deny. Make error someday?
|
# User in both allowed and blocked: default deny. Make error someday?
|
||||||
authenticator = MockPAMAuthenticator(
|
authenticator = MockPAMAuthenticator(
|
||||||
blocked_users={'wash'}, allowed_users={'wash', 'kaylee'}
|
blocked_users={'wash'},
|
||||||
|
allowed_users={'wash', 'kaylee'},
|
||||||
|
allow_all=True,
|
||||||
)
|
)
|
||||||
authorized = await authenticator.get_authenticated_user(
|
authorized = await authenticator.get_authenticated_user(
|
||||||
None, {'username': 'wash', 'password': 'wash'}
|
None, {'username': 'wash', 'password': 'wash'}
|
||||||
@@ -203,7 +211,8 @@ async def test_pam_auth_blocked():
|
|||||||
|
|
||||||
# User not in blocked set can log in
|
# User not in blocked set can log in
|
||||||
authenticator = MockPAMAuthenticator(
|
authenticator = MockPAMAuthenticator(
|
||||||
blocked_users={'wash'}, allowed_users={'wash', 'kaylee'}
|
blocked_users={'wash'},
|
||||||
|
allowed_users={'wash', 'kaylee'},
|
||||||
)
|
)
|
||||||
authorized = await authenticator.get_authenticated_user(
|
authorized = await authenticator.get_authenticated_user(
|
||||||
None, {'username': 'kaylee', 'password': 'kaylee'}
|
None, {'username': 'kaylee', 'password': 'kaylee'}
|
||||||
@@ -221,7 +230,8 @@ async def test_pam_auth_blocked():
|
|||||||
|
|
||||||
# User in neither list
|
# User in neither list
|
||||||
authenticator = MockPAMAuthenticator(
|
authenticator = MockPAMAuthenticator(
|
||||||
blocked_users={'mal'}, allowed_users={'wash', 'kaylee'}
|
blocked_users={'mal'},
|
||||||
|
allowed_users={'wash', 'kaylee'},
|
||||||
)
|
)
|
||||||
authorized = await authenticator.get_authenticated_user(
|
authorized = await authenticator.get_authenticated_user(
|
||||||
None, {'username': 'simon', 'password': 'simon'}
|
None, {'username': 'simon', 'password': 'simon'}
|
||||||
@@ -257,7 +267,9 @@ async def test_deprecated_signatures():
|
|||||||
|
|
||||||
|
|
||||||
async def test_pam_auth_no_such_group():
|
async def test_pam_auth_no_such_group():
|
||||||
authenticator = MockPAMAuthenticator(allowed_groups={'nosuchcrazygroup'})
|
authenticator = MockPAMAuthenticator(
|
||||||
|
allowed_groups={'nosuchcrazygroup'},
|
||||||
|
)
|
||||||
authorized = await authenticator.get_authenticated_user(
|
authorized = await authenticator.get_authenticated_user(
|
||||||
None, {'username': 'kaylee', 'password': 'kaylee'}
|
None, {'username': 'kaylee', 'password': 'kaylee'}
|
||||||
)
|
)
|
||||||
@@ -405,7 +417,7 @@ async def test_auth_state_disabled(app, auth_state_unavailable):
|
|||||||
|
|
||||||
|
|
||||||
async def test_normalize_names():
|
async def test_normalize_names():
|
||||||
a = MockPAMAuthenticator()
|
a = MockPAMAuthenticator(allow_all=True)
|
||||||
authorized = await a.get_authenticated_user(
|
authorized = await a.get_authenticated_user(
|
||||||
None, {'username': 'ZOE', 'password': 'ZOE'}
|
None, {'username': 'ZOE', 'password': 'ZOE'}
|
||||||
)
|
)
|
||||||
@@ -428,7 +440,7 @@ async def test_normalize_names():
|
|||||||
|
|
||||||
|
|
||||||
async def test_username_map():
|
async def test_username_map():
|
||||||
a = MockPAMAuthenticator(username_map={'wash': 'alpha'})
|
a = MockPAMAuthenticator(username_map={'wash': 'alpha'}, allow_all=True)
|
||||||
authorized = await a.get_authenticated_user(
|
authorized = await a.get_authenticated_user(
|
||||||
None, {'username': 'WASH', 'password': 'WASH'}
|
None, {'username': 'WASH', 'password': 'WASH'}
|
||||||
)
|
)
|
||||||
@@ -458,7 +470,7 @@ async def test_post_auth_hook():
|
|||||||
authentication['testkey'] = 'testvalue'
|
authentication['testkey'] = 'testvalue'
|
||||||
return authentication
|
return authentication
|
||||||
|
|
||||||
a = MockPAMAuthenticator(post_auth_hook=test_auth_hook)
|
a = MockPAMAuthenticator(allow_all=True, post_auth_hook=test_auth_hook)
|
||||||
|
|
||||||
authorized = await a.get_authenticated_user(
|
authorized = await a.get_authenticated_user(
|
||||||
None, {'username': 'test_user', 'password': 'test_user'}
|
None, {'username': 'test_user', 'password': 'test_user'}
|
||||||
@@ -566,6 +578,7 @@ async def test_auth_managed_groups(
|
|||||||
parent=app,
|
parent=app,
|
||||||
authenticated_groups=authenticated_groups,
|
authenticated_groups=authenticated_groups,
|
||||||
refresh_groups=refresh_groups,
|
refresh_groups=refresh_groups,
|
||||||
|
allow_all=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
user.groups.append(group)
|
user.groups.append(group)
|
||||||
@@ -1019,3 +1032,193 @@ async def test_auth_manage_roles_description_handling(app, user, role_spec, expe
|
|||||||
assert not app.db.dirty
|
assert not app.db.dirty
|
||||||
roles = {role.name: role for role in user.roles}
|
roles = {role.name: role for role in user.roles}
|
||||||
assert roles[name].description == expected
|
assert roles[name].description == expected
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"allowed_users, allow_existing_users",
|
||||||
|
[
|
||||||
|
('specified', True),
|
||||||
|
('', False),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
async def test_allow_defaults(app, user, allowed_users, allow_existing_users):
|
||||||
|
if allowed_users:
|
||||||
|
allowed_users = set(allowed_users.split(','))
|
||||||
|
else:
|
||||||
|
allowed_users = set()
|
||||||
|
authenticator = auth.Authenticator(allowed_users=allowed_users)
|
||||||
|
authenticator.authenticate = lambda handler, data: data["username"]
|
||||||
|
assert authenticator.allow_all is False
|
||||||
|
assert authenticator.allow_existing_users == allow_existing_users
|
||||||
|
|
||||||
|
# user was already in the database
|
||||||
|
# this happens during hub startup
|
||||||
|
authenticator.add_user(user)
|
||||||
|
if allowed_users:
|
||||||
|
assert user.name in authenticator.allowed_users
|
||||||
|
else:
|
||||||
|
authenticator.allowed_users == set()
|
||||||
|
|
||||||
|
specified_allowed = await authenticator.get_authenticated_user(
|
||||||
|
None, {"username": "specified"}
|
||||||
|
)
|
||||||
|
if "specified" in allowed_users:
|
||||||
|
assert specified_allowed is not None
|
||||||
|
else:
|
||||||
|
assert specified_allowed is None
|
||||||
|
|
||||||
|
existing_allowed = await authenticator.get_authenticated_user(
|
||||||
|
None, {"username": user.name}
|
||||||
|
)
|
||||||
|
if allow_existing_users:
|
||||||
|
assert existing_allowed is not None
|
||||||
|
else:
|
||||||
|
assert existing_allowed is None
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("allow_all", [None, True, False])
|
||||||
|
@pytest.mark.parametrize("allow_existing_users", [None, True, False])
|
||||||
|
@pytest.mark.parametrize("allowed_users", ["existing", ""])
|
||||||
|
def test_allow_existing_users(
|
||||||
|
app, user, allowed_users, allow_all, allow_existing_users
|
||||||
|
):
|
||||||
|
if allowed_users:
|
||||||
|
allowed_users = set(allowed_users.split(','))
|
||||||
|
else:
|
||||||
|
allowed_users = set()
|
||||||
|
authenticator = auth.Authenticator(
|
||||||
|
allowed_users=allowed_users,
|
||||||
|
)
|
||||||
|
if allow_all is None:
|
||||||
|
# default allow_all
|
||||||
|
allow_all = authenticator.allow_all
|
||||||
|
else:
|
||||||
|
authenticator.allow_all = allow_all
|
||||||
|
if allow_existing_users is None:
|
||||||
|
# default allow_all
|
||||||
|
allow_existing_users = authenticator.allow_existing_users
|
||||||
|
else:
|
||||||
|
authenticator.allow_existing_users = allow_existing_users
|
||||||
|
|
||||||
|
# first, nobody in the database
|
||||||
|
assert authenticator.check_allowed("newuser") == allow_all
|
||||||
|
|
||||||
|
# user was already in the database
|
||||||
|
# this happens during hub startup
|
||||||
|
authenticator.add_user(user)
|
||||||
|
if allow_existing_users or allow_all:
|
||||||
|
assert authenticator.check_allowed(user.name)
|
||||||
|
else:
|
||||||
|
assert not authenticator.check_allowed(user.name)
|
||||||
|
for username in allowed_users:
|
||||||
|
assert authenticator.check_allowed(username)
|
||||||
|
|
||||||
|
assert authenticator.check_allowed("newuser") == allow_all
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("allow_all", [True, False])
|
||||||
|
@pytest.mark.parametrize("allow_existing_users", [True, False])
|
||||||
|
def test_allow_existing_users_first_time(user, allow_all, allow_existing_users):
|
||||||
|
# make sure that calling add_user doesn't change results
|
||||||
|
authenticator = auth.Authenticator(
|
||||||
|
allow_all=allow_all,
|
||||||
|
allow_existing_users=allow_existing_users,
|
||||||
|
)
|
||||||
|
allowed_before_one = authenticator.check_allowed(user.name)
|
||||||
|
allowed_before_two = authenticator.check_allowed("newuser")
|
||||||
|
# add_user is called after successful login
|
||||||
|
# it shouldn't change results (e.g. by switching .allowed_users from empty to non-empty)
|
||||||
|
if allowed_before_one:
|
||||||
|
authenticator.add_user(user)
|
||||||
|
assert authenticator.check_allowed(user.name) == allowed_before_one
|
||||||
|
assert authenticator.check_allowed("newuser") == allowed_before_two
|
||||||
|
|
||||||
|
|
||||||
|
class AllowAllIgnoringAuthenticator(auth.Authenticator):
|
||||||
|
"""Test authenticator with custom check_allowed
|
||||||
|
|
||||||
|
not updated for allow_all, allow_existing_users
|
||||||
|
|
||||||
|
Make sure new config doesn't break backward-compatibility
|
||||||
|
or grant unintended access for Authenticators written before JupyterHub 5.
|
||||||
|
"""
|
||||||
|
|
||||||
|
allowed_letters = Tuple(config=True, help="Initial letters to allow")
|
||||||
|
|
||||||
|
def authenticate(self, handler, data):
|
||||||
|
return {"name": data["username"]}
|
||||||
|
|
||||||
|
def check_allowed(self, username, auth=None):
|
||||||
|
if not self.allowed_users and not self.allowed_letters:
|
||||||
|
# this subclass doesn't know about the JupyterHub 5 allow_all config
|
||||||
|
# no allow config, allow all!
|
||||||
|
return True
|
||||||
|
if self.allowed_users and username in self.allowed_users:
|
||||||
|
return True
|
||||||
|
if self.allowed_letters and username.startswith(self.allowed_letters):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
# allow_all is not recognized by Authenticator subclass
|
||||||
|
# make sure it doesn't make anything more permissive, at least
|
||||||
|
@pytest.mark.parametrize("allow_all", [True, False])
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"allowed_users, allowed_letters, allow_existing_users, allowed, not_allowed",
|
||||||
|
[
|
||||||
|
("", "", None, "anyone,should-be,allowed,existing", ""),
|
||||||
|
("", "a,b", None, "alice,bebe", "existing,other"),
|
||||||
|
("", "a,b", False, "alice,bebe", "existing,other"),
|
||||||
|
("", "a,b", True, "alice,bebe,existing", "other"),
|
||||||
|
("specified", "a,b", None, "specified,alice,bebe,existing", "other"),
|
||||||
|
("specified", "a,b", False, "specified,alice,bebe", "existing,other"),
|
||||||
|
("specified", "a,b", True, "specified,alice,bebe,existing", "other"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
async def test_authenticator_without_allow_all(
|
||||||
|
app,
|
||||||
|
allowed_users,
|
||||||
|
allowed_letters,
|
||||||
|
allow_existing_users,
|
||||||
|
allowed,
|
||||||
|
not_allowed,
|
||||||
|
allow_all,
|
||||||
|
):
|
||||||
|
kwargs = {}
|
||||||
|
if allow_all is not None:
|
||||||
|
kwargs["allow_all"] = allow_all
|
||||||
|
if allow_existing_users is not None:
|
||||||
|
kwargs["allow_existing_users"] = allow_existing_users
|
||||||
|
if allowed_users:
|
||||||
|
kwargs["allowed_users"] = set(allowed_users.split(','))
|
||||||
|
if allowed_letters:
|
||||||
|
kwargs["allowed_letters"] = tuple(allowed_letters.split(','))
|
||||||
|
|
||||||
|
authenticator = AllowAllIgnoringAuthenticator(**kwargs)
|
||||||
|
|
||||||
|
# load one user from db
|
||||||
|
existing_user = add_user(app.db, app, name="existing")
|
||||||
|
authenticator.add_user(existing_user)
|
||||||
|
|
||||||
|
if allowed:
|
||||||
|
allowed = allowed.split(",")
|
||||||
|
if not_allowed:
|
||||||
|
not_allowed = not_allowed.split(",")
|
||||||
|
|
||||||
|
expected_allowed = sorted(allowed)
|
||||||
|
expected_not_allowed = sorted(not_allowed)
|
||||||
|
to_check = list(chain(expected_allowed, expected_not_allowed))
|
||||||
|
if allow_all:
|
||||||
|
expected_allowed = to_check
|
||||||
|
expected_not_allowed = []
|
||||||
|
|
||||||
|
are_allowed = []
|
||||||
|
are_not_allowed = []
|
||||||
|
for username in to_check:
|
||||||
|
if await authenticator.get_authenticated_user(None, {"username": username}):
|
||||||
|
are_allowed.append(username)
|
||||||
|
else:
|
||||||
|
are_not_allowed.append(username)
|
||||||
|
|
||||||
|
assert are_allowed == expected_allowed
|
||||||
|
assert are_not_allowed == expected_not_allowed
|
||||||
|
@@ -213,7 +213,9 @@ async def test_spawn_handler_access(app):
|
|||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("has_access", ["all", "user", "group", False])
|
@pytest.mark.parametrize(
|
||||||
|
"has_access", ["all", "user", (pytest.param("group", id="in-group")), False]
|
||||||
|
)
|
||||||
async def test_spawn_other_user(
|
async def test_spawn_other_user(
|
||||||
app, user, username, group, create_temp_role, has_access
|
app, user, username, group, create_temp_role, has_access
|
||||||
):
|
):
|
||||||
@@ -300,7 +302,9 @@ async def test_spawn_page_falsy_callable(app):
|
|||||||
assert history[1] == ujoin(public_url(app), "hub/spawn-pending/erik")
|
assert history[1] == ujoin(public_url(app), "hub/spawn-pending/erik")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("has_access", ["all", "user", "group", False])
|
@pytest.mark.parametrize(
|
||||||
|
"has_access", ["all", "user", (pytest.param("group", id="in-group")), False]
|
||||||
|
)
|
||||||
async def test_spawn_page_access(
|
async def test_spawn_page_access(
|
||||||
app, has_access, group, username, user, create_temp_role
|
app, has_access, group, username, user, create_temp_role
|
||||||
):
|
):
|
||||||
@@ -403,7 +407,9 @@ async def test_spawn_form(app):
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("has_access", ["all", "user", "group", False])
|
@pytest.mark.parametrize(
|
||||||
|
"has_access", ["all", "user", (pytest.param("group", id="in-group")), False]
|
||||||
|
)
|
||||||
async def test_spawn_form_other_user(
|
async def test_spawn_form_other_user(
|
||||||
app, username, user, group, create_temp_role, has_access
|
app, username, user, group, create_temp_role, has_access
|
||||||
):
|
):
|
||||||
@@ -624,7 +630,9 @@ async def test_user_redirect_hook(app, username):
|
|||||||
assert redirected_url.path == ujoin(app.base_url, 'user', username, 'terminals/1')
|
assert redirected_url.path == ujoin(app.base_url, 'user', username, 'terminals/1')
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("has_access", ["all", "user", "group", False])
|
@pytest.mark.parametrize(
|
||||||
|
"has_access", ["all", "user", (pytest.param("group", id="in-group")), False]
|
||||||
|
)
|
||||||
async def test_other_user_url(app, username, user, group, create_temp_role, has_access):
|
async def test_other_user_url(app, username, user, group, create_temp_role, has_access):
|
||||||
"""Test accessing /user/someonelse/ URLs when the server is not running
|
"""Test accessing /user/someonelse/ URLs when the server is not running
|
||||||
|
|
||||||
@@ -685,11 +693,10 @@ async def test_other_user_url(app, username, user, group, create_temp_role, has_
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
async def test_page_with_token(app, user, url, token_in):
|
async def test_page_with_token(app, user, url, token_in):
|
||||||
cookies = await app.login_user(user.name)
|
|
||||||
token = user.new_api_token()
|
token = user.new_api_token()
|
||||||
if token_in == "url":
|
if token_in == "url":
|
||||||
url = url_concat(url, {"token": token})
|
url = url_concat(url, {"token": token})
|
||||||
headers = None
|
headers = {}
|
||||||
elif token_in == "header":
|
elif token_in == "header":
|
||||||
headers = {
|
headers = {
|
||||||
"Authorization": f"token {token}",
|
"Authorization": f"token {token}",
|
||||||
@@ -734,14 +741,13 @@ async def test_login_strip(app, form_user, auth_user, form_password):
|
|||||||
"""Test that login form strips space form usernames, but not passwords"""
|
"""Test that login form strips space form usernames, but not passwords"""
|
||||||
form_data = {"username": form_user, "password": form_password}
|
form_data = {"username": form_user, "password": form_password}
|
||||||
expected_auth = {"username": auth_user, "password": form_password}
|
expected_auth = {"username": auth_user, "password": form_password}
|
||||||
base_url = public_url(app)
|
|
||||||
called_with = []
|
called_with = []
|
||||||
|
|
||||||
async def mock_authenticate(handler, data):
|
async def mock_authenticate(handler, data):
|
||||||
called_with.append(data)
|
called_with.append(data)
|
||||||
|
|
||||||
with mock.patch.object(app.authenticator, 'authenticate', mock_authenticate):
|
with mock.patch.object(app.authenticator, 'authenticate', mock_authenticate):
|
||||||
r = await async_requests.get(base_url + 'hub/login')
|
r = await get_page('login', app)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
cookies = r.cookies
|
cookies = r.cookies
|
||||||
xsrf = cookies['_xsrf']
|
xsrf = cookies['_xsrf']
|
||||||
@@ -922,17 +928,19 @@ async def test_auto_login(app, request):
|
|||||||
async def test_auto_login_logout(app):
|
async def test_auto_login_logout(app):
|
||||||
name = 'burnham'
|
name = 'burnham'
|
||||||
cookies = await app.login_user(name)
|
cookies = await app.login_user(name)
|
||||||
|
s = AsyncSession()
|
||||||
|
s.cookies = cookies
|
||||||
|
|
||||||
with mock.patch.dict(
|
with mock.patch.dict(
|
||||||
app.tornado_settings, {'authenticator': Authenticator(auto_login=True)}
|
app.tornado_settings, {'authenticator': Authenticator(auto_login=True)}
|
||||||
):
|
):
|
||||||
r = await async_requests.get(
|
r = await s.get(
|
||||||
public_host(app) + app.tornado_settings['logout_url'], cookies=cookies
|
public_host(app) + app.tornado_settings['logout_url'], cookies=cookies
|
||||||
)
|
)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
logout_url = public_host(app) + app.tornado_settings['logout_url']
|
logout_url = public_host(app) + app.tornado_settings['logout_url']
|
||||||
assert r.url == logout_url
|
assert r.url == logout_url
|
||||||
assert r.cookies == {}
|
assert list(s.cookies.keys()) == ["_xsrf"]
|
||||||
# don't include logged-out user in page:
|
# don't include logged-out user in page:
|
||||||
try:
|
try:
|
||||||
idx = r.text.index(name)
|
idx = r.text.index(name)
|
||||||
@@ -946,19 +954,23 @@ async def test_auto_login_logout(app):
|
|||||||
async def test_logout(app):
|
async def test_logout(app):
|
||||||
name = 'wash'
|
name = 'wash'
|
||||||
cookies = await app.login_user(name)
|
cookies = await app.login_user(name)
|
||||||
r = await async_requests.get(
|
s = AsyncSession()
|
||||||
public_host(app) + app.tornado_settings['logout_url'], cookies=cookies
|
s.cookies = cookies
|
||||||
|
r = await s.get(
|
||||||
|
public_host(app) + app.tornado_settings['logout_url'],
|
||||||
)
|
)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
login_url = public_host(app) + app.tornado_settings['login_url']
|
login_url = public_host(app) + app.tornado_settings['login_url']
|
||||||
assert r.url == login_url
|
assert r.url == login_url
|
||||||
assert r.cookies == {}
|
assert list(s.cookies.keys()) == ["_xsrf"]
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('shutdown_on_logout', [True, False])
|
@pytest.mark.parametrize('shutdown_on_logout', [True, False])
|
||||||
async def test_shutdown_on_logout(app, shutdown_on_logout):
|
async def test_shutdown_on_logout(app, shutdown_on_logout):
|
||||||
name = 'shutitdown'
|
name = 'shutitdown'
|
||||||
cookies = await app.login_user(name)
|
cookies = await app.login_user(name)
|
||||||
|
s = AsyncSession()
|
||||||
|
s.cookies = cookies
|
||||||
user = app.users[name]
|
user = app.users[name]
|
||||||
|
|
||||||
# start the user's server
|
# start the user's server
|
||||||
@@ -978,14 +990,14 @@ async def test_shutdown_on_logout(app, shutdown_on_logout):
|
|||||||
with mock.patch.dict(
|
with mock.patch.dict(
|
||||||
app.tornado_settings, {'shutdown_on_logout': shutdown_on_logout}
|
app.tornado_settings, {'shutdown_on_logout': shutdown_on_logout}
|
||||||
):
|
):
|
||||||
r = await async_requests.get(
|
r = await s.get(
|
||||||
public_host(app) + app.tornado_settings['logout_url'], cookies=cookies
|
public_host(app) + app.tornado_settings['logout_url'], cookies=cookies
|
||||||
)
|
)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
|
|
||||||
login_url = public_host(app) + app.tornado_settings['login_url']
|
login_url = public_host(app) + app.tornado_settings['login_url']
|
||||||
assert r.url == login_url
|
assert r.url == login_url
|
||||||
assert r.cookies == {}
|
assert list(s.cookies.keys()) == ["_xsrf"]
|
||||||
|
|
||||||
# wait for any pending state to resolve
|
# wait for any pending state to resolve
|
||||||
for i in range(50):
|
for i in range(50):
|
||||||
|
@@ -86,17 +86,9 @@ async def test_hubauth_token(app, mockservice_url, create_user_with_scopes):
|
|||||||
sub_reply = {key: reply.get(key, 'missing') for key in ['name', 'admin']}
|
sub_reply = {key: reply.get(key, 'missing') for key in ['name', 'admin']}
|
||||||
assert sub_reply == {'name': u.name, 'admin': False}
|
assert sub_reply == {'name': u.name, 'admin': False}
|
||||||
|
|
||||||
# token in ?token parameter
|
# token in ?token parameter is not allowed by default
|
||||||
r = await async_requests.get(
|
r = await async_requests.get(
|
||||||
public_url(app, mockservice_url) + '/whoami/?token=%s' % token
|
public_url(app, mockservice_url) + '/whoami/?token=%s' % token,
|
||||||
)
|
|
||||||
r.raise_for_status()
|
|
||||||
reply = r.json()
|
|
||||||
sub_reply = {key: reply.get(key, 'missing') for key in ['name', 'admin']}
|
|
||||||
assert sub_reply == {'name': u.name, 'admin': False}
|
|
||||||
|
|
||||||
r = await async_requests.get(
|
|
||||||
public_url(app, mockservice_url) + '/whoami/?token=no-such-token',
|
|
||||||
allow_redirects=False,
|
allow_redirects=False,
|
||||||
)
|
)
|
||||||
assert r.status_code == 302
|
assert r.status_code == 302
|
||||||
@@ -180,21 +172,9 @@ async def test_hubauth_service_token(request, app, mockservice_url, scopes, allo
|
|||||||
else:
|
else:
|
||||||
assert r.status_code == 403
|
assert r.status_code == 403
|
||||||
|
|
||||||
# token in ?token parameter
|
# token in ?token parameter is not allowed by default
|
||||||
r = await async_requests.get(
|
r = await async_requests.get(
|
||||||
public_url(app, mockservice_url) + 'whoami/?token=%s' % token
|
public_url(app, mockservice_url) + 'whoami/?token=%s' % token,
|
||||||
)
|
|
||||||
if allowed:
|
|
||||||
r.raise_for_status()
|
|
||||||
assert r.status_code == 200
|
|
||||||
reply = r.json()
|
|
||||||
assert service_model.items() <= reply.items()
|
|
||||||
assert not r.cookies
|
|
||||||
else:
|
|
||||||
assert r.status_code == 403
|
|
||||||
|
|
||||||
r = await async_requests.get(
|
|
||||||
public_url(app, mockservice_url) + 'whoami/?token=no-such-token',
|
|
||||||
allow_redirects=False,
|
allow_redirects=False,
|
||||||
)
|
)
|
||||||
assert r.status_code == 302
|
assert r.status_code == 302
|
||||||
@@ -385,20 +365,14 @@ async def test_oauth_service_roles(
|
|||||||
|
|
||||||
# token-authenticated request to HubOAuth
|
# token-authenticated request to HubOAuth
|
||||||
token = app.users[name].new_api_token()
|
token = app.users[name].new_api_token()
|
||||||
# token in ?token parameter
|
s.headers["Authorization"] = f"Bearer {token}"
|
||||||
r = await async_requests.get(url_concat(url, {'token': token}))
|
r = await async_requests.get(url, headers=s.headers)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
reply = r.json()
|
reply = r.json()
|
||||||
assert reply['name'] == name
|
assert reply['name'] == name
|
||||||
|
|
||||||
# verify that ?token= requests set a cookie
|
# tokens in headers don't set cookies
|
||||||
assert len(r.cookies) != 0
|
assert len(r.cookies) == 0
|
||||||
# ensure cookie works in future requests
|
|
||||||
r = await async_requests.get(url, cookies=r.cookies, allow_redirects=False)
|
|
||||||
r.raise_for_status()
|
|
||||||
assert r.url == url
|
|
||||||
reply = r.json()
|
|
||||||
assert reply['name'] == name
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
@@ -578,9 +552,8 @@ async def test_oauth_cookie_collision(
|
|||||||
else:
|
else:
|
||||||
raise ValueError(f"finish_first should be 1 or 2, not {finish_first!r}")
|
raise ValueError(f"finish_first should be 1 or 2, not {finish_first!r}")
|
||||||
# submit the oauth form to complete authorization
|
# submit the oauth form to complete authorization
|
||||||
r = await s.post(
|
hub_xsrf = s.cookies.get("_xsrf", path=app.hub.base_url)
|
||||||
oauth.url, data={'scopes': ['identify'], "_xsrf": s.cookies["_xsrf"]}
|
r = await s.post(oauth.url, data={'scopes': ['identify'], "_xsrf": hub_xsrf})
|
||||||
)
|
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
assert r.url == expected_url
|
assert r.url == expected_url
|
||||||
# after finishing, state cookies are all cleared
|
# after finishing, state cookies are all cleared
|
||||||
@@ -596,9 +569,7 @@ async def test_oauth_cookie_collision(
|
|||||||
assert service_cookie
|
assert service_cookie
|
||||||
|
|
||||||
# finish other oauth
|
# finish other oauth
|
||||||
r = await s.post(
|
r = await s.post(second_oauth.url, data={'scopes': ['identify'], "_xsrf": hub_xsrf})
|
||||||
second_oauth.url, data={'scopes': ['identify'], "_xsrf": s.cookies["_xsrf"]}
|
|
||||||
)
|
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
|
|
||||||
# second oauth doesn't complete,
|
# second oauth doesn't complete,
|
||||||
@@ -664,7 +635,7 @@ async def test_oauth_logout(app, mockservice_url, create_user_with_scopes):
|
|||||||
r = await s.get(public_url(app, path='hub/logout'))
|
r = await s.get(public_url(app, path='hub/logout'))
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
# verify that all cookies other than the service cookie are cleared
|
# verify that all cookies other than the service cookie are cleared
|
||||||
assert sorted(s.cookies.keys()) == ["_xsrf", service_cookie_name]
|
assert sorted(set(s.cookies.keys())) == ["_xsrf", service_cookie_name]
|
||||||
# verify that clearing session id invalidates service cookie
|
# verify that clearing session id invalidates service cookie
|
||||||
# i.e. redirect back to login page
|
# i.e. redirect back to login page
|
||||||
r = await s.get(url)
|
r = await s.get(url)
|
||||||
|
@@ -159,7 +159,9 @@ def expand_scopes(scope_str, user, group=None, share_with=None):
|
|||||||
return scopes
|
return scopes
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("share_with", ["user", "group"])
|
@pytest.mark.parametrize(
|
||||||
|
"share_with", ["user", pytest.param("group", id="share_with=group")]
|
||||||
|
)
|
||||||
def test_create_share(app, user, share_user, group, share_with):
|
def test_create_share(app, user, share_user, group, share_with):
|
||||||
db = app.db
|
db = app.db
|
||||||
spawner = user.spawner.orm_spawner
|
spawner = user.spawner.orm_spawner
|
||||||
@@ -427,7 +429,7 @@ def test_share_code_expires(app, user, share_user):
|
|||||||
"kind",
|
"kind",
|
||||||
[
|
[
|
||||||
("user"),
|
("user"),
|
||||||
("group"),
|
(pytest.param("group", id="kind=group")),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
async def test_shares_api_user_group_doesnt_exist(
|
async def test_shares_api_user_group_doesnt_exist(
|
||||||
|
@@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import warnings
|
||||||
from contextlib import nullcontext
|
from contextlib import nullcontext
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
@@ -75,18 +76,20 @@ async def test_singleuser_auth(
|
|||||||
spawner = user.spawners[server_name]
|
spawner = user.spawners[server_name]
|
||||||
url = url_path_join(public_url(app, user), server_name)
|
url = url_path_join(public_url(app, user), server_name)
|
||||||
|
|
||||||
|
s = AsyncSession()
|
||||||
|
|
||||||
# no cookies, redirects to login page
|
# no cookies, redirects to login page
|
||||||
r = await async_requests.get(url)
|
r = await s.get(url)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
assert '/hub/login' in r.url
|
assert '/hub/login' in r.url
|
||||||
|
|
||||||
# unauthenticated /api/ should 403, not redirect
|
# unauthenticated /api/ should 403, not redirect
|
||||||
api_url = url_path_join(url, "api/status")
|
api_url = url_path_join(url, "api/status")
|
||||||
r = await async_requests.get(api_url, allow_redirects=False)
|
r = await s.get(api_url, allow_redirects=False)
|
||||||
assert r.status_code == 403
|
assert r.status_code == 403
|
||||||
|
|
||||||
# with cookies, login successful
|
# with cookies, login successful
|
||||||
r = await async_requests.get(url, cookies=cookies)
|
r = await s.get(url, cookies=cookies)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
assert (
|
assert (
|
||||||
urlparse(r.url)
|
urlparse(r.url)
|
||||||
@@ -100,7 +103,7 @@ async def test_singleuser_auth(
|
|||||||
assert r.status_code == 200
|
assert r.status_code == 200
|
||||||
|
|
||||||
# logout
|
# logout
|
||||||
r = await async_requests.get(url_path_join(url, 'logout'), cookies=cookies)
|
r = await s.get(url_path_join(url, 'logout'))
|
||||||
assert len(r.cookies) == 0
|
assert len(r.cookies) == 0
|
||||||
|
|
||||||
# accessing another user's server hits the oauth confirmation page
|
# accessing another user's server hits the oauth confirmation page
|
||||||
@@ -149,6 +152,8 @@ async def test_singleuser_auth(
|
|||||||
async def test_disable_user_config(request, app, tmp_path, full_spawn):
|
async def test_disable_user_config(request, app, tmp_path, full_spawn):
|
||||||
# login, start the server
|
# login, start the server
|
||||||
cookies = await app.login_user('nandy')
|
cookies = await app.login_user('nandy')
|
||||||
|
s = AsyncSession()
|
||||||
|
s.cookies = cookies
|
||||||
user = app.users['nandy']
|
user = app.users['nandy']
|
||||||
# stop spawner, if running:
|
# stop spawner, if running:
|
||||||
if user.running:
|
if user.running:
|
||||||
@@ -180,10 +185,11 @@ async def test_disable_user_config(request, app, tmp_path, full_spawn):
|
|||||||
url = public_url(app, user)
|
url = public_url(app, user)
|
||||||
|
|
||||||
# with cookies, login successful
|
# with cookies, login successful
|
||||||
r = await async_requests.get(url, cookies=cookies)
|
r = await s.get(url)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
assert r.url.endswith('/user/nandy/jupyterhub-test-info')
|
assert r.url.endswith('/user/nandy/jupyterhub-test-info')
|
||||||
assert r.status_code == 200
|
assert r.status_code == 200
|
||||||
|
|
||||||
info = r.json()
|
info = r.json()
|
||||||
pprint(info)
|
pprint(info)
|
||||||
assert info['disable_user_config']
|
assert info['disable_user_config']
|
||||||
@@ -286,6 +292,57 @@ async def test_notebook_dir(
|
|||||||
raise ValueError(f"No contents check for {notebook_dir=}")
|
raise ValueError(f"No contents check for {notebook_dir=}")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("extension", [True, False])
|
||||||
|
@pytest.mark.skipif(IS_JUPYVERSE, reason="jupyverse has no auth configuration")
|
||||||
|
async def test_forbid_unauthenticated_access(
|
||||||
|
request, app, tmp_path, user, full_spawn, extension
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
from jupyter_server.auth.decorator import allow_unauthenticated # noqa
|
||||||
|
except ImportError:
|
||||||
|
pytest.skip("needs jupyter-server 2.13")
|
||||||
|
|
||||||
|
from jupyter_server.utils import JupyterServerAuthWarning
|
||||||
|
|
||||||
|
# login, start the server
|
||||||
|
cookies = await app.login_user('nandy')
|
||||||
|
s = AsyncSession()
|
||||||
|
s.cookies = cookies
|
||||||
|
user = app.users['nandy']
|
||||||
|
# stop spawner, if running:
|
||||||
|
if user.running:
|
||||||
|
await user.stop()
|
||||||
|
# start with new config:
|
||||||
|
user.spawner.default_url = "/jupyterhub-test-info"
|
||||||
|
|
||||||
|
if extension:
|
||||||
|
user.spawner.environment["JUPYTERHUB_SINGLEUSER_EXTENSION"] = "1"
|
||||||
|
else:
|
||||||
|
user.spawner.environment["JUPYTERHUB_SINGLEUSER_EXTENSION"] = "0"
|
||||||
|
|
||||||
|
# make sure it's resolved to start
|
||||||
|
tmp_path = tmp_path.resolve()
|
||||||
|
real_home_dir = tmp_path / "realhome"
|
||||||
|
real_home_dir.mkdir()
|
||||||
|
# make symlink to test resolution
|
||||||
|
home_dir = tmp_path / "home"
|
||||||
|
home_dir.symlink_to(real_home_dir)
|
||||||
|
# home_dir is defined on SimpleSpawner
|
||||||
|
user.spawner.home_dir = str(home_dir)
|
||||||
|
jupyter_config_dir = home_dir / ".jupyter"
|
||||||
|
jupyter_config_dir.mkdir()
|
||||||
|
# verify config paths
|
||||||
|
with (jupyter_config_dir / "jupyter_server_config.py").open("w") as f:
|
||||||
|
f.write("c.ServerApp.allow_unauthenticated_access = False")
|
||||||
|
|
||||||
|
# If there are core endpoints (added by jupyterhub) without decorators,
|
||||||
|
# spawn will error out. If there are extension endpoints without decorators
|
||||||
|
# these will be logged as warnings.
|
||||||
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("error", JupyterServerAuthWarning)
|
||||||
|
await user.spawn()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(IS_JUPYVERSE, reason="jupyverse has no --help-all")
|
@pytest.mark.skipif(IS_JUPYVERSE, reason="jupyverse has no --help-all")
|
||||||
def test_help_output():
|
def test_help_output():
|
||||||
out = check_output(
|
out = check_output(
|
||||||
@@ -385,20 +442,31 @@ async def test_nbclassic_control_panel(app, user, full_spawn):
|
|||||||
@pytest.mark.skipif(
|
@pytest.mark.skipif(
|
||||||
IS_JUPYVERSE, reason="jupyverse doesn't implement token authentication"
|
IS_JUPYVERSE, reason="jupyverse doesn't implement token authentication"
|
||||||
)
|
)
|
||||||
async def test_token_url_cookie(app, user, full_spawn):
|
@pytest.mark.parametrize("accept_token_in_url", ["1", "0", ""])
|
||||||
|
async def test_token_url_cookie(app, user, full_spawn, accept_token_in_url):
|
||||||
|
if accept_token_in_url:
|
||||||
|
user.spawner.environment["JUPYTERHUB_ALLOW_TOKEN_IN_URL"] = accept_token_in_url
|
||||||
|
should_accept = accept_token_in_url == "1"
|
||||||
|
|
||||||
await user.spawn()
|
await user.spawn()
|
||||||
await app.proxy.add_user(user)
|
await app.proxy.add_user(user)
|
||||||
|
|
||||||
token = user.new_api_token(scopes=["access:servers!user"])
|
token = user.new_api_token(scopes=["access:servers!user"])
|
||||||
url = url_path_join(public_url(app, user), user.spawner.default_url or "/tree/")
|
url = url_path_join(public_url(app, user), user.spawner.default_url or "/tree/")
|
||||||
|
|
||||||
# first request: auth with token in URL
|
# first request: auth with token in URL
|
||||||
r = await async_requests.get(url + f"?token={token}", allow_redirects=False)
|
s = AsyncSession()
|
||||||
|
r = await s.get(url + f"?token={token}", allow_redirects=False)
|
||||||
print(r.url, r.status_code)
|
print(r.url, r.status_code)
|
||||||
|
if not should_accept:
|
||||||
|
assert r.status_code == 302
|
||||||
|
return
|
||||||
|
|
||||||
assert r.status_code == 200
|
assert r.status_code == 200
|
||||||
assert r.cookies
|
assert s.cookies
|
||||||
# second request, use cookies set by first response,
|
# second request, use cookies set by first response,
|
||||||
# no token in URL
|
# no token in URL
|
||||||
r = await async_requests.get(url, cookies=r.cookies, allow_redirects=False)
|
r = await s.get(url, allow_redirects=False)
|
||||||
assert r.status_code == 200
|
assert r.status_code == 200
|
||||||
|
|
||||||
await user.stop()
|
await user.stop()
|
||||||
@@ -409,7 +477,8 @@ async def test_api_403_no_cookie(app, user, full_spawn):
|
|||||||
await user.spawn()
|
await user.spawn()
|
||||||
await app.proxy.add_user(user)
|
await app.proxy.add_user(user)
|
||||||
url = url_path_join(public_url(app, user), "/api/contents/")
|
url = url_path_join(public_url(app, user), "/api/contents/")
|
||||||
r = await async_requests.get(url, allow_redirects=False)
|
s = AsyncSession()
|
||||||
|
r = await s.get(url, allow_redirects=False)
|
||||||
# 403, not redirect
|
# 403, not redirect
|
||||||
assert r.status_code == 403
|
assert r.status_code == 403
|
||||||
# no state cookie set
|
# no state cookie set
|
||||||
|
@@ -42,6 +42,13 @@ async_requests = _AsyncRequests()
|
|||||||
class AsyncSession(requests.Session):
|
class AsyncSession(requests.Session):
|
||||||
"""requests.Session object that runs in the background thread"""
|
"""requests.Session object that runs in the background thread"""
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
# session requests are for cookie authentication
|
||||||
|
# and should look like regular page views,
|
||||||
|
# so set Sec-Fetch-Mode: navigate
|
||||||
|
self.headers.setdefault("Sec-Fetch-Mode", "navigate")
|
||||||
|
|
||||||
def request(self, *args, **kwargs):
|
def request(self, *args, **kwargs):
|
||||||
return async_requests.executor.submit(super().request, *args, **kwargs)
|
return async_requests.executor.submit(super().request, *args, **kwargs)
|
||||||
|
|
||||||
@@ -157,6 +164,7 @@ async def api_request(
|
|||||||
else:
|
else:
|
||||||
base_url = public_url(app, path='hub')
|
base_url = public_url(app, path='hub')
|
||||||
headers = kwargs.setdefault('headers', {})
|
headers = kwargs.setdefault('headers', {})
|
||||||
|
headers.setdefault("Sec-Fetch-Mode", "cors")
|
||||||
if 'Authorization' not in headers and not noauth and 'cookies' not in kwargs:
|
if 'Authorization' not in headers and not noauth and 'cookies' not in kwargs:
|
||||||
# make a copy to avoid modifying arg in-place
|
# make a copy to avoid modifying arg in-place
|
||||||
kwargs['headers'] = h = {}
|
kwargs['headers'] = h = {}
|
||||||
@@ -176,7 +184,7 @@ async def api_request(
|
|||||||
kwargs['cert'] = (app.internal_ssl_cert, app.internal_ssl_key)
|
kwargs['cert'] = (app.internal_ssl_cert, app.internal_ssl_key)
|
||||||
kwargs["verify"] = app.internal_ssl_ca
|
kwargs["verify"] = app.internal_ssl_ca
|
||||||
resp = await f(url, **kwargs)
|
resp = await f(url, **kwargs)
|
||||||
assert "frame-ancestors 'self'" in resp.headers['Content-Security-Policy']
|
assert "frame-ancestors 'none'" in resp.headers['Content-Security-Policy']
|
||||||
assert (
|
assert (
|
||||||
ujoin(app.hub.base_url, "security/csp-report")
|
ujoin(app.hub.base_url, "security/csp-report")
|
||||||
in resp.headers['Content-Security-Policy']
|
in resp.headers['Content-Security-Policy']
|
||||||
@@ -197,6 +205,9 @@ def get_page(path, app, hub=True, **kw):
|
|||||||
else:
|
else:
|
||||||
prefix = app.base_url
|
prefix = app.base_url
|
||||||
base_url = ujoin(public_host(app), prefix)
|
base_url = ujoin(public_host(app), prefix)
|
||||||
|
# Sec-Fetch-Mode=navigate to look like a regular page view
|
||||||
|
headers = kw.setdefault("headers", {})
|
||||||
|
headers.setdefault("Sec-Fetch-Mode", "navigate")
|
||||||
return async_requests.get(ujoin(base_url, path), **kw)
|
return async_requests.get(ujoin(base_url, path), **kw)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -526,6 +526,9 @@ class User:
|
|||||||
_deprecated_db_session=self.db,
|
_deprecated_db_session=self.db,
|
||||||
oauth_client_id=client_id,
|
oauth_client_id=client_id,
|
||||||
cookie_options=self.settings.get('cookie_options', {}),
|
cookie_options=self.settings.get('cookie_options', {}),
|
||||||
|
cookie_host_prefix_enabled=self.settings.get(
|
||||||
|
"cookie_host_prefix_enabled", False
|
||||||
|
),
|
||||||
trusted_alt_names=trusted_alt_names,
|
trusted_alt_names=trusted_alt_names,
|
||||||
user_options=orm_spawner.user_options or {},
|
user_options=orm_spawner.user_options or {},
|
||||||
)
|
)
|
||||||
|
@@ -8,6 +8,7 @@ import errno
|
|||||||
import functools
|
import functools
|
||||||
import hashlib
|
import hashlib
|
||||||
import inspect
|
import inspect
|
||||||
|
import os
|
||||||
import random
|
import random
|
||||||
import re
|
import re
|
||||||
import secrets
|
import secrets
|
||||||
@@ -34,6 +35,21 @@ from tornado.httpclient import AsyncHTTPClient, HTTPError
|
|||||||
from tornado.log import app_log
|
from tornado.log import app_log
|
||||||
|
|
||||||
|
|
||||||
|
def _bool_env(key, default=False):
|
||||||
|
"""Cast an environment variable to bool
|
||||||
|
|
||||||
|
If unset or empty, return `default`
|
||||||
|
`0` is False; all other values are True.
|
||||||
|
"""
|
||||||
|
value = os.environ.get(key, "")
|
||||||
|
if value == "":
|
||||||
|
return default
|
||||||
|
if value.lower() in {"0", "false"}:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
# Deprecated aliases: no longer needed now that we require 3.7
|
# Deprecated aliases: no longer needed now that we require 3.7
|
||||||
def asyncio_all_tasks(loop=None):
|
def asyncio_all_tasks(loop=None):
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
|
@@ -6,11 +6,6 @@
|
|||||||
<h1 class="sr-only">Manage JupyterHub Tokens</h1>
|
<h1 class="sr-only">Manage JupyterHub Tokens</h1>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<form id="request-token-form" class="col-md-offset-3 col-md-6">
|
<form id="request-token-form" class="col-md-offset-3 col-md-6">
|
||||||
<div class="text-center">
|
|
||||||
<button type="submit" class="btn btn-lg btn-jupyter">
|
|
||||||
Request new API token
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
<div class="form-group">
|
<div class="form-group">
|
||||||
<label for="token-note">Note</label>
|
<label for="token-note">Note</label>
|
||||||
<input
|
<input
|
||||||
@@ -44,6 +39,11 @@
|
|||||||
See the <a href="https://jupyterhub.readthedocs.io/en/stable/rbac/scopes.html#available-scopes">JupyterHub documentation for a list of available scopes</a>.
|
See the <a href="https://jupyterhub.readthedocs.io/en/stable/rbac/scopes.html#available-scopes">JupyterHub documentation for a list of available scopes</a>.
|
||||||
</small>
|
</small>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="text-center">
|
||||||
|
<button type="submit" class="btn btn-lg btn-jupyter">
|
||||||
|
Request new API token
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user