mirror of
https://github.com/jupyterhub/jupyterhub.git
synced 2025-10-07 10:04:07 +00:00
Compare commits
177 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
220eb87bce | ||
![]() |
f9e9150abc | ||
![]() |
8074469ad7 | ||
![]() |
46d2455aff | ||
![]() |
72e4119e1a | ||
![]() |
faa1754645 | ||
![]() |
318f739ba9 | ||
![]() |
20b3229249 | ||
![]() |
f0862f1d10 | ||
![]() |
3c5f9b255e | ||
![]() |
b6d9d5c120 | ||
![]() |
bccd0e2ff1 | ||
![]() |
a2d39c693d | ||
![]() |
76e65da9ff | ||
![]() |
eb9bb71655 | ||
![]() |
a39ef8f163 | ||
![]() |
f4727cba47 | ||
![]() |
14dfa65c75 | ||
![]() |
9f23bc2959 | ||
![]() |
24e8362401 | ||
![]() |
c4c662843c | ||
![]() |
6d5b13962c | ||
![]() |
fe64595d75 | ||
![]() |
a3c93088a8 | ||
![]() |
834229622d | ||
![]() |
44a1ea42de | ||
![]() |
3879a96b67 | ||
![]() |
d40627d397 | ||
![]() |
057cdbc9e9 | ||
![]() |
75390d2e46 | ||
![]() |
f5e4846cfa | ||
![]() |
3dc115a829 | ||
![]() |
af4ddbfc58 | ||
![]() |
50a4d1e34d | ||
![]() |
86a238334c | ||
![]() |
dacb9d1668 | ||
![]() |
95cc170383 | ||
![]() |
437a9d150f | ||
![]() |
c9616d6f11 | ||
![]() |
61aed70c4d | ||
![]() |
9abb573d47 | ||
![]() |
b074304834 | ||
![]() |
201e7ca3d8 | ||
![]() |
fa8cd90793 | ||
![]() |
7dafae29fb | ||
![]() |
89a6c745b5 | ||
![]() |
821d9e229d | ||
![]() |
db7619fa7a | ||
![]() |
1ed9423530 | ||
![]() |
147a578f7a | ||
![]() |
3a59a15164 | ||
![]() |
1b7aded7f9 | ||
![]() |
bc45d77365 | ||
![]() |
1b3b005ca4 | ||
![]() |
e0be811b2c | ||
![]() |
3627251246 | ||
![]() |
8d056170d7 | ||
![]() |
3590d16e30 | ||
![]() |
572d258cd2 | ||
![]() |
11d0954551 | ||
![]() |
650d47d5c1 | ||
![]() |
945fc824d8 | ||
![]() |
a8aa737b00 | ||
![]() |
cd689a1fab | ||
![]() |
b3f04e7c66 | ||
![]() |
fbcf857991 | ||
![]() |
6c5e5452bc | ||
![]() |
2f5ba7ba30 | ||
![]() |
a045eefa64 | ||
![]() |
6ea4f2af0d | ||
![]() |
3d3ad2929c | ||
![]() |
00287ff5ba | ||
![]() |
805d063d1d | ||
![]() |
e6bacf7109 | ||
![]() |
33ccfa7963 | ||
![]() |
593404f558 | ||
![]() |
e7bc282c80 | ||
![]() |
b939b482a1 | ||
![]() |
8afc2c9ae9 | ||
![]() |
d11eda14ed | ||
![]() |
ab79251fe2 | ||
![]() |
484dbf48de | ||
![]() |
6eb526d08a | ||
![]() |
e0a17db5f1 | ||
![]() |
45132b7244 | ||
![]() |
c23cddeb51 | ||
![]() |
672e19a22a | ||
![]() |
4a6c9c3a01 | ||
![]() |
2b79bc44da | ||
![]() |
7861662e17 | ||
![]() |
4a1842bf8a | ||
![]() |
8f18303e50 | ||
![]() |
bcad6e287d | ||
![]() |
9de1951952 | ||
![]() |
99cb1f17f0 | ||
![]() |
10d5157e95 | ||
![]() |
2fc4f26832 | ||
![]() |
f6230001bb | ||
![]() |
960f7cbeb9 | ||
![]() |
76f06a6b55 | ||
![]() |
9c498aa5d4 | ||
![]() |
a0b60f9118 | ||
![]() |
27cb56429b | ||
![]() |
b1ffd4b10b | ||
![]() |
a9ea064202 | ||
![]() |
687a41a467 | ||
![]() |
5348451b2e | ||
![]() |
55f0579dcc | ||
![]() |
a3ea0f0449 | ||
![]() |
78492a4a8e | ||
![]() |
f22203f50e | ||
![]() |
500b354a00 | ||
![]() |
9d4093782f | ||
![]() |
43b3cebfff | ||
![]() |
63c381431d | ||
![]() |
bf41767b33 | ||
![]() |
83d6e4e993 | ||
![]() |
d64a2ddd95 | ||
![]() |
392176d873 | ||
![]() |
58420b3307 | ||
![]() |
a5e3b66dee | ||
![]() |
a9fbe5c9f6 | ||
![]() |
71bbbe4a67 | ||
![]() |
3843885382 | ||
![]() |
25ea559e0d | ||
![]() |
c18815de91 | ||
![]() |
50d53667ce | ||
![]() |
68e2baf4aa | ||
![]() |
6fc9d40e51 | ||
![]() |
0b25694b40 | ||
![]() |
bf750e488f | ||
![]() |
359f9055fc | ||
![]() |
b84dd5d735 | ||
![]() |
3ed345f496 | ||
![]() |
6633f8ef28 | ||
![]() |
757053a9ec | ||
![]() |
36cad38ddf | ||
![]() |
1e9a1cb621 | ||
![]() |
9f051d3172 | ||
![]() |
53576c8f82 | ||
![]() |
bb5ec39b2f | ||
![]() |
4c54c6dcc8 | ||
![]() |
39da98f133 | ||
![]() |
29e69aa880 | ||
![]() |
0c315f31b7 | ||
![]() |
508842a68c | ||
![]() |
4b31615a05 | ||
![]() |
17b64280e8 | ||
![]() |
88be7a9967 | ||
![]() |
4ca2344af7 | ||
![]() |
4c050cf165 | ||
![]() |
5e2ccb81fa | ||
![]() |
b8dc3befab | ||
![]() |
2f29848757 | ||
![]() |
4f3d6cdd0c | ||
![]() |
67733ef928 | ||
![]() |
e657754e7f | ||
![]() |
2d6087959c | ||
![]() |
08a913707f | ||
![]() |
9c8a4f287a | ||
![]() |
64d6f0222c | ||
![]() |
538abdf084 | ||
![]() |
144abcb965 | ||
![]() |
6e5c307edb | ||
![]() |
67ebe0b0cf | ||
![]() |
dcf21d53fd | ||
![]() |
f5bb0a2622 | ||
![]() |
704712cc81 | ||
![]() |
f86d53a234 | ||
![]() |
5466224988 | ||
![]() |
f9fa21bfd7 | ||
![]() |
e4855c30f5 | ||
![]() |
f1c4fdd5a2 | ||
![]() |
e58cf06706 | ||
![]() |
91f4918cff | ||
![]() |
b15ccfa4ae | ||
![]() |
5102fde2f0 |
108
.github/workflows/test-jsx.yml
vendored
Normal file
108
.github/workflows/test-jsx.yml
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
# This is a GitHub workflow defining a set of jobs with a set of steps.
|
||||
# ref: https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions
|
||||
#
|
||||
name: Test jsx (admin-react.js)
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "jsx/**"
|
||||
- ".github/workflows/test-jsx.yml"
|
||||
push:
|
||||
paths:
|
||||
- "jsx/**"
|
||||
- ".github/workflows/test-jsx.yml"
|
||||
branches-ignore:
|
||||
- "dependabot/**"
|
||||
- "pre-commit-ci-update-config"
|
||||
tags:
|
||||
- "**"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
# The ./jsx folder contains React based source code files that are to compile
|
||||
# to share/jupyterhub/static/js/admin-react.js. The ./jsx folder includes
|
||||
# tests also has tests that this job is meant to run with `yarn test`
|
||||
# according to the documentation in jsx/README.md.
|
||||
test-jsx-admin-react:
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: "14"
|
||||
|
||||
- name: Install yarn
|
||||
run: |
|
||||
npm install -g yarn
|
||||
|
||||
- name: yarn
|
||||
run: |
|
||||
cd jsx
|
||||
yarn
|
||||
|
||||
- name: yarn test
|
||||
run: |
|
||||
cd jsx
|
||||
yarn test
|
||||
|
||||
# The ./jsx folder contains React based source files that are to compile to
|
||||
# share/jupyterhub/static/js/admin-react.js. This job makes sure that whatever
|
||||
# we have in jsx/src matches the compiled asset that we package and
|
||||
# distribute.
|
||||
#
|
||||
# This job's purpose is to make sure we don't forget to compile changes and to
|
||||
# verify nobody sneaks in a change in the hard to review compiled asset.
|
||||
#
|
||||
# NOTE: In the future we may want to stop version controlling the compiled
|
||||
# artifact and instead generate it whenever we package JupyterHub. If we
|
||||
# do this, we are required to setup node and compile the source code
|
||||
# more often, at the same time we could avoid having this check be made.
|
||||
#
|
||||
compile-jsx-admin-react:
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: "14"
|
||||
|
||||
- name: Install yarn
|
||||
run: |
|
||||
npm install -g yarn
|
||||
|
||||
- name: yarn
|
||||
run: |
|
||||
cd jsx
|
||||
yarn
|
||||
|
||||
- name: yarn build
|
||||
run: |
|
||||
cd jsx
|
||||
yarn build
|
||||
|
||||
- name: yarn place
|
||||
run: |
|
||||
cd jsx
|
||||
yarn place
|
||||
|
||||
- name: Verify compiled jsx/src matches version controlled artifact
|
||||
run: |
|
||||
if [[ `git status --porcelain=v1` ]]; then
|
||||
echo "The source code in ./jsx compiles to something different than found in ./share/jupyterhub/static/js/admin-react.js!"
|
||||
echo
|
||||
echo "Please re-compile the source code in ./jsx with the following commands:"
|
||||
echo
|
||||
echo "yarn"
|
||||
echo "yarn build"
|
||||
echo "yarn place"
|
||||
echo
|
||||
echo "See ./jsx/README.md for more details."
|
||||
exit 1
|
||||
else
|
||||
echo "Compilation of jsx/src to share/jupyterhub/static/js/admin-react.js didn't lead to changes."
|
||||
fi
|
27
.github/workflows/test.yml
vendored
27
.github/workflows/test.yml
vendored
@@ -31,33 +31,6 @@ env:
|
||||
PYTEST_ADDOPTS: "--verbose --color=yes"
|
||||
|
||||
jobs:
|
||||
jstest:
|
||||
# Run javascript tests
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# NOTE: actions/setup-node@v1 make use of a cache within the GitHub base
|
||||
# environment and setup in a fraction of a second.
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: "14"
|
||||
|
||||
- name: Install Node dependencies
|
||||
run: |
|
||||
npm install -g yarn
|
||||
|
||||
- name: Run yarn
|
||||
run: |
|
||||
cd jsx
|
||||
yarn
|
||||
|
||||
- name: yarn test
|
||||
run: |
|
||||
cd jsx
|
||||
yarn test
|
||||
|
||||
# Run "pytest jupyterhub/tests" in various configurations
|
||||
pytest:
|
||||
runs-on: ubuntu-20.04
|
||||
|
@@ -1,30 +1,52 @@
|
||||
# pre-commit is a tool to perform a predefined set of tasks manually and/or
|
||||
# automatically before git commits are made.
|
||||
#
|
||||
# Config reference: https://pre-commit.com/#pre-commit-configyaml---top-level
|
||||
#
|
||||
# Common tasks
|
||||
#
|
||||
# - Run on all files: pre-commit run --all-files
|
||||
# - Register git hooks: pre-commit install --install-hooks
|
||||
#
|
||||
repos:
|
||||
# Autoformat: Python code, syntax patterns are modernized
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v2.31.0
|
||||
rev: v2.32.1
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args:
|
||||
- --py36-plus
|
||||
|
||||
# Autoformat: Python code
|
||||
- repo: https://github.com/asottile/reorder_python_imports
|
||||
rev: v2.6.0
|
||||
rev: v3.1.0
|
||||
hooks:
|
||||
- id: reorder-python-imports
|
||||
|
||||
# Autoformat: Python code
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 21.12b0
|
||||
rev: 22.3.0
|
||||
hooks:
|
||||
- id: black
|
||||
|
||||
# Autoformat: markdown, yaml, javascript (see the file .prettierignore)
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v2.5.1
|
||||
rev: v2.6.2
|
||||
hooks:
|
||||
- id: prettier
|
||||
|
||||
# Autoformat and linting, misc. details
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.2.0
|
||||
hooks:
|
||||
- id: end-of-file-fixer
|
||||
exclude: share/jupyterhub/static/js/admin-react.js
|
||||
- id: requirements-txt-fixer
|
||||
- id: check-case-conflict
|
||||
- id: check-executables-have-shebangs
|
||||
|
||||
# Linting: Python code (see the file .flake8)
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: "4.0.1"
|
||||
hooks:
|
||||
- id: flake8
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.1.0
|
||||
hooks:
|
||||
- id: end-of-file-fixer
|
||||
- id: check-case-conflict
|
||||
- id: check-executables-have-shebangs
|
||||
- id: requirements-txt-fixer
|
||||
|
@@ -4,10 +4,12 @@ sphinx:
|
||||
configuration: docs/source/conf.py
|
||||
|
||||
build:
|
||||
image: latest
|
||||
os: ubuntu-20.04
|
||||
tools:
|
||||
nodejs: "16"
|
||||
python: "3.9"
|
||||
|
||||
python:
|
||||
version: 3.7
|
||||
install:
|
||||
- method: pip
|
||||
path: .
|
@@ -59,7 +59,7 @@ JupyterHub also provides a
|
||||
[REST API][]
|
||||
for administration of the Hub and its users.
|
||||
|
||||
[rest api]: https://juptyerhub.readthedocs.io/en/latest/reference/rest-api.html
|
||||
[rest api]: https://jupyterhub.readthedocs.io/en/latest/reference/rest-api.html
|
||||
|
||||
## Installation
|
||||
|
||||
|
@@ -9,9 +9,14 @@ cryptography
|
||||
html5lib # needed for beautifulsoup
|
||||
jupyterlab >=3
|
||||
mock
|
||||
# nbclassic provides the '/tree/' handler, which we use in tests
|
||||
# it is a transitive dependency via jupyterlab,
|
||||
# but depend on it directly
|
||||
nbclassic
|
||||
pre-commit
|
||||
pytest>=3.3
|
||||
pytest-asyncio
|
||||
pytest-asyncio; python_version < "3.7"
|
||||
pytest-asyncio>=0.17; python_version >= "3.7"
|
||||
pytest-cov
|
||||
requests-mock
|
||||
tbump
|
||||
|
@@ -6,7 +6,7 @@ info:
|
||||
description: The REST API for JupyterHub
|
||||
license:
|
||||
name: BSD-3-Clause
|
||||
version: 2.0.2
|
||||
version: 2.3.2.dev
|
||||
servers:
|
||||
- url: /hub/api
|
||||
security:
|
||||
@@ -1419,3 +1419,4 @@ components:
|
||||
Read information about the proxy’s routing table, sync the Hub
|
||||
with the proxy and notify the Hub about a new proxy.
|
||||
shutdown: Shutdown the hub.
|
||||
read:metrics: Read prometheus metrics.
|
||||
|
72
docs/source/admin/log-messages.md
Normal file
72
docs/source/admin/log-messages.md
Normal file
@@ -0,0 +1,72 @@
|
||||
# Interpreting common log messages
|
||||
|
||||
When debugging errors and outages, looking at the logs emitted by
|
||||
JupyterHub is very helpful. This document tries to document some common
|
||||
log messages, and what they mean.
|
||||
|
||||
## Failing suspected API request to not-running server
|
||||
|
||||
### Example
|
||||
|
||||
Your logs might be littered with lines that might look slightly scary
|
||||
|
||||
```
|
||||
[W 2022-03-10 17:25:19.774 JupyterHub base:1349] Failing suspected API request to not-running server: /hub/user/<user-name>/api/metrics/v1
|
||||
```
|
||||
|
||||
### Most likely cause
|
||||
|
||||
This likely means is that the user's server has stopped running but they
|
||||
still have a browser tab open. For example, you might have 3 tabs open, and shut
|
||||
your server down via one. Or you closed your laptop, your server was
|
||||
culled for inactivity, and then you reopen your laptop again! The
|
||||
client side code (JupyterLab, Classic Notebook, etc) does not know
|
||||
yet that the server is dead, and continues to make some API requests.
|
||||
JupyterHub's architecture means that the proxy routes all requests that
|
||||
don't go to a running user server to the hub process itself. The hub
|
||||
process then explicitly returns a failure response, so the client knows
|
||||
that the server is not running anymore. This is used by JupyterLab to
|
||||
tell you your server is not running anymore, and offer you the option
|
||||
to let you restart it.
|
||||
|
||||
Most commonly, you'll see this in reference to the `/api/metrics/v1`
|
||||
URL, used by [jupyter-resource-usage](https://github.com/jupyter-server/jupyter-resource-usage).
|
||||
|
||||
### Actions you can take
|
||||
|
||||
This log message is benign, and there is usually no action for you to take.
|
||||
|
||||
## JupyterHub Singleuser Version mismatch
|
||||
|
||||
### Example
|
||||
|
||||
```
|
||||
jupyterhub version 1.5.0 != jupyterhub-singleuser version 1.3.0. This could cause failure to authenticate and result in redirect loops!
|
||||
```
|
||||
|
||||
### Cause
|
||||
|
||||
JupyterHub requires the `jupyterhub` python package installed inside the image or
|
||||
environment the user server starts in. This message indicates that the version of
|
||||
the `jupyterhub` package installed inside the user image or environment is not
|
||||
the same version as the JupyterHub server itself. This is not necessarily always a
|
||||
problem - some version drift is mostly acceptable, and the only two known cases of
|
||||
breakage are across the 0.7 and 2.0 version releases. In those cases, issues pop
|
||||
up immediately after upgrading your version of JupyterHub, so **always check the JupyterHub
|
||||
changelog before upgrading!**. The primary problems this _could_ cause are:
|
||||
|
||||
1. Infinite redirect loops after the user server starts
|
||||
2. Missing expected environment variables in the user server once it starts
|
||||
3. Failure for the started user server to authenticate with the JupyterHub server -
|
||||
note that this is _not_ the same as _user authentication_ failing!
|
||||
|
||||
However, for the most part, unless you are seeing these specific issues, the log
|
||||
message should be counted as a warning to get the `jupyterhub` package versions
|
||||
aligned, rather than as an indicator of an existing problem.
|
||||
|
||||
### Actions you can take
|
||||
|
||||
Upgrade the version of the `jupyterhub` package in your user environment or image
|
||||
so it matches the version of JupyterHub running your JupyterHub server! If you
|
||||
are using the [zero-to-jupyterhub](https://z2jh.jupyter.org) helm chart, you can find the appropriate
|
||||
version of the `jupyterhub` package to install in your user image [here](https://jupyterhub.github.io/helm-chart/)
|
File diff suppressed because one or more lines are too long
@@ -21,6 +21,7 @@ extensions = [
|
||||
'myst_parser',
|
||||
]
|
||||
|
||||
myst_heading_anchors = 2
|
||||
myst_enable_extensions = [
|
||||
'colon_fence',
|
||||
'deflist',
|
||||
@@ -147,6 +148,13 @@ html_theme_options = {
|
||||
"navbar_align": "left",
|
||||
}
|
||||
|
||||
html_context = {
|
||||
"github_user": "jupyterhub",
|
||||
"github_repo": "jupyterhub",
|
||||
"github_version": "main",
|
||||
"doc_path": "docs",
|
||||
}
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
|
@@ -10,4 +10,5 @@ well as other information relevant to running your own JupyterHub over time.
|
||||
|
||||
troubleshooting
|
||||
admin/upgrading
|
||||
admin/log-messages
|
||||
changelog
|
||||
|
@@ -7,7 +7,7 @@ JupyterHub provides four roles that are available by default:
|
||||
```{admonition} **Default roles**
|
||||
- `user` role provides a {ref}`default user scope <default-user-scope-target>` `self` that grants access to the user's own resources.
|
||||
- `admin` role contains all available scopes and grants full rights to all actions. This role **cannot be edited**.
|
||||
- `token` role provides a {ref}`default token scope <default-token-scope-target>` `all` that resolves to the same permissions as the owner of the token has.
|
||||
- `token` role provides a {ref}`default token scope <default-token-scope-target>` `inherit` that resolves to the same permissions as the owner of the token has.
|
||||
- `server` role allows for posting activity of "itself" only.
|
||||
|
||||
**These roles cannot be deleted.**
|
||||
|
@@ -38,7 +38,7 @@ By adding a scope to an existing role, all role bearers will gain the associated
|
||||
Metascopes do not follow the general scope syntax. Instead, a metascope resolves to a set of scopes, which can refer to different resources, based on their owning entity. In JupyterHub, there are currently two metascopes:
|
||||
|
||||
1. default user scope `self`, and
|
||||
2. default token scope `all`.
|
||||
2. default token scope `inherit`.
|
||||
|
||||
(default-user-scope-target)=
|
||||
|
||||
@@ -57,11 +57,11 @@ The `self` scope is only valid for user entities. In other cases (e.g., for serv
|
||||
|
||||
### Default token scope
|
||||
|
||||
The token metascope `all` covers the same scopes as the token owner's scopes during requests. For example, if a token owner has roles containing the scopes `read:groups` and `read:users`, the `all` scope resolves to the set of scopes `{read:groups, read:users}`.
|
||||
The token metascope `inherit` causes the token to have the same permissions as the token's owner. For example, if a token owner has roles containing the scopes `read:groups` and `read:users`, the `inherit` scope resolves to the set of scopes `{read:groups, read:users}`.
|
||||
|
||||
If the token owner has default `user` role, the `all` scope resolves to `self`, which will subsequently be expanded to include all the user-specific scopes (or empty set in the case of services).
|
||||
If the token owner has default `user` role, the `inherit` scope resolves to `self`, which will subsequently be expanded to include all the user-specific scopes (or empty set in the case of services).
|
||||
|
||||
If the token owner is a member of any group with roles, the group scopes will also be included in resolving the `all` scope.
|
||||
If the token owner is a member of any group with roles, the group scopes will also be included in resolving the `inherit` scope.
|
||||
|
||||
(horizontal-filtering-target)=
|
||||
|
||||
|
@@ -49,6 +49,6 @@ API tokens can also be issued to users via API ([_/hub/token_](../reference/urls
|
||||
|
||||
### With RBAC
|
||||
|
||||
The RBAC framework allows for granting tokens different levels of permissions via scopes attached to roles. The 'only identify' purpose of the separate OAuth tokens is no longer required. API tokens can be used used for every action, including the login and authentication, for which an API token with no role (i.e., no scope in {ref}`available-scopes-target`) is used.
|
||||
The RBAC framework allows for granting tokens different levels of permissions via scopes attached to roles. The 'only identify' purpose of the separate OAuth tokens is no longer required. API tokens can be used for every action, including the login and authentication, for which an API token with no role (i.e., no scope in {ref}`available-scopes-target`) is used.
|
||||
|
||||
OAuth tokens are therefore dropped from the Hub upgraded with the RBAC framework.
|
||||
|
@@ -1,6 +1,6 @@
|
||||
# Authenticators
|
||||
|
||||
The [Authenticator][] is the mechanism for authorizing users to use the
|
||||
The {class}`.Authenticator` is the mechanism for authorizing users to use the
|
||||
Hub and single user notebook servers.
|
||||
|
||||
## The default PAM Authenticator
|
||||
@@ -137,8 +137,8 @@ via other mechanisms. One such example is using [GitHub OAuth][].
|
||||
|
||||
Because the username is passed from the Authenticator to the Spawner,
|
||||
a custom Authenticator and Spawner are often used together.
|
||||
For example, the Authenticator methods, [pre_spawn_start(user, spawner)][]
|
||||
and [post_spawn_stop(user, spawner)][], are hooks that can be used to do
|
||||
For example, the Authenticator methods, {meth}`.Authenticator.pre_spawn_start`
|
||||
and {meth}`.Authenticator.post_spawn_stop`, are hooks that can be used to do
|
||||
auth-related startup (e.g. opening PAM sessions) and cleanup
|
||||
(e.g. closing PAM sessions).
|
||||
|
||||
@@ -223,7 +223,7 @@ If there are multiple keys present, the **first** key is always used to persist
|
||||
|
||||
Typically, if `auth_state` is persisted it is desirable to affect the Spawner environment in some way.
|
||||
This may mean defining environment variables, placing certificate in the user's home directory, etc.
|
||||
The `Authenticator.pre_spawn_start` method can be used to pass information from authenticator state
|
||||
The {meth}`Authenticator.pre_spawn_start` method can be used to pass information from authenticator state
|
||||
to Spawner environment:
|
||||
|
||||
```python
|
||||
@@ -247,10 +247,42 @@ class MyAuthenticator(Authenticator):
|
||||
spawner.environment['UPSTREAM_TOKEN'] = auth_state['upstream_token']
|
||||
```
|
||||
|
||||
(authenticator-groups)=
|
||||
|
||||
## Authenticator-managed group membership
|
||||
|
||||
:::{versionadded} 2.2
|
||||
:::
|
||||
|
||||
Some identity providers may have their own concept of group membership that you would like to preserve in JupyterHub.
|
||||
This is now possible with `Authenticator.managed_groups`.
|
||||
|
||||
You can set the config:
|
||||
|
||||
```python
|
||||
c.Authenticator.manage_groups = True
|
||||
```
|
||||
|
||||
to enable this behavior.
|
||||
The default is False for Authenticators that ship with JupyterHub,
|
||||
but may be True for custom Authenticators.
|
||||
Check your Authenticator's documentation for manage_groups support.
|
||||
|
||||
If True, {meth}`.Authenticator.authenticate` and {meth}`.Authenticator.refresh_user` may include a field `groups`
|
||||
which is a list of group names the user should be a member of:
|
||||
|
||||
- Membership will be added for any group in the list
|
||||
- Membership in any groups not in the list will be revoked
|
||||
- Any groups not already present in the database will be created
|
||||
- If `None` is returned, no changes are made to the user's group membership
|
||||
|
||||
If authenticator-managed groups are enabled,
|
||||
all group-management via the API is disabled.
|
||||
|
||||
## pre_spawn_start and post_spawn_stop hooks
|
||||
|
||||
Authenticators uses two hooks, [pre_spawn_start(user, spawner)][] and
|
||||
[post_spawn_stop(user, spawner)][] to add pass additional state information
|
||||
Authenticators uses two hooks, {meth}`.Authenticator.pre_spawn_start` and
|
||||
{meth}`.Authenticator.post_spawn_stop(user, spawner)` to add pass additional state information
|
||||
between the authenticator and a spawner. These hooks are typically used auth-related
|
||||
startup, i.e. opening a PAM session, and auth-related cleanup, i.e. closing a
|
||||
PAM session.
|
||||
@@ -259,10 +291,7 @@ PAM session.
|
||||
|
||||
Beginning with version 0.8, JupyterHub is an OAuth provider.
|
||||
|
||||
[authenticator]: https://github.com/jupyterhub/jupyterhub/blob/HEAD/jupyterhub/auth.py
|
||||
[pam]: https://en.wikipedia.org/wiki/Pluggable_authentication_module
|
||||
[oauth]: https://en.wikipedia.org/wiki/OAuth
|
||||
[github oauth]: https://developer.github.com/v3/oauth/
|
||||
[oauthenticator]: https://github.com/jupyterhub/oauthenticator
|
||||
[pre_spawn_start(user, spawner)]: https://jupyterhub.readthedocs.io/en/latest/api/auth.html#jupyterhub.auth.Authenticator.pre_spawn_start
|
||||
[post_spawn_stop(user, spawner)]: https://jupyterhub.readthedocs.io/en/latest/api/auth.html#jupyterhub.auth.Authenticator.post_spawn_stop
|
||||
|
@@ -165,7 +165,7 @@ As with nginx above, you can use [Apache](https://httpd.apache.org) as the rever
|
||||
First, we will need to enable the apache modules that we are going to need:
|
||||
|
||||
```bash
|
||||
a2enmod ssl rewrite proxy proxy_http proxy_wstunnel
|
||||
a2enmod ssl rewrite proxy headers proxy_http proxy_wstunnel
|
||||
```
|
||||
|
||||
Our Apache configuration is equivalent to the nginx configuration above:
|
||||
@@ -188,13 +188,24 @@ Listen 443
|
||||
|
||||
ServerName HUB.DOMAIN.TLD
|
||||
|
||||
# enable HTTP/2, if available
|
||||
Protocols h2 http/1.1
|
||||
|
||||
# HTTP Strict Transport Security (mod_headers is required) (63072000 seconds)
|
||||
Header always set Strict-Transport-Security "max-age=63072000"
|
||||
|
||||
# configure SSL
|
||||
SSLEngine on
|
||||
SSLCertificateFile /etc/letsencrypt/live/HUB.DOMAIN.TLD/fullchain.pem
|
||||
SSLCertificateKeyFile /etc/letsencrypt/live/HUB.DOMAIN.TLD/privkey.pem
|
||||
SSLProtocol All -SSLv2 -SSLv3
|
||||
SSLOpenSSLConfCmd DHParameters /etc/ssl/certs/dhparam.pem
|
||||
SSLCipherSuite EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH
|
||||
|
||||
# intermediate configuration from ssl-config.mozilla.org (2022-03-03)
|
||||
# Please note, that this configuration might be out-dated - please update it accordingly using https://ssl-config.mozilla.org/
|
||||
SSLProtocol all -SSLv3 -TLSv1 -TLSv1.1
|
||||
SSLCipherSuite ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384
|
||||
SSLHonorCipherOrder off
|
||||
SSLSessionTickets off
|
||||
|
||||
# Use RewriteEngine to handle websocket connection upgrades
|
||||
RewriteEngine On
|
||||
@@ -208,6 +219,7 @@ Listen 443
|
||||
# proxy to JupyterHub
|
||||
ProxyPass http://127.0.0.1:8000/
|
||||
ProxyPassReverse http://127.0.0.1:8000/
|
||||
RequestHeader set "X-Forwarded-Proto" expr=%{REQUEST_SCHEME}
|
||||
</Location>
|
||||
</VirtualHost>
|
||||
```
|
||||
|
@@ -113,7 +113,6 @@ c.JupyterHub.load_roles = [
|
||||
"scopes": [
|
||||
# specify the permissions the token should have
|
||||
"admin:users",
|
||||
"admin:services",
|
||||
],
|
||||
"services": [
|
||||
# assign the service the above permissions
|
||||
|
@@ -83,6 +83,7 @@ c.JupyterHub.load_roles = [
|
||||
# 'admin:users' # needed if culling idle users as well
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
c.JupyterHub.services = [
|
||||
{
|
||||
@@ -208,23 +209,23 @@ can be used by services. You may go beyond this reference implementation and
|
||||
create custom hub-authenticating clients and services. We describe the process
|
||||
below.
|
||||
|
||||
The reference, or base, implementation is the [`HubAuth`][hubauth] class,
|
||||
The reference, or base, implementation is the {class}`.HubAuth` class,
|
||||
which implements the API requests to the Hub that resolve a token to a User model.
|
||||
|
||||
There are two levels of authentication with the Hub:
|
||||
|
||||
- [`HubAuth`][hubauth] - the most basic authentication,
|
||||
- {class}`.HubAuth` - the most basic authentication,
|
||||
for services that should only accept API requests authorized with a token.
|
||||
|
||||
- [`HubOAuth`][huboauth] - For services that should use oauth to authenticate with the Hub.
|
||||
- {class}`.HubOAuth` - For services that should use oauth to authenticate with the Hub.
|
||||
This should be used for any service that serves pages that should be visited with a browser.
|
||||
|
||||
To use HubAuth, you must set the `.api_token`, either programmatically when constructing the class,
|
||||
or via the `JUPYTERHUB_API_TOKEN` environment variable.
|
||||
|
||||
Most of the logic for authentication implementation is found in the
|
||||
[`HubAuth.user_for_token`][hubauth.user_for_token]
|
||||
methods, which makes a request of the Hub, and returns:
|
||||
{meth}`.HubAuth.user_for_token` methods,
|
||||
which makes a request of the Hub, and returns:
|
||||
|
||||
- None, if no user could be identified, or
|
||||
- a dict of the following form:
|
||||
@@ -245,6 +246,19 @@ action.
|
||||
HubAuth also caches the Hub's response for a number of seconds,
|
||||
configurable by the `cookie_cache_max_age` setting (default: five minutes).
|
||||
|
||||
If your service would like to make further requests _on behalf of users_,
|
||||
it should use the token issued by this OAuth process.
|
||||
If you are using tornado,
|
||||
you can access the token authenticating the current request with {meth}`.HubAuth.get_token`.
|
||||
|
||||
:::{versionchanged} 2.2
|
||||
|
||||
{meth}`.HubAuth.get_token` adds support for retrieving
|
||||
tokens stored in tornado cookies after completion of OAuth.
|
||||
Previously, it only retrieved tokens from URL parameters or the Authorization header.
|
||||
Passing `get_token(handler, in_cookie=False)` preserves this behavior.
|
||||
:::
|
||||
|
||||
### Flask Example
|
||||
|
||||
For example, you have a Flask service that returns information about a user.
|
||||
@@ -370,11 +384,6 @@ section on securing the notebook viewer.
|
||||
|
||||
[requests]: http://docs.python-requests.org/en/master/
|
||||
[services_auth]: ../api/services.auth.html
|
||||
[hubauth]: ../api/services.auth.html#jupyterhub.services.auth.HubAuth
|
||||
[huboauth]: ../api/services.auth.html#jupyterhub.services.auth.HubOAuth
|
||||
[hubauth.user_for_token]: ../api/services.auth.html#jupyterhub.services.auth.HubAuth.user_for_token
|
||||
[hubauthenticated]: ../api/services.auth.html#jupyterhub.services.auth.HubAuthenticated
|
||||
[huboauthenticated]: ../api/services.auth.html#jupyterhub.services.auth.HubOAuthenticated
|
||||
[nbviewer example]: https://github.com/jupyter/nbviewer#securing-the-notebook-viewer
|
||||
[fastapi example]: https://github.com/jupyterhub/jupyterhub/tree/HEAD/examples/service-fastapi
|
||||
[fastapi]: https://fastapi.tiangolo.com
|
||||
|
@@ -108,6 +108,16 @@ class MySpawner(Spawner):
|
||||
return url
|
||||
```
|
||||
|
||||
#### Exception handling
|
||||
|
||||
When `Spawner.start` raises an Exception, a message can be passed on to the user via the exception via a `.jupyterhub_html_message` or `.jupyterhub_message` attribute.
|
||||
|
||||
When the Exception has a `.jupyterhub_html_message` attribute, it will be rendered as HTML to the user.
|
||||
|
||||
Alternatively `.jupyterhub_message` is rendered as unformatted text.
|
||||
|
||||
If both attributes are not present, the Exception will be shown to the user as unformatted text.
|
||||
|
||||
### Spawner.poll
|
||||
|
||||
`Spawner.poll` should check if the spawner is still running.
|
||||
|
@@ -275,7 +275,7 @@ where `ssl_cert` is example-chained.crt and ssl_key to your private key.
|
||||
|
||||
Then restart JupyterHub.
|
||||
|
||||
See also [JupyterHub SSL encryption](./getting-started/security-basics.html#ssl-encryption).
|
||||
See also {ref}`ssl-encryption`.
|
||||
|
||||
### Install JupyterHub without a network connection
|
||||
|
||||
|
30
examples/azuread-with-group-management/jupyterhub_config.py
Normal file
30
examples/azuread-with-group-management/jupyterhub_config.py
Normal file
@@ -0,0 +1,30 @@
|
||||
"""sample jupyterhub config file for testing
|
||||
|
||||
configures jupyterhub with dummyauthenticator and simplespawner
|
||||
to enable testing without administrative privileges.
|
||||
"""
|
||||
|
||||
c = get_config() # noqa
|
||||
c.Application.log_level = 'DEBUG'
|
||||
|
||||
from oauthenticator.azuread import AzureAdOAuthenticator
|
||||
import os
|
||||
|
||||
c.JupyterHub.authenticator_class = AzureAdOAuthenticator
|
||||
|
||||
c.AzureAdOAuthenticator.client_id = os.getenv("AAD_CLIENT_ID")
|
||||
c.AzureAdOAuthenticator.client_secret = os.getenv("AAD_CLIENT_SECRET")
|
||||
c.AzureAdOAuthenticator.oauth_callback_url = os.getenv("AAD_CALLBACK_URL")
|
||||
c.AzureAdOAuthenticator.tenant_id = os.getenv("AAD_TENANT_ID")
|
||||
c.AzureAdOAuthenticator.username_claim = "email"
|
||||
c.AzureAdOAuthenticator.authorize_url = os.getenv("AAD_AUTHORIZE_URL")
|
||||
c.AzureAdOAuthenticator.token_url = os.getenv("AAD_TOKEN_URL")
|
||||
c.Authenticator.manage_groups = True
|
||||
c.Authenticator.refresh_pre_spawn = True
|
||||
|
||||
# Optionally set a global password that all users must use
|
||||
# c.DummyAuthenticator.password = "your_password"
|
||||
|
||||
from jupyterhub.spawner import SimpleLocalProcessSpawner
|
||||
|
||||
c.JupyterHub.spawner_class = SimpleLocalProcessSpawner
|
2
examples/azuread-with-group-management/requirements.txt
Normal file
2
examples/azuread-with-group-management/requirements.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
oauthenticator
|
||||
pyjwt
|
@@ -5,12 +5,12 @@ object-assign
|
||||
*/
|
||||
|
||||
/*!
|
||||
Copyright (c) 2017 Jed Watson.
|
||||
Copyright (c) 2018 Jed Watson.
|
||||
Licensed under the MIT License (MIT), see
|
||||
http://jedwatson.github.io/classnames
|
||||
*/
|
||||
|
||||
/** @license React v0.20.1
|
||||
/** @license React v0.20.2
|
||||
* scheduler.production.min.js
|
||||
*
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
@@ -28,7 +28,7 @@ object-assign
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
/** @license React v17.0.1
|
||||
/** @license React v17.0.2
|
||||
* react-dom.production.min.js
|
||||
*
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
@@ -37,7 +37,16 @@ object-assign
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
/** @license React v17.0.1
|
||||
/** @license React v17.0.2
|
||||
* react-jsx-runtime.production.min.js
|
||||
*
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
/** @license React v17.0.2
|
||||
* react.production.min.js
|
||||
*
|
||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||
|
@@ -40,17 +40,20 @@
|
||||
"eslint-plugin-unused-imports": "^1.1.1",
|
||||
"file-loader": "^6.2.0",
|
||||
"history": "^5.0.0",
|
||||
"lodash.debounce": "^4.0.8",
|
||||
"prop-types": "^15.7.2",
|
||||
"react": "^17.0.1",
|
||||
"react-bootstrap": "^1.4.0",
|
||||
"react-bootstrap": "^2.1.1",
|
||||
"react-dom": "^17.0.1",
|
||||
"react-icons": "^4.1.0",
|
||||
"react-multi-select-component": "^3.0.7",
|
||||
"react-object-table-viewer": "^1.0.7",
|
||||
"react-redux": "^7.2.2",
|
||||
"react-router": "^5.2.0",
|
||||
"react-router-dom": "^5.2.0",
|
||||
"recompose": "^0.30.0",
|
||||
"redux": "^4.0.5",
|
||||
"regenerator-runtime": "^0.13.9",
|
||||
"style-loader": "^2.0.0",
|
||||
"webpack": "^5.6.0",
|
||||
"webpack-cli": "^3.3.4",
|
||||
@@ -65,6 +68,7 @@
|
||||
"eslint-plugin-react": "^7.22.0",
|
||||
"identity-obj-proxy": "^3.0.0",
|
||||
"jest": "^26.6.3",
|
||||
"prettier": "^2.2.1"
|
||||
"prettier": "^2.2.1",
|
||||
"sinon": "^13.0.1"
|
||||
}
|
||||
}
|
||||
|
@@ -1,6 +1,7 @@
|
||||
export const initialState = {
|
||||
user_data: undefined,
|
||||
user_page: 0,
|
||||
name_filter: "",
|
||||
groups_data: undefined,
|
||||
groups_page: 0,
|
||||
limit: window.api_page_limit,
|
||||
@@ -13,6 +14,7 @@ export const reducers = (state = initialState, action) => {
|
||||
return Object.assign({}, state, {
|
||||
user_page: action.value.page,
|
||||
user_data: action.value.data,
|
||||
name_filter: action.value.name_filter || "",
|
||||
});
|
||||
|
||||
// Updates the client group model data and stores the page
|
||||
|
@@ -60,7 +60,10 @@ const AddUser = (props) => {
|
||||
placeholder="usernames separated by line"
|
||||
data-testid="user-textarea"
|
||||
onBlur={(e) => {
|
||||
let split_users = e.target.value.split("\n");
|
||||
let split_users = e.target.value
|
||||
.split("\n")
|
||||
.map((u) => u.trim())
|
||||
.filter((u) => u.length > 0);
|
||||
setUsers(split_users);
|
||||
}}
|
||||
></textarea>
|
||||
@@ -88,17 +91,7 @@ const AddUser = (props) => {
|
||||
data-testid="submit"
|
||||
className="btn btn-primary"
|
||||
onClick={() => {
|
||||
let filtered_users = users.filter(
|
||||
(e) =>
|
||||
e.length > 2 &&
|
||||
/[!@#$%^&*(),.?":{}|<>]/g.test(e) == false
|
||||
);
|
||||
if (filtered_users.length < users.length) {
|
||||
setUsers(filtered_users);
|
||||
failRegexEvent();
|
||||
}
|
||||
|
||||
addUsers(filtered_users, admin)
|
||||
addUsers(users, admin)
|
||||
.then((data) =>
|
||||
data.status < 300
|
||||
? updateUsers(0, limit)
|
||||
|
@@ -70,12 +70,12 @@ test("Removes users when they fail Regex", async () => {
|
||||
let textarea = screen.getByTestId("user-textarea");
|
||||
let submit = screen.getByTestId("submit");
|
||||
|
||||
fireEvent.blur(textarea, { target: { value: "foo\nbar\n!!*&*" } });
|
||||
fireEvent.blur(textarea, { target: { value: "foo \n bar\na@b.co\n \n\n" } });
|
||||
await act(async () => {
|
||||
fireEvent.click(submit);
|
||||
});
|
||||
|
||||
expect(callbackSpy).toHaveBeenCalledWith(["foo", "bar"], false);
|
||||
expect(callbackSpy).toHaveBeenCalledWith(["foo", "bar", "a@b.co"], false);
|
||||
});
|
||||
|
||||
test("Correctly submits admin", async () => {
|
||||
|
@@ -59,7 +59,7 @@ const CreateGroup = (props) => {
|
||||
value={groupName}
|
||||
placeholder="group name..."
|
||||
onChange={(e) => {
|
||||
setGroupName(e.target.value);
|
||||
setGroupName(e.target.value.trim());
|
||||
}}
|
||||
></input>
|
||||
</div>
|
||||
|
@@ -1,8 +1,19 @@
|
||||
import React, { useState } from "react";
|
||||
import regeneratorRuntime from "regenerator-runtime";
|
||||
import { useSelector, useDispatch } from "react-redux";
|
||||
import PropTypes from "prop-types";
|
||||
|
||||
import { Button } from "react-bootstrap";
|
||||
import {
|
||||
Button,
|
||||
Col,
|
||||
Row,
|
||||
FormControl,
|
||||
Card,
|
||||
CardGroup,
|
||||
Collapse,
|
||||
} from "react-bootstrap";
|
||||
import ReactObjectTableViewer from "react-object-table-viewer";
|
||||
|
||||
import { Link } from "react-router-dom";
|
||||
import { FaSort, FaSortUp, FaSortDown } from "react-icons/fa";
|
||||
|
||||
@@ -10,7 +21,16 @@ import "./server-dashboard.css";
|
||||
import { timeSince } from "../../util/timeSince";
|
||||
import PaginationFooter from "../PaginationFooter/PaginationFooter";
|
||||
|
||||
const AccessServerButton = ({ url }) => (
|
||||
<a href={url || ""}>
|
||||
<button className="btn btn-primary btn-xs" style={{ marginRight: 20 }}>
|
||||
Access Server
|
||||
</button>
|
||||
</a>
|
||||
);
|
||||
|
||||
const ServerDashboard = (props) => {
|
||||
let base_url = window.base_url;
|
||||
// sort methods
|
||||
var usernameDesc = (e) => e.sort((a, b) => (a.name > b.name ? 1 : -1)),
|
||||
usernameAsc = (e) => e.sort((a, b) => (a.name < b.name ? 1 : -1)),
|
||||
@@ -29,14 +49,17 @@ const ServerDashboard = (props) => {
|
||||
|
||||
var [errorAlert, setErrorAlert] = useState(null);
|
||||
var [sortMethod, setSortMethod] = useState(null);
|
||||
var [disabledButtons, setDisabledButtons] = useState({});
|
||||
const [collapseStates, setCollapseStates] = useState({});
|
||||
|
||||
var user_data = useSelector((state) => state.user_data),
|
||||
user_page = useSelector((state) => state.user_page),
|
||||
limit = useSelector((state) => state.limit),
|
||||
name_filter = useSelector((state) => state.name_filter),
|
||||
page = parseInt(new URLSearchParams(props.location.search).get("page"));
|
||||
|
||||
page = isNaN(page) ? 0 : page;
|
||||
var slice = [page * limit, limit];
|
||||
var slice = [page * limit, limit, name_filter];
|
||||
|
||||
const dispatch = useDispatch();
|
||||
|
||||
@@ -50,12 +73,13 @@ const ServerDashboard = (props) => {
|
||||
history,
|
||||
} = props;
|
||||
|
||||
var dispatchPageUpdate = (data, page) => {
|
||||
var dispatchPageUpdate = (data, page, name_filter) => {
|
||||
dispatch({
|
||||
type: "USER_PAGE",
|
||||
value: {
|
||||
data: data,
|
||||
page: page,
|
||||
name_filter: name_filter,
|
||||
},
|
||||
});
|
||||
};
|
||||
@@ -65,13 +89,243 @@ const ServerDashboard = (props) => {
|
||||
}
|
||||
|
||||
if (page != user_page) {
|
||||
updateUsers(...slice).then((data) => dispatchPageUpdate(data, page));
|
||||
updateUsers(...slice).then((data) =>
|
||||
dispatchPageUpdate(data, page, name_filter)
|
||||
);
|
||||
}
|
||||
|
||||
var debounce = require("lodash.debounce");
|
||||
const handleSearch = debounce(async (event) => {
|
||||
// setNameFilter(event.target.value);
|
||||
updateUsers(page * limit, limit, event.target.value).then((data) =>
|
||||
dispatchPageUpdate(data, page, name_filter)
|
||||
);
|
||||
}, 300);
|
||||
|
||||
if (sortMethod != null) {
|
||||
user_data = sortMethod(user_data);
|
||||
}
|
||||
|
||||
const StopServerButton = ({ serverName, userName }) => {
|
||||
var [isDisabled, setIsDisabled] = useState(false);
|
||||
return (
|
||||
<button
|
||||
className="btn btn-danger btn-xs stop-button"
|
||||
disabled={isDisabled}
|
||||
onClick={() => {
|
||||
setIsDisabled(true);
|
||||
stopServer(userName, serverName)
|
||||
.then((res) => {
|
||||
if (res.status < 300) {
|
||||
updateUsers(...slice)
|
||||
.then((data) => {
|
||||
dispatchPageUpdate(data, page, name_filter);
|
||||
})
|
||||
.catch(() => {
|
||||
setIsDisabled(false);
|
||||
setErrorAlert(`Failed to update users list.`);
|
||||
});
|
||||
} else {
|
||||
setErrorAlert(`Failed to stop server.`);
|
||||
setIsDisabled(false);
|
||||
}
|
||||
return res;
|
||||
})
|
||||
.catch(() => {
|
||||
setErrorAlert(`Failed to stop server.`);
|
||||
setIsDisabled(false);
|
||||
});
|
||||
}}
|
||||
>
|
||||
Stop Server
|
||||
</button>
|
||||
);
|
||||
};
|
||||
|
||||
const StartServerButton = ({ serverName, userName }) => {
|
||||
var [isDisabled, setIsDisabled] = useState(false);
|
||||
return (
|
||||
<button
|
||||
className="btn btn-success btn-xs start-button"
|
||||
disabled={isDisabled}
|
||||
onClick={() => {
|
||||
setIsDisabled(true);
|
||||
startServer(userName, serverName)
|
||||
.then((res) => {
|
||||
if (res.status < 300) {
|
||||
updateUsers(...slice)
|
||||
.then((data) => {
|
||||
dispatchPageUpdate(data, page, name_filter);
|
||||
})
|
||||
.catch(() => {
|
||||
setErrorAlert(`Failed to update users list.`);
|
||||
setIsDisabled(false);
|
||||
});
|
||||
} else {
|
||||
setErrorAlert(`Failed to start server.`);
|
||||
setIsDisabled(false);
|
||||
}
|
||||
return res;
|
||||
})
|
||||
.catch(() => {
|
||||
setErrorAlert(`Failed to start server.`);
|
||||
setIsDisabled(false);
|
||||
});
|
||||
}}
|
||||
>
|
||||
Start Server
|
||||
</button>
|
||||
);
|
||||
};
|
||||
|
||||
const EditUserCell = ({ user }) => {
|
||||
return (
|
||||
<td>
|
||||
<button
|
||||
className="btn btn-primary btn-xs"
|
||||
style={{ marginRight: 20 }}
|
||||
onClick={() =>
|
||||
history.push({
|
||||
pathname: "/edit-user",
|
||||
state: {
|
||||
username: user.name,
|
||||
has_admin: user.admin,
|
||||
},
|
||||
})
|
||||
}
|
||||
>
|
||||
Edit User
|
||||
</button>
|
||||
</td>
|
||||
);
|
||||
};
|
||||
|
||||
const ServerRowTable = ({ data }) => {
|
||||
return (
|
||||
<ReactObjectTableViewer
|
||||
className="table-striped table-bordered"
|
||||
style={{
|
||||
padding: "3px 6px",
|
||||
margin: "auto",
|
||||
}}
|
||||
keyStyle={{
|
||||
padding: "4px",
|
||||
}}
|
||||
valueStyle={{
|
||||
padding: "4px",
|
||||
}}
|
||||
data={data}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
const serverRow = (user, server) => {
|
||||
const { servers, ...userNoServers } = user;
|
||||
const serverNameDash = server.name ? `-${server.name}` : "";
|
||||
const userServerName = user.name + serverNameDash;
|
||||
const open = collapseStates[userServerName] || false;
|
||||
return [
|
||||
<tr key={`${userServerName}-row`} className="user-row">
|
||||
<td data-testid="user-row-name">
|
||||
<span>
|
||||
<Button
|
||||
onClick={() =>
|
||||
setCollapseStates({
|
||||
...collapseStates,
|
||||
[userServerName]: !open,
|
||||
})
|
||||
}
|
||||
aria-controls={`${userServerName}-collapse`}
|
||||
aria-expanded={open}
|
||||
data-testid={`${userServerName}-collapse-button`}
|
||||
variant={open ? "secondary" : "primary"}
|
||||
size="sm"
|
||||
>
|
||||
<span className="caret"></span>
|
||||
</Button>{" "}
|
||||
</span>
|
||||
<span data-testid={`user-name-div-${userServerName}`}>
|
||||
{user.name}
|
||||
</span>
|
||||
</td>
|
||||
<td data-testid="user-row-admin">{user.admin ? "admin" : ""}</td>
|
||||
|
||||
<td data-testid="user-row-server">
|
||||
{server.name ? (
|
||||
<p className="text-secondary">{server.name}</p>
|
||||
) : (
|
||||
<p style={{ color: "lightgrey" }}>[MAIN]</p>
|
||||
)}
|
||||
</td>
|
||||
<td data-testid="user-row-last-activity">
|
||||
{server.last_activity ? timeSince(server.last_activity) : "Never"}
|
||||
</td>
|
||||
<td data-testid="user-row-server-activity">
|
||||
{server.started ? (
|
||||
// Stop Single-user server
|
||||
<>
|
||||
<StopServerButton serverName={server.name} userName={user.name} />
|
||||
<AccessServerButton url={server.url} />
|
||||
</>
|
||||
) : (
|
||||
// Start Single-user server
|
||||
<>
|
||||
<StartServerButton
|
||||
serverName={server.name}
|
||||
userName={user.name}
|
||||
style={{ marginRight: 20 }}
|
||||
/>
|
||||
<a
|
||||
href={`${base_url}spawn/${user.name}${
|
||||
server.name && "/" + server.name
|
||||
}`}
|
||||
>
|
||||
<button
|
||||
className="btn btn-secondary btn-xs"
|
||||
style={{ marginRight: 20 }}
|
||||
>
|
||||
Spawn Page
|
||||
</button>
|
||||
</a>
|
||||
</>
|
||||
)}
|
||||
</td>
|
||||
<EditUserCell user={user} />
|
||||
</tr>,
|
||||
<tr>
|
||||
<td
|
||||
colSpan={6}
|
||||
style={{ padding: 0 }}
|
||||
data-testid={`${userServerName}-td`}
|
||||
>
|
||||
<Collapse in={open} data-testid={`${userServerName}-collapse`}>
|
||||
<CardGroup
|
||||
id={`${userServerName}-card-group`}
|
||||
style={{ width: "100%", margin: "0 auto", float: "none" }}
|
||||
>
|
||||
<Card style={{ width: "100%", padding: 3, margin: "0 auto" }}>
|
||||
<Card.Title>User</Card.Title>
|
||||
<ServerRowTable data={userNoServers} />
|
||||
</Card>
|
||||
<Card style={{ width: "100%", padding: 3, margin: "0 auto" }}>
|
||||
<Card.Title>Server</Card.Title>
|
||||
<ServerRowTable data={server} />
|
||||
</Card>
|
||||
</CardGroup>
|
||||
</Collapse>
|
||||
</td>
|
||||
</tr>,
|
||||
];
|
||||
};
|
||||
|
||||
let servers = user_data.flatMap((user) => {
|
||||
let userServers = Object.values({
|
||||
"": user.server || {},
|
||||
...(user.servers || {}),
|
||||
});
|
||||
return userServers.map((server) => [user, server]);
|
||||
});
|
||||
|
||||
return (
|
||||
<div className="container" data-testid="container">
|
||||
{errorAlert != null ? (
|
||||
@@ -92,11 +346,24 @@ const ServerDashboard = (props) => {
|
||||
) : (
|
||||
<></>
|
||||
)}
|
||||
<div className="manage-groups" style={{ float: "right", margin: "20px" }}>
|
||||
<Link to="/groups">{"> Manage Groups"}</Link>
|
||||
</div>
|
||||
<div className="server-dashboard-container">
|
||||
<table className="table table-striped table-bordered table-hover">
|
||||
<Row>
|
||||
<Col md={4}>
|
||||
<FormControl
|
||||
type="text"
|
||||
name="user_search"
|
||||
placeholder="Search users"
|
||||
aria-label="user-search"
|
||||
defaultValue={name_filter}
|
||||
onChange={handleSearch}
|
||||
/>
|
||||
</Col>
|
||||
|
||||
<Col md="auto" style={{ float: "right", margin: 15 }}>
|
||||
<Link to="/groups">{"> Manage Groups"}</Link>
|
||||
</Col>
|
||||
</Row>
|
||||
<table className="table table-bordered table-hover">
|
||||
<thead className="admin-table-head">
|
||||
<tr>
|
||||
<th id="user-header">
|
||||
@@ -115,6 +382,14 @@ const ServerDashboard = (props) => {
|
||||
testid="admin-sort"
|
||||
/>
|
||||
</th>
|
||||
<th id="server-header">
|
||||
Server{" "}
|
||||
<SortHandler
|
||||
sorts={{ asc: usernameAsc, desc: usernameDesc }}
|
||||
callback={(method) => setSortMethod(() => method)}
|
||||
testid="server-sort"
|
||||
/>
|
||||
</th>
|
||||
<th id="last-activity-header">
|
||||
Last Activity{" "}
|
||||
<SortHandler
|
||||
@@ -167,7 +442,7 @@ const ServerDashboard = (props) => {
|
||||
.then((res) => {
|
||||
updateUsers(...slice)
|
||||
.then((data) => {
|
||||
dispatchPageUpdate(data, page);
|
||||
dispatchPageUpdate(data, page, name_filter);
|
||||
})
|
||||
.catch(() =>
|
||||
setErrorAlert(`Failed to update users list.`)
|
||||
@@ -203,7 +478,7 @@ const ServerDashboard = (props) => {
|
||||
.then((res) => {
|
||||
updateUsers(...slice)
|
||||
.then((data) => {
|
||||
dispatchPageUpdate(data, page);
|
||||
dispatchPageUpdate(data, page, name_filter);
|
||||
})
|
||||
.catch(() =>
|
||||
setErrorAlert(`Failed to update users list.`)
|
||||
@@ -227,88 +502,7 @@ const ServerDashboard = (props) => {
|
||||
</Button>
|
||||
</td>
|
||||
</tr>
|
||||
{user_data.map((e, i) => (
|
||||
<tr key={i + "row"} className="user-row">
|
||||
<td data-testid="user-row-name">{e.name}</td>
|
||||
<td data-testid="user-row-admin">{e.admin ? "admin" : ""}</td>
|
||||
<td data-testid="user-row-last-activity">
|
||||
{e.last_activity ? timeSince(e.last_activity) : "Never"}
|
||||
</td>
|
||||
<td data-testid="user-row-server-activity">
|
||||
{e.server != null ? (
|
||||
// Stop Single-user server
|
||||
<button
|
||||
className="btn btn-danger btn-xs stop-button"
|
||||
onClick={() =>
|
||||
stopServer(e.name)
|
||||
.then((res) => {
|
||||
if (res.status < 300) {
|
||||
updateUsers(...slice)
|
||||
.then((data) => {
|
||||
dispatchPageUpdate(data, page);
|
||||
})
|
||||
.catch(() =>
|
||||
setErrorAlert(`Failed to update users list.`)
|
||||
);
|
||||
} else {
|
||||
setErrorAlert(`Failed to stop server.`);
|
||||
}
|
||||
return res;
|
||||
})
|
||||
.catch(() => setErrorAlert(`Failed to stop server.`))
|
||||
}
|
||||
>
|
||||
Stop Server
|
||||
</button>
|
||||
) : (
|
||||
// Start Single-user server
|
||||
<button
|
||||
className="btn btn-primary btn-xs start-button"
|
||||
onClick={() =>
|
||||
startServer(e.name)
|
||||
.then((res) => {
|
||||
if (res.status < 300) {
|
||||
updateUsers(...slice)
|
||||
.then((data) => {
|
||||
dispatchPageUpdate(data, page);
|
||||
})
|
||||
.catch(() =>
|
||||
setErrorAlert(`Failed to update users list.`)
|
||||
);
|
||||
} else {
|
||||
setErrorAlert(`Failed to start server.`);
|
||||
}
|
||||
return res;
|
||||
})
|
||||
.catch(() => {
|
||||
setErrorAlert(`Failed to start server.`);
|
||||
})
|
||||
}
|
||||
>
|
||||
Start Server
|
||||
</button>
|
||||
)}
|
||||
</td>
|
||||
<td>
|
||||
{/* Edit User */}
|
||||
<button
|
||||
className="btn btn-primary btn-xs"
|
||||
style={{ marginRight: 20 }}
|
||||
onClick={() =>
|
||||
history.push({
|
||||
pathname: "/edit-user",
|
||||
state: {
|
||||
username: e.name,
|
||||
has_admin: e.admin,
|
||||
},
|
||||
})
|
||||
}
|
||||
>
|
||||
edit user
|
||||
</button>
|
||||
</td>
|
||||
</tr>
|
||||
))}
|
||||
{servers.flatMap(([user, server]) => serverRow(user, server))}
|
||||
</tbody>
|
||||
</table>
|
||||
<PaginationFooter
|
||||
|
@@ -1,6 +1,7 @@
|
||||
import React from "react";
|
||||
import "@testing-library/jest-dom";
|
||||
import { act } from "react-dom/test-utils";
|
||||
import userEvent from "@testing-library/user-event";
|
||||
import { render, screen, fireEvent } from "@testing-library/react";
|
||||
import { HashRouter, Switch } from "react-router-dom";
|
||||
import { Provider, useSelector } from "react-redux";
|
||||
@@ -9,6 +10,9 @@ import { createStore } from "redux";
|
||||
import regeneratorRuntime from "regenerator-runtime";
|
||||
|
||||
import ServerDashboard from "./ServerDashboard";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
let clock;
|
||||
|
||||
jest.mock("react-redux", () => ({
|
||||
...jest.requireActual("react-redux"),
|
||||
@@ -45,6 +49,7 @@ var mockAppState = () => ({
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
clock = sinon.useFakeTimers();
|
||||
useSelector.mockImplementation((callback) => {
|
||||
return callback(mockAppState());
|
||||
});
|
||||
@@ -52,6 +57,7 @@ beforeEach(() => {
|
||||
|
||||
afterEach(() => {
|
||||
useSelector.mockClear();
|
||||
clock.restore();
|
||||
});
|
||||
|
||||
test("Renders", async () => {
|
||||
@@ -71,8 +77,8 @@ test("Renders users from props.user_data into table", async () => {
|
||||
render(serverDashboardJsx(callbackSpy));
|
||||
});
|
||||
|
||||
let foo = screen.getByText("foo");
|
||||
let bar = screen.getByText("bar");
|
||||
let foo = screen.getByTestId("user-name-div-foo");
|
||||
let bar = screen.getByTestId("user-name-div-bar");
|
||||
|
||||
expect(foo).toBeVisible();
|
||||
expect(bar).toBeVisible();
|
||||
@@ -151,12 +157,12 @@ test("Sorts according to username", async () => {
|
||||
fireEvent.click(handler);
|
||||
|
||||
let first = screen.getAllByTestId("user-row-name")[0];
|
||||
expect(first.textContent).toBe("bar");
|
||||
expect(first.textContent).toContain("bar");
|
||||
|
||||
fireEvent.click(handler);
|
||||
|
||||
first = screen.getAllByTestId("user-row-name")[0];
|
||||
expect(first.textContent).toBe("foo");
|
||||
expect(first.textContent).toContain("foo");
|
||||
});
|
||||
|
||||
test("Sorts according to admin", async () => {
|
||||
@@ -189,12 +195,12 @@ test("Sorts according to last activity", async () => {
|
||||
fireEvent.click(handler);
|
||||
|
||||
let first = screen.getAllByTestId("user-row-name")[0];
|
||||
expect(first.textContent).toBe("foo");
|
||||
expect(first.textContent).toContain("foo");
|
||||
|
||||
fireEvent.click(handler);
|
||||
|
||||
first = screen.getAllByTestId("user-row-name")[0];
|
||||
expect(first.textContent).toBe("bar");
|
||||
expect(first.textContent).toContain("bar");
|
||||
});
|
||||
|
||||
test("Sorts according to server status (running/not running)", async () => {
|
||||
@@ -208,12 +214,53 @@ test("Sorts according to server status (running/not running)", async () => {
|
||||
fireEvent.click(handler);
|
||||
|
||||
let first = screen.getAllByTestId("user-row-name")[0];
|
||||
expect(first.textContent).toBe("foo");
|
||||
expect(first.textContent).toContain("foo");
|
||||
|
||||
fireEvent.click(handler);
|
||||
|
||||
first = screen.getAllByTestId("user-row-name")[0];
|
||||
expect(first.textContent).toBe("bar");
|
||||
expect(first.textContent).toContain("bar");
|
||||
});
|
||||
|
||||
test("Shows server details with button click", async () => {
|
||||
let callbackSpy = mockAsync();
|
||||
|
||||
await act(async () => {
|
||||
render(serverDashboardJsx(callbackSpy));
|
||||
});
|
||||
let button = screen.getByTestId("foo-collapse-button");
|
||||
let collapse = screen.getByTestId("foo-collapse");
|
||||
let collapseBar = screen.getByTestId("bar-collapse");
|
||||
|
||||
// expect().toBeVisible does not work here with collapse.
|
||||
expect(collapse).toHaveClass("collapse");
|
||||
expect(collapse).not.toHaveClass("show");
|
||||
expect(collapseBar).not.toHaveClass("show");
|
||||
|
||||
await act(async () => {
|
||||
fireEvent.click(button);
|
||||
});
|
||||
clock.tick(400);
|
||||
|
||||
expect(collapse).toHaveClass("collapse show");
|
||||
expect(collapseBar).not.toHaveClass("show");
|
||||
|
||||
await act(async () => {
|
||||
fireEvent.click(button);
|
||||
});
|
||||
clock.tick(400);
|
||||
|
||||
expect(collapse).toHaveClass("collapse");
|
||||
expect(collapse).not.toHaveClass("show");
|
||||
expect(collapseBar).not.toHaveClass("show");
|
||||
|
||||
await act(async () => {
|
||||
fireEvent.click(button);
|
||||
});
|
||||
clock.tick(400);
|
||||
|
||||
expect(collapse).toHaveClass("collapse show");
|
||||
expect(collapseBar).not.toHaveClass("show");
|
||||
});
|
||||
|
||||
test("Renders nothing if required data is not available", async () => {
|
||||
@@ -435,3 +482,42 @@ test("Shows a UI error dialogue when stop user server returns an improper status
|
||||
|
||||
expect(errorDialog).toBeVisible();
|
||||
});
|
||||
|
||||
test("Search for user calls updateUsers with name filter", async () => {
|
||||
let spy = mockAsync();
|
||||
let mockUpdateUsers = jest.fn((offset, limit, name_filter) => {
|
||||
return Promise.resolve([]);
|
||||
});
|
||||
await act(async () => {
|
||||
render(
|
||||
<Provider store={createStore(() => {}, {})}>
|
||||
<HashRouter>
|
||||
<Switch>
|
||||
<ServerDashboard
|
||||
updateUsers={mockUpdateUsers}
|
||||
shutdownHub={spy}
|
||||
startServer={spy}
|
||||
stopServer={spy}
|
||||
startAll={spy}
|
||||
stopAll={spy}
|
||||
/>
|
||||
</Switch>
|
||||
</HashRouter>
|
||||
</Provider>
|
||||
);
|
||||
});
|
||||
|
||||
let search = screen.getByLabelText("user-search");
|
||||
|
||||
userEvent.type(search, "a");
|
||||
expect(search.value).toEqual("a");
|
||||
clock.tick(400);
|
||||
expect(mockUpdateUsers.mock.calls[1][2]).toEqual("a");
|
||||
expect(mockUpdateUsers.mock.calls).toHaveLength(2);
|
||||
|
||||
userEvent.type(search, "b");
|
||||
expect(search.value).toEqual("ab");
|
||||
clock.tick(400);
|
||||
expect(mockUpdateUsers.mock.calls[2][2]).toEqual("ab");
|
||||
expect(mockUpdateUsers.mock.calls).toHaveLength(3);
|
||||
});
|
||||
|
@@ -2,17 +2,20 @@ import { withProps } from "recompose";
|
||||
import { jhapiRequest } from "./jhapiUtil";
|
||||
|
||||
const withAPI = withProps(() => ({
|
||||
updateUsers: (offset, limit) =>
|
||||
jhapiRequest(`/users?offset=${offset}&limit=${limit}`, "GET").then((data) =>
|
||||
data.json()
|
||||
),
|
||||
updateUsers: (offset, limit, name_filter) =>
|
||||
jhapiRequest(
|
||||
`/users?offset=${offset}&limit=${limit}&name_filter=${name_filter || ""}`,
|
||||
"GET"
|
||||
).then((data) => data.json()),
|
||||
updateGroups: (offset, limit) =>
|
||||
jhapiRequest(`/groups?offset=${offset}&limit=${limit}`, "GET").then(
|
||||
(data) => data.json()
|
||||
),
|
||||
shutdownHub: () => jhapiRequest("/shutdown", "POST"),
|
||||
startServer: (name) => jhapiRequest("/users/" + name + "/server", "POST"),
|
||||
stopServer: (name) => jhapiRequest("/users/" + name + "/server", "DELETE"),
|
||||
startServer: (name, serverName = "") =>
|
||||
jhapiRequest("/users/" + name + "/servers/" + (serverName || ""), "POST"),
|
||||
stopServer: (name, serverName = "") =>
|
||||
jhapiRequest("/users/" + name + "/servers/" + (serverName || ""), "DELETE"),
|
||||
startAll: (names) =>
|
||||
names.map((e) => jhapiRequest("/users/" + e + "/server", "POST")),
|
||||
stopAll: (names) =>
|
||||
|
4846
jsx/yarn.lock
4846
jsx/yarn.lock
File diff suppressed because it is too large
Load Diff
@@ -2,7 +2,7 @@
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
# version_info updated by running `tbump`
|
||||
version_info = (2, 0, 2, "", "")
|
||||
version_info = (2, 3, 2, "", "dev")
|
||||
|
||||
# pep 440 version: no dot before beta/rc, but before .dev
|
||||
# 0.1.0rc1
|
||||
|
@@ -55,8 +55,15 @@ def run_migrations_offline():
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(url=url, target_metadata=target_metadata, literal_binds=True)
|
||||
connectable = config.attributes.get('connection', None)
|
||||
|
||||
if connectable is None:
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(url=url, target_metadata=target_metadata, literal_binds=True)
|
||||
else:
|
||||
context.configure(
|
||||
connection=connectable, target_metadata=target_metadata, literal_binds=True
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
@@ -69,11 +76,14 @@ def run_migrations_online():
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section),
|
||||
prefix='sqlalchemy.',
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
connectable = config.attributes.get('connection', None)
|
||||
|
||||
if connectable is None:
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section),
|
||||
prefix='sqlalchemy.',
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
@@ -33,6 +33,11 @@ class _GroupAPIHandler(APIHandler):
|
||||
raise web.HTTPError(404, "No such group: %s", group_name)
|
||||
return group
|
||||
|
||||
def check_authenticator_managed_groups(self):
|
||||
"""Raise error on group-management APIs if Authenticator is managing groups"""
|
||||
if self.authenticator.manage_groups:
|
||||
raise web.HTTPError(400, "Group management via API is disabled")
|
||||
|
||||
|
||||
class GroupListAPIHandler(_GroupAPIHandler):
|
||||
@needs_scope('list:groups')
|
||||
@@ -45,7 +50,7 @@ class GroupListAPIHandler(_GroupAPIHandler):
|
||||
# the only valid filter is group=...
|
||||
# don't expand invalid !server=x to all groups!
|
||||
self.log.warning(
|
||||
"Invalid filter on list:group for {self.current_user}: {sub_scope}"
|
||||
f"Invalid filter on list:group for {self.current_user}: {sub_scope}"
|
||||
)
|
||||
raise web.HTTPError(403)
|
||||
query = query.filter(orm.Group.name.in_(sub_scope['group']))
|
||||
@@ -68,6 +73,9 @@ class GroupListAPIHandler(_GroupAPIHandler):
|
||||
@needs_scope('admin:groups')
|
||||
async def post(self):
|
||||
"""POST creates Multiple groups"""
|
||||
|
||||
self.check_authenticator_managed_groups()
|
||||
|
||||
model = self.get_json_body()
|
||||
if not model or not isinstance(model, dict) or not model.get('groups'):
|
||||
raise web.HTTPError(400, "Must specify at least one group to create")
|
||||
@@ -106,6 +114,7 @@ class GroupAPIHandler(_GroupAPIHandler):
|
||||
@needs_scope('admin:groups')
|
||||
async def post(self, group_name):
|
||||
"""POST creates a group by name"""
|
||||
self.check_authenticator_managed_groups()
|
||||
model = self.get_json_body()
|
||||
if model is None:
|
||||
model = {}
|
||||
@@ -132,6 +141,7 @@ class GroupAPIHandler(_GroupAPIHandler):
|
||||
@needs_scope('delete:groups')
|
||||
def delete(self, group_name):
|
||||
"""Delete a group by name"""
|
||||
self.check_authenticator_managed_groups()
|
||||
group = self.find_group(group_name)
|
||||
self.log.info("Deleting group %s", group_name)
|
||||
self.db.delete(group)
|
||||
@@ -145,6 +155,7 @@ class GroupUsersAPIHandler(_GroupAPIHandler):
|
||||
@needs_scope('groups')
|
||||
def post(self, group_name):
|
||||
"""POST adds users to a group"""
|
||||
self.check_authenticator_managed_groups()
|
||||
group = self.find_group(group_name)
|
||||
data = self.get_json_body()
|
||||
self._check_group_model(data)
|
||||
@@ -163,6 +174,7 @@ class GroupUsersAPIHandler(_GroupAPIHandler):
|
||||
@needs_scope('groups')
|
||||
async def delete(self, group_name):
|
||||
"""DELETE removes users from a group"""
|
||||
self.check_authenticator_managed_groups()
|
||||
group = self.find_group(group_name)
|
||||
data = self.get_json_body()
|
||||
self._check_group_model(data)
|
||||
|
@@ -47,9 +47,8 @@ class ShutdownAPIHandler(APIHandler):
|
||||
self.set_status(202)
|
||||
self.finish(json.dumps({"message": "Shutting down Hub"}))
|
||||
|
||||
# stop the eventloop, which will trigger cleanup
|
||||
loop = IOLoop.current()
|
||||
loop.add_callback(loop.stop)
|
||||
# instruct the app to stop, which will trigger cleanup
|
||||
app.stop()
|
||||
|
||||
|
||||
class RootAPIHandler(APIHandler):
|
||||
|
@@ -84,6 +84,7 @@ class UserListAPIHandler(APIHandler):
|
||||
@needs_scope('list:users')
|
||||
def get(self):
|
||||
state_filter = self.get_argument("state", None)
|
||||
name_filter = self.get_argument("name_filter", None)
|
||||
offset, limit = self.get_api_pagination()
|
||||
|
||||
# post_filter
|
||||
@@ -130,7 +131,7 @@ class UserListAPIHandler(APIHandler):
|
||||
if not set(sub_scope).issubset({'group', 'user'}):
|
||||
# don't expand invalid !server=x filter to all users!
|
||||
self.log.warning(
|
||||
"Invalid filter on list:user for {self.current_user}: {sub_scope}"
|
||||
f"Invalid filter on list:user for {self.current_user}: {sub_scope}"
|
||||
)
|
||||
raise web.HTTPError(403)
|
||||
filters = []
|
||||
@@ -148,6 +149,9 @@ class UserListAPIHandler(APIHandler):
|
||||
else:
|
||||
query = query.filter(or_(*filters))
|
||||
|
||||
if name_filter:
|
||||
query = query.filter(orm.User.name.ilike(f'%{name_filter}%'))
|
||||
|
||||
full_query = query
|
||||
query = query.order_by(orm.User.id.asc()).offset(offset).limit(limit)
|
||||
|
||||
@@ -515,7 +519,7 @@ class UserServerAPIHandler(APIHandler):
|
||||
user_name, self.named_server_limit_per_user
|
||||
),
|
||||
)
|
||||
spawner = user.spawners[server_name]
|
||||
spawner = user.get_spawner(server_name, replace_failed=True)
|
||||
pending = spawner.pending
|
||||
if pending == 'spawn':
|
||||
self.set_header('Content-Type', 'text/plain')
|
||||
@@ -714,7 +718,12 @@ class SpawnProgressAPIHandler(APIHandler):
|
||||
# check if spawner has just failed
|
||||
f = spawn_future
|
||||
if f and f.done() and f.exception():
|
||||
failed_event['message'] = "Spawn failed: %s" % f.exception()
|
||||
exc = f.exception()
|
||||
message = getattr(exc, "jupyterhub_message", str(exc))
|
||||
failed_event['message'] = f"Spawn failed: {message}"
|
||||
html_message = getattr(exc, "jupyterhub_html_message", "")
|
||||
if html_message:
|
||||
failed_event['html_message'] = html_message
|
||||
await self.send_event(failed_event)
|
||||
return
|
||||
else:
|
||||
@@ -747,7 +756,12 @@ class SpawnProgressAPIHandler(APIHandler):
|
||||
# what happened? Maybe spawn failed?
|
||||
f = spawn_future
|
||||
if f and f.done() and f.exception():
|
||||
failed_event['message'] = "Spawn failed: %s" % f.exception()
|
||||
exc = f.exception()
|
||||
message = getattr(exc, "jupyterhub_message", str(exc))
|
||||
failed_event['message'] = f"Spawn failed: {message}"
|
||||
html_message = getattr(exc, "jupyterhub_html_message", "")
|
||||
if html_message:
|
||||
failed_event['html_message'] = html_message
|
||||
else:
|
||||
self.log.warning(
|
||||
"Server %s didn't start for unknown reason", spawner._log_name
|
||||
|
@@ -1689,7 +1689,9 @@ class JupyterHub(Application):
|
||||
for authority, files in self.internal_ssl_authorities.items():
|
||||
if files:
|
||||
self.log.info("Adding CA for %s", authority)
|
||||
certipy.store.add_record(authority, is_ca=True, files=files)
|
||||
certipy.store.add_record(
|
||||
authority, is_ca=True, files=files, overwrite=True
|
||||
)
|
||||
|
||||
self.internal_trust_bundles = certipy.trust_from_graph(
|
||||
self.internal_ssl_components_trust
|
||||
@@ -2001,6 +2003,9 @@ class JupyterHub(Application):
|
||||
async def init_groups(self):
|
||||
"""Load predefined groups into the database"""
|
||||
db = self.db
|
||||
|
||||
if self.authenticator.manage_groups and self.load_groups:
|
||||
raise ValueError("Group management has been offloaded to the authenticator")
|
||||
for name, usernames in self.load_groups.items():
|
||||
group = orm.Group.find(db, name)
|
||||
if group is None:
|
||||
@@ -3147,7 +3152,12 @@ class JupyterHub(Application):
|
||||
self.last_activity_callback = pc
|
||||
pc.start()
|
||||
|
||||
self.log.info("JupyterHub is now running at %s", self.proxy.public_url)
|
||||
if self.proxy.should_start:
|
||||
self.log.info("JupyterHub is now running at %s", self.proxy.public_url)
|
||||
else:
|
||||
self.log.info(
|
||||
"JupyterHub is now running, internal Hub API at %s", self.hub.url
|
||||
)
|
||||
# Use atexit for Windows, it doesn't have signal handling support
|
||||
if _mswindows:
|
||||
atexit.register(self.atexit)
|
||||
@@ -3233,9 +3243,15 @@ class JupyterHub(Application):
|
||||
loop.make_current()
|
||||
loop.run_sync(self.cleanup)
|
||||
|
||||
async def shutdown_cancel_tasks(self, sig):
|
||||
async def shutdown_cancel_tasks(self, sig=None):
|
||||
"""Cancel all other tasks of the event loop and initiate cleanup"""
|
||||
self.log.critical("Received signal %s, initiating shutdown...", sig.name)
|
||||
if sig is None:
|
||||
self.log.critical("Initiating shutdown...")
|
||||
else:
|
||||
self.log.critical("Received signal %s, initiating shutdown...", sig.name)
|
||||
|
||||
await self.cleanup()
|
||||
|
||||
tasks = [t for t in asyncio_all_tasks() if t is not asyncio_current_task()]
|
||||
|
||||
if tasks:
|
||||
@@ -3252,7 +3268,6 @@ class JupyterHub(Application):
|
||||
tasks = [t for t in asyncio_all_tasks()]
|
||||
for t in tasks:
|
||||
self.log.debug("Task status: %s", t)
|
||||
await self.cleanup()
|
||||
asyncio.get_event_loop().stop()
|
||||
|
||||
def stop(self):
|
||||
@@ -3260,7 +3275,7 @@ class JupyterHub(Application):
|
||||
return
|
||||
if self.http_server:
|
||||
self.http_server.stop()
|
||||
self.io_loop.add_callback(self.io_loop.stop)
|
||||
self.io_loop.add_callback(self.shutdown_cancel_tasks)
|
||||
|
||||
async def start_show_config(self):
|
||||
"""Async wrapper around base start_show_config method"""
|
||||
|
@@ -582,9 +582,13 @@ class Authenticator(LoggingConfigurable):
|
||||
or None if Authentication failed.
|
||||
|
||||
The Authenticator may return a dict instead, which MUST have a
|
||||
key `name` holding the username, and MAY have two optional keys
|
||||
set: `auth_state`, a dictionary of of auth state that will be
|
||||
persisted; and `admin`, the admin setting value for the user.
|
||||
key `name` holding the username, and MAY have additional keys:
|
||||
|
||||
- `auth_state`, a dictionary of of auth state that will be
|
||||
persisted;
|
||||
- `admin`, the admin setting value for the user
|
||||
- `groups`, the list of group names the user should be a member of,
|
||||
if Authenticator.manage_groups is True.
|
||||
"""
|
||||
|
||||
def pre_spawn_start(self, user, spawner):
|
||||
@@ -635,6 +639,19 @@ class Authenticator(LoggingConfigurable):
|
||||
"""
|
||||
self.allowed_users.discard(user.name)
|
||||
|
||||
manage_groups = Bool(
|
||||
False,
|
||||
config=True,
|
||||
help="""Let authenticator manage user groups
|
||||
|
||||
If True, Authenticator.authenticate and/or .refresh_user
|
||||
may return a list of group names in the 'groups' field,
|
||||
which will be assigned to the user.
|
||||
|
||||
All group-assignment APIs are disabled if this is True.
|
||||
""",
|
||||
)
|
||||
|
||||
auto_login = Bool(
|
||||
False,
|
||||
config=True,
|
||||
@@ -958,16 +975,24 @@ class PAMAuthenticator(LocalAuthenticator):
|
||||
).tag(config=True)
|
||||
|
||||
open_sessions = Bool(
|
||||
True,
|
||||
False,
|
||||
help="""
|
||||
Whether to open a new PAM session when spawners are started.
|
||||
|
||||
This may trigger things like mounting shared filsystems,
|
||||
loading credentials, etc. depending on system configuration,
|
||||
but it does not always work.
|
||||
This may trigger things like mounting shared filesystems,
|
||||
loading credentials, etc. depending on system configuration.
|
||||
|
||||
The lifecycle of PAM sessions is not correct,
|
||||
so many PAM session configurations will not work.
|
||||
|
||||
If any errors are encountered when opening/closing PAM sessions,
|
||||
this is automatically set to False.
|
||||
|
||||
.. versionchanged:: 2.2
|
||||
|
||||
Due to longstanding problems in the session lifecycle,
|
||||
this is now disabled by default.
|
||||
You may opt-in to opening sessions by setting this to True.
|
||||
""",
|
||||
).tag(config=True)
|
||||
|
||||
|
@@ -45,6 +45,7 @@ from ..metrics import ServerSpawnStatus
|
||||
from ..metrics import ServerStopStatus
|
||||
from ..metrics import TOTAL_USERS
|
||||
from ..objects import Server
|
||||
from ..scopes import needs_scope
|
||||
from ..spawner import LocalProcessSpawner
|
||||
from ..user import User
|
||||
from ..utils import AnyTimeoutError
|
||||
@@ -525,10 +526,16 @@ class BaseHandler(RequestHandler):
|
||||
path=url_path_join(self.base_url, 'services'),
|
||||
**kwargs,
|
||||
)
|
||||
# clear tornado cookie
|
||||
# clear_cookie only accepts a subset of set_cookie's kwargs
|
||||
clear_xsrf_cookie_kwargs = {
|
||||
key: value
|
||||
for key, value in self.settings.get('xsrf_cookie_kwargs', {}).items()
|
||||
if key in {"path", "domain"}
|
||||
}
|
||||
|
||||
self.clear_cookie(
|
||||
'_xsrf',
|
||||
**self.settings.get('xsrf_cookie_kwargs', {}),
|
||||
**clear_xsrf_cookie_kwargs,
|
||||
)
|
||||
# Reset _jupyterhub_user
|
||||
self._jupyterhub_user = None
|
||||
@@ -635,29 +642,32 @@ class BaseHandler(RequestHandler):
|
||||
next_url = next_url.replace('\\', '%5C')
|
||||
proto = get_browser_protocol(self.request)
|
||||
host = self.request.host
|
||||
if next_url.startswith("///"):
|
||||
# strip more than 2 leading // down to 2
|
||||
# because urlparse treats that as empty netloc,
|
||||
# whereas browsers treat more than two leading // the same as //,
|
||||
# so netloc is the first non-/ bit
|
||||
next_url = "//" + next_url.lstrip("/")
|
||||
parsed_next_url = urlparse(next_url)
|
||||
|
||||
if (next_url + '/').startswith((f'{proto}://{host}/', f'//{host}/',)) or (
|
||||
self.subdomain_host
|
||||
and urlparse(next_url).netloc
|
||||
and ("." + urlparse(next_url).netloc).endswith(
|
||||
and parsed_next_url.netloc
|
||||
and ("." + parsed_next_url.netloc).endswith(
|
||||
"." + urlparse(self.subdomain_host).netloc
|
||||
)
|
||||
):
|
||||
# treat absolute URLs for our host as absolute paths:
|
||||
# below, redirects that aren't strictly paths
|
||||
parsed = urlparse(next_url)
|
||||
next_url = parsed.path
|
||||
if parsed.query:
|
||||
next_url = next_url + '?' + parsed.query
|
||||
if parsed.fragment:
|
||||
next_url = next_url + '#' + parsed.fragment
|
||||
# below, redirects that aren't strictly paths are rejected
|
||||
next_url = parsed_next_url.path
|
||||
if parsed_next_url.query:
|
||||
next_url = next_url + '?' + parsed_next_url.query
|
||||
if parsed_next_url.fragment:
|
||||
next_url = next_url + '#' + parsed_next_url.fragment
|
||||
parsed_next_url = urlparse(next_url)
|
||||
|
||||
# if it still has host info, it didn't match our above check for *this* host
|
||||
if next_url and (
|
||||
'://' in next_url
|
||||
or next_url.startswith('//')
|
||||
or not next_url.startswith('/')
|
||||
):
|
||||
if next_url and (parsed_next_url.netloc or not next_url.startswith('/')):
|
||||
self.log.warning("Disallowing redirect outside JupyterHub: %r", next_url)
|
||||
next_url = ''
|
||||
|
||||
@@ -773,13 +783,22 @@ class BaseHandler(RequestHandler):
|
||||
# always ensure default roles ('user', 'admin' if admin) are assigned
|
||||
# after a successful login
|
||||
roles.assign_default_roles(self.db, entity=user)
|
||||
|
||||
# apply authenticator-managed groups
|
||||
if self.authenticator.manage_groups:
|
||||
group_names = authenticated.get("groups")
|
||||
if group_names is not None:
|
||||
user.sync_groups(group_names)
|
||||
|
||||
# always set auth_state and commit,
|
||||
# because there could be key-rotation or clearing of previous values
|
||||
# going on.
|
||||
if not self.authenticator.enable_auth_state:
|
||||
# auth_state is not enabled. Force None.
|
||||
auth_state = None
|
||||
|
||||
await user.save_auth_state(auth_state)
|
||||
|
||||
return user
|
||||
|
||||
async def login_user(self, data=None):
|
||||
@@ -793,6 +812,7 @@ class BaseHandler(RequestHandler):
|
||||
self.set_login_cookie(user)
|
||||
self.statsd.incr('login.success')
|
||||
self.statsd.timing('login.authenticate.success', auth_timer.ms)
|
||||
|
||||
self.log.info("User logged in: %s", user.name)
|
||||
user._auth_refreshed = time.monotonic()
|
||||
return user
|
||||
@@ -1448,54 +1468,24 @@ class UserUrlHandler(BaseHandler):
|
||||
delete = non_get
|
||||
|
||||
@web.authenticated
|
||||
@needs_scope("access:servers")
|
||||
async def get(self, user_name, user_path):
|
||||
if not user_path:
|
||||
user_path = '/'
|
||||
current_user = self.current_user
|
||||
|
||||
if (
|
||||
current_user
|
||||
and current_user.name != user_name
|
||||
and current_user.admin
|
||||
and self.settings.get('admin_access', False)
|
||||
):
|
||||
# allow admins to spawn on behalf of users
|
||||
if user_name != current_user.name:
|
||||
user = self.find_user(user_name)
|
||||
if user is None:
|
||||
# no such user
|
||||
raise web.HTTPError(404, "No such user %s" % user_name)
|
||||
raise web.HTTPError(404, f"No such user {user_name}")
|
||||
self.log.info(
|
||||
"Admin %s requesting spawn on behalf of %s",
|
||||
current_user.name,
|
||||
user.name,
|
||||
f"User {current_user.name} requesting spawn on behalf of {user.name}"
|
||||
)
|
||||
admin_spawn = True
|
||||
should_spawn = True
|
||||
redirect_to_self = False
|
||||
else:
|
||||
user = current_user
|
||||
admin_spawn = False
|
||||
# For non-admins, spawn if the user requested is the current user
|
||||
# otherwise redirect users to their own server
|
||||
should_spawn = current_user and current_user.name == user_name
|
||||
redirect_to_self = not should_spawn
|
||||
|
||||
if redirect_to_self:
|
||||
# logged in as a different non-admin user, redirect to user's own server
|
||||
# this is only a stop-gap for a common mistake,
|
||||
# because the same request will be a 403
|
||||
# if the requested server is running
|
||||
self.statsd.incr('redirects.user_to_user', 1)
|
||||
self.log.warning(
|
||||
"User %s requested server for %s, which they don't own",
|
||||
current_user.name,
|
||||
user_name,
|
||||
)
|
||||
target = url_path_join(current_user.url, user_path or '')
|
||||
if self.request.query:
|
||||
target = url_concat(target, parse_qsl(self.request.query))
|
||||
self.redirect(target)
|
||||
return
|
||||
|
||||
# If people visit /user/:user_name directly on the Hub,
|
||||
# the redirects will just loop, because the proxy is bypassed.
|
||||
@@ -1539,14 +1529,10 @@ class UserUrlHandler(BaseHandler):
|
||||
|
||||
# if request is expecting JSON, assume it's an API request and fail with 503
|
||||
# because it won't like the redirect to the pending page
|
||||
if (
|
||||
get_accepted_mimetype(
|
||||
self.request.headers.get('Accept', ''),
|
||||
choices=['application/json', 'text/html'],
|
||||
)
|
||||
== 'application/json'
|
||||
or 'api' in user_path.split('/')
|
||||
):
|
||||
if get_accepted_mimetype(
|
||||
self.request.headers.get('Accept', ''),
|
||||
choices=['application/json', 'text/html'],
|
||||
) == 'application/json' or 'api' in user_path.split('/'):
|
||||
self._fail_api_request(user_name, server_name)
|
||||
return
|
||||
|
||||
@@ -1628,7 +1614,7 @@ class UserUrlHandler(BaseHandler):
|
||||
if redirects:
|
||||
self.log.warning("Redirect loop detected on %s", self.request.uri)
|
||||
# add capped exponential backoff where cap is 10s
|
||||
await asyncio.sleep(min(1 * (2 ** redirects), 10))
|
||||
await asyncio.sleep(min(1 * (2**redirects), 10))
|
||||
# rewrite target url with new `redirects` query value
|
||||
url_parts = urlparse(target)
|
||||
query_parts = parse_qs(url_parts.query)
|
||||
|
@@ -12,6 +12,8 @@ class MetricsHandler(BaseHandler):
|
||||
Handler to serve Prometheus metrics
|
||||
"""
|
||||
|
||||
_accept_token_auth = True
|
||||
|
||||
@metrics_authentication
|
||||
async def get(self):
|
||||
self.set_header('Content-Type', CONTENT_TYPE_LATEST)
|
||||
|
@@ -106,22 +106,27 @@ class SpawnHandler(BaseHandler):
|
||||
)
|
||||
|
||||
@web.authenticated
|
||||
async def get(self, for_user=None, server_name=''):
|
||||
def get(self, user_name=None, server_name=''):
|
||||
"""GET renders form for spawning with user-specified options
|
||||
|
||||
or triggers spawn via redirect if there is no form.
|
||||
"""
|
||||
# two-stage to get the right signature for @require_scopes filter on user_name
|
||||
if user_name is None:
|
||||
user_name = self.current_user.name
|
||||
if server_name is None:
|
||||
server_name = ""
|
||||
return self._get(user_name=user_name, server_name=server_name)
|
||||
|
||||
@needs_scope("servers")
|
||||
async def _get(self, user_name, server_name):
|
||||
for_user = user_name
|
||||
|
||||
user = current_user = self.current_user
|
||||
if for_user is not None and for_user != user.name:
|
||||
if not user.admin:
|
||||
raise web.HTTPError(
|
||||
403, "Only admins can spawn on behalf of other users"
|
||||
)
|
||||
|
||||
if for_user != user.name:
|
||||
user = self.find_user(for_user)
|
||||
if user is None:
|
||||
raise web.HTTPError(404, "No such user: %s" % for_user)
|
||||
raise web.HTTPError(404, f"No such user: {for_user}")
|
||||
|
||||
if server_name:
|
||||
if not self.allow_named_servers:
|
||||
@@ -141,15 +146,12 @@ class SpawnHandler(BaseHandler):
|
||||
)
|
||||
|
||||
if not self.allow_named_servers and user.running:
|
||||
url = self.get_next_url(user, default=user.server_url(server_name))
|
||||
url = self.get_next_url(user, default=user.server_url(""))
|
||||
self.log.info("User is running: %s", user.name)
|
||||
self.redirect(url)
|
||||
return
|
||||
|
||||
if server_name is None:
|
||||
server_name = ''
|
||||
|
||||
spawner = user.spawners[server_name]
|
||||
spawner = user.get_spawner(server_name, replace_failed=True)
|
||||
|
||||
pending_url = self._get_pending_url(user, server_name)
|
||||
|
||||
@@ -189,7 +191,6 @@ class SpawnHandler(BaseHandler):
|
||||
spawner._log_name,
|
||||
)
|
||||
options = await maybe_future(spawner.options_from_query(query_options))
|
||||
pending_url = self._get_pending_url(user, server_name)
|
||||
return await self._wrap_spawn_single_user(
|
||||
user, server_name, spawner, pending_url, options
|
||||
)
|
||||
@@ -219,19 +220,24 @@ class SpawnHandler(BaseHandler):
|
||||
)
|
||||
|
||||
@web.authenticated
|
||||
async def post(self, for_user=None, server_name=''):
|
||||
def post(self, user_name=None, server_name=''):
|
||||
"""POST spawns with user-specified options"""
|
||||
if user_name is None:
|
||||
user_name = self.current_user.name
|
||||
if server_name is None:
|
||||
server_name = ""
|
||||
return self._post(user_name=user_name, server_name=server_name)
|
||||
|
||||
@needs_scope("servers")
|
||||
async def _post(self, user_name, server_name):
|
||||
for_user = user_name
|
||||
user = current_user = self.current_user
|
||||
if for_user is not None and for_user != user.name:
|
||||
if not user.admin:
|
||||
raise web.HTTPError(
|
||||
403, "Only admins can spawn on behalf of other users"
|
||||
)
|
||||
if for_user != user.name:
|
||||
user = self.find_user(for_user)
|
||||
if user is None:
|
||||
raise web.HTTPError(404, "No such user: %s" % for_user)
|
||||
|
||||
spawner = user.spawners[server_name]
|
||||
spawner = user.get_spawner(server_name, replace_failed=True)
|
||||
|
||||
if spawner.ready:
|
||||
raise web.HTTPError(400, "%s is already running" % (spawner._log_name))
|
||||
@@ -249,7 +255,7 @@ class SpawnHandler(BaseHandler):
|
||||
self.log.debug(
|
||||
"Triggering spawn with supplied form options for %s", spawner._log_name
|
||||
)
|
||||
options = await maybe_future(spawner.options_from_form(form_options))
|
||||
options = await maybe_future(spawner.run_options_from_form(form_options))
|
||||
pending_url = self._get_pending_url(user, server_name)
|
||||
return await self._wrap_spawn_single_user(
|
||||
user, server_name, spawner, pending_url, options
|
||||
@@ -337,13 +343,11 @@ class SpawnPendingHandler(BaseHandler):
|
||||
"""
|
||||
|
||||
@web.authenticated
|
||||
async def get(self, for_user, server_name=''):
|
||||
@needs_scope("servers")
|
||||
async def get(self, user_name, server_name=''):
|
||||
for_user = user_name
|
||||
user = current_user = self.current_user
|
||||
if for_user is not None and for_user != current_user.name:
|
||||
if not current_user.admin:
|
||||
raise web.HTTPError(
|
||||
403, "Only admins can spawn on behalf of other users"
|
||||
)
|
||||
if for_user != current_user.name:
|
||||
user = self.find_user(for_user)
|
||||
if user is None:
|
||||
raise web.HTTPError(404, "No such user: %s" % for_user)
|
||||
@@ -365,13 +369,9 @@ class SpawnPendingHandler(BaseHandler):
|
||||
auth_state = await user.get_auth_state()
|
||||
|
||||
# First, check for previous failure.
|
||||
if (
|
||||
not spawner.active
|
||||
and spawner._spawn_future
|
||||
and spawner._spawn_future.done()
|
||||
and spawner._spawn_future.exception()
|
||||
):
|
||||
# Condition: spawner not active and _spawn_future exists and contains an Exception
|
||||
if not spawner.active and spawner._failed:
|
||||
# Condition: spawner not active and last spawn failed
|
||||
# (failure is available as spawner._spawn_future.exception()).
|
||||
# Implicit spawn on /user/:name is not allowed if the user's last spawn failed.
|
||||
# We should point the user to Home if the most recent spawn failed.
|
||||
exc = spawner._spawn_future.exception()
|
||||
@@ -387,6 +387,7 @@ class SpawnPendingHandler(BaseHandler):
|
||||
server_name=server_name,
|
||||
spawn_url=spawn_url,
|
||||
failed=True,
|
||||
failed_html_message=getattr(exc, 'jupyterhub_html_message', ''),
|
||||
failed_message=getattr(exc, 'jupyterhub_message', ''),
|
||||
exception=exc,
|
||||
)
|
||||
@@ -497,7 +498,7 @@ class TokenPageHandler(BaseHandler):
|
||||
continue
|
||||
if not token.client_id:
|
||||
# token should have been deleted when client was deleted
|
||||
self.log.warning("Deleting stale oauth token {token}")
|
||||
self.log.warning(f"Deleting stale oauth token {token}")
|
||||
self.db.delete(token)
|
||||
self.db.commit()
|
||||
continue
|
||||
|
@@ -536,9 +536,7 @@ class Hashed(Expiring):
|
||||
prefix = token[: cls.prefix_length]
|
||||
# since we can't filter on hashed values, filter on prefix
|
||||
# so we aren't comparing with all tokens
|
||||
prefix_match = db.query(cls).filter(
|
||||
bindparam('prefix', prefix).startswith(cls.prefix)
|
||||
)
|
||||
prefix_match = db.query(cls).filter_by(prefix=prefix)
|
||||
prefix_match = prefix_match.filter(
|
||||
or_(cls.expires_at == None, cls.expires_at >= cls.now())
|
||||
)
|
||||
|
@@ -45,6 +45,7 @@ def get_default_roles():
|
||||
'access:services',
|
||||
'access:servers',
|
||||
'read:roles',
|
||||
'read:metrics',
|
||||
],
|
||||
},
|
||||
{
|
||||
@@ -402,6 +403,10 @@ def _token_allowed_role(db, token, role):
|
||||
if owner is None:
|
||||
raise ValueError(f"Owner not found for {token}")
|
||||
|
||||
if role in owner.roles:
|
||||
# shortcut: token is assigned an exact role the owner has
|
||||
return True
|
||||
|
||||
expanded_scopes = _get_subscopes(role, owner=owner)
|
||||
|
||||
implicit_permissions = {'inherit', 'read:inherit'}
|
||||
|
@@ -131,6 +131,9 @@ scope_definitions = {
|
||||
'description': 'Read information about the proxy’s routing table, sync the Hub with the proxy and notify the Hub about a new proxy.'
|
||||
},
|
||||
'shutdown': {'description': 'Shutdown the hub.'},
|
||||
'read:metrics': {
|
||||
'description': "Read prometheus metrics.",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
|
@@ -501,11 +501,17 @@ class HubAuth(SingletonConfigurable):
|
||||
auth_header_name = 'Authorization'
|
||||
auth_header_pat = re.compile(r'(?:token|bearer)\s+(.+)', re.IGNORECASE)
|
||||
|
||||
def get_token(self, handler):
|
||||
"""Get the user token from a request
|
||||
def get_token(self, handler, in_cookie=True):
|
||||
"""Get the token authenticating a request
|
||||
|
||||
.. versionchanged:: 2.2
|
||||
in_cookie added.
|
||||
Previously, only URL params and header were considered.
|
||||
Pass `in_cookie=False` to preserve that behavior.
|
||||
|
||||
- in URL parameters: ?token=<token>
|
||||
- in header: Authorization: token <token>
|
||||
- in cookie (stored after oauth), if in_cookie is True
|
||||
"""
|
||||
|
||||
user_token = handler.get_argument('token', '')
|
||||
@@ -516,8 +522,14 @@ class HubAuth(SingletonConfigurable):
|
||||
)
|
||||
if m:
|
||||
user_token = m.group(1)
|
||||
if not user_token and in_cookie:
|
||||
user_token = self._get_token_cookie(handler)
|
||||
return user_token
|
||||
|
||||
def _get_token_cookie(self, handler):
|
||||
"""Base class doesn't store tokens in cookies"""
|
||||
return None
|
||||
|
||||
def _get_user_cookie(self, handler):
|
||||
"""Get the user model from a cookie"""
|
||||
# overridden in HubOAuth to store the access token after oauth
|
||||
@@ -553,8 +565,10 @@ class HubAuth(SingletonConfigurable):
|
||||
handler._cached_hub_user = user_model = None
|
||||
session_id = self.get_session_id(handler)
|
||||
|
||||
# check token first
|
||||
token = self.get_token(handler)
|
||||
# check token first, ignoring cookies
|
||||
# because some checks are different when a request
|
||||
# is token-authenticated (CORS-related)
|
||||
token = self.get_token(handler, in_cookie=False)
|
||||
if token:
|
||||
user_model = self.user_for_token(token, session_id=session_id)
|
||||
if user_model:
|
||||
@@ -614,11 +628,18 @@ class HubOAuth(HubAuth):
|
||||
"""
|
||||
return self.cookie_name + '-oauth-state'
|
||||
|
||||
def _get_user_cookie(self, handler):
|
||||
def _get_token_cookie(self, handler):
|
||||
"""Base class doesn't store tokens in cookies"""
|
||||
token = handler.get_secure_cookie(self.cookie_name)
|
||||
if token:
|
||||
# decode cookie bytes
|
||||
token = token.decode('ascii', 'replace')
|
||||
return token
|
||||
|
||||
def _get_user_cookie(self, handler):
|
||||
token = self._get_token_cookie(handler)
|
||||
session_id = self.get_session_id(handler)
|
||||
if token:
|
||||
token = token.decode('ascii', 'replace')
|
||||
user_model = self.user_for_token(token, session_id=session_id)
|
||||
if user_model is None:
|
||||
app_log.warning("Token stored in cookie may have expired")
|
||||
|
@@ -29,9 +29,9 @@ else:
|
||||
try:
|
||||
App = import_item(JUPYTERHUB_SINGLEUSER_APP)
|
||||
except ImportError as e:
|
||||
continue
|
||||
if _import_error is None:
|
||||
_import_error = e
|
||||
continue
|
||||
else:
|
||||
break
|
||||
if App is None:
|
||||
|
@@ -16,7 +16,6 @@ import random
|
||||
import secrets
|
||||
import sys
|
||||
import warnings
|
||||
from datetime import datetime
|
||||
from datetime import timezone
|
||||
from importlib import import_module
|
||||
from textwrap import dedent
|
||||
@@ -183,6 +182,7 @@ page_template = """
|
||||
|
||||
<span>
|
||||
<a href='{{hub_control_panel_url}}'
|
||||
id='jupyterhub-control-panel-link'
|
||||
class='btn btn-default btn-sm navbar-btn pull-right'
|
||||
style='margin-right: 4px; margin-left: 2px;'>
|
||||
Control Panel
|
||||
@@ -493,7 +493,7 @@ class SingleUserNotebookAppMixin(Configurable):
|
||||
i,
|
||||
RETRIES,
|
||||
)
|
||||
await asyncio.sleep(min(2 ** i, 16))
|
||||
await asyncio.sleep(min(2**i, 16))
|
||||
else:
|
||||
break
|
||||
else:
|
||||
@@ -634,8 +634,15 @@ class SingleUserNotebookAppMixin(Configurable):
|
||||
# disable trash by default
|
||||
# this can be re-enabled by config
|
||||
self.config.FileContentsManager.delete_to_trash = False
|
||||
# load default-url env at higher priority than `@default`,
|
||||
# which may have their own _defaults_ which should not override explicit default_url config
|
||||
# via e.g. c.Spawner.default_url. Seen in jupyterlab's SingleUserLabApp.
|
||||
default_url = os.environ.get("JUPYTERHUB_DEFAULT_URL")
|
||||
if default_url:
|
||||
self.config[self.__class__.__name__].default_url = default_url
|
||||
self._log_app_versions()
|
||||
return super().initialize(argv)
|
||||
super().initialize(argv)
|
||||
self.patch_templates()
|
||||
|
||||
def start(self):
|
||||
self.log.info("Starting jupyterhub-singleuser server version %s", __version__)
|
||||
@@ -680,6 +687,7 @@ class SingleUserNotebookAppMixin(Configurable):
|
||||
s['hub_prefix'] = self.hub_prefix
|
||||
s['hub_host'] = self.hub_host
|
||||
s['hub_auth'] = self.hub_auth
|
||||
s['page_config_hook'] = self.page_config_hook
|
||||
csp_report_uri = s['csp_report_uri'] = self.hub_host + url_path_join(
|
||||
self.hub_prefix, 'security/csp-report'
|
||||
)
|
||||
@@ -705,7 +713,18 @@ class SingleUserNotebookAppMixin(Configurable):
|
||||
|
||||
# apply X-JupyterHub-Version to *all* request handlers (even redirects)
|
||||
self.patch_default_headers()
|
||||
self.patch_templates()
|
||||
|
||||
def page_config_hook(self, handler, page_config):
|
||||
"""JupyterLab page config hook
|
||||
|
||||
Adds JupyterHub info to page config.
|
||||
|
||||
Places the JupyterHub API token in PageConfig.token.
|
||||
|
||||
Only has effect on jupyterlab_server >=2.9
|
||||
"""
|
||||
page_config["token"] = self.hub_auth.get_token(handler) or ""
|
||||
return page_config
|
||||
|
||||
def patch_default_headers(self):
|
||||
if hasattr(RequestHandler, '_orig_set_default_headers'):
|
||||
@@ -726,19 +745,32 @@ class SingleUserNotebookAppMixin(Configurable):
|
||||
)
|
||||
self.jinja_template_vars['hub_host'] = self.hub_host
|
||||
self.jinja_template_vars['hub_prefix'] = self.hub_prefix
|
||||
env = self.web_app.settings['jinja2_env']
|
||||
self.jinja_template_vars[
|
||||
'hub_control_panel_url'
|
||||
] = self.hub_host + url_path_join(self.hub_prefix, 'home')
|
||||
|
||||
env.globals['hub_control_panel_url'] = self.hub_host + url_path_join(
|
||||
self.hub_prefix, 'home'
|
||||
)
|
||||
settings = self.web_app.settings
|
||||
# patch classic notebook jinja env
|
||||
jinja_envs = []
|
||||
if 'jinja2_env' in settings:
|
||||
# default jinja env (should we do this on jupyter-server, or only notebook?)
|
||||
jinja_envs.append(settings['jinja2_env'])
|
||||
for ext_name in ("notebook", "nbclassic"):
|
||||
env_name = f"{ext_name}_jinja2_env"
|
||||
if env_name in settings:
|
||||
# when running with jupyter-server, classic notebook (nbclassic server extension or notebook v7)
|
||||
# gets its own jinja env, which needs the same patch
|
||||
jinja_envs.append(settings[env_name])
|
||||
|
||||
# patch jinja env loading to modify page template
|
||||
# patch jinja env loading to get modified template, only for base page.html
|
||||
def get_page(name):
|
||||
if name == 'page.html':
|
||||
return page_template
|
||||
|
||||
orig_loader = env.loader
|
||||
env.loader = ChoiceLoader([FunctionLoader(get_page), orig_loader])
|
||||
for jinja_env in jinja_envs:
|
||||
jinja_env.loader = ChoiceLoader(
|
||||
[FunctionLoader(get_page), jinja_env.loader]
|
||||
)
|
||||
|
||||
def load_server_extensions(self):
|
||||
# Loading LabApp sets $JUPYTERHUB_API_TOKEN on load, which is incorrect
|
||||
|
@@ -11,6 +11,7 @@ import shutil
|
||||
import signal
|
||||
import sys
|
||||
import warnings
|
||||
from inspect import signature
|
||||
from subprocess import Popen
|
||||
from tempfile import mkdtemp
|
||||
from urllib.parse import urlparse
|
||||
@@ -96,10 +97,15 @@ class Spawner(LoggingConfigurable):
|
||||
|
||||
Used in logging for consistency with named servers.
|
||||
"""
|
||||
if self.name:
|
||||
return f'{self.user.name}:{self.name}'
|
||||
if self.user:
|
||||
user_name = self.user.name
|
||||
else:
|
||||
return self.user.name
|
||||
# no user, only happens in mock tests
|
||||
user_name = "(no user)"
|
||||
if self.name:
|
||||
return f"{user_name}:{self.name}"
|
||||
else:
|
||||
return user_name
|
||||
|
||||
@property
|
||||
def _failed(self):
|
||||
@@ -183,17 +189,38 @@ class Spawner(LoggingConfigurable):
|
||||
def last_activity(self):
|
||||
return self.orm_spawner.last_activity
|
||||
|
||||
# Spawner.server is a wrapper of the ORM orm_spawner.server
|
||||
# make sure it's always in sync with the underlying state
|
||||
# this is harder to do with traitlets,
|
||||
# which do not run on every access, only on set and first-get
|
||||
_server = None
|
||||
|
||||
@property
|
||||
def server(self):
|
||||
if hasattr(self, '_server'):
|
||||
# always check that we're in sync with orm_spawner
|
||||
if not self.orm_spawner:
|
||||
# no ORM spawner, nothing to check
|
||||
return self._server
|
||||
if self.orm_spawner and self.orm_spawner.server:
|
||||
return Server(orm_server=self.orm_spawner.server)
|
||||
|
||||
orm_server = self.orm_spawner.server
|
||||
|
||||
if orm_server is not None and (
|
||||
self._server is None or orm_server is not self._server.orm_server
|
||||
):
|
||||
# self._server is not connected to orm_spawner
|
||||
self._server = Server(orm_server=self.orm_spawner.server)
|
||||
elif orm_server is None:
|
||||
# no ORM server, clear it
|
||||
self._server = None
|
||||
return self._server
|
||||
|
||||
@server.setter
|
||||
def server(self, server):
|
||||
self._server = server
|
||||
if self.orm_spawner:
|
||||
if self.orm_spawner is not None:
|
||||
if server is not None and server.orm_server == self.orm_spawner.server:
|
||||
# no change
|
||||
return
|
||||
if self.orm_spawner.server is not None:
|
||||
# delete the old value
|
||||
db = inspect(self.orm_spawner.server).session
|
||||
@@ -201,7 +228,13 @@ class Spawner(LoggingConfigurable):
|
||||
if server is None:
|
||||
self.orm_spawner.server = None
|
||||
else:
|
||||
if server.orm_server is None:
|
||||
self.log.warning(f"No ORM server for {self._log_name}")
|
||||
self.orm_spawner.server = server.orm_server
|
||||
elif server is not None:
|
||||
self.log.warning(
|
||||
f"Setting Spawner.server for {self._log_name} with no underlying orm_spawner"
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -424,6 +457,13 @@ class Spawner(LoggingConfigurable):
|
||||
def _default_options_from_form(self, form_data):
|
||||
return form_data
|
||||
|
||||
def run_options_from_form(self, form_data):
|
||||
sig = signature(self.options_from_form)
|
||||
if 'spawner' in sig.parameters:
|
||||
return self.options_from_form(form_data, spawner=self)
|
||||
else:
|
||||
return self.options_from_form(form_data)
|
||||
|
||||
def options_from_query(self, query_data):
|
||||
"""Interpret query arguments passed to /spawn
|
||||
|
||||
@@ -836,9 +876,6 @@ class Spawner(LoggingConfigurable):
|
||||
|
||||
if self.server:
|
||||
base_url = self.server.base_url
|
||||
if self.ip or self.port:
|
||||
self.server.ip = self.ip
|
||||
self.server.port = self.port
|
||||
env['JUPYTERHUB_SERVICE_PREFIX'] = self.server.base_url
|
||||
else:
|
||||
# this should only occur in mock/testing scenarios
|
||||
|
@@ -57,12 +57,14 @@ from .utils import add_user
|
||||
_db = None
|
||||
|
||||
|
||||
def pytest_collection_modifyitems(items):
|
||||
def _pytest_collection_modifyitems(items):
|
||||
"""This function is automatically run by pytest passing all collected test
|
||||
functions.
|
||||
|
||||
We use it to add asyncio marker to all async tests and assert we don't use
|
||||
test functions that are async generators which wouldn't make sense.
|
||||
|
||||
It is no longer required with pytest-asyncio >= 0.17
|
||||
"""
|
||||
for item in items:
|
||||
if inspect.iscoroutinefunction(item.obj):
|
||||
@@ -70,6 +72,13 @@ def pytest_collection_modifyitems(items):
|
||||
assert not inspect.isasyncgenfunction(item.obj)
|
||||
|
||||
|
||||
if sys.version_info < (3, 7):
|
||||
# apply pytest-asyncio's 'auto' mode on Python 3.6.
|
||||
# 'auto' mode is new in pytest-asyncio 0.17,
|
||||
# which requires Python 3.7.
|
||||
pytest_collection_modifyitems = _pytest_collection_modifyitems
|
||||
|
||||
|
||||
@fixture(scope='module')
|
||||
def ssl_tmpdir(tmpdir_factory):
|
||||
return tmpdir_factory.mktemp('ssl')
|
||||
@@ -182,6 +191,8 @@ def cleanup_after(request, io_loop):
|
||||
if not MockHub.initialized():
|
||||
return
|
||||
app = MockHub.instance()
|
||||
if app.db_file.closed:
|
||||
return
|
||||
for uid, user in list(app.users.items()):
|
||||
for name, spawner in list(user.spawners.items()):
|
||||
if spawner.active:
|
||||
|
@@ -333,26 +333,28 @@ class MockHub(JupyterHub):
|
||||
roles.assign_default_roles(self.db, entity=user)
|
||||
self.db.commit()
|
||||
|
||||
def stop(self):
|
||||
super().stop()
|
||||
_stop_called = False
|
||||
|
||||
def stop(self):
|
||||
if self._stop_called:
|
||||
return
|
||||
self._stop_called = True
|
||||
# run cleanup in a background thread
|
||||
# to avoid multiple eventloops in the same thread errors from asyncio
|
||||
|
||||
def cleanup():
|
||||
asyncio.set_event_loop(asyncio.new_event_loop())
|
||||
loop = IOLoop.current()
|
||||
loop.run_sync(self.cleanup)
|
||||
loop = asyncio.new_event_loop()
|
||||
loop.run_until_complete(self.cleanup())
|
||||
loop.close()
|
||||
|
||||
pool = ThreadPoolExecutor(1)
|
||||
f = pool.submit(cleanup)
|
||||
# wait for cleanup to finish
|
||||
f.result()
|
||||
pool.shutdown()
|
||||
with ThreadPoolExecutor(1) as pool:
|
||||
f = pool.submit(cleanup)
|
||||
# wait for cleanup to finish
|
||||
f.result()
|
||||
|
||||
# ignore the call that will fire in atexit
|
||||
self.cleanup = lambda: None
|
||||
# prevent redundant atexit from running
|
||||
self._atexit_ran = True
|
||||
super().stop()
|
||||
self.db_file.close()
|
||||
|
||||
async def login_user(self, name):
|
||||
|
@@ -471,6 +471,42 @@ async def test_get_users_state_filter(app, state):
|
||||
assert usernames == expected
|
||||
|
||||
|
||||
@mark.user
|
||||
async def test_get_users_name_filter(app):
|
||||
db = app.db
|
||||
|
||||
add_user(db, app=app, name='q')
|
||||
add_user(db, app=app, name='qr')
|
||||
add_user(db, app=app, name='qrs')
|
||||
add_user(db, app=app, name='qrst')
|
||||
added_usernames = {'q', 'qr', 'qrs', 'qrst'}
|
||||
|
||||
r = await api_request(app, 'users')
|
||||
assert r.status_code == 200
|
||||
response_users = [u.get("name") for u in r.json()]
|
||||
assert added_usernames.intersection(response_users) == added_usernames
|
||||
|
||||
r = await api_request(app, 'users?name_filter=q')
|
||||
assert r.status_code == 200
|
||||
response_users = [u.get("name") for u in r.json()]
|
||||
assert response_users == ['q', 'qr', 'qrs', 'qrst']
|
||||
|
||||
r = await api_request(app, 'users?name_filter=qr')
|
||||
assert r.status_code == 200
|
||||
response_users = [u.get("name") for u in r.json()]
|
||||
assert response_users == ['qr', 'qrs', 'qrst']
|
||||
|
||||
r = await api_request(app, 'users?name_filter=qrs')
|
||||
assert r.status_code == 200
|
||||
response_users = [u.get("name") for u in r.json()]
|
||||
assert response_users == ['qrs', 'qrst']
|
||||
|
||||
r = await api_request(app, 'users?name_filter=qrst')
|
||||
assert r.status_code == 200
|
||||
response_users = [u.get("name") for u in r.json()]
|
||||
assert response_users == ['qrst']
|
||||
|
||||
|
||||
@mark.user
|
||||
async def test_get_self(app):
|
||||
db = app.db
|
||||
@@ -1030,7 +1066,7 @@ async def test_never_spawn(app, no_patience, never_spawn):
|
||||
assert not app_user.spawner._spawn_pending
|
||||
status = await app_user.spawner.poll()
|
||||
assert status is not None
|
||||
# failed spawn should decrements pending count
|
||||
# failed spawn should decrement pending count
|
||||
assert app.users.count_active_users()['pending'] == 0
|
||||
|
||||
|
||||
@@ -1039,9 +1075,16 @@ async def test_bad_spawn(app, bad_spawn):
|
||||
name = 'prim'
|
||||
user = add_user(db, app=app, name=name)
|
||||
r = await api_request(app, 'users', name, 'server', method='post')
|
||||
# check that we don't re-use spawners that failed
|
||||
user.spawners[''].reused = True
|
||||
assert r.status_code == 500
|
||||
assert app.users.count_active_users()['pending'] == 0
|
||||
|
||||
r = await api_request(app, 'users', name, 'server', method='post')
|
||||
# check that we don't re-use spawners that failed
|
||||
spawner = user.spawners['']
|
||||
assert not getattr(spawner, 'reused', False)
|
||||
|
||||
|
||||
async def test_spawn_nosuch_user(app):
|
||||
r = await api_request(app, 'users', "nosuchuser", 'server', method='post')
|
||||
@@ -1806,6 +1849,38 @@ async def test_group_add_delete_users(app):
|
||||
assert sorted(u.name for u in group.users) == sorted(names[2:])
|
||||
|
||||
|
||||
@mark.group
|
||||
async def test_auth_managed_groups(request, app, group, user):
|
||||
group.users.append(user)
|
||||
app.db.commit()
|
||||
app.authenticator.manage_groups = True
|
||||
request.addfinalizer(lambda: setattr(app.authenticator, "manage_groups", False))
|
||||
# create groups
|
||||
r = await api_request(app, 'groups', method='post')
|
||||
assert r.status_code == 400
|
||||
r = await api_request(app, 'groups/newgroup', method='post')
|
||||
assert r.status_code == 400
|
||||
# delete groups
|
||||
r = await api_request(app, f'groups/{group.name}', method='delete')
|
||||
assert r.status_code == 400
|
||||
# add users to group
|
||||
r = await api_request(
|
||||
app,
|
||||
f'groups/{group.name}/users',
|
||||
method='post',
|
||||
data=json.dumps({"users": [user.name]}),
|
||||
)
|
||||
assert r.status_code == 400
|
||||
# remove users from group
|
||||
r = await api_request(
|
||||
app,
|
||||
f'groups/{group.name}/users',
|
||||
method='delete',
|
||||
data=json.dumps({"users": [user.name]}),
|
||||
)
|
||||
assert r.status_code == 400
|
||||
|
||||
|
||||
# -----------------
|
||||
# Service API tests
|
||||
# -----------------
|
||||
@@ -2029,14 +2104,23 @@ def test_shutdown(app):
|
||||
)
|
||||
return r
|
||||
|
||||
real_stop = loop.stop
|
||||
real_stop = loop.asyncio_loop.stop
|
||||
|
||||
def stop():
|
||||
stop.called = True
|
||||
loop.call_later(1, real_stop)
|
||||
|
||||
with mock.patch.object(loop, 'stop', stop):
|
||||
real_cleanup = app.cleanup
|
||||
|
||||
def cleanup():
|
||||
cleanup.called = True
|
||||
return real_cleanup()
|
||||
|
||||
app.cleanup = cleanup
|
||||
|
||||
with mock.patch.object(loop.asyncio_loop, 'stop', stop):
|
||||
r = loop.run_sync(shutdown, timeout=5)
|
||||
r.raise_for_status()
|
||||
reply = r.json()
|
||||
assert cleanup.called
|
||||
assert stop.called
|
||||
|
@@ -6,7 +6,6 @@ import os
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
from distutils.version import LooseVersion as V
|
||||
from subprocess import check_output
|
||||
from subprocess import PIPE
|
||||
from subprocess import Popen
|
||||
@@ -33,7 +32,7 @@ def test_help_all():
|
||||
assert '--JupyterHub.ip' in out
|
||||
|
||||
|
||||
@pytest.mark.skipif(V(traitlets.__version__) < V('5'), reason="requires traitlets 5")
|
||||
@pytest.mark.skipif(traitlets.version_info < (5,), reason="requires traitlets 5")
|
||||
def test_show_config(tmpdir):
|
||||
tmpdir.chdir()
|
||||
p = Popen(
|
||||
|
@@ -7,6 +7,7 @@ from urllib.parse import urlparse
|
||||
|
||||
import pytest
|
||||
from requests import HTTPError
|
||||
from traitlets import Any
|
||||
from traitlets.config import Config
|
||||
|
||||
from .mocking import MockPAMAuthenticator
|
||||
@@ -14,6 +15,7 @@ from .mocking import MockStructGroup
|
||||
from .mocking import MockStructPasswd
|
||||
from .utils import add_user
|
||||
from .utils import async_requests
|
||||
from .utils import get_page
|
||||
from .utils import public_url
|
||||
from jupyterhub import auth
|
||||
from jupyterhub import crypto
|
||||
@@ -527,3 +529,71 @@ async def test_nullauthenticator(app):
|
||||
r = await async_requests.get(public_url(app))
|
||||
assert urlparse(r.url).path.endswith("/hub/login")
|
||||
assert r.status_code == 403
|
||||
|
||||
|
||||
class MockGroupsAuthenticator(auth.Authenticator):
|
||||
authenticated_groups = Any()
|
||||
refresh_groups = Any()
|
||||
|
||||
manage_groups = True
|
||||
|
||||
def authenticate(self, handler, data):
|
||||
return {
|
||||
"name": data["username"],
|
||||
"groups": self.authenticated_groups,
|
||||
}
|
||||
|
||||
async def refresh_user(self, user, handler):
|
||||
return {
|
||||
"name": user.name,
|
||||
"groups": self.refresh_groups,
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"authenticated_groups, refresh_groups",
|
||||
[
|
||||
(None, None),
|
||||
(["auth1"], None),
|
||||
(None, ["auth1"]),
|
||||
(["auth1"], ["auth1", "auth2"]),
|
||||
(["auth1", "auth2"], ["auth1"]),
|
||||
(["auth1", "auth2"], ["auth3"]),
|
||||
(["auth1", "auth2"], ["auth3"]),
|
||||
],
|
||||
)
|
||||
async def test_auth_managed_groups(
|
||||
app, user, group, authenticated_groups, refresh_groups
|
||||
):
|
||||
|
||||
authenticator = MockGroupsAuthenticator(
|
||||
parent=app,
|
||||
authenticated_groups=authenticated_groups,
|
||||
refresh_groups=refresh_groups,
|
||||
)
|
||||
|
||||
user.groups.append(group)
|
||||
app.db.commit()
|
||||
before_groups = [group.name]
|
||||
if authenticated_groups is None:
|
||||
expected_authenticated_groups = before_groups
|
||||
else:
|
||||
expected_authenticated_groups = authenticated_groups
|
||||
if refresh_groups is None:
|
||||
expected_refresh_groups = expected_authenticated_groups
|
||||
else:
|
||||
expected_refresh_groups = refresh_groups
|
||||
|
||||
with mock.patch.dict(app.tornado_settings, {"authenticator": authenticator}):
|
||||
cookies = await app.login_user(user.name)
|
||||
assert not app.db.dirty
|
||||
groups = sorted(g.name for g in user.groups)
|
||||
assert groups == expected_authenticated_groups
|
||||
|
||||
# force refresh_user on next request
|
||||
user._auth_refreshed -= 10 + app.authenticator.auth_refresh_age
|
||||
r = await get_page('home', app, cookies=cookies, allow_redirects=False)
|
||||
assert r.status_code == 200
|
||||
assert not app.db.dirty
|
||||
groups = sorted(g.name for g in user.groups)
|
||||
assert groups == expected_refresh_groups
|
||||
|
@@ -1,9 +1,13 @@
|
||||
import json
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
from .utils import add_user
|
||||
from .utils import api_request
|
||||
from .utils import get_page
|
||||
from jupyterhub import metrics
|
||||
from jupyterhub import orm
|
||||
from jupyterhub import roles
|
||||
|
||||
|
||||
async def test_total_users(app):
|
||||
@@ -32,3 +36,42 @@ async def test_total_users(app):
|
||||
|
||||
sample = metrics.TOTAL_USERS.collect()[0].samples[0]
|
||||
assert sample.value == num_users
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"authenticate_prometheus, authenticated, authorized, success",
|
||||
[
|
||||
(True, True, True, True),
|
||||
(True, True, False, False),
|
||||
(True, False, False, False),
|
||||
(False, True, True, True),
|
||||
(False, False, False, True),
|
||||
],
|
||||
)
|
||||
async def test_metrics_auth(
|
||||
app,
|
||||
authenticate_prometheus,
|
||||
authenticated,
|
||||
authorized,
|
||||
success,
|
||||
create_temp_role,
|
||||
user,
|
||||
):
|
||||
if authorized:
|
||||
role = create_temp_role(["read:metrics"])
|
||||
roles.grant_role(app.db, user, role)
|
||||
|
||||
headers = {}
|
||||
if authenticated:
|
||||
token = user.new_api_token()
|
||||
headers["Authorization"] = f"token {token}"
|
||||
|
||||
with mock.patch.dict(
|
||||
app.tornado_settings, {"authenticate_prometheus": authenticate_prometheus}
|
||||
):
|
||||
r = await get_page("metrics", app, headers=headers)
|
||||
if success:
|
||||
assert r.status_code == 200
|
||||
else:
|
||||
assert r.status_code == 403
|
||||
assert 'read:metrics' in r.text
|
||||
|
@@ -12,6 +12,7 @@ from tornado.escape import url_escape
|
||||
from tornado.httputil import url_concat
|
||||
|
||||
from .. import orm
|
||||
from .. import roles
|
||||
from .. import scopes
|
||||
from ..auth import Authenticator
|
||||
from ..handlers import BaseHandler
|
||||
@@ -20,7 +21,6 @@ from ..utils import url_path_join as ujoin
|
||||
from .mocking import FalsyCallableFormSpawner
|
||||
from .mocking import FormSpawner
|
||||
from .test_api import next_event
|
||||
from .utils import add_user
|
||||
from .utils import api_request
|
||||
from .utils import async_requests
|
||||
from .utils import AsyncSession
|
||||
@@ -48,16 +48,16 @@ async def test_root_auth(app):
|
||||
# if spawning was quick, there will be one more entry that's public_url(user)
|
||||
|
||||
|
||||
async def test_root_redirect(app):
|
||||
async def test_root_redirect(app, user):
|
||||
name = 'wash'
|
||||
cookies = await app.login_user(name)
|
||||
next_url = ujoin(app.base_url, 'user/other/test.ipynb')
|
||||
next_url = ujoin(app.base_url, f'user/{user.name}/test.ipynb')
|
||||
url = '/?' + urlencode({'next': next_url})
|
||||
r = await get_page(url, app, cookies=cookies)
|
||||
path = urlparse(r.url).path
|
||||
assert path == ujoin(app.base_url, 'hub/user/%s/test.ipynb' % name)
|
||||
# serve "server not running" page, which has status 424
|
||||
assert r.status_code == 424
|
||||
assert path == ujoin(app.base_url, f'hub/user/{user.name}/test.ipynb')
|
||||
# preserves choice to requested user, which 404s as unavailable without access
|
||||
assert r.status_code == 404
|
||||
|
||||
|
||||
async def test_root_default_url_noauth(app):
|
||||
@@ -128,11 +128,20 @@ async def test_admin_sort(app, sort):
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
async def test_spawn_redirect(app):
|
||||
@pytest.mark.parametrize("last_failed", [True, False])
|
||||
async def test_spawn_redirect(app, last_failed):
|
||||
name = 'wash'
|
||||
cookies = await app.login_user(name)
|
||||
u = app.users[orm.User.find(app.db, name)]
|
||||
|
||||
if last_failed:
|
||||
# mock a failed spawn
|
||||
last_spawner = u.spawners['']
|
||||
last_spawner._spawn_future = asyncio.Future()
|
||||
last_spawner._spawn_future.set_exception(RuntimeError("I failed!"))
|
||||
else:
|
||||
last_spawner = None
|
||||
|
||||
status = await u.spawner.poll()
|
||||
assert status is not None
|
||||
|
||||
@@ -141,6 +150,10 @@ async def test_spawn_redirect(app):
|
||||
r.raise_for_status()
|
||||
print(urlparse(r.url))
|
||||
path = urlparse(r.url).path
|
||||
|
||||
# ensure we got a new spawner
|
||||
assert u.spawners[''] is not last_spawner
|
||||
|
||||
# make sure we visited hub/spawn-pending after spawn
|
||||
# if spawn was really quick, we might get redirected all the way to the running server,
|
||||
# so check history instead of r.url
|
||||
@@ -203,13 +216,34 @@ async def test_spawn_handler_access(app):
|
||||
r.raise_for_status()
|
||||
|
||||
|
||||
async def test_spawn_admin_access(app, admin_access):
|
||||
"""GET /user/:name as admin with admin-access spawns user's server"""
|
||||
cookies = await app.login_user('admin')
|
||||
name = 'mariel'
|
||||
user = add_user(app.db, app=app, name=name)
|
||||
app.db.commit()
|
||||
@pytest.mark.parametrize("has_access", ["all", "user", "group", False])
|
||||
async def test_spawn_other_user(
|
||||
app, user, username, group, create_temp_role, has_access
|
||||
):
|
||||
"""GET /user/:name as another user with access to spawns user's server"""
|
||||
cookies = await app.login_user(username)
|
||||
requester = app.users[username]
|
||||
name = user.name
|
||||
|
||||
if has_access:
|
||||
if has_access == "group":
|
||||
group.users.append(user)
|
||||
app.db.commit()
|
||||
scopes = [
|
||||
f"access:servers!group={group.name}",
|
||||
f"servers!group={group.name}",
|
||||
]
|
||||
elif has_access == "all":
|
||||
scopes = ["access:servers", "servers"]
|
||||
elif has_access == "user":
|
||||
scopes = [f"access:servers!user={user.name}", f"servers!user={user.name}"]
|
||||
role = create_temp_role(scopes)
|
||||
roles.grant_role(app.db, requester, role)
|
||||
|
||||
r = await get_page('spawn/' + name, app, cookies=cookies)
|
||||
if not has_access:
|
||||
assert r.status_code == 404
|
||||
return
|
||||
r.raise_for_status()
|
||||
|
||||
while '/spawn-pending/' in r.url:
|
||||
@@ -237,6 +271,25 @@ async def test_spawn_page(app):
|
||||
assert FormSpawner.options_form in r.text
|
||||
|
||||
|
||||
async def test_spawn_page_after_failed(app, user):
|
||||
cookies = await app.login_user(user.name)
|
||||
|
||||
# mock a failed spawn
|
||||
last_spawner = user.spawners['']
|
||||
last_spawner._spawn_future = asyncio.Future()
|
||||
last_spawner._spawn_future.set_exception(RuntimeError("I failed!"))
|
||||
|
||||
with mock.patch.dict(app.users.settings, {'spawner_class': FormSpawner}):
|
||||
r = await get_page('spawn', app, cookies=cookies)
|
||||
spawner = user.spawners['']
|
||||
# make sure we didn't reuse last spawner
|
||||
assert isinstance(spawner, FormSpawner)
|
||||
assert spawner is not last_spawner
|
||||
assert r.url.endswith('/spawn')
|
||||
spawner = user.spawners['']
|
||||
assert FormSpawner.options_form in r.text
|
||||
|
||||
|
||||
async def test_spawn_page_falsy_callable(app):
|
||||
with mock.patch.dict(
|
||||
app.users.settings, {'spawner_class': FalsyCallableFormSpawner}
|
||||
@@ -248,14 +301,36 @@ async def test_spawn_page_falsy_callable(app):
|
||||
assert history[1] == ujoin(public_url(app), "hub/spawn-pending/erik")
|
||||
|
||||
|
||||
async def test_spawn_page_admin(app, admin_access):
|
||||
@pytest.mark.parametrize("has_access", ["all", "user", "group", False])
|
||||
async def test_spawn_page_access(
|
||||
app, has_access, group, username, user, create_temp_role
|
||||
):
|
||||
cookies = await app.login_user(username)
|
||||
requester = app.users[username]
|
||||
if has_access:
|
||||
if has_access == "group":
|
||||
group.users.append(user)
|
||||
app.db.commit()
|
||||
scopes = [
|
||||
f"access:servers!group={group.name}",
|
||||
f"servers!group={group.name}",
|
||||
]
|
||||
elif has_access == "all":
|
||||
scopes = ["access:servers", "servers"]
|
||||
elif has_access == "user":
|
||||
scopes = [f"access:servers!user={user.name}", f"servers!user={user.name}"]
|
||||
role = create_temp_role(scopes)
|
||||
roles.grant_role(app.db, requester, role)
|
||||
|
||||
with mock.patch.dict(app.users.settings, {'spawner_class': FormSpawner}):
|
||||
cookies = await app.login_user('admin')
|
||||
u = add_user(app.db, app=app, name='melanie')
|
||||
r = await get_page('spawn/' + u.name, app, cookies=cookies)
|
||||
assert r.url.endswith('/spawn/' + u.name)
|
||||
r = await get_page('spawn/' + user.name, app, cookies=cookies)
|
||||
if not has_access:
|
||||
assert r.status_code == 404
|
||||
return
|
||||
assert r.status_code == 200
|
||||
assert r.url.endswith('/spawn/' + user.name)
|
||||
assert FormSpawner.options_form in r.text
|
||||
assert f"Spawning server for {u.name}" in r.text
|
||||
assert f"Spawning server for {user.name}" in r.text
|
||||
|
||||
|
||||
async def test_spawn_with_query_arguments(app):
|
||||
@@ -322,18 +397,39 @@ async def test_spawn_form(app):
|
||||
}
|
||||
|
||||
|
||||
async def test_spawn_form_admin_access(app, admin_access):
|
||||
@pytest.mark.parametrize("has_access", ["all", "user", "group", False])
|
||||
async def test_spawn_form_other_user(
|
||||
app, username, user, group, create_temp_role, has_access
|
||||
):
|
||||
cookies = await app.login_user(username)
|
||||
requester = app.users[username]
|
||||
if has_access:
|
||||
if has_access == "group":
|
||||
group.users.append(user)
|
||||
app.db.commit()
|
||||
scopes = [
|
||||
f"access:servers!group={group.name}",
|
||||
f"servers!group={group.name}",
|
||||
]
|
||||
elif has_access == "all":
|
||||
scopes = ["access:servers", "servers"]
|
||||
elif has_access == "user":
|
||||
scopes = [f"access:servers!user={user.name}", f"servers!user={user.name}"]
|
||||
role = create_temp_role(scopes)
|
||||
roles.grant_role(app.db, requester, role)
|
||||
|
||||
with mock.patch.dict(app.tornado_settings, {'spawner_class': FormSpawner}):
|
||||
base_url = ujoin(public_host(app), app.hub.base_url)
|
||||
cookies = await app.login_user('admin')
|
||||
u = add_user(app.db, app=app, name='martha')
|
||||
next_url = ujoin(app.base_url, 'user', u.name, 'tree')
|
||||
next_url = ujoin(app.base_url, 'user', user.name, 'tree')
|
||||
|
||||
r = await async_requests.post(
|
||||
url_concat(ujoin(base_url, 'spawn', u.name), {'next': next_url}),
|
||||
url_concat(ujoin(base_url, 'spawn', user.name), {'next': next_url}),
|
||||
cookies=cookies,
|
||||
data={'bounds': ['-3', '3'], 'energy': '938MeV'},
|
||||
)
|
||||
if not has_access:
|
||||
assert r.status_code == 404
|
||||
return
|
||||
r.raise_for_status()
|
||||
|
||||
while '/spawn-pending/' in r.url:
|
||||
@@ -342,8 +438,8 @@ async def test_spawn_form_admin_access(app, admin_access):
|
||||
r.raise_for_status()
|
||||
|
||||
assert r.history
|
||||
assert r.url.startswith(public_url(app, u))
|
||||
assert u.spawner.user_options == {
|
||||
assert r.url.startswith(public_url(app, user))
|
||||
assert user.spawner.user_options == {
|
||||
'energy': '938MeV',
|
||||
'bounds': [-3, 3],
|
||||
'notspecified': 5,
|
||||
@@ -498,31 +594,54 @@ async def test_user_redirect_hook(app, username):
|
||||
assert redirected_url.path == ujoin(app.base_url, 'user', username, 'terminals/1')
|
||||
|
||||
|
||||
async def test_user_redirect_deprecated(app, username):
|
||||
"""redirecting from /user/someonelse/ URLs (deprecated)"""
|
||||
@pytest.mark.parametrize("has_access", ["all", "user", "group", False])
|
||||
async def test_other_user_url(app, username, user, group, create_temp_role, has_access):
|
||||
"""Test accessing /user/someonelse/ URLs when the server is not running
|
||||
|
||||
Used to redirect to your own server,
|
||||
which produced inconsistent behavior depending on whether the server was running.
|
||||
"""
|
||||
name = username
|
||||
cookies = await app.login_user(name)
|
||||
other_user = user
|
||||
requester = app.users[name]
|
||||
other_user_url = f"/user/{other_user.name}"
|
||||
if has_access:
|
||||
if has_access == "group":
|
||||
group.users.append(other_user)
|
||||
app.db.commit()
|
||||
scopes = [f"access:servers!group={group.name}"]
|
||||
elif has_access == "all":
|
||||
scopes = ["access:servers"]
|
||||
elif has_access == "user":
|
||||
scopes = [f"access:servers!user={other_user.name}"]
|
||||
role = create_temp_role(scopes)
|
||||
roles.grant_role(app.db, requester, role)
|
||||
status = 424
|
||||
else:
|
||||
# 404 - access denied without revealing if the user exists
|
||||
status = 404
|
||||
|
||||
r = await get_page('/user/baduser', app, cookies=cookies, hub=False)
|
||||
r = await get_page(other_user_url, app, cookies=cookies, hub=False)
|
||||
print(urlparse(r.url))
|
||||
path = urlparse(r.url).path
|
||||
assert path == ujoin(app.base_url, 'hub/user/%s/' % name)
|
||||
assert r.status_code == 424
|
||||
assert path == ujoin(app.base_url, f'hub/user/{other_user.name}/')
|
||||
assert r.status_code == status
|
||||
|
||||
r = await get_page('/user/baduser/test.ipynb', app, cookies=cookies, hub=False)
|
||||
r = await get_page(f'{other_user_url}/test.ipynb', app, cookies=cookies, hub=False)
|
||||
print(urlparse(r.url))
|
||||
path = urlparse(r.url).path
|
||||
assert path == ujoin(app.base_url, 'hub/user/%s/test.ipynb' % name)
|
||||
assert r.status_code == 424
|
||||
assert path == ujoin(app.base_url, f'hub/user/{other_user.name}/test.ipynb')
|
||||
assert r.status_code == status
|
||||
|
||||
r = await get_page('/user/baduser/test.ipynb', app, hub=False)
|
||||
r = await get_page(f'{other_user_url}/test.ipynb', app, hub=False)
|
||||
r.raise_for_status()
|
||||
print(urlparse(r.url))
|
||||
path = urlparse(r.url).path
|
||||
assert path == ujoin(app.base_url, '/hub/login')
|
||||
query = urlparse(r.url).query
|
||||
assert query == urlencode(
|
||||
{'next': ujoin(app.base_url, '/hub/user/baduser/test.ipynb')}
|
||||
{'next': ujoin(app.base_url, f'/hub/user/{other_user.name}/test.ipynb')}
|
||||
)
|
||||
|
||||
|
||||
@@ -652,6 +771,10 @@ async def test_login_strip(app):
|
||||
(False, '/user/other', '/hub/user/other', None),
|
||||
(False, '/absolute', '/absolute', None),
|
||||
(False, '/has?query#andhash', '/has?query#andhash', None),
|
||||
# :// in query string or fragment
|
||||
(False, '/has?repo=https/host.git', '/has?repo=https/host.git', None),
|
||||
(False, '/has?repo=https://host.git', '/has?repo=https://host.git', None),
|
||||
(False, '/has#repo=https://host.git', '/has#repo=https://host.git', None),
|
||||
# next_url outside is not allowed
|
||||
(False, 'relative/path', '', None),
|
||||
(False, 'https://other.domain', '', None),
|
||||
@@ -691,7 +814,9 @@ async def test_login_redirect(app, running, next_url, location, params):
|
||||
if params:
|
||||
url = url_concat(url, params)
|
||||
if next_url:
|
||||
if '//' not in next_url and next_url.startswith('/'):
|
||||
if next_url.startswith('/') and not (
|
||||
next_url.startswith("//") or urlparse(next_url).netloc
|
||||
):
|
||||
next_url = ujoin(app.base_url, next_url, '')
|
||||
url = url_concat(url, dict(next=next_url))
|
||||
|
||||
@@ -1110,19 +1235,6 @@ async def test_server_not_running_api_request_legacy_status(app):
|
||||
assert r.status_code == 503
|
||||
|
||||
|
||||
async def test_metrics_no_auth(app):
|
||||
r = await get_page("metrics", app)
|
||||
assert r.status_code == 403
|
||||
|
||||
|
||||
async def test_metrics_auth(app):
|
||||
cookies = await app.login_user('river')
|
||||
metrics_url = ujoin(public_host(app), app.hub.base_url, 'metrics')
|
||||
r = await get_page("metrics", app, cookies=cookies)
|
||||
assert r.status_code == 200
|
||||
assert r.url == metrics_url
|
||||
|
||||
|
||||
async def test_health_check_request(app):
|
||||
r = await get_page('health', app)
|
||||
assert r.status_code == 200
|
||||
|
@@ -5,9 +5,11 @@ from contextlib import contextmanager
|
||||
from subprocess import CalledProcessError
|
||||
from subprocess import check_output
|
||||
from unittest import mock
|
||||
from urllib.parse import urlencode
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import pytest
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
import jupyterhub
|
||||
from .. import orm
|
||||
@@ -16,6 +18,7 @@ from .mocking import public_url
|
||||
from .mocking import StubSingleUserSpawner
|
||||
from .utils import async_requests
|
||||
from .utils import AsyncSession
|
||||
from .utils import get_page
|
||||
|
||||
|
||||
@contextmanager
|
||||
@@ -196,10 +199,22 @@ def test_singleuser_app_class(JUPYTERHUB_SINGLEUSER_APP):
|
||||
import jupyter_server # noqa
|
||||
except ImportError:
|
||||
have_server = False
|
||||
expect_error = "jupyter_server" in JUPYTERHUB_SINGLEUSER_APP
|
||||
else:
|
||||
have_server = True
|
||||
expect_error = False
|
||||
try:
|
||||
import notebook.notebookapp # noqa
|
||||
except ImportError:
|
||||
have_notebook = False
|
||||
else:
|
||||
have_notebook = True
|
||||
|
||||
if JUPYTERHUB_SINGLEUSER_APP.startswith("notebook."):
|
||||
expect_error = not have_notebook
|
||||
elif JUPYTERHUB_SINGLEUSER_APP.startswith("jupyter_server."):
|
||||
expect_error = not have_server
|
||||
else:
|
||||
# not specified, will try both
|
||||
expect_error = not (have_server or have_notebook)
|
||||
|
||||
if expect_error:
|
||||
ctx = pytest.raises(CalledProcessError)
|
||||
@@ -225,3 +240,22 @@ def test_singleuser_app_class(JUPYTERHUB_SINGLEUSER_APP):
|
||||
else:
|
||||
assert '--ServerApp.' in out
|
||||
assert '--NotebookApp.' not in out
|
||||
|
||||
|
||||
async def test_nbclassic_control_panel(app, user):
|
||||
# use StubSingleUserSpawner to launch a single-user app in a thread
|
||||
app.spawner_class = StubSingleUserSpawner
|
||||
app.tornado_settings['spawner_class'] = StubSingleUserSpawner
|
||||
|
||||
# login, start the server
|
||||
await user.spawn()
|
||||
cookies = await app.login_user(user.name)
|
||||
next_url = url_path_join(user.url, "tree/")
|
||||
url = '/?' + urlencode({'next': next_url})
|
||||
r = await get_page(url, app, cookies=cookies)
|
||||
r.raise_for_status()
|
||||
assert urlparse(r.url).path == urlparse(next_url).path
|
||||
page = BeautifulSoup(r.text, "html.parser")
|
||||
link = page.find("a", id="jupyterhub-control-panel-link")
|
||||
assert link, f"Missing jupyterhub-control-panel-link in {page}"
|
||||
assert link["href"] == url_path_join(app.base_url, "hub/home")
|
||||
|
@@ -81,6 +81,18 @@ async def test_spawner(db, request):
|
||||
assert isinstance(status, int)
|
||||
|
||||
|
||||
def test_spawner_from_db(app, user):
|
||||
spawner = user.spawners['name']
|
||||
user_options = {"test": "value"}
|
||||
spawner.orm_spawner.user_options = user_options
|
||||
app.db.commit()
|
||||
# delete and recreate the spawner from the db
|
||||
user.spawners.pop('name')
|
||||
new_spawner = user.spawners['name']
|
||||
assert new_spawner.orm_spawner.user_options == user_options
|
||||
assert new_spawner.user_options == user_options
|
||||
|
||||
|
||||
async def wait_for_spawner(spawner, timeout=10):
|
||||
"""Wait for an http server to show up
|
||||
|
||||
@@ -447,3 +459,80 @@ async def test_spawner_oauth_roles_bad(app, user):
|
||||
# raises ValueError if we try to assign a role that doesn't exist
|
||||
with pytest.raises(ValueError):
|
||||
await spawner.user.spawn()
|
||||
|
||||
|
||||
async def test_spawner_options_from_form(db):
|
||||
def options_from_form(form_data):
|
||||
return form_data
|
||||
|
||||
spawner = new_spawner(db, options_from_form=options_from_form)
|
||||
form_data = {"key": ["value"]}
|
||||
result = spawner.run_options_from_form(form_data)
|
||||
for key, value in form_data.items():
|
||||
assert key in result
|
||||
assert result[key] == value
|
||||
|
||||
|
||||
async def test_spawner_options_from_form_with_spawner(db):
|
||||
def options_from_form(form_data, spawner):
|
||||
return form_data
|
||||
|
||||
spawner = new_spawner(db, options_from_form=options_from_form)
|
||||
form_data = {"key": ["value"]}
|
||||
result = spawner.run_options_from_form(form_data)
|
||||
for key, value in form_data.items():
|
||||
assert key in result
|
||||
assert result[key] == value
|
||||
|
||||
|
||||
def test_spawner_server(db):
|
||||
spawner = new_spawner(db)
|
||||
spawner.orm_spawner = None
|
||||
orm_spawner = orm.Spawner()
|
||||
orm_server = orm.Server(base_url="/1/")
|
||||
orm_spawner.server = orm_server
|
||||
db.add(orm_spawner)
|
||||
db.add(orm_server)
|
||||
db.commit()
|
||||
# initial: no orm_spawner
|
||||
assert spawner.server is None
|
||||
# assigning spawner.orm_spawner updates spawner.server
|
||||
spawner.orm_spawner = orm_spawner
|
||||
assert spawner.server is not None
|
||||
assert spawner.server.orm_server is orm_server
|
||||
# update orm_spawner.server without direct access on Spawner
|
||||
orm_spawner.server = new_server = orm.Server(base_url="/2/")
|
||||
db.commit()
|
||||
assert spawner.server is not None
|
||||
assert spawner.server.orm_server is not orm_server
|
||||
assert spawner.server.orm_server is new_server
|
||||
# clear orm_server via orm_spawner clears spawner.server
|
||||
orm_spawner.server = None
|
||||
db.commit()
|
||||
assert spawner.server is None
|
||||
# assigning spawner.server updates orm_spawner.server
|
||||
orm_server = orm.Server(base_url="/3/")
|
||||
db.add(orm_server)
|
||||
db.commit()
|
||||
spawner.server = server = Server(orm_server=orm_server)
|
||||
db.commit()
|
||||
assert spawner.server is server
|
||||
assert spawner.orm_spawner.server is orm_server
|
||||
# change orm spawner.server
|
||||
orm_server = orm.Server(base_url="/4/")
|
||||
db.add(orm_server)
|
||||
db.commit()
|
||||
spawner.server = server2 = Server(orm_server=orm_server)
|
||||
assert spawner.server is server2
|
||||
assert spawner.orm_spawner.server is orm_server
|
||||
# clear server via spawner.server
|
||||
spawner.server = None
|
||||
db.commit()
|
||||
assert spawner.orm_spawner.server is None
|
||||
|
||||
# test with no underlying orm.Spawner
|
||||
# (only relevant for mocking, never true for actual Spawners)
|
||||
spawner = Spawner()
|
||||
spawner.server = Server.from_url("http://1.2.3.4")
|
||||
assert spawner.server is not None
|
||||
assert spawner.server.ip == "1.2.3.4"
|
||||
|
@@ -1,5 +1,6 @@
|
||||
import pytest
|
||||
|
||||
from .. import orm
|
||||
from ..user import UserDict
|
||||
from .utils import add_user
|
||||
|
||||
@@ -20,3 +21,35 @@ async def test_userdict_get(db, attr):
|
||||
assert userdict.get(key).id == u.id
|
||||
# `in` should find it now
|
||||
assert key in userdict
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"group_names",
|
||||
[
|
||||
["isin1", "isin2"],
|
||||
["isin1"],
|
||||
["notin", "isin1"],
|
||||
["new-group", "isin1"],
|
||||
[],
|
||||
],
|
||||
)
|
||||
def test_sync_groups(app, user, group_names):
|
||||
expected = sorted(group_names)
|
||||
db = app.db
|
||||
db.add(orm.Group(name="notin"))
|
||||
in_groups = [orm.Group(name="isin1"), orm.Group(name="isin2")]
|
||||
for group in in_groups:
|
||||
db.add(group)
|
||||
db.commit()
|
||||
user.groups = in_groups
|
||||
db.commit()
|
||||
user.sync_groups(group_names)
|
||||
assert not app.db.dirty
|
||||
after_groups = sorted(g.name for g in user.groups)
|
||||
assert after_groups == expected
|
||||
# double-check backref
|
||||
for group in db.query(orm.Group):
|
||||
if group.name in expected:
|
||||
assert user.orm_user in group.users
|
||||
else:
|
||||
assert user.orm_user not in group.users
|
||||
|
@@ -253,6 +253,58 @@ class User:
|
||||
def spawner_class(self):
|
||||
return self.settings.get('spawner_class', LocalProcessSpawner)
|
||||
|
||||
def get_spawner(self, server_name="", replace_failed=False):
|
||||
"""Get a spawner by name
|
||||
|
||||
replace_failed governs whether a failed spawner should be replaced
|
||||
or returned (default: returned).
|
||||
|
||||
.. versionadded:: 2.2
|
||||
"""
|
||||
spawner = self.spawners[server_name]
|
||||
if replace_failed and spawner._failed:
|
||||
self.log.debug(f"Discarding failed spawner {spawner._log_name}")
|
||||
# remove failed spawner, create a new one
|
||||
self.spawners.pop(server_name)
|
||||
spawner = self.spawners[server_name]
|
||||
return spawner
|
||||
|
||||
def sync_groups(self, group_names):
|
||||
"""Synchronize groups with database"""
|
||||
|
||||
current_groups = {g.name for g in self.orm_user.groups}
|
||||
new_groups = set(group_names)
|
||||
if current_groups == new_groups:
|
||||
# no change, nothing to do
|
||||
return
|
||||
|
||||
# log group changes
|
||||
new_groups = set(group_names).difference(current_groups)
|
||||
removed_groups = current_groups.difference(group_names)
|
||||
if new_groups:
|
||||
self.log.info(f"Adding user {self.name} to group(s): {new_groups}")
|
||||
if removed_groups:
|
||||
self.log.info(f"Removing user {self.name} from group(s): {removed_groups}")
|
||||
|
||||
if group_names:
|
||||
groups = (
|
||||
self.db.query(orm.Group).filter(orm.Group.name.in_(group_names)).all()
|
||||
)
|
||||
existing_groups = {g.name for g in groups}
|
||||
for group_name in group_names:
|
||||
if group_name not in existing_groups:
|
||||
# create groups that don't exist yet
|
||||
self.log.info(
|
||||
f"Creating new group {group_name} for user {self.name}"
|
||||
)
|
||||
group = orm.Group(name=group_name)
|
||||
self.db.add(group)
|
||||
groups.append(group)
|
||||
self.groups = groups
|
||||
else:
|
||||
self.groups = []
|
||||
self.db.commit()
|
||||
|
||||
async def save_auth_state(self, auth_state):
|
||||
"""Encrypt and store auth_state"""
|
||||
if auth_state is None:
|
||||
@@ -376,6 +428,7 @@ class User:
|
||||
oauth_client_id=client_id,
|
||||
cookie_options=self.settings.get('cookie_options', {}),
|
||||
trusted_alt_names=trusted_alt_names,
|
||||
user_options=orm_spawner.user_options or {},
|
||||
)
|
||||
|
||||
if self.settings.get('internal_ssl'):
|
||||
@@ -591,7 +644,7 @@ class User:
|
||||
api_token = self.new_api_token(note=note, roles=['server'])
|
||||
db.commit()
|
||||
|
||||
spawner = self.spawners[server_name]
|
||||
spawner = self.get_spawner(server_name, replace_failed=True)
|
||||
spawner.server = server = Server(orm_server=orm_server)
|
||||
assert spawner.orm_spawner.server is orm_server
|
||||
|
||||
@@ -759,7 +812,7 @@ class User:
|
||||
e.reason = 'timeout'
|
||||
self.settings['statsd'].incr('spawner.failure.timeout')
|
||||
else:
|
||||
self.log.error(
|
||||
self.log.exception(
|
||||
"Unhandled error starting {user}'s server: {error}".format(
|
||||
user=self.name, error=e
|
||||
)
|
||||
@@ -769,7 +822,7 @@ class User:
|
||||
try:
|
||||
await self.stop(spawner.name)
|
||||
except Exception:
|
||||
self.log.error(
|
||||
self.log.exception(
|
||||
"Failed to cleanup {user}'s server that failed to start".format(
|
||||
user=self.name
|
||||
),
|
||||
@@ -817,7 +870,7 @@ class User:
|
||||
self.settings['statsd'].incr('spawner.failure.http_timeout')
|
||||
else:
|
||||
e.reason = 'error'
|
||||
self.log.error(
|
||||
self.log.exception(
|
||||
"Unhandled error waiting for {user}'s server to show up at {url}: {error}".format(
|
||||
user=self.name, url=server.url, error=e
|
||||
)
|
||||
@@ -826,7 +879,7 @@ class User:
|
||||
try:
|
||||
await self.stop(spawner.name)
|
||||
except Exception:
|
||||
self.log.error(
|
||||
self.log.exception(
|
||||
"Failed to cleanup {user}'s server that failed to start".format(
|
||||
user=self.name
|
||||
),
|
||||
|
@@ -320,9 +320,11 @@ def admin_only(f):
|
||||
@auth_decorator
|
||||
def metrics_authentication(self):
|
||||
"""Decorator for restricting access to metrics"""
|
||||
user = self.current_user
|
||||
if user is None and self.authenticate_prometheus:
|
||||
raise web.HTTPError(403)
|
||||
if not self.authenticate_prometheus:
|
||||
return
|
||||
scope = 'read:metrics'
|
||||
if scope not in self.parsed_scopes:
|
||||
raise web.HTTPError(403, f"Access to metrics requires scope '{scope}'")
|
||||
|
||||
|
||||
# Token utilities
|
||||
|
@@ -1,9 +1,13 @@
|
||||
[tool.black]
|
||||
skip-string-normalization = true
|
||||
# target-version should be all supported versions, see
|
||||
# https://github.com/psf/black/issues/751#issuecomment-473066811
|
||||
target_version = [
|
||||
"py36",
|
||||
"py37",
|
||||
"py38",
|
||||
"py39",
|
||||
"py310",
|
||||
]
|
||||
|
||||
[tool.tbump]
|
||||
@@ -11,7 +15,7 @@ target_version = [
|
||||
github_url = "https://github.com/jupyterhub/jupyterhub"
|
||||
|
||||
[tool.tbump.version]
|
||||
current = "2.0.2"
|
||||
current = "2.3.2.dev"
|
||||
|
||||
# Example of a semver regexp.
|
||||
# Make sure this matches current_version before
|
||||
|
@@ -3,6 +3,9 @@
|
||||
# so we have to disable this until pytest 3.11
|
||||
# minversion = 3.3
|
||||
|
||||
# automatically run coroutine tests with asyncio
|
||||
asyncio_mode = auto
|
||||
|
||||
# jupyter_server plugin is incompatible with notebook imports
|
||||
addopts = -p no:jupyter_server
|
||||
|
||||
|
File diff suppressed because one or more lines are too long
@@ -15,6 +15,11 @@
|
||||
{{ custom_html | safe }}
|
||||
{% elif login_service %}
|
||||
<div class="service-login">
|
||||
<p id='insecure-login-warning' class='hidden'>
|
||||
Warning: JupyterHub seems to be served over an unsecured HTTP connection.
|
||||
We strongly recommend enabling HTTPS for JupyterHub.
|
||||
</p>
|
||||
|
||||
<a role="button" class='btn btn-jupyter btn-lg' href='{{authenticator_login_url}}'>
|
||||
Sign in with {{login_service}}
|
||||
</a>
|
||||
|
@@ -18,8 +18,10 @@
|
||||
<p>
|
||||
{% if failed %}
|
||||
The latest attempt to start your server {{ server_name }} has failed.
|
||||
{% if failed_message %}
|
||||
{{ failed_message }}
|
||||
{% if failed_html_message %}
|
||||
</p><p>{{ failed_html_message | safe }}</p><p>
|
||||
{% elif failed_message %}
|
||||
</p><p>{{ failed_message }}</p><p>
|
||||
{% endif %}
|
||||
Would you like to retry starting it?
|
||||
{% else %}
|
||||
|
Reference in New Issue
Block a user