mirror of
https://github.com/jupyterhub/jupyterhub.git
synced 2025-10-08 18:44:10 +00:00
Compare commits
380 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
a3c93088a8 | ||
![]() |
834229622d | ||
![]() |
44a1ea42de | ||
![]() |
3879a96b67 | ||
![]() |
d40627d397 | ||
![]() |
057cdbc9e9 | ||
![]() |
75390d2e46 | ||
![]() |
f5e4846cfa | ||
![]() |
3dc115a829 | ||
![]() |
af4ddbfc58 | ||
![]() |
50a4d1e34d | ||
![]() |
86a238334c | ||
![]() |
dacb9d1668 | ||
![]() |
95cc170383 | ||
![]() |
437a9d150f | ||
![]() |
c9616d6f11 | ||
![]() |
61aed70c4d | ||
![]() |
9abb573d47 | ||
![]() |
b074304834 | ||
![]() |
201e7ca3d8 | ||
![]() |
fa8cd90793 | ||
![]() |
7dafae29fb | ||
![]() |
89a6c745b5 | ||
![]() |
821d9e229d | ||
![]() |
db7619fa7a | ||
![]() |
1ed9423530 | ||
![]() |
147a578f7a | ||
![]() |
3a59a15164 | ||
![]() |
1b7aded7f9 | ||
![]() |
bc45d77365 | ||
![]() |
1b3b005ca4 | ||
![]() |
e0be811b2c | ||
![]() |
3627251246 | ||
![]() |
8d056170d7 | ||
![]() |
3590d16e30 | ||
![]() |
572d258cd2 | ||
![]() |
11d0954551 | ||
![]() |
650d47d5c1 | ||
![]() |
945fc824d8 | ||
![]() |
a8aa737b00 | ||
![]() |
cd689a1fab | ||
![]() |
b3f04e7c66 | ||
![]() |
fbcf857991 | ||
![]() |
6c5e5452bc | ||
![]() |
2f5ba7ba30 | ||
![]() |
a045eefa64 | ||
![]() |
6ea4f2af0d | ||
![]() |
3d3ad2929c | ||
![]() |
00287ff5ba | ||
![]() |
805d063d1d | ||
![]() |
e6bacf7109 | ||
![]() |
33ccfa7963 | ||
![]() |
593404f558 | ||
![]() |
e7bc282c80 | ||
![]() |
b939b482a1 | ||
![]() |
8afc2c9ae9 | ||
![]() |
d11eda14ed | ||
![]() |
ab79251fe2 | ||
![]() |
484dbf48de | ||
![]() |
6eb526d08a | ||
![]() |
e0a17db5f1 | ||
![]() |
45132b7244 | ||
![]() |
c23cddeb51 | ||
![]() |
672e19a22a | ||
![]() |
4a6c9c3a01 | ||
![]() |
2b79bc44da | ||
![]() |
7861662e17 | ||
![]() |
4a1842bf8a | ||
![]() |
8f18303e50 | ||
![]() |
bcad6e287d | ||
![]() |
9de1951952 | ||
![]() |
99cb1f17f0 | ||
![]() |
10d5157e95 | ||
![]() |
2fc4f26832 | ||
![]() |
f6230001bb | ||
![]() |
960f7cbeb9 | ||
![]() |
76f06a6b55 | ||
![]() |
9c498aa5d4 | ||
![]() |
a0b60f9118 | ||
![]() |
27cb56429b | ||
![]() |
b1ffd4b10b | ||
![]() |
a9ea064202 | ||
![]() |
687a41a467 | ||
![]() |
5348451b2e | ||
![]() |
55f0579dcc | ||
![]() |
a3ea0f0449 | ||
![]() |
78492a4a8e | ||
![]() |
f22203f50e | ||
![]() |
500b354a00 | ||
![]() |
9d4093782f | ||
![]() |
43b3cebfff | ||
![]() |
63c381431d | ||
![]() |
bf41767b33 | ||
![]() |
83d6e4e993 | ||
![]() |
d64a2ddd95 | ||
![]() |
392176d873 | ||
![]() |
58420b3307 | ||
![]() |
a5e3b66dee | ||
![]() |
a9fbe5c9f6 | ||
![]() |
71bbbe4a67 | ||
![]() |
3843885382 | ||
![]() |
25ea559e0d | ||
![]() |
c18815de91 | ||
![]() |
50d53667ce | ||
![]() |
68e2baf4aa | ||
![]() |
6fc9d40e51 | ||
![]() |
0b25694b40 | ||
![]() |
bf750e488f | ||
![]() |
359f9055fc | ||
![]() |
b84dd5d735 | ||
![]() |
3ed345f496 | ||
![]() |
6633f8ef28 | ||
![]() |
757053a9ec | ||
![]() |
36cad38ddf | ||
![]() |
1e9a1cb621 | ||
![]() |
9f051d3172 | ||
![]() |
53576c8f82 | ||
![]() |
bb5ec39b2f | ||
![]() |
4c54c6dcc8 | ||
![]() |
39da98f133 | ||
![]() |
29e69aa880 | ||
![]() |
0c315f31b7 | ||
![]() |
508842a68c | ||
![]() |
4b31615a05 | ||
![]() |
17b64280e8 | ||
![]() |
88be7a9967 | ||
![]() |
4ca2344af7 | ||
![]() |
4c050cf165 | ||
![]() |
5e2ccb81fa | ||
![]() |
b8dc3befab | ||
![]() |
2f29848757 | ||
![]() |
4f3d6cdd0c | ||
![]() |
67733ef928 | ||
![]() |
e657754e7f | ||
![]() |
2d6087959c | ||
![]() |
08a913707f | ||
![]() |
9c8a4f287a | ||
![]() |
64d6f0222c | ||
![]() |
538abdf084 | ||
![]() |
144abcb965 | ||
![]() |
6e5c307edb | ||
![]() |
67ebe0b0cf | ||
![]() |
dcf21d53fd | ||
![]() |
f5bb0a2622 | ||
![]() |
704712cc81 | ||
![]() |
f86d53a234 | ||
![]() |
5466224988 | ||
![]() |
f9fa21bfd7 | ||
![]() |
e4855c30f5 | ||
![]() |
f1c4fdd5a2 | ||
![]() |
e58cf06706 | ||
![]() |
91f4918cff | ||
![]() |
b15ccfa4ae | ||
![]() |
5102fde2f0 | ||
![]() |
f5dc005a70 | ||
![]() |
5fd8f0f596 | ||
![]() |
26ceafa8a3 | ||
![]() |
2e2ed8a4ff | ||
![]() |
6cc734f884 | ||
![]() |
4f7f07d3b7 | ||
![]() |
d436c97e3d | ||
![]() |
807c5b8ff9 | ||
![]() |
8da06d1259 | ||
![]() |
1c1be8a24b | ||
![]() |
897606b00c | ||
![]() |
615af5eb33 | ||
![]() |
85f94c12fc | ||
![]() |
ccfee4d235 | ||
![]() |
a2ba55756d | ||
![]() |
1b3e94db6c | ||
![]() |
614d9d89d0 | ||
![]() |
05a3f5aa9a | ||
![]() |
4f47153123 | ||
![]() |
a14d9ecaa1 | ||
![]() |
6815f30d36 | ||
![]() |
13172e6856 | ||
![]() |
ebc9fd7758 | ||
![]() |
0761a5db02 | ||
![]() |
46e7a231fe | ||
![]() |
ffa5a20e2f | ||
![]() |
2088a57ffe | ||
![]() |
345805781f | ||
![]() |
9eb52ea788 | ||
![]() |
fb1405ecd8 | ||
![]() |
3f01bf400b | ||
![]() |
c528751502 | ||
![]() |
0018184150 | ||
![]() |
7903f76e11 | ||
![]() |
d5551a2f32 | ||
![]() |
ca564a5948 | ||
![]() |
0fcc559323 | ||
![]() |
a746e8e7fb | ||
![]() |
b2ce6023e1 | ||
![]() |
39b331df1b | ||
![]() |
a69140ae1b | ||
![]() |
225ca9007a | ||
![]() |
11efebf1e2 | ||
![]() |
3e5082f265 | ||
![]() |
36cb1df27e | ||
![]() |
fcad2d5695 | ||
![]() |
2ec722d3af | ||
![]() |
390f50e246 | ||
![]() |
3276e4a58f | ||
![]() |
2a8428dbb0 | ||
![]() |
7febb3aa06 | ||
![]() |
92c6a23a13 | ||
![]() |
bb75081086 | ||
![]() |
915c244d02 | ||
![]() |
b5e0f46796 | ||
![]() |
34e8e2d828 | ||
![]() |
c2cbeda9e4 | ||
![]() |
92a33bd358 | ||
![]() |
e19700348d | ||
![]() |
04ac02c09d | ||
![]() |
2b61c16c06 | ||
![]() |
028722a5ac | ||
![]() |
ca7e07de54 | ||
![]() |
c523e74644 | ||
![]() |
dd932784ed | ||
![]() |
4704217dc5 | ||
![]() |
3893fb6d2c | ||
![]() |
59b2b36a27 | ||
![]() |
f6eaaebdf4 | ||
![]() |
bb20002aea | ||
![]() |
d1995ba7eb | ||
![]() |
b06f4cda33 | ||
![]() |
9d7a235107 | ||
![]() |
18459bad11 | ||
![]() |
ced941a6aa | ||
![]() |
85e37e7f8c | ||
![]() |
53067de596 | ||
![]() |
9c13861eb8 | ||
![]() |
b0ed9f5928 | ||
![]() |
ff0d15fa43 | ||
![]() |
81bb05d0ef | ||
![]() |
95649a3ece | ||
![]() |
08288f5b0f | ||
![]() |
01b1ce3995 | ||
![]() |
cbe93810be | ||
![]() |
75309d9dc4 | ||
![]() |
8594b3fa70 | ||
![]() |
1e956df4c7 | ||
![]() |
8ba2bcdfd4 | ||
![]() |
999cc0a37c | ||
![]() |
a6611e5999 | ||
![]() |
c0d5778d93 | ||
![]() |
293fe4e838 | ||
![]() |
dfee471e22 | ||
![]() |
db7cdc4aa7 | ||
![]() |
c048ad4aac | ||
![]() |
9e245379e8 | ||
![]() |
496f414a2e | ||
![]() |
df67a75893 | ||
![]() |
249b4af59f | ||
![]() |
db3b2d8961 | ||
![]() |
7d44a0ffc8 | ||
![]() |
202b2590e9 | ||
![]() |
c98ef547a8 | ||
![]() |
8a866a9102 | ||
![]() |
b186bdbce3 | ||
![]() |
36fe6c6f66 | ||
![]() |
8bf559db52 | ||
![]() |
750085f627 | ||
![]() |
2dc2c99b4a | ||
![]() |
e703555888 | ||
![]() |
7e102f0511 | ||
![]() |
facde96425 | ||
![]() |
608c746a59 | ||
![]() |
a8c834410f | ||
![]() |
bda14b487a | ||
![]() |
fd5cf8c360 | ||
![]() |
03758e5b46 | ||
![]() |
e540d143bb | ||
![]() |
b2c5ad40c5 | ||
![]() |
edfdf672d8 | ||
![]() |
39f19aef49 | ||
![]() |
8813bb63d4 | ||
![]() |
7c18d6fe14 | ||
![]() |
d1fe17d3cb | ||
![]() |
b8965c2017 | ||
![]() |
733d7bc158 | ||
![]() |
88f31c29bb | ||
![]() |
3caf3cfda8 | ||
![]() |
d076c55cca | ||
![]() |
3e185022c8 | ||
![]() |
857ee2885f | ||
![]() |
cd8dd56213 | ||
![]() |
f06902aa8f | ||
![]() |
bb109c6f75 | ||
![]() |
e525ec7b5b | ||
![]() |
356b98e19f | ||
![]() |
8c803e7a53 | ||
![]() |
2e21a6f4e0 | ||
![]() |
cfd31b14e3 | ||
![]() |
f03a620424 | ||
![]() |
440ad77ad5 | ||
![]() |
68835e97a2 | ||
![]() |
ce80c9c9cf | ||
![]() |
3c299fbfb7 | ||
![]() |
597f8ea6eb | ||
![]() |
d1181085bf | ||
![]() |
913832da48 | ||
![]() |
42f57f4a72 | ||
![]() |
d01a518c41 | ||
![]() |
65ce06b116 | ||
![]() |
468aa5e93c | ||
![]() |
5c01370e6f | ||
![]() |
21d08883a8 | ||
![]() |
59de506f20 | ||
![]() |
b34120ed81 | ||
![]() |
617978179d | ||
![]() |
0985d6fdf2 | ||
![]() |
2049fb0491 | ||
![]() |
a58fc6534b | ||
![]() |
a14f97b7aa | ||
![]() |
0a4cd5b4f2 | ||
![]() |
dca6d372df | ||
![]() |
3898c72921 | ||
![]() |
b25517efe8 | ||
![]() |
392dffd11e | ||
![]() |
510f6ea7e6 | ||
![]() |
296a0ad2f2 | ||
![]() |
487c4524ad | ||
![]() |
b2f0208fcc | ||
![]() |
84b9c3848c | ||
![]() |
9adbafdfb3 | ||
![]() |
9cf2b5101e | ||
![]() |
725fa3a48a | ||
![]() |
534dda3dc7 | ||
![]() |
b0c7df04ac | ||
![]() |
61b0e8bef5 | ||
![]() |
64f3938528 | ||
![]() |
85bc92d88e | ||
![]() |
7bcda18564 | ||
![]() |
86da36857e | ||
![]() |
530833e930 | ||
![]() |
3b0850fa9b | ||
![]() |
1366911be6 | ||
![]() |
fe276eac64 | ||
![]() |
9209ccd0de | ||
![]() |
3b2a1a37f9 | ||
![]() |
6007ba78b0 | ||
![]() |
9cb19cc342 | ||
![]() |
0f471f4e12 | ||
![]() |
68db740998 | ||
![]() |
9c0c6f25b7 | ||
![]() |
5f0077cb5b | ||
![]() |
a6a2056cca | ||
![]() |
fb1e81212f | ||
![]() |
17f811d0b4 | ||
![]() |
34398d94de | ||
![]() |
6bf94fde48 | ||
![]() |
ee18fed04b | ||
![]() |
28f56ba510 | ||
![]() |
c8d3dbb7b1 | ||
![]() |
a76a093638 | ||
![]() |
27908a8e17 | ||
![]() |
8a30f015c9 | ||
![]() |
8cac83fc96 | ||
![]() |
9ade4bb9b2 | ||
![]() |
874c91a086 | ||
![]() |
a906677440 | ||
![]() |
3f93942a24 | ||
![]() |
aeb3130b25 | ||
![]() |
8a6b364ca5 | ||
![]() |
2ade7328d1 | ||
![]() |
2bb9f4f444 | ||
![]() |
b029d983f9 | ||
![]() |
4082006039 | ||
![]() |
69aa0eaa7a | ||
![]() |
3674ada640 | ||
![]() |
48accb0a64 | ||
![]() |
70ac143cfe | ||
![]() |
b1b2d531f8 | ||
![]() |
e200783c59 | ||
![]() |
a7e57196c6 | ||
![]() |
b5f05e6cd2 | ||
![]() |
5fe5b35f21 | ||
![]() |
3610454a12 | ||
![]() |
abc4bbebe4 |
77
.github/workflows/release.yml
vendored
77
.github/workflows/release.yml
vendored
@@ -1,15 +1,32 @@
|
|||||||
# Build releases and (on tags) publish to PyPI
|
# This is a GitHub workflow defining a set of jobs with a set of steps.
|
||||||
|
# ref: https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions
|
||||||
|
#
|
||||||
|
# Test build release artifacts (PyPI package, Docker images) and publish them on
|
||||||
|
# pushed git tags.
|
||||||
|
#
|
||||||
name: Release
|
name: Release
|
||||||
|
|
||||||
# always build releases (to make sure wheel-building works)
|
|
||||||
# but only publish to PyPI on tags
|
|
||||||
on:
|
on:
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- "!dependabot/**"
|
|
||||||
tags:
|
|
||||||
- "*"
|
|
||||||
pull_request:
|
pull_request:
|
||||||
|
paths-ignore:
|
||||||
|
- "docs/**"
|
||||||
|
- "**.md"
|
||||||
|
- "**.rst"
|
||||||
|
- ".github/workflows/*"
|
||||||
|
- "!.github/workflows/release.yml"
|
||||||
|
push:
|
||||||
|
paths-ignore:
|
||||||
|
- "docs/**"
|
||||||
|
- "**.md"
|
||||||
|
- "**.rst"
|
||||||
|
- ".github/workflows/*"
|
||||||
|
- "!.github/workflows/release.yml"
|
||||||
|
branches-ignore:
|
||||||
|
- "dependabot/**"
|
||||||
|
- "pre-commit-ci-update-config"
|
||||||
|
tags:
|
||||||
|
- "**"
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-release:
|
build-release:
|
||||||
@@ -96,7 +113,6 @@ jobs:
|
|||||||
# Setup docker to build for multiple platforms, see:
|
# Setup docker to build for multiple platforms, see:
|
||||||
# https://github.com/docker/build-push-action/tree/v2.4.0#usage
|
# https://github.com/docker/build-push-action/tree/v2.4.0#usage
|
||||||
# https://github.com/docker/build-push-action/blob/v2.4.0/docs/advanced/multi-platform.md
|
# https://github.com/docker/build-push-action/blob/v2.4.0/docs/advanced/multi-platform.md
|
||||||
|
|
||||||
- name: Set up QEMU (for docker buildx)
|
- name: Set up QEMU (for docker buildx)
|
||||||
uses: docker/setup-qemu-action@25f0500ff22e406f7191a2a8ba8cda16901ca018 # associated tag: v1.0.2
|
uses: docker/setup-qemu-action@25f0500ff22e406f7191a2a8ba8cda16901ca018 # associated tag: v1.0.2
|
||||||
|
|
||||||
@@ -120,6 +136,8 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
docker login -u "${{ secrets.DOCKERHUB_USERNAME }}" -p "${{ secrets.DOCKERHUB_TOKEN }}"
|
docker login -u "${{ secrets.DOCKERHUB_USERNAME }}" -p "${{ secrets.DOCKERHUB_TOKEN }}"
|
||||||
|
|
||||||
|
# image: jupyterhub/jupyterhub
|
||||||
|
#
|
||||||
# https://github.com/jupyterhub/action-major-minor-tag-calculator
|
# https://github.com/jupyterhub/action-major-minor-tag-calculator
|
||||||
# If this is a tagged build this will return additional parent tags.
|
# If this is a tagged build this will return additional parent tags.
|
||||||
# E.g. 1.2.3 is expanded to Docker tags
|
# E.g. 1.2.3 is expanded to Docker tags
|
||||||
@@ -129,7 +147,7 @@ jobs:
|
|||||||
# If GITHUB_TOKEN isn't available (e.g. in PRs) returns no tags [].
|
# If GITHUB_TOKEN isn't available (e.g. in PRs) returns no tags [].
|
||||||
- name: Get list of jupyterhub tags
|
- name: Get list of jupyterhub tags
|
||||||
id: jupyterhubtags
|
id: jupyterhubtags
|
||||||
uses: jupyterhub/action-major-minor-tag-calculator@v1
|
uses: jupyterhub/action-major-minor-tag-calculator@v2
|
||||||
with:
|
with:
|
||||||
githubToken: ${{ secrets.GITHUB_TOKEN }}
|
githubToken: ${{ secrets.GITHUB_TOKEN }}
|
||||||
prefix: "${{ env.REGISTRY }}jupyterhub/jupyterhub:"
|
prefix: "${{ env.REGISTRY }}jupyterhub/jupyterhub:"
|
||||||
@@ -137,7 +155,7 @@ jobs:
|
|||||||
branchRegex: ^\w[\w-.]*$
|
branchRegex: ^\w[\w-.]*$
|
||||||
|
|
||||||
- name: Build and push jupyterhub
|
- name: Build and push jupyterhub
|
||||||
uses: docker/build-push-action@e1b7f96249f2e4c8e4ac1519b9608c0d48944a1f # associated tag: v2.4.0
|
uses: docker/build-push-action@e1b7f96249f2e4c8e4ac1519b9608c0d48944a1f
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
@@ -146,11 +164,11 @@ jobs:
|
|||||||
# array into a comma separated list of tags
|
# array into a comma separated list of tags
|
||||||
tags: ${{ join(fromJson(steps.jupyterhubtags.outputs.tags)) }}
|
tags: ${{ join(fromJson(steps.jupyterhubtags.outputs.tags)) }}
|
||||||
|
|
||||||
# jupyterhub-onbuild
|
# image: jupyterhub/jupyterhub-onbuild
|
||||||
|
#
|
||||||
- name: Get list of jupyterhub-onbuild tags
|
- name: Get list of jupyterhub-onbuild tags
|
||||||
id: onbuildtags
|
id: onbuildtags
|
||||||
uses: jupyterhub/action-major-minor-tag-calculator@v1
|
uses: jupyterhub/action-major-minor-tag-calculator@v2
|
||||||
with:
|
with:
|
||||||
githubToken: ${{ secrets.GITHUB_TOKEN }}
|
githubToken: ${{ secrets.GITHUB_TOKEN }}
|
||||||
prefix: "${{ env.REGISTRY }}jupyterhub/jupyterhub-onbuild:"
|
prefix: "${{ env.REGISTRY }}jupyterhub/jupyterhub-onbuild:"
|
||||||
@@ -158,7 +176,7 @@ jobs:
|
|||||||
branchRegex: ^\w[\w-.]*$
|
branchRegex: ^\w[\w-.]*$
|
||||||
|
|
||||||
- name: Build and push jupyterhub-onbuild
|
- name: Build and push jupyterhub-onbuild
|
||||||
uses: docker/build-push-action@e1b7f96249f2e4c8e4ac1519b9608c0d48944a1f # associated tag: v2.4.0
|
uses: docker/build-push-action@e1b7f96249f2e4c8e4ac1519b9608c0d48944a1f
|
||||||
with:
|
with:
|
||||||
build-args: |
|
build-args: |
|
||||||
BASE_IMAGE=${{ fromJson(steps.jupyterhubtags.outputs.tags)[0] }}
|
BASE_IMAGE=${{ fromJson(steps.jupyterhubtags.outputs.tags)[0] }}
|
||||||
@@ -167,11 +185,11 @@ jobs:
|
|||||||
push: true
|
push: true
|
||||||
tags: ${{ join(fromJson(steps.onbuildtags.outputs.tags)) }}
|
tags: ${{ join(fromJson(steps.onbuildtags.outputs.tags)) }}
|
||||||
|
|
||||||
# jupyterhub-demo
|
# image: jupyterhub/jupyterhub-demo
|
||||||
|
#
|
||||||
- name: Get list of jupyterhub-demo tags
|
- name: Get list of jupyterhub-demo tags
|
||||||
id: demotags
|
id: demotags
|
||||||
uses: jupyterhub/action-major-minor-tag-calculator@v1
|
uses: jupyterhub/action-major-minor-tag-calculator@v2
|
||||||
with:
|
with:
|
||||||
githubToken: ${{ secrets.GITHUB_TOKEN }}
|
githubToken: ${{ secrets.GITHUB_TOKEN }}
|
||||||
prefix: "${{ env.REGISTRY }}jupyterhub/jupyterhub-demo:"
|
prefix: "${{ env.REGISTRY }}jupyterhub/jupyterhub-demo:"
|
||||||
@@ -179,7 +197,7 @@ jobs:
|
|||||||
branchRegex: ^\w[\w-.]*$
|
branchRegex: ^\w[\w-.]*$
|
||||||
|
|
||||||
- name: Build and push jupyterhub-demo
|
- name: Build and push jupyterhub-demo
|
||||||
uses: docker/build-push-action@e1b7f96249f2e4c8e4ac1519b9608c0d48944a1f # associated tag: v2.4.0
|
uses: docker/build-push-action@e1b7f96249f2e4c8e4ac1519b9608c0d48944a1f
|
||||||
with:
|
with:
|
||||||
build-args: |
|
build-args: |
|
||||||
BASE_IMAGE=${{ fromJson(steps.onbuildtags.outputs.tags)[0] }}
|
BASE_IMAGE=${{ fromJson(steps.onbuildtags.outputs.tags)[0] }}
|
||||||
@@ -190,3 +208,24 @@ jobs:
|
|||||||
platforms: linux/amd64
|
platforms: linux/amd64
|
||||||
push: true
|
push: true
|
||||||
tags: ${{ join(fromJson(steps.demotags.outputs.tags)) }}
|
tags: ${{ join(fromJson(steps.demotags.outputs.tags)) }}
|
||||||
|
|
||||||
|
# image: jupyterhub/singleuser
|
||||||
|
#
|
||||||
|
- name: Get list of jupyterhub/singleuser tags
|
||||||
|
id: singleusertags
|
||||||
|
uses: jupyterhub/action-major-minor-tag-calculator@v2
|
||||||
|
with:
|
||||||
|
githubToken: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
prefix: "${{ env.REGISTRY }}jupyterhub/singleuser:"
|
||||||
|
defaultTag: "${{ env.REGISTRY }}jupyterhub/singleuser:noref"
|
||||||
|
branchRegex: ^\w[\w-.]*$
|
||||||
|
|
||||||
|
- name: Build and push jupyterhub/singleuser
|
||||||
|
uses: docker/build-push-action@e1b7f96249f2e4c8e4ac1519b9608c0d48944a1f
|
||||||
|
with:
|
||||||
|
build-args: |
|
||||||
|
JUPYTERHUB_VERSION=${{ github.ref_type == 'tag' && github.ref_name || format('git:{0}', github.sha) }}
|
||||||
|
context: singleuser
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
push: true
|
||||||
|
tags: ${{ join(fromJson(steps.singleusertags.outputs.tags)) }}
|
||||||
|
31
.github/workflows/support-bot.yml
vendored
Normal file
31
.github/workflows/support-bot.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
# https://github.com/dessant/support-requests
|
||||||
|
name: "Support Requests"
|
||||||
|
|
||||||
|
on:
|
||||||
|
issues:
|
||||||
|
types: [labeled, unlabeled, reopened]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
action:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: dessant/support-requests@v2
|
||||||
|
with:
|
||||||
|
github-token: ${{ github.token }}
|
||||||
|
support-label: "support"
|
||||||
|
issue-comment: |
|
||||||
|
Hi there @{issue-author} :wave:!
|
||||||
|
|
||||||
|
I closed this issue because it was labelled as a support question.
|
||||||
|
|
||||||
|
Please help us organize discussion by posting this on the http://discourse.jupyter.org/ forum.
|
||||||
|
|
||||||
|
Our goal is to sustain a positive experience for both users and developers. We use GitHub issues for specific discussions related to changing a repository's content, and let the forum be where we can more generally help and inspire each other.
|
||||||
|
|
||||||
|
Thanks you for being an active member of our community! :heart:
|
||||||
|
close-issue: true
|
||||||
|
lock-issue: false
|
||||||
|
issue-lock-reason: "off-topic"
|
64
.github/workflows/test-docs.yml
vendored
Normal file
64
.github/workflows/test-docs.yml
vendored
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
# This is a GitHub workflow defining a set of jobs with a set of steps.
|
||||||
|
# ref: https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions
|
||||||
|
#
|
||||||
|
# This workflow validates the REST API definition and runs the pytest tests in
|
||||||
|
# the docs/ folder. This workflow does not build the documentation. That is
|
||||||
|
# instead tested via ReadTheDocs (https://readthedocs.org/projects/jupyterhub/).
|
||||||
|
#
|
||||||
|
name: Test docs
|
||||||
|
|
||||||
|
# The tests defined in docs/ are currently influenced by changes to _version.py
|
||||||
|
# and scopes.py.
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- "docs/**"
|
||||||
|
- "jupyterhub/_version.py"
|
||||||
|
- "jupyterhub/scopes.py"
|
||||||
|
- ".github/workflows/*"
|
||||||
|
- "!.github/workflows/test-docs.yml"
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- "docs/**"
|
||||||
|
- "jupyterhub/_version.py"
|
||||||
|
- "jupyterhub/scopes.py"
|
||||||
|
- ".github/workflows/*"
|
||||||
|
- "!.github/workflows/test-docs.yml"
|
||||||
|
branches-ignore:
|
||||||
|
- "dependabot/**"
|
||||||
|
- "pre-commit-ci-update-config"
|
||||||
|
tags:
|
||||||
|
- "**"
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
# UTF-8 content may be interpreted as ascii and causes errors without this.
|
||||||
|
LANG: C.UTF-8
|
||||||
|
PYTEST_ADDOPTS: "--verbose --color=yes"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validate-rest-api-definition:
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Validate REST API definition
|
||||||
|
uses: char0n/swagger-editor-validate@182d1a5d26ff5c2f4f452c43bd55e2c7d8064003
|
||||||
|
with:
|
||||||
|
definition-file: docs/source/_static/rest-api.yml
|
||||||
|
|
||||||
|
test-docs:
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: "3.9"
|
||||||
|
|
||||||
|
- name: Install requirements
|
||||||
|
run: |
|
||||||
|
pip install -r docs/requirements.txt pytest -e .
|
||||||
|
|
||||||
|
- name: pytest docs/
|
||||||
|
run: |
|
||||||
|
pytest docs/
|
108
.github/workflows/test-jsx.yml
vendored
Normal file
108
.github/workflows/test-jsx.yml
vendored
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
# This is a GitHub workflow defining a set of jobs with a set of steps.
|
||||||
|
# ref: https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions
|
||||||
|
#
|
||||||
|
name: Test jsx (admin-react.js)
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- "jsx/**"
|
||||||
|
- ".github/workflows/test-jsx.yml"
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- "jsx/**"
|
||||||
|
- ".github/workflows/test-jsx.yml"
|
||||||
|
branches-ignore:
|
||||||
|
- "dependabot/**"
|
||||||
|
- "pre-commit-ci-update-config"
|
||||||
|
tags:
|
||||||
|
- "**"
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# The ./jsx folder contains React based source code files that are to compile
|
||||||
|
# to share/jupyterhub/static/js/admin-react.js. The ./jsx folder includes
|
||||||
|
# tests also has tests that this job is meant to run with `yarn test`
|
||||||
|
# according to the documentation in jsx/README.md.
|
||||||
|
test-jsx-admin-react:
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
timeout-minutes: 5
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/setup-node@v1
|
||||||
|
with:
|
||||||
|
node-version: "14"
|
||||||
|
|
||||||
|
- name: Install yarn
|
||||||
|
run: |
|
||||||
|
npm install -g yarn
|
||||||
|
|
||||||
|
- name: yarn
|
||||||
|
run: |
|
||||||
|
cd jsx
|
||||||
|
yarn
|
||||||
|
|
||||||
|
- name: yarn test
|
||||||
|
run: |
|
||||||
|
cd jsx
|
||||||
|
yarn test
|
||||||
|
|
||||||
|
# The ./jsx folder contains React based source files that are to compile to
|
||||||
|
# share/jupyterhub/static/js/admin-react.js. This job makes sure that whatever
|
||||||
|
# we have in jsx/src matches the compiled asset that we package and
|
||||||
|
# distribute.
|
||||||
|
#
|
||||||
|
# This job's purpose is to make sure we don't forget to compile changes and to
|
||||||
|
# verify nobody sneaks in a change in the hard to review compiled asset.
|
||||||
|
#
|
||||||
|
# NOTE: In the future we may want to stop version controlling the compiled
|
||||||
|
# artifact and instead generate it whenever we package JupyterHub. If we
|
||||||
|
# do this, we are required to setup node and compile the source code
|
||||||
|
# more often, at the same time we could avoid having this check be made.
|
||||||
|
#
|
||||||
|
compile-jsx-admin-react:
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
timeout-minutes: 5
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/setup-node@v1
|
||||||
|
with:
|
||||||
|
node-version: "14"
|
||||||
|
|
||||||
|
- name: Install yarn
|
||||||
|
run: |
|
||||||
|
npm install -g yarn
|
||||||
|
|
||||||
|
- name: yarn
|
||||||
|
run: |
|
||||||
|
cd jsx
|
||||||
|
yarn
|
||||||
|
|
||||||
|
- name: yarn build
|
||||||
|
run: |
|
||||||
|
cd jsx
|
||||||
|
yarn build
|
||||||
|
|
||||||
|
- name: yarn place
|
||||||
|
run: |
|
||||||
|
cd jsx
|
||||||
|
yarn place
|
||||||
|
|
||||||
|
- name: Verify compiled jsx/src matches version controlled artifact
|
||||||
|
run: |
|
||||||
|
if [[ `git status --porcelain=v1` ]]; then
|
||||||
|
echo "The source code in ./jsx compiles to something different than found in ./share/jupyterhub/static/js/admin-react.js!"
|
||||||
|
echo
|
||||||
|
echo "Please re-compile the source code in ./jsx with the following commands:"
|
||||||
|
echo
|
||||||
|
echo "yarn"
|
||||||
|
echo "yarn build"
|
||||||
|
echo "yarn place"
|
||||||
|
echo
|
||||||
|
echo "See ./jsx/README.md for more details."
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
echo "Compilation of jsx/src to share/jupyterhub/static/js/admin-react.js didn't lead to changes."
|
||||||
|
fi
|
54
.github/workflows/test.yml
vendored
54
.github/workflows/test.yml
vendored
@@ -1,25 +1,40 @@
|
|||||||
# This is a GitHub workflow defining a set of jobs with a set of steps.
|
# This is a GitHub workflow defining a set of jobs with a set of steps.
|
||||||
# ref: https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-syntax-for-github-actions
|
# ref: https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions
|
||||||
#
|
#
|
||||||
name: Test
|
name: Test
|
||||||
|
|
||||||
# Trigger the workflow's on all PRs but only on pushed tags or commits to
|
|
||||||
# main/master branch to avoid PRs developed in a GitHub fork's dedicated branch
|
|
||||||
# to trigger.
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
|
paths-ignore:
|
||||||
|
- "docs/**"
|
||||||
|
- "**.md"
|
||||||
|
- "**.rst"
|
||||||
|
- ".github/workflows/*"
|
||||||
|
- "!.github/workflows/test.yml"
|
||||||
push:
|
push:
|
||||||
|
paths-ignore:
|
||||||
|
- "docs/**"
|
||||||
|
- "**.md"
|
||||||
|
- "**.rst"
|
||||||
|
- ".github/workflows/*"
|
||||||
|
- "!.github/workflows/test.yml"
|
||||||
|
branches-ignore:
|
||||||
|
- "dependabot/**"
|
||||||
|
- "pre-commit-ci-update-config"
|
||||||
|
tags:
|
||||||
|
- "**"
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
env:
|
env:
|
||||||
# UTF-8 content may be interpreted as ascii and causes errors without this.
|
# UTF-8 content may be interpreted as ascii and causes errors without this.
|
||||||
LANG: C.UTF-8
|
LANG: C.UTF-8
|
||||||
|
PYTEST_ADDOPTS: "--verbose --color=yes"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
# Run "pytest jupyterhub/tests" in various configurations
|
# Run "pytest jupyterhub/tests" in various configurations
|
||||||
pytest:
|
pytest:
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
timeout-minutes: 10
|
timeout-minutes: 15
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
# Keep running even if one variation of the job fail
|
# Keep running even if one variation of the job fail
|
||||||
@@ -38,9 +53,9 @@ jobs:
|
|||||||
# Tests everything when JupyterHub works against a dedicated mysql or
|
# Tests everything when JupyterHub works against a dedicated mysql or
|
||||||
# postgresql server.
|
# postgresql server.
|
||||||
#
|
#
|
||||||
# jupyter_server:
|
# nbclassic:
|
||||||
# Tests everything when the user instances are started with
|
# Tests everything when the user instances are started with
|
||||||
# jupyter_server instead of notebook.
|
# notebook instead of jupyter_server.
|
||||||
#
|
#
|
||||||
# ssl:
|
# ssl:
|
||||||
# Tests everything using internal SSL connections instead of
|
# Tests everything using internal SSL connections instead of
|
||||||
@@ -48,7 +63,7 @@ jobs:
|
|||||||
#
|
#
|
||||||
# main_dependencies:
|
# main_dependencies:
|
||||||
# Tests everything when the we use the latest available dependencies
|
# Tests everything when the we use the latest available dependencies
|
||||||
# from: ipytraitlets.
|
# from: traitlets.
|
||||||
#
|
#
|
||||||
# NOTE: Since only the value of these parameters are presented in the
|
# NOTE: Since only the value of these parameters are presented in the
|
||||||
# GitHub UI when the workflow run, we avoid using true/false as
|
# GitHub UI when the workflow run, we avoid using true/false as
|
||||||
@@ -56,6 +71,7 @@ jobs:
|
|||||||
include:
|
include:
|
||||||
- python: "3.6"
|
- python: "3.6"
|
||||||
oldest_dependencies: oldest_dependencies
|
oldest_dependencies: oldest_dependencies
|
||||||
|
nbclassic: nbclassic
|
||||||
- python: "3.6"
|
- python: "3.6"
|
||||||
subdomain: subdomain
|
subdomain: subdomain
|
||||||
- python: "3.7"
|
- python: "3.7"
|
||||||
@@ -65,7 +81,7 @@ jobs:
|
|||||||
- python: "3.8"
|
- python: "3.8"
|
||||||
db: postgres
|
db: postgres
|
||||||
- python: "3.8"
|
- python: "3.8"
|
||||||
jupyter_server: jupyter_server
|
nbclassic: nbclassic
|
||||||
- python: "3.9"
|
- python: "3.9"
|
||||||
main_dependencies: main_dependencies
|
main_dependencies: main_dependencies
|
||||||
|
|
||||||
@@ -105,7 +121,6 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
npm install
|
npm install
|
||||||
npm install -g configurable-http-proxy
|
npm install -g configurable-http-proxy
|
||||||
npm install -g yarn
|
|
||||||
npm list
|
npm list
|
||||||
|
|
||||||
# NOTE: actions/setup-python@v2 make use of a cache within the GitHub base
|
# NOTE: actions/setup-python@v2 make use of a cache within the GitHub base
|
||||||
@@ -130,9 +145,9 @@ jobs:
|
|||||||
if [ "${{ matrix.main_dependencies }}" != "" ]; then
|
if [ "${{ matrix.main_dependencies }}" != "" ]; then
|
||||||
pip install git+https://github.com/ipython/traitlets#egg=traitlets --force
|
pip install git+https://github.com/ipython/traitlets#egg=traitlets --force
|
||||||
fi
|
fi
|
||||||
if [ "${{ matrix.jupyter_server }}" != "" ]; then
|
if [ "${{ matrix.nbclassic }}" != "" ]; then
|
||||||
pip uninstall notebook --yes
|
pip uninstall jupyter_server --yes
|
||||||
pip install jupyter_server
|
pip install notebook
|
||||||
fi
|
fi
|
||||||
if [ "${{ matrix.db }}" == "mysql" ]; then
|
if [ "${{ matrix.db }}" == "mysql" ]; then
|
||||||
pip install mysql-connector-python
|
pip install mysql-connector-python
|
||||||
@@ -168,33 +183,32 @@ jobs:
|
|||||||
if: ${{ matrix.db }}
|
if: ${{ matrix.db }}
|
||||||
run: |
|
run: |
|
||||||
if [ "${{ matrix.db }}" == "mysql" ]; then
|
if [ "${{ matrix.db }}" == "mysql" ]; then
|
||||||
|
if [[ -z "$(which mysql)" ]]; then
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -y mysql-client
|
sudo apt-get install -y mysql-client
|
||||||
|
fi
|
||||||
DB=mysql bash ci/docker-db.sh
|
DB=mysql bash ci/docker-db.sh
|
||||||
DB=mysql bash ci/init-db.sh
|
DB=mysql bash ci/init-db.sh
|
||||||
fi
|
fi
|
||||||
if [ "${{ matrix.db }}" == "postgres" ]; then
|
if [ "${{ matrix.db }}" == "postgres" ]; then
|
||||||
|
if [[ -z "$(which psql)" ]]; then
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -y postgresql-client
|
sudo apt-get install -y postgresql-client
|
||||||
|
fi
|
||||||
DB=postgres bash ci/docker-db.sh
|
DB=postgres bash ci/docker-db.sh
|
||||||
DB=postgres bash ci/init-db.sh
|
DB=postgres bash ci/init-db.sh
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Run pytest
|
- name: Run pytest
|
||||||
# FIXME: --color=yes explicitly set because:
|
|
||||||
# https://github.com/actions/runner/issues/241
|
|
||||||
run: |
|
run: |
|
||||||
pytest -v --maxfail=2 --color=yes --cov=jupyterhub jupyterhub/tests
|
pytest --maxfail=2 --cov=jupyterhub jupyterhub/tests
|
||||||
- name: Run yarn jest test
|
|
||||||
run: |
|
|
||||||
cd jsx && yarn && yarn test
|
|
||||||
- name: Submit codecov report
|
- name: Submit codecov report
|
||||||
run: |
|
run: |
|
||||||
codecov
|
codecov
|
||||||
|
|
||||||
docker-build:
|
docker-build:
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
timeout-minutes: 10
|
timeout-minutes: 20
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
@@ -1,30 +1,52 @@
|
|||||||
|
# pre-commit is a tool to perform a predefined set of tasks manually and/or
|
||||||
|
# automatically before git commits are made.
|
||||||
|
#
|
||||||
|
# Config reference: https://pre-commit.com/#pre-commit-configyaml---top-level
|
||||||
|
#
|
||||||
|
# Common tasks
|
||||||
|
#
|
||||||
|
# - Run on all files: pre-commit run --all-files
|
||||||
|
# - Register git hooks: pre-commit install --install-hooks
|
||||||
|
#
|
||||||
repos:
|
repos:
|
||||||
|
# Autoformat: Python code, syntax patterns are modernized
|
||||||
- repo: https://github.com/asottile/pyupgrade
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
rev: v2.26.0
|
rev: v2.32.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: pyupgrade
|
- id: pyupgrade
|
||||||
args:
|
args:
|
||||||
- --py36-plus
|
- --py36-plus
|
||||||
|
|
||||||
|
# Autoformat: Python code
|
||||||
- repo: https://github.com/asottile/reorder_python_imports
|
- repo: https://github.com/asottile/reorder_python_imports
|
||||||
rev: v2.6.0
|
rev: v3.1.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: reorder-python-imports
|
- id: reorder-python-imports
|
||||||
|
|
||||||
|
# Autoformat: Python code
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/psf/black
|
||||||
rev: 21.8b0
|
rev: 22.3.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
|
|
||||||
|
# Autoformat: markdown, yaml, javascript (see the file .prettierignore)
|
||||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||||
rev: v2.4.0
|
rev: v2.6.2
|
||||||
hooks:
|
hooks:
|
||||||
- id: prettier
|
- id: prettier
|
||||||
- repo: https://github.com/PyCQA/flake8
|
|
||||||
rev: "3.9.2"
|
# Autoformat and linting, misc. details
|
||||||
hooks:
|
|
||||||
- id: flake8
|
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.0.1
|
rev: v4.2.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
|
exclude: share/jupyterhub/static/js/admin-react.js
|
||||||
|
- id: requirements-txt-fixer
|
||||||
- id: check-case-conflict
|
- id: check-case-conflict
|
||||||
- id: check-executables-have-shebangs
|
- id: check-executables-have-shebangs
|
||||||
- id: requirements-txt-fixer
|
|
||||||
|
# Linting: Python code (see the file .flake8)
|
||||||
|
- repo: https://github.com/PyCQA/flake8
|
||||||
|
rev: "4.0.1"
|
||||||
|
hooks:
|
||||||
|
- id: flake8
|
||||||
|
@@ -4,10 +4,12 @@ sphinx:
|
|||||||
configuration: docs/source/conf.py
|
configuration: docs/source/conf.py
|
||||||
|
|
||||||
build:
|
build:
|
||||||
image: latest
|
os: ubuntu-20.04
|
||||||
|
tools:
|
||||||
|
nodejs: "16"
|
||||||
|
python: "3.9"
|
||||||
|
|
||||||
python:
|
python:
|
||||||
version: 3.7
|
|
||||||
install:
|
install:
|
||||||
- method: pip
|
- method: pip
|
||||||
path: .
|
path: .
|
@@ -1,26 +0,0 @@
|
|||||||
# Release checklist
|
|
||||||
|
|
||||||
- [ ] Upgrade Docs prior to Release
|
|
||||||
|
|
||||||
- [ ] Change log
|
|
||||||
- [ ] New features documented
|
|
||||||
- [ ] Update the contributor list - thank you page
|
|
||||||
|
|
||||||
- [ ] Upgrade and test Reference Deployments
|
|
||||||
|
|
||||||
- [ ] Release software
|
|
||||||
|
|
||||||
- [ ] Make sure 0 issues in milestone
|
|
||||||
- [ ] Follow release process steps
|
|
||||||
- [ ] Send builds to PyPI (Warehouse) and Conda Forge
|
|
||||||
|
|
||||||
- [ ] Blog post and/or release note
|
|
||||||
|
|
||||||
- [ ] Notify users of release
|
|
||||||
|
|
||||||
- [ ] Email Jupyter and Jupyter In Education mailing lists
|
|
||||||
- [ ] Tweet (optional)
|
|
||||||
|
|
||||||
- [ ] Increment the version number for the next release
|
|
||||||
|
|
||||||
- [ ] Update roadmap
|
|
@@ -56,9 +56,11 @@ Basic principles for operation are:
|
|||||||
servers.
|
servers.
|
||||||
|
|
||||||
JupyterHub also provides a
|
JupyterHub also provides a
|
||||||
[REST API](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/jupyter/jupyterhub/HEAD/docs/rest-api.yml#/default)
|
[REST API][]
|
||||||
for administration of the Hub and its users.
|
for administration of the Hub and its users.
|
||||||
|
|
||||||
|
[rest api]: https://jupyterhub.readthedocs.io/en/latest/reference/rest-api.html
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
### Check prerequisites
|
### Check prerequisites
|
||||||
@@ -115,8 +117,7 @@ To start the Hub server, run the command:
|
|||||||
|
|
||||||
jupyterhub
|
jupyterhub
|
||||||
|
|
||||||
Visit `https://localhost:8000` in your browser, and sign in with your unix
|
Visit `http://localhost:8000` in your browser, and sign in with your system username and password.
|
||||||
PAM credentials.
|
|
||||||
|
|
||||||
_Note_: To allow multiple users to sign in to the server, you will need to
|
_Note_: To allow multiple users to sign in to the server, you will need to
|
||||||
run the `jupyterhub` command as a _privileged user_, such as root.
|
run the `jupyterhub` command as a _privileged user_, such as root.
|
||||||
@@ -239,7 +240,7 @@ You can also talk with us on our JupyterHub [Gitter](https://gitter.im/jupyterhu
|
|||||||
- [Reporting Issues](https://github.com/jupyterhub/jupyterhub/issues)
|
- [Reporting Issues](https://github.com/jupyterhub/jupyterhub/issues)
|
||||||
- [JupyterHub tutorial](https://github.com/jupyterhub/jupyterhub-tutorial)
|
- [JupyterHub tutorial](https://github.com/jupyterhub/jupyterhub-tutorial)
|
||||||
- [Documentation for JupyterHub](https://jupyterhub.readthedocs.io/en/latest/) | [PDF (latest)](https://media.readthedocs.org/pdf/jupyterhub/latest/jupyterhub.pdf) | [PDF (stable)](https://media.readthedocs.org/pdf/jupyterhub/stable/jupyterhub.pdf)
|
- [Documentation for JupyterHub](https://jupyterhub.readthedocs.io/en/latest/) | [PDF (latest)](https://media.readthedocs.org/pdf/jupyterhub/latest/jupyterhub.pdf) | [PDF (stable)](https://media.readthedocs.org/pdf/jupyterhub/stable/jupyterhub.pdf)
|
||||||
- [Documentation for JupyterHub's REST API](https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/jupyter/jupyterhub/HEAD/docs/rest-api.yml#/default)
|
- [Documentation for JupyterHub's REST API][rest api]
|
||||||
- [Documentation for Project Jupyter](http://jupyter.readthedocs.io/en/latest/index.html) | [PDF](https://media.readthedocs.org/pdf/jupyter/latest/jupyter.pdf)
|
- [Documentation for Project Jupyter](http://jupyter.readthedocs.io/en/latest/index.html) | [PDF](https://media.readthedocs.org/pdf/jupyter/latest/jupyter.pdf)
|
||||||
- [Project Jupyter website](https://jupyter.org)
|
- [Project Jupyter website](https://jupyter.org)
|
||||||
- [Project Jupyter community](https://jupyter.org/community)
|
- [Project Jupyter community](https://jupyter.org/community)
|
||||||
|
50
RELEASE.md
Normal file
50
RELEASE.md
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
# How to make a release
|
||||||
|
|
||||||
|
`jupyterhub` is a package [available on
|
||||||
|
PyPI](https://pypi.org/project/jupyterhub/) and
|
||||||
|
[conda-forge](https://conda-forge.org/).
|
||||||
|
These are instructions on how to make a release on PyPI.
|
||||||
|
The PyPI release is done automatically by CI when a tag is pushed.
|
||||||
|
|
||||||
|
For you to follow along according to these instructions, you need:
|
||||||
|
|
||||||
|
- To have push rights to the [jupyterhub GitHub
|
||||||
|
repository](https://github.com/jupyterhub/jupyterhub).
|
||||||
|
|
||||||
|
## Steps to make a release
|
||||||
|
|
||||||
|
1. Checkout main and make sure it is up to date.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
ORIGIN=${ORIGIN:-origin} # set to the canonical remote, e.g. 'upstream' if 'origin' is not the official repo
|
||||||
|
git checkout main
|
||||||
|
git fetch $ORIGIN main
|
||||||
|
git reset --hard $ORIGIN/main
|
||||||
|
```
|
||||||
|
|
||||||
|
1. Make sure `docs/source/changelog.md` is up-to-date.
|
||||||
|
[github-activity][] can help with this.
|
||||||
|
|
||||||
|
1. Update the version with `tbump`.
|
||||||
|
You can see what will happen without making any changes with `tbump --dry-run ${VERSION}`
|
||||||
|
|
||||||
|
```shell
|
||||||
|
tbump ${VERSION}
|
||||||
|
```
|
||||||
|
|
||||||
|
This will tag and publish a release,
|
||||||
|
which will be finished on CI.
|
||||||
|
|
||||||
|
1. Reset the version back to dev, e.g. `2.1.0.dev` after releasing `2.0.0`
|
||||||
|
|
||||||
|
```shell
|
||||||
|
tbump --no-tag ${NEXT_VERSION}.dev
|
||||||
|
```
|
||||||
|
|
||||||
|
1. Following the release to PyPI, an automated PR should arrive to
|
||||||
|
[conda-forge/jupyterhub-feedstock][],
|
||||||
|
check for the tests to succeed on this PR and then merge it to successfully
|
||||||
|
update the package for `conda` on the conda-forge channel.
|
||||||
|
|
||||||
|
[github-activity]: https://github.com/choldgraf/github-activity
|
||||||
|
[conda-forge/jupyterhub-feedstock]: https://github.com/conda-forge/jupyterhub-feedstock
|
5
SECURITY.md
Normal file
5
SECURITY.md
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Reporting a Vulnerability
|
||||||
|
|
||||||
|
If you believe you’ve found a security vulnerability in a Jupyter
|
||||||
|
project, please report it to security@ipython.org. If you prefer to
|
||||||
|
encrypt your security reports, you can use [this PGP public key](https://jupyter-notebook.readthedocs.io/en/stable/_downloads/1d303a645f2505a8fd283826fafc9908/ipython_security.asc).
|
@@ -7,13 +7,15 @@ codecov
|
|||||||
coverage
|
coverage
|
||||||
cryptography
|
cryptography
|
||||||
html5lib # needed for beautifulsoup
|
html5lib # needed for beautifulsoup
|
||||||
|
jupyterlab >=3
|
||||||
mock
|
mock
|
||||||
notebook
|
|
||||||
pre-commit
|
pre-commit
|
||||||
pytest>=3.3
|
pytest>=3.3
|
||||||
pytest-asyncio
|
pytest-asyncio; python_version < "3.7"
|
||||||
|
pytest-asyncio>=0.17; python_version >= "3.7"
|
||||||
pytest-cov
|
pytest-cov
|
||||||
requests-mock
|
requests-mock
|
||||||
|
tbump
|
||||||
# blacklist urllib3 releases affected by https://github.com/urllib3/urllib3/issues/1683
|
# blacklist urllib3 releases affected by https://github.com/urllib3/urllib3/issues/1683
|
||||||
# I *think* this should only affect testing, not production
|
# I *think* this should only affect testing, not production
|
||||||
urllib3!=1.25.4,!=1.25.5
|
urllib3!=1.25.4,!=1.25.5
|
||||||
|
@@ -53,14 +53,6 @@ help:
|
|||||||
clean:
|
clean:
|
||||||
rm -rf $(BUILDDIR)/*
|
rm -rf $(BUILDDIR)/*
|
||||||
|
|
||||||
node_modules: package.json
|
|
||||||
npm install && touch node_modules
|
|
||||||
|
|
||||||
rest-api: source/_static/rest-api/index.html
|
|
||||||
|
|
||||||
source/_static/rest-api/index.html: rest-api.yml node_modules
|
|
||||||
npm run rest-api
|
|
||||||
|
|
||||||
metrics: source/reference/metrics.rst
|
metrics: source/reference/metrics.rst
|
||||||
|
|
||||||
source/reference/metrics.rst: generate-metrics.py
|
source/reference/metrics.rst: generate-metrics.py
|
||||||
@@ -71,7 +63,7 @@ scopes: source/rbac/scope-table.md
|
|||||||
source/rbac/scope-table.md: source/rbac/generate-scope-table.py
|
source/rbac/scope-table.md: source/rbac/generate-scope-table.py
|
||||||
python3 source/rbac/generate-scope-table.py
|
python3 source/rbac/generate-scope-table.py
|
||||||
|
|
||||||
html: rest-api metrics scopes
|
html: metrics scopes
|
||||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||||
@echo
|
@echo
|
||||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||||
|
@@ -1,14 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "jupyterhub-docs-build",
|
|
||||||
"version": "0.8.0",
|
|
||||||
"description": "build JupyterHub swagger docs",
|
|
||||||
"scripts": {
|
|
||||||
"rest-api": "bootprint openapi ./rest-api.yml source/_static/rest-api"
|
|
||||||
},
|
|
||||||
"author": "",
|
|
||||||
"license": "BSD-3-Clause",
|
|
||||||
"devDependencies": {
|
|
||||||
"bootprint": "^1.0.0",
|
|
||||||
"bootprint-openapi": "^1.0.0"
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,12 +1,12 @@
|
|||||||
-r ../requirements.txt
|
-r ../requirements.txt
|
||||||
|
|
||||||
alabaster_jupyterhub
|
alabaster_jupyterhub
|
||||||
# Temporary fix of #3021. Revert back to released autodoc-traits when
|
autodoc-traits
|
||||||
# 0.1.0 released.
|
|
||||||
https://github.com/jupyterhub/autodoc-traits/archive/d22282c1c18c6865436e06d8b329c06fe12a07f8.zip
|
|
||||||
myst-parser
|
myst-parser
|
||||||
|
pre-commit
|
||||||
pydata-sphinx-theme
|
pydata-sphinx-theme
|
||||||
pytablewriter>=0.56
|
pytablewriter>=0.56
|
||||||
|
ruamel.yaml
|
||||||
sphinx>=1.7
|
sphinx>=1.7
|
||||||
sphinx-copybutton
|
sphinx-copybutton
|
||||||
sphinx-jsonschema
|
sphinx-jsonschema
|
||||||
|
1196
docs/rest-api.yml
1196
docs/rest-api.yml
File diff suppressed because it is too large
Load Diff
@@ -2,3 +2,9 @@
|
|||||||
.navbar-brand {
|
.navbar-brand {
|
||||||
height: 4rem !important;
|
height: 4rem !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* hide redundant funky-formatted swagger-ui version */
|
||||||
|
|
||||||
|
.swagger-ui .info .title small {
|
||||||
|
display: none !important;
|
||||||
|
}
|
||||||
|
1422
docs/source/_static/rest-api.yml
Normal file
1422
docs/source/_static/rest-api.yml
Normal file
File diff suppressed because it is too large
Load Diff
72
docs/source/admin/log-messages.md
Normal file
72
docs/source/admin/log-messages.md
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
# Interpreting common log messages
|
||||||
|
|
||||||
|
When debugging errors and outages, looking at the logs emitted by
|
||||||
|
JupyterHub is very helpful. This document tries to document some common
|
||||||
|
log messages, and what they mean.
|
||||||
|
|
||||||
|
## Failing suspected API request to not-running server
|
||||||
|
|
||||||
|
### Example
|
||||||
|
|
||||||
|
Your logs might be littered with lines that might look slightly scary
|
||||||
|
|
||||||
|
```
|
||||||
|
[W 2022-03-10 17:25:19.774 JupyterHub base:1349] Failing suspected API request to not-running server: /hub/user/<user-name>/api/metrics/v1
|
||||||
|
```
|
||||||
|
|
||||||
|
### Most likely cause
|
||||||
|
|
||||||
|
This likely means is that the user's server has stopped running but they
|
||||||
|
still have a browser tab open. For example, you might have 3 tabs open, and shut
|
||||||
|
your server down via one. Or you closed your laptop, your server was
|
||||||
|
culled for inactivity, and then you reopen your laptop again! The
|
||||||
|
client side code (JupyterLab, Classic Notebook, etc) does not know
|
||||||
|
yet that the server is dead, and continues to make some API requests.
|
||||||
|
JupyterHub's architecture means that the proxy routes all requests that
|
||||||
|
don't go to a running user server to the hub process itself. The hub
|
||||||
|
process then explicitly returns a failure response, so the client knows
|
||||||
|
that the server is not running anymore. This is used by JupyterLab to
|
||||||
|
tell you your server is not running anymore, and offer you the option
|
||||||
|
to let you restart it.
|
||||||
|
|
||||||
|
Most commonly, you'll see this in reference to the `/api/metrics/v1`
|
||||||
|
URL, used by [jupyter-resource-usage](https://github.com/jupyter-server/jupyter-resource-usage).
|
||||||
|
|
||||||
|
### Actions you can take
|
||||||
|
|
||||||
|
This log message is benign, and there is usually no action for you to take.
|
||||||
|
|
||||||
|
## JupyterHub Singleuser Version mismatch
|
||||||
|
|
||||||
|
### Example
|
||||||
|
|
||||||
|
```
|
||||||
|
jupyterhub version 1.5.0 != jupyterhub-singleuser version 1.3.0. This could cause failure to authenticate and result in redirect loops!
|
||||||
|
```
|
||||||
|
|
||||||
|
### Cause
|
||||||
|
|
||||||
|
JupyterHub requires the `jupyterhub` python package installed inside the image or
|
||||||
|
environment the user server starts in. This message indicates that the version of
|
||||||
|
the `jupyterhub` package installed inside the user image or environment is not
|
||||||
|
the same version as the JupyterHub server itself. This is not necessarily always a
|
||||||
|
problem - some version drift is mostly acceptable, and the only two known cases of
|
||||||
|
breakage are across the 0.7 and 2.0 version releases. In those cases, issues pop
|
||||||
|
up immediately after upgrading your version of JupyterHub, so **always check the JupyterHub
|
||||||
|
changelog before upgrading!**. The primary problems this _could_ cause are:
|
||||||
|
|
||||||
|
1. Infinite redirect loops after the user server starts
|
||||||
|
2. Missing expected environment variables in the user server once it starts
|
||||||
|
3. Failure for the started user server to authenticate with the JupyterHub server -
|
||||||
|
note that this is _not_ the same as _user authentication_ failing!
|
||||||
|
|
||||||
|
However, for the most part, unless you are seeing these specific issues, the log
|
||||||
|
message should be counted as a warning to get the `jupyterhub` package versions
|
||||||
|
aligned, rather than as an indicator of an existing problem.
|
||||||
|
|
||||||
|
### Actions you can take
|
||||||
|
|
||||||
|
Upgrade the version of the `jupyterhub` package in your user environment or image
|
||||||
|
so it matches the version of JupyterHub running your JupyterHub server! If you
|
||||||
|
are using the [zero-to-jupyterhub](https://z2jh.jupyter.org) helm chart, you can find the appropriate
|
||||||
|
version of the `jupyterhub` package to install in your user image [here](https://jupyterhub.github.io/helm-chart/)
|
@@ -1,5 +1,3 @@
|
|||||||
.. _admin/upgrading:
|
|
||||||
|
|
||||||
====================
|
====================
|
||||||
Upgrading JupyterHub
|
Upgrading JupyterHub
|
||||||
====================
|
====================
|
||||||
|
@@ -17,11 +17,6 @@ information on:
|
|||||||
- making an API request programmatically using the requests library
|
- making an API request programmatically using the requests library
|
||||||
- learning more about JupyterHub's API
|
- learning more about JupyterHub's API
|
||||||
|
|
||||||
The same JupyterHub API spec, as found here, is available in an interactive form
|
|
||||||
`here (on swagger's petstore) <https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/jupyterhub/jupyterhub/HEAD/docs/rest-api.yml#!/default>`__.
|
|
||||||
The `OpenAPI Initiative`_ (fka Swagger™) is a project used to describe
|
|
||||||
and document RESTful APIs.
|
|
||||||
|
|
||||||
JupyterHub API Reference:
|
JupyterHub API Reference:
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
|
File diff suppressed because one or more lines are too long
@@ -21,6 +21,7 @@ extensions = [
|
|||||||
'myst_parser',
|
'myst_parser',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
myst_heading_anchors = 2
|
||||||
myst_enable_extensions = [
|
myst_enable_extensions = [
|
||||||
'colon_fence',
|
'colon_fence',
|
||||||
'deflist',
|
'deflist',
|
||||||
@@ -130,6 +131,30 @@ html_static_path = ['_static']
|
|||||||
|
|
||||||
htmlhelp_basename = 'JupyterHubdoc'
|
htmlhelp_basename = 'JupyterHubdoc'
|
||||||
|
|
||||||
|
html_theme_options = {
|
||||||
|
"icon_links": [
|
||||||
|
{
|
||||||
|
"name": "GitHub",
|
||||||
|
"url": "https://github.com/jupyterhub/jupyterhub",
|
||||||
|
"icon": "fab fa-github-square",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Discourse",
|
||||||
|
"url": "https://discourse.jupyter.org/c/jupyterhub/10",
|
||||||
|
"icon": "fab fa-discourse",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"use_edit_page_button": True,
|
||||||
|
"navbar_align": "left",
|
||||||
|
}
|
||||||
|
|
||||||
|
html_context = {
|
||||||
|
"github_user": "jupyterhub",
|
||||||
|
"github_repo": "jupyterhub",
|
||||||
|
"github_version": "main",
|
||||||
|
"doc_path": "docs",
|
||||||
|
}
|
||||||
|
|
||||||
# -- Options for LaTeX output ---------------------------------------------
|
# -- Options for LaTeX output ---------------------------------------------
|
||||||
|
|
||||||
latex_elements = {
|
latex_elements = {
|
||||||
@@ -205,7 +230,10 @@ epub_exclude_files = ['search.html']
|
|||||||
|
|
||||||
# -- Intersphinx ----------------------------------------------------------
|
# -- Intersphinx ----------------------------------------------------------
|
||||||
|
|
||||||
intersphinx_mapping = {'https://docs.python.org/3/': None}
|
intersphinx_mapping = {
|
||||||
|
'python': ('https://docs.python.org/3/', None),
|
||||||
|
'tornado': ('https://www.tornadoweb.org/en/stable/', None),
|
||||||
|
}
|
||||||
|
|
||||||
# -- Read The Docs --------------------------------------------------------
|
# -- Read The Docs --------------------------------------------------------
|
||||||
|
|
||||||
@@ -215,7 +243,7 @@ if on_rtd:
|
|||||||
# build both metrics and rest-api, since RTD doesn't run make
|
# build both metrics and rest-api, since RTD doesn't run make
|
||||||
from subprocess import check_call as sh
|
from subprocess import check_call as sh
|
||||||
|
|
||||||
sh(['make', 'metrics', 'rest-api', 'scopes'], cwd=docs)
|
sh(['make', 'metrics', 'scopes'], cwd=docs)
|
||||||
|
|
||||||
# -- Spell checking -------------------------------------------------------
|
# -- Spell checking -------------------------------------------------------
|
||||||
|
|
||||||
|
@@ -16,6 +16,10 @@ c.Authenticator.allowed_users = {'mal', 'zoe', 'inara', 'kaylee'}
|
|||||||
Users in the `allowed_users` set are added to the Hub database when the Hub is
|
Users in the `allowed_users` set are added to the Hub database when the Hub is
|
||||||
started.
|
started.
|
||||||
|
|
||||||
|
```{warning}
|
||||||
|
If this configuration value is not set, then **all authenticated users will be allowed into your hub**.
|
||||||
|
```
|
||||||
|
|
||||||
## Configure admins (`admin_users`)
|
## Configure admins (`admin_users`)
|
||||||
|
|
||||||
```{note}
|
```{note}
|
||||||
|
BIN
docs/source/images/binder-404.png
Normal file
BIN
docs/source/images/binder-404.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 160 KiB |
BIN
docs/source/images/binderhub-form.png
Normal file
BIN
docs/source/images/binderhub-form.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 138 KiB |
BIN
docs/source/images/chp-404.png
Normal file
BIN
docs/source/images/chp-404.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 38 KiB |
BIN
docs/source/images/server-not-running.png
Normal file
BIN
docs/source/images/server-not-running.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 66 KiB |
@@ -10,4 +10,5 @@ well as other information relevant to running your own JupyterHub over time.
|
|||||||
|
|
||||||
troubleshooting
|
troubleshooting
|
||||||
admin/upgrading
|
admin/upgrading
|
||||||
|
admin/log-messages
|
||||||
changelog
|
changelog
|
||||||
|
@@ -43,7 +43,7 @@ JupyterHub performs the following functions:
|
|||||||
notebook servers
|
notebook servers
|
||||||
|
|
||||||
For convenient administration of the Hub, its users, and services,
|
For convenient administration of the Hub, its users, and services,
|
||||||
JupyterHub also provides a `REST API`_.
|
JupyterHub also provides a :doc:`REST API <reference/rest-api>`.
|
||||||
|
|
||||||
The JupyterHub team and Project Jupyter value our community, and JupyterHub
|
The JupyterHub team and Project Jupyter value our community, and JupyterHub
|
||||||
follows the Jupyter `Community Guides <https://jupyter.readthedocs.io/en/latest/community/content-community.html>`_.
|
follows the Jupyter `Community Guides <https://jupyter.readthedocs.io/en/latest/community/content-community.html>`_.
|
||||||
@@ -155,4 +155,3 @@ Questions? Suggestions?
|
|||||||
|
|
||||||
.. _JupyterHub: https://github.com/jupyterhub/jupyterhub
|
.. _JupyterHub: https://github.com/jupyterhub/jupyterhub
|
||||||
.. _Jupyter notebook: https://jupyter-notebook.readthedocs.io/en/latest/
|
.. _Jupyter notebook: https://jupyter-notebook.readthedocs.io/en/latest/
|
||||||
.. _REST API: https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/jupyterhub/jupyterhub/HEAD/docs/rest-api.yml#!/default
|
|
||||||
|
@@ -5,8 +5,8 @@
|
|||||||
Before installing JupyterHub, you will need:
|
Before installing JupyterHub, you will need:
|
||||||
|
|
||||||
- a Linux/Unix based system
|
- a Linux/Unix based system
|
||||||
- [Python](https://www.python.org/downloads/) 3.5 or greater. An understanding
|
- [Python](https://www.python.org/downloads/) 3.6 or greater. An understanding
|
||||||
of using [`pip`](https://pip.pypa.io/en/stable/) or
|
of using [`pip`](https://pip.pypa.io) or
|
||||||
[`conda`](https://conda.io/docs/get-started.html) for
|
[`conda`](https://conda.io/docs/get-started.html) for
|
||||||
installing Python packages is helpful.
|
installing Python packages is helpful.
|
||||||
- [nodejs/npm](https://www.npmjs.com/). [Install nodejs/npm](https://docs.npmjs.com/getting-started/installing-node),
|
- [nodejs/npm](https://www.npmjs.com/). [Install nodejs/npm](https://docs.npmjs.com/getting-started/installing-node),
|
||||||
@@ -20,11 +20,11 @@ Before installing JupyterHub, you will need:
|
|||||||
For example, install it on Linux (Debian/Ubuntu) using:
|
For example, install it on Linux (Debian/Ubuntu) using:
|
||||||
|
|
||||||
```
|
```
|
||||||
sudo apt-get install npm nodejs-legacy
|
sudo apt-get install nodejs npm
|
||||||
```
|
```
|
||||||
|
|
||||||
The `nodejs-legacy` package installs the `node` executable and is currently
|
[nodesource][] is a great resource to get more recent versions of the nodejs runtime,
|
||||||
required for npm to work on Debian/Ubuntu.
|
if your system package manager only has an old version of Node.js (e.g. 10 or older).
|
||||||
|
|
||||||
- A [pluggable authentication module (PAM)](https://en.wikipedia.org/wiki/Pluggable_authentication_module)
|
- A [pluggable authentication module (PAM)](https://en.wikipedia.org/wiki/Pluggable_authentication_module)
|
||||||
to use the [default Authenticator](./getting-started/authenticators-users-basics.md).
|
to use the [default Authenticator](./getting-started/authenticators-users-basics.md).
|
||||||
@@ -33,11 +33,17 @@ Before installing JupyterHub, you will need:
|
|||||||
- TLS certificate and key for HTTPS communication
|
- TLS certificate and key for HTTPS communication
|
||||||
- Domain name
|
- Domain name
|
||||||
|
|
||||||
|
[nodesource]: https://github.com/nodesource/distributions#table-of-contents
|
||||||
|
|
||||||
Before running the single-user notebook servers (which may be on the same
|
Before running the single-user notebook servers (which may be on the same
|
||||||
system as the Hub or not), you will need:
|
system as the Hub or not), you will need:
|
||||||
|
|
||||||
- [Jupyter Notebook](https://jupyter.readthedocs.io/en/latest/install.html)
|
- [JupyterLab][] version 3 or greater,
|
||||||
version 4 or greater
|
or [Jupyter Notebook][]
|
||||||
|
4 or greater.
|
||||||
|
|
||||||
|
[jupyterlab]: https://jupyterlab.readthedocs.io
|
||||||
|
[jupyter notebook]: https://jupyter.readthedocs.io/en/latest/install.html
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
@@ -48,14 +54,14 @@ JupyterHub can be installed with `pip` (and the proxy with `npm`) or `conda`:
|
|||||||
```bash
|
```bash
|
||||||
python3 -m pip install jupyterhub
|
python3 -m pip install jupyterhub
|
||||||
npm install -g configurable-http-proxy
|
npm install -g configurable-http-proxy
|
||||||
python3 -m pip install notebook # needed if running the notebook servers locally
|
python3 -m pip install jupyterlab notebook # needed if running the notebook servers in the same environment
|
||||||
```
|
```
|
||||||
|
|
||||||
**conda** (one command installs jupyterhub and proxy):
|
**conda** (one command installs jupyterhub and proxy):
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
conda install -c conda-forge jupyterhub # installs jupyterhub and proxy
|
conda install -c conda-forge jupyterhub # installs jupyterhub and proxy
|
||||||
conda install notebook # needed if running the notebook servers locally
|
conda install jupyterlab notebook # needed if running the notebook servers in the same environment
|
||||||
```
|
```
|
||||||
|
|
||||||
Test your installation. If installed, these commands should return the packages'
|
Test your installation. If installed, these commands should return the packages'
|
||||||
@@ -74,7 +80,7 @@ To start the Hub server, run the command:
|
|||||||
jupyterhub
|
jupyterhub
|
||||||
```
|
```
|
||||||
|
|
||||||
Visit `https://localhost:8000` in your browser, and sign in with your unix
|
Visit `http://localhost:8000` in your browser, and sign in with your unix
|
||||||
credentials.
|
credentials.
|
||||||
|
|
||||||
To **allow multiple users to sign in** to the Hub server, you must start
|
To **allow multiple users to sign in** to the Hub server, you must start
|
||||||
|
@@ -1,14 +1,33 @@
|
|||||||
|
"""
|
||||||
|
This script updates two files with the RBAC scope descriptions found in
|
||||||
|
`scopes.py`.
|
||||||
|
|
||||||
|
The files are:
|
||||||
|
|
||||||
|
1. scope-table.md
|
||||||
|
|
||||||
|
This file is git ignored and referenced by the documentation.
|
||||||
|
|
||||||
|
2. rest-api.yml
|
||||||
|
|
||||||
|
This file is JupyterHub's REST API schema. Both a version and the RBAC
|
||||||
|
scopes descriptions are updated in it.
|
||||||
|
"""
|
||||||
import os
|
import os
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from subprocess import run
|
||||||
|
|
||||||
from pytablewriter import MarkdownTableWriter
|
from pytablewriter import MarkdownTableWriter
|
||||||
from ruamel.yaml import YAML
|
from ruamel.yaml import YAML
|
||||||
|
|
||||||
|
from jupyterhub import __version__
|
||||||
from jupyterhub.scopes import scope_definitions
|
from jupyterhub.scopes import scope_definitions
|
||||||
|
|
||||||
HERE = os.path.abspath(os.path.dirname(__file__))
|
HERE = os.path.abspath(os.path.dirname(__file__))
|
||||||
PARENT = Path(HERE).parent.parent.absolute()
|
DOCS = Path(HERE).parent.parent.absolute()
|
||||||
|
REST_API_YAML = DOCS.joinpath("source", "_static", "rest-api.yml")
|
||||||
|
SCOPE_TABLE_MD = Path(HERE).joinpath("scope-table.md")
|
||||||
|
|
||||||
|
|
||||||
class ScopeTableGenerator:
|
class ScopeTableGenerator:
|
||||||
@@ -80,8 +99,9 @@ class ScopeTableGenerator:
|
|||||||
return table_rows
|
return table_rows
|
||||||
|
|
||||||
def write_table(self):
|
def write_table(self):
|
||||||
"""Generates the scope table in markdown format and writes it into `scope-table.md`"""
|
"""Generates the RBAC scopes reference documentation as a markdown table
|
||||||
filename = f"{HERE}/scope-table.md"
|
and writes it to the .gitignored `scope-table.md`."""
|
||||||
|
filename = SCOPE_TABLE_MD
|
||||||
table_name = ""
|
table_name = ""
|
||||||
headers = ["Scope", "Grants permission to:"]
|
headers = ["Scope", "Grants permission to:"]
|
||||||
values = self._parse_scopes()
|
values = self._parse_scopes()
|
||||||
@@ -97,23 +117,38 @@ class ScopeTableGenerator:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def write_api(self):
|
def write_api(self):
|
||||||
"""Generates the API description in markdown format and writes it into `rest-api.yml`"""
|
"""Loads `rest-api.yml` and writes it back with a dynamically set
|
||||||
filename = f"{PARENT}/rest-api.yml"
|
JupyterHub version field and list of RBAC scopes descriptions from
|
||||||
yaml = YAML(typ='rt')
|
`scopes.py`."""
|
||||||
|
filename = REST_API_YAML
|
||||||
|
|
||||||
|
yaml = YAML(typ="rt")
|
||||||
yaml.preserve_quotes = True
|
yaml.preserve_quotes = True
|
||||||
|
yaml.indent(mapping=2, offset=2, sequence=4)
|
||||||
|
|
||||||
scope_dict = {}
|
scope_dict = {}
|
||||||
with open(filename, 'r+') as f:
|
with open(filename) as f:
|
||||||
content = yaml.load(f.read())
|
content = yaml.load(f.read())
|
||||||
f.seek(0)
|
|
||||||
|
content["info"]["version"] = __version__
|
||||||
for scope in self.scopes:
|
for scope in self.scopes:
|
||||||
description = self.scopes[scope]['description']
|
description = self.scopes[scope]['description']
|
||||||
doc_description = self.scopes[scope].get('doc_description', '')
|
doc_description = self.scopes[scope].get('doc_description', '')
|
||||||
if doc_description:
|
if doc_description:
|
||||||
description = doc_description
|
description = doc_description
|
||||||
scope_dict[scope] = description
|
scope_dict[scope] = description
|
||||||
content['securityDefinitions']['oauth2']['scopes'] = scope_dict
|
content['components']['securitySchemes']['oauth2']['flows'][
|
||||||
|
'authorizationCode'
|
||||||
|
]['scopes'] = scope_dict
|
||||||
|
|
||||||
|
with open(filename, 'w') as f:
|
||||||
yaml.dump(content, f)
|
yaml.dump(content, f)
|
||||||
f.truncate()
|
|
||||||
|
run(
|
||||||
|
['pre-commit', 'run', 'prettier', '--files', filename],
|
||||||
|
cwd=HERE,
|
||||||
|
check=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
@@ -7,7 +7,7 @@ JupyterHub provides four roles that are available by default:
|
|||||||
```{admonition} **Default roles**
|
```{admonition} **Default roles**
|
||||||
- `user` role provides a {ref}`default user scope <default-user-scope-target>` `self` that grants access to the user's own resources.
|
- `user` role provides a {ref}`default user scope <default-user-scope-target>` `self` that grants access to the user's own resources.
|
||||||
- `admin` role contains all available scopes and grants full rights to all actions. This role **cannot be edited**.
|
- `admin` role contains all available scopes and grants full rights to all actions. This role **cannot be edited**.
|
||||||
- `token` role provides a {ref}`default token scope <default-token-scope-target>` `all` that resolves to the same permissions as the owner of the token has.
|
- `token` role provides a {ref}`default token scope <default-token-scope-target>` `inherit` that resolves to the same permissions as the owner of the token has.
|
||||||
- `server` role allows for posting activity of "itself" only.
|
- `server` role allows for posting activity of "itself" only.
|
||||||
|
|
||||||
**These roles cannot be deleted.**
|
**These roles cannot be deleted.**
|
||||||
@@ -123,13 +123,13 @@ has,
|
|||||||
define the `server` role.
|
define the `server` role.
|
||||||
|
|
||||||
To restore the JupyterHub 1.x behavior of servers being able to do anything their owners can do,
|
To restore the JupyterHub 1.x behavior of servers being able to do anything their owners can do,
|
||||||
use the scope `all`:
|
use the scope `inherit` (for 'inheriting' the owner's permissions):
|
||||||
|
|
||||||
```python
|
```python
|
||||||
c.JupyterHub.load_roles = [
|
c.JupyterHub.load_roles = [
|
||||||
{
|
{
|
||||||
'name': 'server',
|
'name': 'server',
|
||||||
'scopes': ['all'],
|
'scopes': ['inherit'],
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
```
|
```
|
||||||
|
@@ -38,7 +38,7 @@ By adding a scope to an existing role, all role bearers will gain the associated
|
|||||||
Metascopes do not follow the general scope syntax. Instead, a metascope resolves to a set of scopes, which can refer to different resources, based on their owning entity. In JupyterHub, there are currently two metascopes:
|
Metascopes do not follow the general scope syntax. Instead, a metascope resolves to a set of scopes, which can refer to different resources, based on their owning entity. In JupyterHub, there are currently two metascopes:
|
||||||
|
|
||||||
1. default user scope `self`, and
|
1. default user scope `self`, and
|
||||||
2. default token scope `all`.
|
2. default token scope `inherit`.
|
||||||
|
|
||||||
(default-user-scope-target)=
|
(default-user-scope-target)=
|
||||||
|
|
||||||
@@ -57,11 +57,11 @@ The `self` scope is only valid for user entities. In other cases (e.g., for serv
|
|||||||
|
|
||||||
### Default token scope
|
### Default token scope
|
||||||
|
|
||||||
The token metascope `all` covers the same scopes as the token owner's scopes during requests. For example, if a token owner has roles containing the scopes `read:groups` and `read:users`, the `all` scope resolves to the set of scopes `{read:groups, read:users}`.
|
The token metascope `inherit` causes the token to have the same permissions as the token's owner. For example, if a token owner has roles containing the scopes `read:groups` and `read:users`, the `inherit` scope resolves to the set of scopes `{read:groups, read:users}`.
|
||||||
|
|
||||||
If the token owner has default `user` role, the `all` scope resolves to `self`, which will subsequently be expanded to include all the user-specific scopes (or empty set in the case of services).
|
If the token owner has default `user` role, the `inherit` scope resolves to `self`, which will subsequently be expanded to include all the user-specific scopes (or empty set in the case of services).
|
||||||
|
|
||||||
If the token owner is a member of any group with roles, the group scopes will also be included in resolving the `all` scope.
|
If the token owner is a member of any group with roles, the group scopes will also be included in resolving the `inherit` scope.
|
||||||
|
|
||||||
(horizontal-filtering-target)=
|
(horizontal-filtering-target)=
|
||||||
|
|
||||||
|
@@ -49,6 +49,6 @@ API tokens can also be issued to users via API ([_/hub/token_](../reference/urls
|
|||||||
|
|
||||||
### With RBAC
|
### With RBAC
|
||||||
|
|
||||||
The RBAC framework allows for granting tokens different levels of permissions via scopes attached to roles. The 'only identify' purpose of the separate OAuth tokens is no longer required. API tokens can be used used for every action, including the login and authentication, for which an API token with no role (i.e., no scope in {ref}`available-scopes-target`) is used.
|
The RBAC framework allows for granting tokens different levels of permissions via scopes attached to roles. The 'only identify' purpose of the separate OAuth tokens is no longer required. API tokens can be used for every action, including the login and authentication, for which an API token with no role (i.e., no scope in {ref}`available-scopes-target`) is used.
|
||||||
|
|
||||||
OAuth tokens are therefore dropped from the Hub upgraded with the RBAC framework.
|
OAuth tokens are therefore dropped from the Hub upgraded with the RBAC framework.
|
||||||
|
128
docs/source/reference/api-only.md
Normal file
128
docs/source/reference/api-only.md
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
(api-only)=
|
||||||
|
|
||||||
|
# Deploying JupyterHub in "API only mode"
|
||||||
|
|
||||||
|
As a service for deploying and managing Jupyter servers for users, JupyterHub
|
||||||
|
exposes this functionality _primarily_ via a [REST API](rest).
|
||||||
|
For convenience, JupyterHub also ships with a _basic_ web UI built using that REST API.
|
||||||
|
The basic web UI enables users to click a button to quickly start and stop their servers,
|
||||||
|
and it lets admins perform some basic user and server management tasks.
|
||||||
|
|
||||||
|
The REST API has always provided additional functionality beyond what is available in the basic web UI.
|
||||||
|
Similarly, we avoid implementing UI functionality that is also not available via the API.
|
||||||
|
With JupyterHub 2.0, the basic web UI will **always** be composed using the REST API.
|
||||||
|
In other words, no UI pages should rely on information not available via the REST API.
|
||||||
|
Previously, some admin UI functionality could only be achieved via admin pages,
|
||||||
|
such as paginated requests.
|
||||||
|
|
||||||
|
## Limited UI customization via templates
|
||||||
|
|
||||||
|
The JupyterHub UI is customizable via extensible HTML [templates](templates),
|
||||||
|
but this has some limited scope to what can be customized.
|
||||||
|
Adding some content and messages to existing pages is well supported,
|
||||||
|
but changing the page flow and what pages are available are beyond the scope of what is customizable.
|
||||||
|
|
||||||
|
## Rich UI customization with REST API based apps
|
||||||
|
|
||||||
|
Increasingly, JupyterHub is used purely as an API for managing Jupyter servers
|
||||||
|
for other Jupyter-based applications that might want to present a different user experience.
|
||||||
|
If you want a fully customized user experience,
|
||||||
|
you can now disable the Hub UI and use your own pages together with the JupyterHub REST API
|
||||||
|
to build your own web application to serve your users,
|
||||||
|
relying on the Hub only as an API for managing users and servers.
|
||||||
|
|
||||||
|
One example of such an application is [BinderHub][], which powers https://mybinder.org,
|
||||||
|
and motivates many of these changes.
|
||||||
|
|
||||||
|
BinderHub is distinct from a traditional JupyterHub deployment
|
||||||
|
because it uses temporary users created for each launch.
|
||||||
|
Instead of presenting a login page,
|
||||||
|
users are presented with a form to specify what environment they would like to launch:
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
When a launch is requested:
|
||||||
|
|
||||||
|
1. an image is built, if necessary
|
||||||
|
2. a temporary user is created,
|
||||||
|
3. a server is launched for that user, and
|
||||||
|
4. when running, users are redirected to an already running server with an auth token in the URL
|
||||||
|
5. after the session is over, the user is deleted
|
||||||
|
|
||||||
|
This means that a lot of JupyterHub's UI flow doesn't make sense:
|
||||||
|
|
||||||
|
- there is no way for users to login
|
||||||
|
- the human user doesn't map onto a JupyterHub `User` in a meaningful way
|
||||||
|
- when a server isn't running, there isn't a 'restart your server' action available because the user has been deleted
|
||||||
|
- users do not have any access to any Hub functionality, so presenting pages for those features would be confusing
|
||||||
|
|
||||||
|
BinderHub is one of the motivating use cases for JupyterHub supporting being used _only_ via its API.
|
||||||
|
We'll use BinderHub here as an example of various configuration options.
|
||||||
|
|
||||||
|
[binderhub]: https://binderhub.readthedocs.io
|
||||||
|
|
||||||
|
## Disabling Hub UI
|
||||||
|
|
||||||
|
`c.JupyterHub.hub_routespec` is a configuration option to specify which URL prefix should be routed to the Hub.
|
||||||
|
The default is `/` which means that the Hub will receive all requests not already specified to be routed somewhere else.
|
||||||
|
|
||||||
|
There are three values that are most logical for `hub_routespec`:
|
||||||
|
|
||||||
|
- `/` - this is the default, and used in most deployments.
|
||||||
|
It is also the only option prior to JupyterHub 1.4.
|
||||||
|
- `/hub/` - this serves only Hub pages, both UI and API
|
||||||
|
- `/hub/api` - this serves _only the Hub API_, so all Hub UI is disabled,
|
||||||
|
aside from the OAuth confirmation page, if used.
|
||||||
|
|
||||||
|
If you choose a hub routespec other than `/`,
|
||||||
|
the main JupyterHub feature you will lose is the automatic handling of requests for `/user/:username`
|
||||||
|
when the requested server is not running.
|
||||||
|
|
||||||
|
JupyterHub's handling of this request shows this page,
|
||||||
|
telling you that the server is not running,
|
||||||
|
with a button to launch it again:
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
If you set `hub_routespec` to something other than `/`,
|
||||||
|
it is likely that you also want to register another destination for `/` to handle requests to not-running servers.
|
||||||
|
If you don't, you will see a default 404 page from the proxy:
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
For mybinder.org, the default "start my server" page doesn't make sense,
|
||||||
|
because when a server is gone, there is no restart action.
|
||||||
|
Instead, we provide hints about how to get back to a link to start a _new_ server:
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
To achieve this, mybinder.org registers a route for `/` that goes to a custom endpoint
|
||||||
|
that runs nginx and only serves this static HTML error page.
|
||||||
|
This is set with
|
||||||
|
|
||||||
|
```python
|
||||||
|
c.Proxy.extra_routes = {
|
||||||
|
"/": "http://custom-404-entpoint/",
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
You may want to use an alternate behavior, such as redirecting to a landing page,
|
||||||
|
or taking some other action based on the requested page.
|
||||||
|
|
||||||
|
If you use `c.JupyterHub.hub_routespec = "/hub/"`,
|
||||||
|
then all the Hub pages will be available,
|
||||||
|
and only this default-page-404 issue will come up.
|
||||||
|
|
||||||
|
If you use `c.JupyterHub.hub_routespec = "/hub/api/"`,
|
||||||
|
then only the Hub _API_ will be available,
|
||||||
|
and all UI will be up to you.
|
||||||
|
mybinder.org takes this last option,
|
||||||
|
because none of the Hub UI pages really make sense.
|
||||||
|
Binder users don't have any reason to know or care that JupyterHub happens
|
||||||
|
to be an implementation detail of how their environment is managed.
|
||||||
|
Seeing Hub error pages and messages in that situation is more likely to be confusing than helpful.
|
||||||
|
|
||||||
|
:::{versionadded} 1.4
|
||||||
|
|
||||||
|
`c.JupyterHub.hub_routespec` and `c.Proxy.extra_routes` are new in JupyterHub 1.4.
|
||||||
|
:::
|
@@ -1,6 +1,6 @@
|
|||||||
# Authenticators
|
# Authenticators
|
||||||
|
|
||||||
The [Authenticator][] is the mechanism for authorizing users to use the
|
The {class}`.Authenticator` is the mechanism for authorizing users to use the
|
||||||
Hub and single user notebook servers.
|
Hub and single user notebook servers.
|
||||||
|
|
||||||
## The default PAM Authenticator
|
## The default PAM Authenticator
|
||||||
@@ -137,8 +137,8 @@ via other mechanisms. One such example is using [GitHub OAuth][].
|
|||||||
|
|
||||||
Because the username is passed from the Authenticator to the Spawner,
|
Because the username is passed from the Authenticator to the Spawner,
|
||||||
a custom Authenticator and Spawner are often used together.
|
a custom Authenticator and Spawner are often used together.
|
||||||
For example, the Authenticator methods, [pre_spawn_start(user, spawner)][]
|
For example, the Authenticator methods, {meth}`.Authenticator.pre_spawn_start`
|
||||||
and [post_spawn_stop(user, spawner)][], are hooks that can be used to do
|
and {meth}`.Authenticator.post_spawn_stop`, are hooks that can be used to do
|
||||||
auth-related startup (e.g. opening PAM sessions) and cleanup
|
auth-related startup (e.g. opening PAM sessions) and cleanup
|
||||||
(e.g. closing PAM sessions).
|
(e.g. closing PAM sessions).
|
||||||
|
|
||||||
@@ -223,7 +223,7 @@ If there are multiple keys present, the **first** key is always used to persist
|
|||||||
|
|
||||||
Typically, if `auth_state` is persisted it is desirable to affect the Spawner environment in some way.
|
Typically, if `auth_state` is persisted it is desirable to affect the Spawner environment in some way.
|
||||||
This may mean defining environment variables, placing certificate in the user's home directory, etc.
|
This may mean defining environment variables, placing certificate in the user's home directory, etc.
|
||||||
The `Authenticator.pre_spawn_start` method can be used to pass information from authenticator state
|
The {meth}`Authenticator.pre_spawn_start` method can be used to pass information from authenticator state
|
||||||
to Spawner environment:
|
to Spawner environment:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
@@ -247,10 +247,42 @@ class MyAuthenticator(Authenticator):
|
|||||||
spawner.environment['UPSTREAM_TOKEN'] = auth_state['upstream_token']
|
spawner.environment['UPSTREAM_TOKEN'] = auth_state['upstream_token']
|
||||||
```
|
```
|
||||||
|
|
||||||
|
(authenticator-groups)=
|
||||||
|
|
||||||
|
## Authenticator-managed group membership
|
||||||
|
|
||||||
|
:::{versionadded} 2.2
|
||||||
|
:::
|
||||||
|
|
||||||
|
Some identity providers may have their own concept of group membership that you would like to preserve in JupyterHub.
|
||||||
|
This is now possible with `Authenticator.managed_groups`.
|
||||||
|
|
||||||
|
You can set the config:
|
||||||
|
|
||||||
|
```python
|
||||||
|
c.Authenticator.manage_groups = True
|
||||||
|
```
|
||||||
|
|
||||||
|
to enable this behavior.
|
||||||
|
The default is False for Authenticators that ship with JupyterHub,
|
||||||
|
but may be True for custom Authenticators.
|
||||||
|
Check your Authenticator's documentation for manage_groups support.
|
||||||
|
|
||||||
|
If True, {meth}`.Authenticator.authenticate` and {meth}`.Authenticator.refresh_user` may include a field `groups`
|
||||||
|
which is a list of group names the user should be a member of:
|
||||||
|
|
||||||
|
- Membership will be added for any group in the list
|
||||||
|
- Membership in any groups not in the list will be revoked
|
||||||
|
- Any groups not already present in the database will be created
|
||||||
|
- If `None` is returned, no changes are made to the user's group membership
|
||||||
|
|
||||||
|
If authenticator-managed groups are enabled,
|
||||||
|
all group-management via the API is disabled.
|
||||||
|
|
||||||
## pre_spawn_start and post_spawn_stop hooks
|
## pre_spawn_start and post_spawn_stop hooks
|
||||||
|
|
||||||
Authenticators uses two hooks, [pre_spawn_start(user, spawner)][] and
|
Authenticators uses two hooks, {meth}`.Authenticator.pre_spawn_start` and
|
||||||
[post_spawn_stop(user, spawner)][] to add pass additional state information
|
{meth}`.Authenticator.post_spawn_stop(user, spawner)` to add pass additional state information
|
||||||
between the authenticator and a spawner. These hooks are typically used auth-related
|
between the authenticator and a spawner. These hooks are typically used auth-related
|
||||||
startup, i.e. opening a PAM session, and auth-related cleanup, i.e. closing a
|
startup, i.e. opening a PAM session, and auth-related cleanup, i.e. closing a
|
||||||
PAM session.
|
PAM session.
|
||||||
@@ -259,10 +291,7 @@ PAM session.
|
|||||||
|
|
||||||
Beginning with version 0.8, JupyterHub is an OAuth provider.
|
Beginning with version 0.8, JupyterHub is an OAuth provider.
|
||||||
|
|
||||||
[authenticator]: https://github.com/jupyterhub/jupyterhub/blob/HEAD/jupyterhub/auth.py
|
|
||||||
[pam]: https://en.wikipedia.org/wiki/Pluggable_authentication_module
|
[pam]: https://en.wikipedia.org/wiki/Pluggable_authentication_module
|
||||||
[oauth]: https://en.wikipedia.org/wiki/OAuth
|
[oauth]: https://en.wikipedia.org/wiki/OAuth
|
||||||
[github oauth]: https://developer.github.com/v3/oauth/
|
[github oauth]: https://developer.github.com/v3/oauth/
|
||||||
[oauthenticator]: https://github.com/jupyterhub/oauthenticator
|
[oauthenticator]: https://github.com/jupyterhub/oauthenticator
|
||||||
[pre_spawn_start(user, spawner)]: https://jupyterhub.readthedocs.io/en/latest/api/auth.html#jupyterhub.auth.Authenticator.pre_spawn_start
|
|
||||||
[post_spawn_stop(user, spawner)]: https://jupyterhub.readthedocs.io/en/latest/api/auth.html#jupyterhub.auth.Authenticator.post_spawn_stop
|
|
||||||
|
@@ -165,7 +165,7 @@ As with nginx above, you can use [Apache](https://httpd.apache.org) as the rever
|
|||||||
First, we will need to enable the apache modules that we are going to need:
|
First, we will need to enable the apache modules that we are going to need:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
a2enmod ssl rewrite proxy proxy_http proxy_wstunnel
|
a2enmod ssl rewrite proxy headers proxy_http proxy_wstunnel
|
||||||
```
|
```
|
||||||
|
|
||||||
Our Apache configuration is equivalent to the nginx configuration above:
|
Our Apache configuration is equivalent to the nginx configuration above:
|
||||||
@@ -188,13 +188,24 @@ Listen 443
|
|||||||
|
|
||||||
ServerName HUB.DOMAIN.TLD
|
ServerName HUB.DOMAIN.TLD
|
||||||
|
|
||||||
|
# enable HTTP/2, if available
|
||||||
|
Protocols h2 http/1.1
|
||||||
|
|
||||||
|
# HTTP Strict Transport Security (mod_headers is required) (63072000 seconds)
|
||||||
|
Header always set Strict-Transport-Security "max-age=63072000"
|
||||||
|
|
||||||
# configure SSL
|
# configure SSL
|
||||||
SSLEngine on
|
SSLEngine on
|
||||||
SSLCertificateFile /etc/letsencrypt/live/HUB.DOMAIN.TLD/fullchain.pem
|
SSLCertificateFile /etc/letsencrypt/live/HUB.DOMAIN.TLD/fullchain.pem
|
||||||
SSLCertificateKeyFile /etc/letsencrypt/live/HUB.DOMAIN.TLD/privkey.pem
|
SSLCertificateKeyFile /etc/letsencrypt/live/HUB.DOMAIN.TLD/privkey.pem
|
||||||
SSLProtocol All -SSLv2 -SSLv3
|
|
||||||
SSLOpenSSLConfCmd DHParameters /etc/ssl/certs/dhparam.pem
|
SSLOpenSSLConfCmd DHParameters /etc/ssl/certs/dhparam.pem
|
||||||
SSLCipherSuite EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH
|
|
||||||
|
# intermediate configuration from ssl-config.mozilla.org (2022-03-03)
|
||||||
|
# Please note, that this configuration might be out-dated - please update it accordingly using https://ssl-config.mozilla.org/
|
||||||
|
SSLProtocol all -SSLv3 -TLSv1 -TLSv1.1
|
||||||
|
SSLCipherSuite ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384
|
||||||
|
SSLHonorCipherOrder off
|
||||||
|
SSLSessionTickets off
|
||||||
|
|
||||||
# Use RewriteEngine to handle websocket connection upgrades
|
# Use RewriteEngine to handle websocket connection upgrades
|
||||||
RewriteEngine On
|
RewriteEngine On
|
||||||
@@ -208,6 +219,7 @@ Listen 443
|
|||||||
# proxy to JupyterHub
|
# proxy to JupyterHub
|
||||||
ProxyPass http://127.0.0.1:8000/
|
ProxyPass http://127.0.0.1:8000/
|
||||||
ProxyPassReverse http://127.0.0.1:8000/
|
ProxyPassReverse http://127.0.0.1:8000/
|
||||||
|
RequestHeader set "X-Forwarded-Proto" expr=%{REQUEST_SCHEME}
|
||||||
</Location>
|
</Location>
|
||||||
</VirtualHost>
|
</VirtualHost>
|
||||||
```
|
```
|
||||||
@@ -219,7 +231,7 @@ In case of the need to run the jupyterhub under /jhub/ or other location please
|
|||||||
httpd.conf amendments:
|
httpd.conf amendments:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
RewriteRule /jhub/(.*) ws://127.0.0.1:8000/jhub/$1 [NE.P,L]
|
RewriteRule /jhub/(.*) ws://127.0.0.1:8000/jhub/$1 [NE,P,L]
|
||||||
RewriteRule /jhub/(.*) http://127.0.0.1:8000/jhub/$1 [NE,P,L]
|
RewriteRule /jhub/(.*) http://127.0.0.1:8000/jhub/$1 [NE,P,L]
|
||||||
|
|
||||||
ProxyPass /jhub/ http://127.0.0.1:8000/jhub/
|
ProxyPass /jhub/ http://127.0.0.1:8000/jhub/
|
||||||
|
@@ -76,13 +76,26 @@ c.InteractiveShellApp.extensions.append("cython")
|
|||||||
|
|
||||||
### Example: Enable a Jupyter notebook configuration setting for all users
|
### Example: Enable a Jupyter notebook configuration setting for all users
|
||||||
|
|
||||||
|
:::{note}
|
||||||
|
These examples configure the Jupyter ServerApp,
|
||||||
|
which is used by JupyterLab, the default in JupyterHub 2.0.
|
||||||
|
|
||||||
|
If you are using the classing Jupyter Notebook server,
|
||||||
|
the same things should work,
|
||||||
|
with the following substitutions:
|
||||||
|
|
||||||
|
- Where you see `jupyter_server_config`, use `jupyter_notebook_config`
|
||||||
|
- Where you see `NotebookApp`, use `ServerApp`
|
||||||
|
|
||||||
|
:::
|
||||||
|
|
||||||
To enable Jupyter notebook's internal idle-shutdown behavior (requires
|
To enable Jupyter notebook's internal idle-shutdown behavior (requires
|
||||||
notebook ≥ 5.4), set the following in the `/etc/jupyter/jupyter_notebook_config.py`
|
notebook ≥ 5.4), set the following in the `/etc/jupyter/jupyter_server_config.py`
|
||||||
file:
|
file:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
# shutdown the server after no activity for an hour
|
# shutdown the server after no activity for an hour
|
||||||
c.NotebookApp.shutdown_no_activity_timeout = 60 * 60
|
c.ServerApp.shutdown_no_activity_timeout = 60 * 60
|
||||||
# shutdown kernels after no activity for 20 minutes
|
# shutdown kernels after no activity for 20 minutes
|
||||||
c.MappingKernelManager.cull_idle_timeout = 20 * 60
|
c.MappingKernelManager.cull_idle_timeout = 20 * 60
|
||||||
# check for idle kernels every two minutes
|
# check for idle kernels every two minutes
|
||||||
@@ -112,8 +125,8 @@ Assuming I have a Python 2 and Python 3 environment that I want to make
|
|||||||
sure are available, I can install their specs system-wide (in /usr/local) with:
|
sure are available, I can install their specs system-wide (in /usr/local) with:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
/path/to/python3 -m IPython kernel install --prefix=/usr/local
|
/path/to/python3 -m ipykernel install --prefix=/usr/local
|
||||||
/path/to/python2 -m IPython kernel install --prefix=/usr/local
|
/path/to/python2 -m ipykernel install --prefix=/usr/local
|
||||||
```
|
```
|
||||||
|
|
||||||
## Multi-user hosts vs. Containers
|
## Multi-user hosts vs. Containers
|
||||||
@@ -176,12 +189,40 @@ The number of named servers per user can be limited by setting
|
|||||||
c.JupyterHub.named_server_limit_per_user = 5
|
c.JupyterHub.named_server_limit_per_user = 5
|
||||||
```
|
```
|
||||||
|
|
||||||
## Switching to Jupyter Server
|
(classic-notebook-ui)=
|
||||||
|
|
||||||
[Jupyter Server](https://jupyter-server.readthedocs.io/en/latest/) is a new Tornado Server backend for Jupyter web applications (e.g. JupyterLab 3.0 uses this package as its default backend).
|
## Switching back to classic notebook
|
||||||
|
|
||||||
By default, the single-user notebook server uses the (old) `NotebookApp` from the [notebook](https://github.com/jupyter/notebook) package. You can switch to using Jupyter Server's `ServerApp` backend (this will likely become the default in future releases) by setting the `JUPYTERHUB_SINGLEUSER_APP` environment variable to:
|
By default the single-user server launches JupyterLab,
|
||||||
|
which is based on [Jupyter Server][].
|
||||||
|
This is the default server when running JupyterHub ≥ 2.0.
|
||||||
|
You can switch to using the legacy Jupyter Notebook server by setting the `JUPYTERHUB_SINGLEUSER_APP` environment variable
|
||||||
|
(in the single-user environment) to:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
export JUPYTERHUB_SINGLEUSER_APP='notebook.notebookapp.NotebookApp'
|
||||||
|
```
|
||||||
|
|
||||||
|
[jupyter server]: https://jupyter-server.readthedocs.io
|
||||||
|
[jupyter notebook]: https://jupyter-notebook.readthedocs.io
|
||||||
|
|
||||||
|
:::{versionchanged} 2.0
|
||||||
|
JupyterLab is now the default singleuser UI, if available,
|
||||||
|
which is based on the [Jupyter Server][],
|
||||||
|
no longer the legacy [Jupyter Notebook][] server.
|
||||||
|
JupyterHub prior to 2.0 launched the legacy notebook server (`jupyter notebook`),
|
||||||
|
and Jupyter server could be selected by specifying
|
||||||
|
|
||||||
|
```python
|
||||||
|
# jupyterhub_config.py
|
||||||
|
c.Spawner.cmd = ["jupyter-labhub"]
|
||||||
|
```
|
||||||
|
|
||||||
|
or for an otherwise customized Jupyter Server app,
|
||||||
|
set the environment variable:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
export JUPYTERHUB_SINGLEUSER_APP='jupyter_server.serverapp.ServerApp'
|
export JUPYTERHUB_SINGLEUSER_APP='jupyter_server.serverapp.ServerApp'
|
||||||
```
|
```
|
||||||
|
|
||||||
|
:::
|
||||||
|
@@ -16,10 +16,12 @@ what happens under-the-hood when you deploy and configure your JupyterHub.
|
|||||||
proxy
|
proxy
|
||||||
separate-proxy
|
separate-proxy
|
||||||
rest
|
rest
|
||||||
|
rest-api
|
||||||
server-api
|
server-api
|
||||||
monitoring
|
monitoring
|
||||||
database
|
database
|
||||||
templates
|
templates
|
||||||
|
api-only
|
||||||
../events/index
|
../events/index
|
||||||
config-user-env
|
config-user-env
|
||||||
config-examples
|
config-examples
|
||||||
|
27
docs/source/reference/rest-api.md
Normal file
27
docs/source/reference/rest-api.md
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
# JupyterHub REST API
|
||||||
|
|
||||||
|
Below is an interactive view of JupyterHub's OpenAPI specification.
|
||||||
|
|
||||||
|
<!-- client-rendered openapi UI copied from FastAPI -->
|
||||||
|
|
||||||
|
<link type="text/css" rel="stylesheet" href="https://cdn.jsdelivr.net/npm/swagger-ui-dist@3/swagger-ui.css">
|
||||||
|
<script src="https://cdn.jsdelivr.net/npm/swagger-ui-dist@4.1/swagger-ui-bundle.js"></script>
|
||||||
|
<!-- `SwaggerUIBundle` is now available on the page -->
|
||||||
|
|
||||||
|
<!-- render the ui here -->
|
||||||
|
<div id="openapi-ui"></div>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
const ui = SwaggerUIBundle({
|
||||||
|
url: '../_static/rest-api.yml',
|
||||||
|
dom_id: '#openapi-ui',
|
||||||
|
presets: [
|
||||||
|
SwaggerUIBundle.presets.apis,
|
||||||
|
SwaggerUIBundle.SwaggerUIStandalonePreset
|
||||||
|
],
|
||||||
|
layout: "BaseLayout",
|
||||||
|
deepLinking: true,
|
||||||
|
showExtensions: true,
|
||||||
|
showCommonExtensions: true,
|
||||||
|
});
|
||||||
|
</script>
|
@@ -1,14 +0,0 @@
|
|||||||
:orphan:
|
|
||||||
|
|
||||||
===================
|
|
||||||
JupyterHub REST API
|
|
||||||
===================
|
|
||||||
|
|
||||||
.. this doc exists as a resolvable link target
|
|
||||||
.. which _static files are not
|
|
||||||
|
|
||||||
.. meta::
|
|
||||||
:http-equiv=refresh: 0;url=../_static/rest-api/index.html
|
|
||||||
|
|
||||||
The rest API docs are `here <../_static/rest-api/index.html>`_
|
|
||||||
if you are not redirected automatically.
|
|
@@ -1,3 +1,5 @@
|
|||||||
|
(rest-api)=
|
||||||
|
|
||||||
# Using JupyterHub's REST API
|
# Using JupyterHub's REST API
|
||||||
|
|
||||||
This section will give you information on:
|
This section will give you information on:
|
||||||
@@ -111,7 +113,6 @@ c.JupyterHub.load_roles = [
|
|||||||
"scopes": [
|
"scopes": [
|
||||||
# specify the permissions the token should have
|
# specify the permissions the token should have
|
||||||
"admin:users",
|
"admin:users",
|
||||||
"admin:services",
|
|
||||||
],
|
],
|
||||||
"services": [
|
"services": [
|
||||||
# assign the service the above permissions
|
# assign the service the above permissions
|
||||||
@@ -302,12 +303,8 @@ or kubernetes pods.
|
|||||||
|
|
||||||
## Learn more about the API
|
## Learn more about the API
|
||||||
|
|
||||||
You can see the full [JupyterHub REST API][] for details. This REST API Spec can
|
You can see the full [JupyterHub REST API][] for details.
|
||||||
be viewed in a more [interactive style on swagger's petstore][].
|
|
||||||
Both resources contain the same information and differ only in its display.
|
|
||||||
Note: The Swagger specification is being renamed the [OpenAPI Initiative][].
|
|
||||||
|
|
||||||
[interactive style on swagger's petstore]: https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/jupyterhub/jupyterhub/HEAD/docs/rest-api.yml#!/default
|
|
||||||
[openapi initiative]: https://www.openapis.org/
|
[openapi initiative]: https://www.openapis.org/
|
||||||
[jupyterhub rest api]: ./rest-api
|
[jupyterhub rest api]: ./rest-api
|
||||||
[jupyter notebook rest api]: https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/jupyter/notebook/HEAD/notebook/services/api/api.yaml
|
[jupyter notebook rest api]: https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/jupyter/notebook/HEAD/notebook/services/api/api.yaml
|
||||||
|
@@ -1,17 +1,5 @@
|
|||||||
# Services
|
# Services
|
||||||
|
|
||||||
With version 0.7, JupyterHub adds support for **Services**.
|
|
||||||
|
|
||||||
This section provides the following information about Services:
|
|
||||||
|
|
||||||
- [Definition of a Service](#definition-of-a-service)
|
|
||||||
- [Properties of a Service](#properties-of-a-service)
|
|
||||||
- [Hub-Managed Services](#hub-managed-services)
|
|
||||||
- [Launching a Hub-Managed Service](#launching-a-hub-managed-service)
|
|
||||||
- [Externally-Managed Services](#externally-managed-services)
|
|
||||||
- [Writing your own Services](#writing-your-own-services)
|
|
||||||
- [Hub Authentication and Services](#hub-authentication-and-services)
|
|
||||||
|
|
||||||
## Definition of a Service
|
## Definition of a Service
|
||||||
|
|
||||||
When working with JupyterHub, a **Service** is defined as a process that interacts
|
When working with JupyterHub, a **Service** is defined as a process that interacts
|
||||||
@@ -95,6 +83,7 @@ c.JupyterHub.load_roles = [
|
|||||||
# 'admin:users' # needed if culling idle users as well
|
# 'admin:users' # needed if culling idle users as well
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
]
|
||||||
|
|
||||||
c.JupyterHub.services = [
|
c.JupyterHub.services = [
|
||||||
{
|
{
|
||||||
@@ -115,6 +104,8 @@ parameters, which describe the environment needed to start the Service process:
|
|||||||
|
|
||||||
The Hub will pass the following environment variables to launch the Service:
|
The Hub will pass the following environment variables to launch the Service:
|
||||||
|
|
||||||
|
(service-env)=
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
JUPYTERHUB_SERVICE_NAME: The name of the service
|
JUPYTERHUB_SERVICE_NAME: The name of the service
|
||||||
JUPYTERHUB_API_TOKEN: API token assigned to the service
|
JUPYTERHUB_API_TOKEN: API token assigned to the service
|
||||||
@@ -196,25 +187,45 @@ extra slash you might get unexpected behavior. For example if your service has a
|
|||||||
|
|
||||||
## Hub Authentication and Services
|
## Hub Authentication and Services
|
||||||
|
|
||||||
JupyterHub 0.7 introduces some utilities for using the Hub's authentication
|
JupyterHub provides some utilities for using the Hub's authentication
|
||||||
mechanism to govern access to your service. When a user logs into JupyterHub,
|
mechanism to govern access to your service.
|
||||||
the Hub sets a **cookie (`jupyterhub-services`)**. The service can use this
|
|
||||||
cookie to authenticate requests.
|
|
||||||
|
|
||||||
JupyterHub ships with a reference implementation of Hub authentication that
|
Requests to all JupyterHub services are made with OAuth tokens.
|
||||||
|
These can either be requests with a token in the `Authorization` header,
|
||||||
|
or url parameter `?token=...`,
|
||||||
|
or browser requests which must complete the OAuth authorization code flow,
|
||||||
|
which results in a token that should be persisted for future requests
|
||||||
|
(persistence is up to the service,
|
||||||
|
but an encrypted cookie confined to the service path is appropriate,
|
||||||
|
and provided by default).
|
||||||
|
|
||||||
|
:::{versionchanged} 2.0
|
||||||
|
The shared `jupyterhub-services` cookie is removed.
|
||||||
|
OAuth must be used to authenticate browser requests with services.
|
||||||
|
:::
|
||||||
|
|
||||||
|
JupyterHub includes a reference implementation of Hub authentication that
|
||||||
can be used by services. You may go beyond this reference implementation and
|
can be used by services. You may go beyond this reference implementation and
|
||||||
create custom hub-authenticating clients and services. We describe the process
|
create custom hub-authenticating clients and services. We describe the process
|
||||||
below.
|
below.
|
||||||
|
|
||||||
The reference, or base, implementation is the [`HubAuth`][hubauth] class,
|
The reference, or base, implementation is the {class}`.HubAuth` class,
|
||||||
which implements the requests to the Hub.
|
which implements the API requests to the Hub that resolve a token to a User model.
|
||||||
|
|
||||||
|
There are two levels of authentication with the Hub:
|
||||||
|
|
||||||
|
- {class}`.HubAuth` - the most basic authentication,
|
||||||
|
for services that should only accept API requests authorized with a token.
|
||||||
|
|
||||||
|
- {class}`.HubOAuth` - For services that should use oauth to authenticate with the Hub.
|
||||||
|
This should be used for any service that serves pages that should be visited with a browser.
|
||||||
|
|
||||||
To use HubAuth, you must set the `.api_token`, either programmatically when constructing the class,
|
To use HubAuth, you must set the `.api_token`, either programmatically when constructing the class,
|
||||||
or via the `JUPYTERHUB_API_TOKEN` environment variable.
|
or via the `JUPYTERHUB_API_TOKEN` environment variable.
|
||||||
|
|
||||||
Most of the logic for authentication implementation is found in the
|
Most of the logic for authentication implementation is found in the
|
||||||
[`HubAuth.user_for_token`][hubauth.user_for_token]
|
{meth}`.HubAuth.user_for_token` methods,
|
||||||
methods, which makes a request of the Hub, and returns:
|
which makes a request of the Hub, and returns:
|
||||||
|
|
||||||
- None, if no user could be identified, or
|
- None, if no user could be identified, or
|
||||||
- a dict of the following form:
|
- a dict of the following form:
|
||||||
@@ -235,6 +246,19 @@ action.
|
|||||||
HubAuth also caches the Hub's response for a number of seconds,
|
HubAuth also caches the Hub's response for a number of seconds,
|
||||||
configurable by the `cookie_cache_max_age` setting (default: five minutes).
|
configurable by the `cookie_cache_max_age` setting (default: five minutes).
|
||||||
|
|
||||||
|
If your service would like to make further requests _on behalf of users_,
|
||||||
|
it should use the token issued by this OAuth process.
|
||||||
|
If you are using tornado,
|
||||||
|
you can access the token authenticating the current request with {meth}`.HubAuth.get_token`.
|
||||||
|
|
||||||
|
:::{versionchanged} 2.2
|
||||||
|
|
||||||
|
{meth}`.HubAuth.get_token` adds support for retrieving
|
||||||
|
tokens stored in tornado cookies after completion of OAuth.
|
||||||
|
Previously, it only retrieved tokens from URL parameters or the Authorization header.
|
||||||
|
Passing `get_token(handler, in_cookie=False)` preserves this behavior.
|
||||||
|
:::
|
||||||
|
|
||||||
### Flask Example
|
### Flask Example
|
||||||
|
|
||||||
For example, you have a Flask service that returns information about a user.
|
For example, you have a Flask service that returns information about a user.
|
||||||
@@ -250,18 +274,17 @@ for more details.
|
|||||||
### Authenticating tornado services with JupyterHub
|
### Authenticating tornado services with JupyterHub
|
||||||
|
|
||||||
Since most Jupyter services are written with tornado,
|
Since most Jupyter services are written with tornado,
|
||||||
we include a mixin class, [`HubAuthenticated`][hubauthenticated],
|
we include a mixin class, [`HubOAuthenticated`][huboauthenticated],
|
||||||
for quickly authenticating your own tornado services with JupyterHub.
|
for quickly authenticating your own tornado services with JupyterHub.
|
||||||
|
|
||||||
Tornado's `@web.authenticated` method calls a Handler's `.get_current_user`
|
Tornado's {py:func}`~.tornado.web.authenticated` decorator calls a Handler's {py:meth}`~.tornado.web.RequestHandler.get_current_user`
|
||||||
method to identify the user. Mixing in `HubAuthenticated` defines
|
method to identify the user. Mixing in {class}`.HubAuthenticated` defines
|
||||||
`get_current_user` to use HubAuth. If you want to configure the HubAuth
|
{meth}`~.HubAuthenticated.get_current_user` to use HubAuth. If you want to configure the HubAuth
|
||||||
instance beyond the default, you'll want to define an `initialize` method,
|
instance beyond the default, you'll want to define an {py:meth}`~.tornado.web.RequestHandler.initialize` method,
|
||||||
such as:
|
such as:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
class MyHandler(HubAuthenticated, web.RequestHandler):
|
class MyHandler(HubOAuthenticated, web.RequestHandler):
|
||||||
hub_users = {'inara', 'mal'}
|
|
||||||
|
|
||||||
def initialize(self, hub_auth):
|
def initialize(self, hub_auth):
|
||||||
self.hub_auth = hub_auth
|
self.hub_auth = hub_auth
|
||||||
@@ -271,14 +294,21 @@ class MyHandler(HubAuthenticated, web.RequestHandler):
|
|||||||
...
|
...
|
||||||
```
|
```
|
||||||
|
|
||||||
The HubAuth will automatically load the desired configuration from the Service
|
The HubAuth class will automatically load the desired configuration from the Service
|
||||||
environment variables.
|
[environment variables](service-env).
|
||||||
|
|
||||||
If you want to limit user access, you can specify allowed users through either the
|
:::{versionchanged} 2.0
|
||||||
`.hub_users` attribute or `.hub_groups`. These are sets that check against the
|
|
||||||
username and user group list, respectively. If a user matches neither the user
|
Access scopes are used to govern access to services.
|
||||||
list nor the group list, they will not be allowed access. If both are left
|
Prior to 2.0,
|
||||||
undefined, then any user will be allowed.
|
sets of users and groups could be used to grant access
|
||||||
|
by defining `.hub_groups` or `.hub_users` on the authenticated handler.
|
||||||
|
These are ignored if the 2.0 `.hub_scopes` is defined.
|
||||||
|
:::
|
||||||
|
|
||||||
|
:::{seealso}
|
||||||
|
{meth}`.HubAuth.check_scopes`
|
||||||
|
:::
|
||||||
|
|
||||||
### Implementing your own Authentication with JupyterHub
|
### Implementing your own Authentication with JupyterHub
|
||||||
|
|
||||||
@@ -354,9 +384,6 @@ section on securing the notebook viewer.
|
|||||||
|
|
||||||
[requests]: http://docs.python-requests.org/en/master/
|
[requests]: http://docs.python-requests.org/en/master/
|
||||||
[services_auth]: ../api/services.auth.html
|
[services_auth]: ../api/services.auth.html
|
||||||
[huboauth]: ../api/services.auth.html#jupyterhub.services.auth.HubOAuth
|
|
||||||
[hubauth.user_for_token]: ../api/services.auth.html#jupyterhub.services.auth.HubAuth.user_for_token
|
|
||||||
[hubauthenticated]: ../api/services.auth.html#jupyterhub.services.auth.HubAuthenticated
|
|
||||||
[nbviewer example]: https://github.com/jupyter/nbviewer#securing-the-notebook-viewer
|
[nbviewer example]: https://github.com/jupyter/nbviewer#securing-the-notebook-viewer
|
||||||
[fastapi example]: https://github.com/jupyterhub/jupyterhub/tree/HEAD/examples/service-fastapi
|
[fastapi example]: https://github.com/jupyterhub/jupyterhub/tree/HEAD/examples/service-fastapi
|
||||||
[fastapi]: https://fastapi.tiangolo.com
|
[fastapi]: https://fastapi.tiangolo.com
|
||||||
|
@@ -108,6 +108,16 @@ class MySpawner(Spawner):
|
|||||||
return url
|
return url
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Exception handling
|
||||||
|
|
||||||
|
When `Spawner.start` raises an Exception, a message can be passed on to the user via the exception via a `.jupyterhub_html_message` or `.jupyterhub_message` attribute.
|
||||||
|
|
||||||
|
When the Exception has a `.jupyterhub_html_message` attribute, it will be rendered as HTML to the user.
|
||||||
|
|
||||||
|
Alternatively `.jupyterhub_message` is rendered as unformatted text.
|
||||||
|
|
||||||
|
If both attributes are not present, the Exception will be shown to the user as unformatted text.
|
||||||
|
|
||||||
### Spawner.poll
|
### Spawner.poll
|
||||||
|
|
||||||
`Spawner.poll` should check if the spawner is still running.
|
`Spawner.poll` should check if the spawner is still running.
|
||||||
|
@@ -275,7 +275,7 @@ where `ssl_cert` is example-chained.crt and ssl_key to your private key.
|
|||||||
|
|
||||||
Then restart JupyterHub.
|
Then restart JupyterHub.
|
||||||
|
|
||||||
See also [JupyterHub SSL encryption](./getting-started/security-basics.html#ssl-encryption).
|
See also {ref}`ssl-encryption`.
|
||||||
|
|
||||||
### Install JupyterHub without a network connection
|
### Install JupyterHub without a network connection
|
||||||
|
|
||||||
|
46
docs/test_docs.py
Normal file
46
docs/test_docs.py
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
from subprocess import run
|
||||||
|
|
||||||
|
from ruamel.yaml import YAML
|
||||||
|
|
||||||
|
yaml = YAML(typ="safe")
|
||||||
|
|
||||||
|
here = Path(__file__).absolute().parent
|
||||||
|
root = here.parent
|
||||||
|
|
||||||
|
|
||||||
|
def test_rest_api_version_is_updated():
|
||||||
|
"""Checks that the version in JupyterHub's REST API definition file
|
||||||
|
(rest-api.yml) is matching the JupyterHub version."""
|
||||||
|
version_py = root.joinpath("jupyterhub", "_version.py")
|
||||||
|
rest_api_yaml = root.joinpath("docs", "source", "_static", "rest-api.yml")
|
||||||
|
ns = {}
|
||||||
|
with version_py.open() as f:
|
||||||
|
exec(f.read(), {}, ns)
|
||||||
|
jupyterhub_version = ns["__version__"]
|
||||||
|
|
||||||
|
with rest_api_yaml.open() as f:
|
||||||
|
rest_api = yaml.load(f)
|
||||||
|
rest_api_version = rest_api["info"]["version"]
|
||||||
|
|
||||||
|
assert jupyterhub_version == rest_api_version
|
||||||
|
|
||||||
|
|
||||||
|
def test_rest_api_rbac_scope_descriptions_are_updated():
|
||||||
|
"""Checks that the RBAC scope descriptions in JupyterHub's REST API
|
||||||
|
definition file (rest-api.yml) as can be updated by generate-scope-table.py
|
||||||
|
matches what is committed."""
|
||||||
|
run([sys.executable, "source/rbac/generate-scope-table.py"], cwd=here, check=True)
|
||||||
|
run(
|
||||||
|
[
|
||||||
|
"git",
|
||||||
|
"--no-pager",
|
||||||
|
"diff",
|
||||||
|
"--color=always",
|
||||||
|
"--exit-code",
|
||||||
|
str(here.joinpath("source", "_static", "rest-api.yml")),
|
||||||
|
],
|
||||||
|
cwd=here,
|
||||||
|
check=True,
|
||||||
|
)
|
30
examples/azuread-with-group-management/jupyterhub_config.py
Normal file
30
examples/azuread-with-group-management/jupyterhub_config.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
"""sample jupyterhub config file for testing
|
||||||
|
|
||||||
|
configures jupyterhub with dummyauthenticator and simplespawner
|
||||||
|
to enable testing without administrative privileges.
|
||||||
|
"""
|
||||||
|
|
||||||
|
c = get_config() # noqa
|
||||||
|
c.Application.log_level = 'DEBUG'
|
||||||
|
|
||||||
|
from oauthenticator.azuread import AzureAdOAuthenticator
|
||||||
|
import os
|
||||||
|
|
||||||
|
c.JupyterHub.authenticator_class = AzureAdOAuthenticator
|
||||||
|
|
||||||
|
c.AzureAdOAuthenticator.client_id = os.getenv("AAD_CLIENT_ID")
|
||||||
|
c.AzureAdOAuthenticator.client_secret = os.getenv("AAD_CLIENT_SECRET")
|
||||||
|
c.AzureAdOAuthenticator.oauth_callback_url = os.getenv("AAD_CALLBACK_URL")
|
||||||
|
c.AzureAdOAuthenticator.tenant_id = os.getenv("AAD_TENANT_ID")
|
||||||
|
c.AzureAdOAuthenticator.username_claim = "email"
|
||||||
|
c.AzureAdOAuthenticator.authorize_url = os.getenv("AAD_AUTHORIZE_URL")
|
||||||
|
c.AzureAdOAuthenticator.token_url = os.getenv("AAD_TOKEN_URL")
|
||||||
|
c.Authenticator.manage_groups = True
|
||||||
|
c.Authenticator.refresh_pre_spawn = True
|
||||||
|
|
||||||
|
# Optionally set a global password that all users must use
|
||||||
|
# c.DummyAuthenticator.password = "your_password"
|
||||||
|
|
||||||
|
from jupyterhub.spawner import SimpleLocalProcessSpawner
|
||||||
|
|
||||||
|
c.JupyterHub.spawner_class = SimpleLocalProcessSpawner
|
2
examples/azuread-with-group-management/requirements.txt
Normal file
2
examples/azuread-with-group-management/requirements.txt
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
oauthenticator
|
||||||
|
pyjwt
|
@@ -29,7 +29,7 @@ def get_token():
|
|||||||
token_file = here.joinpath("service-token")
|
token_file = here.joinpath("service-token")
|
||||||
log.info(f"Loading token from {token_file}")
|
log.info(f"Loading token from {token_file}")
|
||||||
with token_file.open("r") as f:
|
with token_file.open("r") as f:
|
||||||
token = f.read()
|
token = f.read().strip()
|
||||||
return token
|
return token
|
||||||
|
|
||||||
|
|
||||||
|
@@ -8,59 +8,72 @@ There is an implementation each of api-token-based `HubAuthenticated` and OAuth-
|
|||||||
|
|
||||||
1. Launch JupyterHub and the `whoami` services with
|
1. Launch JupyterHub and the `whoami` services with
|
||||||
|
|
||||||
jupyterhub --ip=127.0.0.1
|
jupyterhub
|
||||||
|
|
||||||
2. Visit http://127.0.0.1:8000/services/whoami-oauth
|
2. Visit http://127.0.0.1:8000/services/whoami-oauth
|
||||||
|
|
||||||
After logging in with your local-system credentials, you should see a JSON dump of your user info:
|
After logging in with any username and password, you should see a JSON dump of your user info:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"admin": false,
|
"admin": false,
|
||||||
"last_activity": "2016-05-27T14:05:18.016372",
|
"groups": [],
|
||||||
|
"kind": "user",
|
||||||
"name": "queequeg",
|
"name": "queequeg",
|
||||||
"pending": null,
|
"scopes": ["access:services!service=whoami-oauth"],
|
||||||
"server": "/user/queequeg"
|
"session_id": "5a2164273a7346728873bcc2e3c26415"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
What is contained in the model will depend on the permissions
|
||||||
|
requested in the `oauth_roles` configuration of the service `whoami-oauth` service.
|
||||||
|
The default is the minimum required for identification and access to the service,
|
||||||
|
which will provide the username and current scopes.
|
||||||
|
|
||||||
The `whoami-api` service powered by the base `HubAuthenticated` class only supports token-authenticated API requests,
|
The `whoami-api` service powered by the base `HubAuthenticated` class only supports token-authenticated API requests,
|
||||||
not browser visits, because it does not implement OAuth. Visit it by requesting an api token from the tokens page,
|
not browser visits, because it does not implement OAuth. Visit it by requesting an api token from the tokens page (`/hub/token`),
|
||||||
and making a direct request:
|
and making a direct request:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ curl -H "Authorization: token 8630bbd8ef064c48b22c7f122f0cd8ad" http://127.0.0.1:8000/services/whoami-api/ | jq .
|
token="d584cbc5bba2430fb153aadb305029b4"
|
||||||
|
curl -H "Authorization: token $token" http://127.0.0.1:8000/services/whoami-api/ | jq .
|
||||||
|
```
|
||||||
|
|
||||||
|
```json
|
||||||
{
|
{
|
||||||
"admin": false,
|
"admin": false,
|
||||||
"created": "2021-05-21T09:47:41.299400Z",
|
"created": "2021-12-20T09:49:37.258427Z",
|
||||||
"groups": [],
|
"groups": [],
|
||||||
"kind": "user",
|
"kind": "user",
|
||||||
"last_activity": "2021-05-21T09:49:08.290745Z",
|
"last_activity": "2021-12-20T10:07:31.298056Z",
|
||||||
"name": "test",
|
"name": "queequeg",
|
||||||
"pending": null,
|
"pending": null,
|
||||||
"roles": [
|
"roles": ["user"],
|
||||||
"user"
|
|
||||||
],
|
|
||||||
"scopes": [
|
"scopes": [
|
||||||
|
"access:servers!user=queequeg",
|
||||||
"access:services",
|
"access:services",
|
||||||
"access:servers!user=test",
|
"delete:servers!user=queequeg",
|
||||||
"read:users!user=test",
|
"read:servers!user=queequeg",
|
||||||
"read:users:activity!user=test",
|
"read:tokens!user=queequeg",
|
||||||
"read:users:groups!user=test",
|
"read:users!user=queequeg",
|
||||||
"read:users:name!user=test",
|
"read:users:activity!user=queequeg",
|
||||||
"read:servers!user=test",
|
"read:users:groups!user=queequeg",
|
||||||
"read:tokens!user=test",
|
"read:users:name!user=queequeg",
|
||||||
"users!user=test",
|
"servers!user=queequeg",
|
||||||
"users:activity!user=test",
|
"tokens!user=queequeg",
|
||||||
"users:groups!user=test",
|
"users:activity!user=queequeg"
|
||||||
"users:name!user=test",
|
|
||||||
"servers!user=test",
|
|
||||||
"tokens!user=test"
|
|
||||||
],
|
],
|
||||||
"server": null
|
"server": null,
|
||||||
|
"servers": {},
|
||||||
|
"session_id": null
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
The above is a more complete user model than the `whoami-oauth` example, because
|
||||||
|
the token was issued with the default `token` role,
|
||||||
|
which has the `inherit` metascope,
|
||||||
|
meaning the token has access to everything the tokens owner has access to.
|
||||||
|
|
||||||
This relies on the Hub starting the whoami services, via config (see [jupyterhub_config.py](./jupyterhub_config.py)).
|
This relies on the Hub starting the whoami services, via config (see [jupyterhub_config.py](./jupyterhub_config.py)).
|
||||||
|
|
||||||
To govern access to the services, create **roles** with the scope `access:services!service=$service-name`,
|
To govern access to the services, create **roles** with the scope `access:services!service=$service-name`,
|
||||||
|
@@ -10,7 +10,15 @@ c.JupyterHub.services = [
|
|||||||
'name': 'whoami-oauth',
|
'name': 'whoami-oauth',
|
||||||
'url': 'http://127.0.0.1:10102',
|
'url': 'http://127.0.0.1:10102',
|
||||||
'command': [sys.executable, './whoami-oauth.py'],
|
'command': [sys.executable, './whoami-oauth.py'],
|
||||||
'oauth_roles': ['user'],
|
# the default oauth roles is minimal,
|
||||||
|
# only requesting access to the service,
|
||||||
|
# and identification by name,
|
||||||
|
# nothing more.
|
||||||
|
# Specifying 'oauth_roles' as a list of role names
|
||||||
|
# allows requesting more information about users,
|
||||||
|
# or the ability to take actions on users' behalf, as required.
|
||||||
|
# The default 'token' role has the full permissions of its owner:
|
||||||
|
# 'oauth_roles': ['token'],
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@@ -5,12 +5,12 @@ object-assign
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
/*!
|
/*!
|
||||||
Copyright (c) 2017 Jed Watson.
|
Copyright (c) 2018 Jed Watson.
|
||||||
Licensed under the MIT License (MIT), see
|
Licensed under the MIT License (MIT), see
|
||||||
http://jedwatson.github.io/classnames
|
http://jedwatson.github.io/classnames
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/** @license React v0.20.1
|
/** @license React v0.20.2
|
||||||
* scheduler.production.min.js
|
* scheduler.production.min.js
|
||||||
*
|
*
|
||||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||||
@@ -28,7 +28,7 @@ object-assign
|
|||||||
* LICENSE file in the root directory of this source tree.
|
* LICENSE file in the root directory of this source tree.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/** @license React v17.0.1
|
/** @license React v17.0.2
|
||||||
* react-dom.production.min.js
|
* react-dom.production.min.js
|
||||||
*
|
*
|
||||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||||
@@ -37,7 +37,16 @@ object-assign
|
|||||||
* LICENSE file in the root directory of this source tree.
|
* LICENSE file in the root directory of this source tree.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/** @license React v17.0.1
|
/** @license React v17.0.2
|
||||||
|
* react-jsx-runtime.production.min.js
|
||||||
|
*
|
||||||
|
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||||
|
*
|
||||||
|
* This source code is licensed under the MIT license found in the
|
||||||
|
* LICENSE file in the root directory of this source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/** @license React v17.0.2
|
||||||
* react.production.min.js
|
* react.production.min.js
|
||||||
*
|
*
|
||||||
* Copyright (c) Facebook, Inc. and its affiliates.
|
* Copyright (c) Facebook, Inc. and its affiliates.
|
||||||
|
@@ -31,30 +31,36 @@
|
|||||||
"@babel/core": "^7.12.3",
|
"@babel/core": "^7.12.3",
|
||||||
"@babel/preset-env": "^7.12.11",
|
"@babel/preset-env": "^7.12.11",
|
||||||
"@babel/preset-react": "^7.12.10",
|
"@babel/preset-react": "^7.12.10",
|
||||||
|
"@testing-library/jest-dom": "^5.15.1",
|
||||||
|
"@testing-library/react": "^12.1.2",
|
||||||
|
"@testing-library/user-event": "^13.5.0",
|
||||||
"babel-loader": "^8.2.1",
|
"babel-loader": "^8.2.1",
|
||||||
"bootstrap": "^4.5.3",
|
"bootstrap": "^4.5.3",
|
||||||
"css-loader": "^5.0.1",
|
"css-loader": "^5.0.1",
|
||||||
"eslint-plugin-unused-imports": "^1.1.1",
|
"eslint-plugin-unused-imports": "^1.1.1",
|
||||||
"file-loader": "^6.2.0",
|
"file-loader": "^6.2.0",
|
||||||
"history": "^5.0.0",
|
"history": "^5.0.0",
|
||||||
|
"lodash.debounce": "^4.0.8",
|
||||||
"prop-types": "^15.7.2",
|
"prop-types": "^15.7.2",
|
||||||
"react": "^17.0.1",
|
"react": "^17.0.1",
|
||||||
"react-bootstrap": "^1.4.0",
|
"react-bootstrap": "^2.1.1",
|
||||||
"react-dom": "^17.0.1",
|
"react-dom": "^17.0.1",
|
||||||
"react-icons": "^4.1.0",
|
"react-icons": "^4.1.0",
|
||||||
"react-multi-select-component": "^3.0.7",
|
"react-multi-select-component": "^3.0.7",
|
||||||
|
"react-object-table-viewer": "^1.0.7",
|
||||||
"react-redux": "^7.2.2",
|
"react-redux": "^7.2.2",
|
||||||
"react-router": "^5.2.0",
|
"react-router": "^5.2.0",
|
||||||
"react-router-dom": "^5.2.0",
|
"react-router-dom": "^5.2.0",
|
||||||
"recompose": "^0.30.0",
|
"recompose": "^0.30.0",
|
||||||
"redux": "^4.0.5",
|
"redux": "^4.0.5",
|
||||||
|
"regenerator-runtime": "^0.13.9",
|
||||||
"style-loader": "^2.0.0",
|
"style-loader": "^2.0.0",
|
||||||
"webpack": "^5.6.0",
|
"webpack": "^5.6.0",
|
||||||
"webpack-cli": "^3.3.4",
|
"webpack-cli": "^3.3.4",
|
||||||
"webpack-dev-server": "^3.11.0"
|
"webpack-dev-server": "^3.11.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@wojtekmaj/enzyme-adapter-react-17": "^0.4.1",
|
"@wojtekmaj/enzyme-adapter-react-17": "^0.6.5",
|
||||||
"babel-jest": "^26.6.3",
|
"babel-jest": "^26.6.3",
|
||||||
"enzyme": "^3.11.0",
|
"enzyme": "^3.11.0",
|
||||||
"eslint": "^7.18.0",
|
"eslint": "^7.18.0",
|
||||||
@@ -62,6 +68,7 @@
|
|||||||
"eslint-plugin-react": "^7.22.0",
|
"eslint-plugin-react": "^7.22.0",
|
||||||
"identity-obj-proxy": "^3.0.0",
|
"identity-obj-proxy": "^3.0.0",
|
||||||
"jest": "^26.6.3",
|
"jest": "^26.6.3",
|
||||||
"prettier": "^2.2.1"
|
"prettier": "^2.2.1",
|
||||||
|
"sinon": "^13.0.1"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
export const initialState = {
|
export const initialState = {
|
||||||
user_data: undefined,
|
user_data: undefined,
|
||||||
user_page: 0,
|
user_page: 0,
|
||||||
|
name_filter: "",
|
||||||
groups_data: undefined,
|
groups_data: undefined,
|
||||||
groups_page: 0,
|
groups_page: 0,
|
||||||
limit: window.api_page_limit,
|
limit: window.api_page_limit,
|
||||||
@@ -13,6 +14,7 @@ export const reducers = (state = initialState, action) => {
|
|||||||
return Object.assign({}, state, {
|
return Object.assign({}, state, {
|
||||||
user_page: action.value.page,
|
user_page: action.value.page,
|
||||||
user_data: action.value.data,
|
user_data: action.value.data,
|
||||||
|
name_filter: action.value.name_filter || "",
|
||||||
});
|
});
|
||||||
|
|
||||||
// Updates the client group model data and stores the page
|
// Updates the client group model data and stores the page
|
||||||
|
@@ -25,11 +25,20 @@ const AddUser = (props) => {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<div className="container">
|
<div className="container" data-testid="container">
|
||||||
{errorAlert != null ? (
|
{errorAlert != null ? (
|
||||||
<div className="row">
|
<div className="row">
|
||||||
<div className="col-md-10 col-md-offset-1 col-lg-8 col-lg-offset-2">
|
<div className="col-md-10 col-md-offset-1 col-lg-8 col-lg-offset-2">
|
||||||
<div className="alert alert-danger">{errorAlert}</div>
|
<div className="alert alert-danger">
|
||||||
|
{errorAlert}
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
className="close"
|
||||||
|
onClick={() => setErrorAlert(null)}
|
||||||
|
>
|
||||||
|
<span>×</span>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
) : (
|
) : (
|
||||||
@@ -49,6 +58,7 @@ const AddUser = (props) => {
|
|||||||
id="add-user-textarea"
|
id="add-user-textarea"
|
||||||
rows="3"
|
rows="3"
|
||||||
placeholder="usernames separated by line"
|
placeholder="usernames separated by line"
|
||||||
|
data-testid="user-textarea"
|
||||||
onBlur={(e) => {
|
onBlur={(e) => {
|
||||||
let split_users = e.target.value.split("\n");
|
let split_users = e.target.value.split("\n");
|
||||||
setUsers(split_users);
|
setUsers(split_users);
|
||||||
@@ -57,10 +67,11 @@ const AddUser = (props) => {
|
|||||||
<br></br>
|
<br></br>
|
||||||
<input
|
<input
|
||||||
className="form-check-input"
|
className="form-check-input"
|
||||||
|
data-testid="check"
|
||||||
type="checkbox"
|
type="checkbox"
|
||||||
value=""
|
|
||||||
id="admin-check"
|
id="admin-check"
|
||||||
onChange={(e) => setAdmin(e.target.checked)}
|
checked={admin}
|
||||||
|
onChange={() => setAdmin(!admin)}
|
||||||
/>
|
/>
|
||||||
<span> </span>
|
<span> </span>
|
||||||
<label className="form-check-label">Admin</label>
|
<label className="form-check-label">Admin</label>
|
||||||
@@ -74,6 +85,7 @@ const AddUser = (props) => {
|
|||||||
<span> </span>
|
<span> </span>
|
||||||
<button
|
<button
|
||||||
id="submit"
|
id="submit"
|
||||||
|
data-testid="submit"
|
||||||
className="btn btn-primary"
|
className="btn btn-primary"
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
let filtered_users = users.filter(
|
let filtered_users = users.filter(
|
||||||
@@ -92,14 +104,16 @@ const AddUser = (props) => {
|
|||||||
? updateUsers(0, limit)
|
? updateUsers(0, limit)
|
||||||
.then((data) => dispatchPageChange(data, 0))
|
.then((data) => dispatchPageChange(data, 0))
|
||||||
.then(() => history.push("/"))
|
.then(() => history.push("/"))
|
||||||
.catch((err) => console.log(err))
|
.catch(() =>
|
||||||
|
setErrorAlert(`Failed to update users.`)
|
||||||
|
)
|
||||||
: setErrorAlert(
|
: setErrorAlert(
|
||||||
`[${data.status}] Failed to create user. ${
|
`Failed to create user. ${
|
||||||
data.status == 409 ? "User already exists." : ""
|
data.status == 409 ? "User already exists." : ""
|
||||||
}`
|
}`
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.catch((err) => console.log(err));
|
.catch(() => setErrorAlert(`Failed to create user.`));
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
Add Users
|
Add Users
|
||||||
|
@@ -1,12 +1,15 @@
|
|||||||
import React from "react";
|
import React from "react";
|
||||||
import Enzyme, { mount } from "enzyme";
|
import "@testing-library/jest-dom";
|
||||||
import AddUser from "./AddUser";
|
import { act } from "react-dom/test-utils";
|
||||||
import Adapter from "@wojtekmaj/enzyme-adapter-react-17";
|
import { render, screen, fireEvent } from "@testing-library/react";
|
||||||
|
import userEvent from "@testing-library/user-event";
|
||||||
import { Provider, useDispatch, useSelector } from "react-redux";
|
import { Provider, useDispatch, useSelector } from "react-redux";
|
||||||
import { createStore } from "redux";
|
import { createStore } from "redux";
|
||||||
import { HashRouter } from "react-router-dom";
|
import { HashRouter } from "react-router-dom";
|
||||||
|
// eslint-disable-next-line
|
||||||
|
import regeneratorRuntime from "regenerator-runtime";
|
||||||
|
|
||||||
Enzyme.configure({ adapter: new Adapter() });
|
import AddUser from "./AddUser";
|
||||||
|
|
||||||
jest.mock("react-redux", () => ({
|
jest.mock("react-redux", () => ({
|
||||||
...jest.requireActual("react-redux"),
|
...jest.requireActual("react-redux"),
|
||||||
@@ -14,19 +17,19 @@ jest.mock("react-redux", () => ({
|
|||||||
useSelector: jest.fn(),
|
useSelector: jest.fn(),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
describe("AddUser Component: ", () => {
|
var mockAsync = (result) =>
|
||||||
var mockAsync = () =>
|
jest.fn().mockImplementation(() => Promise.resolve(result));
|
||||||
jest
|
|
||||||
.fn()
|
|
||||||
.mockImplementation(() => Promise.resolve({ key: "value", status: 200 }));
|
|
||||||
|
|
||||||
var addUserJsx = (callbackSpy) => (
|
var mockAsyncRejection = () =>
|
||||||
|
jest.fn().mockImplementation(() => Promise.reject());
|
||||||
|
|
||||||
|
var addUserJsx = (spy, spy2, spy3) => (
|
||||||
<Provider store={createStore(() => {}, {})}>
|
<Provider store={createStore(() => {}, {})}>
|
||||||
<HashRouter>
|
<HashRouter>
|
||||||
<AddUser
|
<AddUser
|
||||||
addUsers={callbackSpy}
|
addUsers={spy}
|
||||||
failRegexEvent={callbackSpy}
|
failRegexEvent={spy2 || spy}
|
||||||
updateUsers={callbackSpy}
|
updateUsers={spy3 || spy2 || spy}
|
||||||
history={{ push: () => {} }}
|
history={{ push: () => {} }}
|
||||||
/>
|
/>
|
||||||
</HashRouter>
|
</HashRouter>
|
||||||
@@ -50,28 +53,87 @@ describe("AddUser Component: ", () => {
|
|||||||
useDispatch.mockClear();
|
useDispatch.mockClear();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("Renders", () => {
|
test("Renders", async () => {
|
||||||
let component = mount(addUserJsx(mockAsync()));
|
await act(async () => {
|
||||||
expect(component.find(".container").length).toBe(1);
|
render(addUserJsx());
|
||||||
|
});
|
||||||
|
expect(screen.getByTestId("container")).toBeVisible();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Removes users when they fail Regex", async () => {
|
||||||
|
let callbackSpy = mockAsync();
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(addUserJsx(callbackSpy));
|
||||||
|
});
|
||||||
|
|
||||||
|
let textarea = screen.getByTestId("user-textarea");
|
||||||
|
let submit = screen.getByTestId("submit");
|
||||||
|
|
||||||
|
fireEvent.blur(textarea, { target: { value: "foo\nbar\n!!*&*" } });
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(submit);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("Removes users when they fail Regex", () => {
|
|
||||||
let callbackSpy = mockAsync(),
|
|
||||||
component = mount(addUserJsx(callbackSpy)),
|
|
||||||
textarea = component.find("textarea").first();
|
|
||||||
textarea.simulate("blur", { target: { value: "foo\nbar\n!!*&*" } });
|
|
||||||
let submit = component.find("#submit");
|
|
||||||
submit.simulate("click");
|
|
||||||
expect(callbackSpy).toHaveBeenCalledWith(["foo", "bar"], false);
|
expect(callbackSpy).toHaveBeenCalledWith(["foo", "bar"], false);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("Correctly submits admin", () => {
|
test("Correctly submits admin", async () => {
|
||||||
let callbackSpy = mockAsync(),
|
let callbackSpy = mockAsync();
|
||||||
component = mount(addUserJsx(callbackSpy)),
|
|
||||||
input = component.find("input").first();
|
await act(async () => {
|
||||||
input.simulate("change", { target: { checked: true } });
|
render(addUserJsx(callbackSpy));
|
||||||
let submit = component.find("#submit");
|
|
||||||
submit.simulate("click");
|
|
||||||
expect(callbackSpy).toHaveBeenCalledWith([], true);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
let textarea = screen.getByTestId("user-textarea");
|
||||||
|
let submit = screen.getByTestId("submit");
|
||||||
|
let check = screen.getByTestId("check");
|
||||||
|
|
||||||
|
userEvent.click(check);
|
||||||
|
fireEvent.blur(textarea, { target: { value: "foo" } });
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(submit);
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(callbackSpy).toHaveBeenCalledWith(["foo"], true);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Shows a UI error dialogue when user creation fails", async () => {
|
||||||
|
let callbackSpy = mockAsyncRejection();
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(addUserJsx(callbackSpy));
|
||||||
|
});
|
||||||
|
|
||||||
|
let submit = screen.getByTestId("submit");
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(submit);
|
||||||
|
});
|
||||||
|
|
||||||
|
let errorDialog = screen.getByText("Failed to create user.");
|
||||||
|
|
||||||
|
expect(errorDialog).toBeVisible();
|
||||||
|
expect(callbackSpy).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Shows a more specific UI error dialogue when user creation returns an improper status code", async () => {
|
||||||
|
let callbackSpy = mockAsync({ status: 409 });
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(addUserJsx(callbackSpy));
|
||||||
|
});
|
||||||
|
|
||||||
|
let submit = screen.getByTestId("submit");
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(submit);
|
||||||
|
});
|
||||||
|
|
||||||
|
let errorDialog = screen.getByText(
|
||||||
|
"Failed to create user. User already exists."
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(errorDialog).toBeVisible();
|
||||||
|
expect(callbackSpy).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
@@ -24,11 +24,20 @@ const CreateGroup = (props) => {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<div className="container">
|
<div className="container" data-testid="container">
|
||||||
{errorAlert != null ? (
|
{errorAlert != null ? (
|
||||||
<div className="row">
|
<div className="row">
|
||||||
<div className="col-md-10 col-md-offset-1 col-lg-8 col-lg-offset-2">
|
<div className="col-md-10 col-md-offset-1 col-lg-8 col-lg-offset-2">
|
||||||
<div className="alert alert-danger">{errorAlert}</div>
|
<div className="alert alert-danger">
|
||||||
|
{errorAlert}
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
className="close"
|
||||||
|
onClick={() => setErrorAlert(null)}
|
||||||
|
>
|
||||||
|
<span>×</span>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
) : (
|
) : (
|
||||||
@@ -44,6 +53,7 @@ const CreateGroup = (props) => {
|
|||||||
<div className="input-group">
|
<div className="input-group">
|
||||||
<input
|
<input
|
||||||
className="group-name-input"
|
className="group-name-input"
|
||||||
|
data-testid="group-input"
|
||||||
type="text"
|
type="text"
|
||||||
id="group-name"
|
id="group-name"
|
||||||
value={groupName}
|
value={groupName}
|
||||||
@@ -61,6 +71,7 @@ const CreateGroup = (props) => {
|
|||||||
<span> </span>
|
<span> </span>
|
||||||
<button
|
<button
|
||||||
id="submit"
|
id="submit"
|
||||||
|
data-testid="submit"
|
||||||
className="btn btn-primary"
|
className="btn btn-primary"
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
createGroup(groupName)
|
createGroup(groupName)
|
||||||
@@ -69,16 +80,18 @@ const CreateGroup = (props) => {
|
|||||||
? updateGroups(0, limit)
|
? updateGroups(0, limit)
|
||||||
.then((data) => dispatchPageUpdate(data, 0))
|
.then((data) => dispatchPageUpdate(data, 0))
|
||||||
.then(() => history.push("/groups"))
|
.then(() => history.push("/groups"))
|
||||||
.catch((err) => console.log(err))
|
.catch(() =>
|
||||||
|
setErrorAlert(`Could not update groups list.`)
|
||||||
|
)
|
||||||
: setErrorAlert(
|
: setErrorAlert(
|
||||||
`[${data.status}] Failed to create group. ${
|
`Failed to create group. ${
|
||||||
data.status == 409
|
data.status == 409
|
||||||
? "Group already exists."
|
? "Group already exists."
|
||||||
: ""
|
: ""
|
||||||
}`
|
}`
|
||||||
);
|
);
|
||||||
})
|
})
|
||||||
.catch((err) => console.log(err));
|
.catch(() => setErrorAlert(`Failed to create group.`));
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
Create
|
Create
|
||||||
|
@@ -1,13 +1,14 @@
|
|||||||
import React from "react";
|
import React from "react";
|
||||||
import Enzyme, { mount } from "enzyme";
|
import "@testing-library/jest-dom";
|
||||||
import CreateGroup from "./CreateGroup";
|
import { act } from "react-dom/test-utils";
|
||||||
import Adapter from "@wojtekmaj/enzyme-adapter-react-17";
|
import { render, screen, fireEvent } from "@testing-library/react";
|
||||||
|
import userEvent from "@testing-library/user-event";
|
||||||
import { Provider, useDispatch, useSelector } from "react-redux";
|
import { Provider, useDispatch, useSelector } from "react-redux";
|
||||||
import { createStore } from "redux";
|
import { createStore } from "redux";
|
||||||
import { HashRouter } from "react-router-dom";
|
import { HashRouter } from "react-router-dom";
|
||||||
import regeneratorRuntime from "regenerator-runtime"; // eslint-disable-line
|
// eslint-disable-next-line
|
||||||
|
import regeneratorRuntime from "regenerator-runtime";
|
||||||
Enzyme.configure({ adapter: new Adapter() });
|
import CreateGroup from "./CreateGroup";
|
||||||
|
|
||||||
jest.mock("react-redux", () => ({
|
jest.mock("react-redux", () => ({
|
||||||
...jest.requireActual("react-redux"),
|
...jest.requireActual("react-redux"),
|
||||||
@@ -15,10 +16,12 @@ jest.mock("react-redux", () => ({
|
|||||||
useSelector: jest.fn(),
|
useSelector: jest.fn(),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
describe("CreateGroup Component: ", () => {
|
|
||||||
var mockAsync = (result) =>
|
var mockAsync = (result) =>
|
||||||
jest.fn().mockImplementation(() => Promise.resolve(result));
|
jest.fn().mockImplementation(() => Promise.resolve(result));
|
||||||
|
|
||||||
|
var mockAsyncRejection = () =>
|
||||||
|
jest.fn().mockImplementation(() => Promise.reject());
|
||||||
|
|
||||||
var createGroupJsx = (callbackSpy) => (
|
var createGroupJsx = (callbackSpy) => (
|
||||||
<Provider store={createStore(() => {}, {})}>
|
<Provider store={createStore(() => {}, {})}>
|
||||||
<HashRouter>
|
<HashRouter>
|
||||||
@@ -48,19 +51,65 @@ describe("CreateGroup Component: ", () => {
|
|||||||
useDispatch.mockClear();
|
useDispatch.mockClear();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("Renders", () => {
|
test("Renders", async () => {
|
||||||
let component = mount(createGroupJsx());
|
await act(async () => {
|
||||||
expect(component.find(".container").length).toBe(1);
|
render(createGroupJsx());
|
||||||
|
});
|
||||||
|
expect(screen.getByTestId("container")).toBeVisible();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("Calls createGroup on submit", () => {
|
test("Calls createGroup on submit", async () => {
|
||||||
let callbackSpy = mockAsync({ status: 200 }),
|
let callbackSpy = mockAsync({ status: 200 });
|
||||||
component = mount(createGroupJsx(callbackSpy)),
|
|
||||||
input = component.find("input").first(),
|
await act(async () => {
|
||||||
submit = component.find("#submit").first();
|
render(createGroupJsx(callbackSpy));
|
||||||
input.simulate("change", { target: { value: "" } });
|
|
||||||
submit.simulate("click");
|
|
||||||
expect(callbackSpy).toHaveBeenNthCalledWith(1, "");
|
|
||||||
expect(component.find(".alert.alert-danger").length).toBe(0);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
let input = screen.getByTestId("group-input");
|
||||||
|
let submit = screen.getByTestId("submit");
|
||||||
|
|
||||||
|
userEvent.type(input, "groupname");
|
||||||
|
await act(async () => fireEvent.click(submit));
|
||||||
|
|
||||||
|
expect(callbackSpy).toHaveBeenNthCalledWith(1, "groupname");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Shows a UI error dialogue when group creation fails", async () => {
|
||||||
|
let callbackSpy = mockAsyncRejection();
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(createGroupJsx(callbackSpy));
|
||||||
|
});
|
||||||
|
|
||||||
|
let submit = screen.getByTestId("submit");
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(submit);
|
||||||
|
});
|
||||||
|
|
||||||
|
let errorDialog = screen.getByText("Failed to create group.");
|
||||||
|
|
||||||
|
expect(errorDialog).toBeVisible();
|
||||||
|
expect(callbackSpy).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Shows a more specific UI error dialogue when user creation returns an improper status code", async () => {
|
||||||
|
let callbackSpy = mockAsync({ status: 409 });
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(createGroupJsx(callbackSpy));
|
||||||
|
});
|
||||||
|
|
||||||
|
let submit = screen.getByTestId("submit");
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(submit);
|
||||||
|
});
|
||||||
|
|
||||||
|
let errorDialog = screen.getByText(
|
||||||
|
"Failed to create group. Group already exists."
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(errorDialog).toBeVisible();
|
||||||
|
expect(callbackSpy).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
@@ -19,14 +19,7 @@ const EditUser = (props) => {
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
var {
|
var { editUser, deleteUser, noChangeEvent, updateUsers, history } = props;
|
||||||
editUser,
|
|
||||||
deleteUser,
|
|
||||||
failRegexEvent,
|
|
||||||
noChangeEvent,
|
|
||||||
updateUsers,
|
|
||||||
history,
|
|
||||||
} = props;
|
|
||||||
|
|
||||||
if (props.location.state == undefined) {
|
if (props.location.state == undefined) {
|
||||||
props.history.push("/");
|
props.history.push("/");
|
||||||
@@ -40,11 +33,20 @@ const EditUser = (props) => {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<div className="container">
|
<div className="container" data-testid="container">
|
||||||
{errorAlert != null ? (
|
{errorAlert != null ? (
|
||||||
<div className="row">
|
<div className="row">
|
||||||
<div className="col-md-10 col-md-offset-1 col-lg-8 col-lg-offset-2">
|
<div className="col-md-10 col-md-offset-1 col-lg-8 col-lg-offset-2">
|
||||||
<div className="alert alert-danger">{errorAlert}</div>
|
<div className="alert alert-danger">
|
||||||
|
{errorAlert}
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
className="close"
|
||||||
|
onClick={() => setErrorAlert(null)}
|
||||||
|
>
|
||||||
|
<span>×</span>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
) : (
|
) : (
|
||||||
@@ -61,6 +63,7 @@ const EditUser = (props) => {
|
|||||||
<div className="form-group">
|
<div className="form-group">
|
||||||
<textarea
|
<textarea
|
||||||
className="form-control"
|
className="form-control"
|
||||||
|
data-testid="edit-username-input"
|
||||||
id="exampleFormControlTextarea1"
|
id="exampleFormControlTextarea1"
|
||||||
rows="3"
|
rows="3"
|
||||||
placeholder="updated username"
|
placeholder="updated username"
|
||||||
@@ -81,20 +84,26 @@ const EditUser = (props) => {
|
|||||||
<br></br>
|
<br></br>
|
||||||
<button
|
<button
|
||||||
id="delete-user"
|
id="delete-user"
|
||||||
|
data-testid="delete-user"
|
||||||
className="btn btn-danger btn-sm"
|
className="btn btn-danger btn-sm"
|
||||||
onClick={() => {
|
onClick={(e) => {
|
||||||
|
e.preventDefault();
|
||||||
deleteUser(username)
|
deleteUser(username)
|
||||||
.then((data) => {
|
.then((data) => {
|
||||||
data.status < 300
|
data.status < 300
|
||||||
? updateUsers(0, limit)
|
? updateUsers(0, limit)
|
||||||
.then((data) => dispatchPageChange(data, 0))
|
.then((data) => dispatchPageChange(data, 0))
|
||||||
.then(() => history.push("/"))
|
.then(() => history.push("/"))
|
||||||
.catch((err) => console.log(err))
|
.catch(() =>
|
||||||
: setErrorAlert(
|
setErrorAlert(
|
||||||
`[${data.status}] Failed to edit user.`
|
`Could not update users list.`
|
||||||
);
|
)
|
||||||
|
)
|
||||||
|
: setErrorAlert(`Failed to edit user.`);
|
||||||
})
|
})
|
||||||
.catch((err) => console.log(err));
|
.catch(() => {
|
||||||
|
setErrorAlert(`Failed to edit user.`);
|
||||||
|
});
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
Delete user
|
Delete user
|
||||||
@@ -109,8 +118,10 @@ const EditUser = (props) => {
|
|||||||
<span> </span>
|
<span> </span>
|
||||||
<button
|
<button
|
||||||
id="submit"
|
id="submit"
|
||||||
|
data-testid="submit"
|
||||||
className="btn btn-primary"
|
className="btn btn-primary"
|
||||||
onClick={() => {
|
onClick={(e) => {
|
||||||
|
e.preventDefault();
|
||||||
if (updatedUsername == "" && admin == has_admin) {
|
if (updatedUsername == "" && admin == has_admin) {
|
||||||
noChangeEvent();
|
noChangeEvent();
|
||||||
return;
|
return;
|
||||||
@@ -129,17 +140,20 @@ const EditUser = (props) => {
|
|||||||
? updateUsers(0, limit)
|
? updateUsers(0, limit)
|
||||||
.then((data) => dispatchPageChange(data, 0))
|
.then((data) => dispatchPageChange(data, 0))
|
||||||
.then(() => history.push("/"))
|
.then(() => history.push("/"))
|
||||||
.catch((err) => console.log(err))
|
.catch(() =>
|
||||||
: setErrorAlert(
|
setErrorAlert(
|
||||||
`[${data.status}] Failed to edit user.`
|
`Could not update users list.`
|
||||||
);
|
)
|
||||||
|
)
|
||||||
|
: setErrorAlert(`Failed to edit user.`);
|
||||||
})
|
})
|
||||||
.catch((err) => {
|
.catch(() => {
|
||||||
console.log(err);
|
setErrorAlert(`Failed to edit user.`);
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
setUpdatedUsername("");
|
setErrorAlert(
|
||||||
failRegexEvent();
|
`Failed to edit user. Make sure the username does not contain special characters.`
|
||||||
|
);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
editUser(username, username, admin)
|
editUser(username, username, admin)
|
||||||
@@ -148,13 +162,13 @@ const EditUser = (props) => {
|
|||||||
? updateUsers(0, limit)
|
? updateUsers(0, limit)
|
||||||
.then((data) => dispatchPageChange(data, 0))
|
.then((data) => dispatchPageChange(data, 0))
|
||||||
.then(() => history.push("/"))
|
.then(() => history.push("/"))
|
||||||
.catch((err) => console.log(err))
|
.catch(() =>
|
||||||
: setErrorAlert(
|
setErrorAlert(`Could not update users list.`)
|
||||||
`[${data.status}] Failed to edit user.`
|
)
|
||||||
);
|
: setErrorAlert(`Failed to edit user.`);
|
||||||
})
|
})
|
||||||
.catch((err) => {
|
.catch(() => {
|
||||||
console.log(err);
|
setErrorAlert(`Failed to edit user.`);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
|
@@ -1,12 +1,14 @@
|
|||||||
import React from "react";
|
import React from "react";
|
||||||
import Enzyme, { mount } from "enzyme";
|
import "@testing-library/jest-dom";
|
||||||
import EditUser from "./EditUser";
|
import { act } from "react-dom/test-utils";
|
||||||
import Adapter from "@wojtekmaj/enzyme-adapter-react-17";
|
import { render, screen, fireEvent } from "@testing-library/react";
|
||||||
import { Provider, useDispatch, useSelector } from "react-redux";
|
import { Provider, useDispatch, useSelector } from "react-redux";
|
||||||
import { createStore } from "redux";
|
import { createStore } from "redux";
|
||||||
import { HashRouter } from "react-router-dom";
|
import { HashRouter } from "react-router-dom";
|
||||||
|
// eslint-disable-next-line
|
||||||
|
import regeneratorRuntime from "regenerator-runtime";
|
||||||
|
|
||||||
Enzyme.configure({ adapter: new Adapter() });
|
import EditUser from "./EditUser";
|
||||||
|
|
||||||
jest.mock("react-redux", () => ({
|
jest.mock("react-redux", () => ({
|
||||||
...jest.requireActual("react-redux"),
|
...jest.requireActual("react-redux"),
|
||||||
@@ -14,20 +16,17 @@ jest.mock("react-redux", () => ({
|
|||||||
useSelector: jest.fn(),
|
useSelector: jest.fn(),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
describe("EditUser Component: ", () => {
|
var mockAsync = (data) =>
|
||||||
var mockAsync = () =>
|
jest.fn().mockImplementation(() => Promise.resolve(data));
|
||||||
jest
|
|
||||||
.fn()
|
var mockAsyncRejection = () =>
|
||||||
.mockImplementation(() => Promise.resolve({ key: "value", status: 200 }));
|
jest.fn().mockImplementation(() => Promise.reject());
|
||||||
var mockSync = () => jest.fn();
|
|
||||||
|
|
||||||
var editUserJsx = (callbackSpy, empty) => (
|
var editUserJsx = (callbackSpy, empty) => (
|
||||||
<Provider store={createStore(() => {}, {})}>
|
<Provider store={createStore(() => {}, {})}>
|
||||||
<HashRouter>
|
<HashRouter>
|
||||||
<EditUser
|
<EditUser
|
||||||
location={
|
location={empty ? {} : { state: { username: "foo", has_admin: false } }}
|
||||||
empty ? {} : { state: { username: "foo", has_admin: false } }
|
|
||||||
}
|
|
||||||
deleteUser={callbackSpy}
|
deleteUser={callbackSpy}
|
||||||
editUser={callbackSpy}
|
editUser={callbackSpy}
|
||||||
updateUsers={callbackSpy}
|
updateUsers={callbackSpy}
|
||||||
@@ -56,25 +55,85 @@ describe("EditUser Component: ", () => {
|
|||||||
useDispatch.mockClear();
|
useDispatch.mockClear();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("Calls the delete user function when the button is pressed", () => {
|
test("Renders", async () => {
|
||||||
let callbackSpy = mockAsync(),
|
let callbackSpy = mockAsync({ key: "value", status: 200 });
|
||||||
component = mount(editUserJsx(callbackSpy)),
|
|
||||||
deleteUser = component.find("#delete-user");
|
await act(async () => {
|
||||||
deleteUser.simulate("click");
|
render(editUserJsx(callbackSpy));
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(screen.getByTestId("container")).toBeVisible();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Calls the delete user function when the button is pressed", async () => {
|
||||||
|
let callbackSpy = mockAsync({ key: "value", status: 200 });
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(editUserJsx(callbackSpy));
|
||||||
|
});
|
||||||
|
|
||||||
|
let deleteUser = screen.getByTestId("delete-user");
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(deleteUser);
|
||||||
|
});
|
||||||
|
|
||||||
expect(callbackSpy).toHaveBeenCalled();
|
expect(callbackSpy).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("Submits the edits when the button is pressed", () => {
|
test("Submits the edits when the button is pressed", async () => {
|
||||||
let callbackSpy = mockSync(),
|
let callbackSpy = mockAsync({ key: "value", status: 200 });
|
||||||
component = mount(editUserJsx(callbackSpy)),
|
|
||||||
submit = component.find("#submit");
|
await act(async () => {
|
||||||
submit.simulate("click");
|
render(editUserJsx(callbackSpy));
|
||||||
|
});
|
||||||
|
|
||||||
|
let submit = screen.getByTestId("submit");
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(submit);
|
||||||
|
});
|
||||||
|
|
||||||
expect(callbackSpy).toHaveBeenCalled();
|
expect(callbackSpy).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("Doesn't render when no data is provided", () => {
|
test("Shows a UI error dialogue when user edit fails", async () => {
|
||||||
let callbackSpy = mockSync(),
|
let callbackSpy = mockAsyncRejection();
|
||||||
component = mount(editUserJsx(callbackSpy, true));
|
|
||||||
expect(component.find(".container").length).toBe(0);
|
await act(async () => {
|
||||||
|
render(editUserJsx(callbackSpy));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
let submit = screen.getByTestId("submit");
|
||||||
|
let usernameInput = screen.getByTestId("edit-username-input");
|
||||||
|
|
||||||
|
fireEvent.blur(usernameInput, { target: { value: "whatever" } });
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(submit);
|
||||||
|
});
|
||||||
|
|
||||||
|
let errorDialog = screen.getByText("Failed to edit user.");
|
||||||
|
|
||||||
|
expect(errorDialog).toBeVisible();
|
||||||
|
expect(callbackSpy).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Shows a UI error dialogue when user edit returns an improper status code", async () => {
|
||||||
|
let callbackSpy = mockAsync({ status: 409 });
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(editUserJsx(callbackSpy));
|
||||||
|
});
|
||||||
|
|
||||||
|
let submit = screen.getByTestId("submit");
|
||||||
|
let usernameInput = screen.getByTestId("edit-username-input");
|
||||||
|
|
||||||
|
fireEvent.blur(usernameInput, { target: { value: "whatever" } });
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(submit);
|
||||||
|
});
|
||||||
|
|
||||||
|
let errorDialog = screen.getByText("Failed to edit user.");
|
||||||
|
|
||||||
|
expect(errorDialog).toBeVisible();
|
||||||
|
expect(callbackSpy).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
@@ -7,6 +7,7 @@ import GroupSelect from "../GroupSelect/GroupSelect";
|
|||||||
const GroupEdit = (props) => {
|
const GroupEdit = (props) => {
|
||||||
var [selected, setSelected] = useState([]),
|
var [selected, setSelected] = useState([]),
|
||||||
[changed, setChanged] = useState(false),
|
[changed, setChanged] = useState(false),
|
||||||
|
[errorAlert, setErrorAlert] = useState(null),
|
||||||
limit = useSelector((state) => state.limit);
|
limit = useSelector((state) => state.limit);
|
||||||
|
|
||||||
var dispatch = useDispatch();
|
var dispatch = useDispatch();
|
||||||
@@ -41,7 +42,25 @@ const GroupEdit = (props) => {
|
|||||||
if (!group_data) return <div></div>;
|
if (!group_data) return <div></div>;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="container">
|
<div className="container" data-testid="container">
|
||||||
|
{errorAlert != null ? (
|
||||||
|
<div className="row">
|
||||||
|
<div className="col-md-10 col-md-offset-1 col-lg-8 col-lg-offset-2">
|
||||||
|
<div className="alert alert-danger">
|
||||||
|
{errorAlert}
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
className="close"
|
||||||
|
onClick={() => setErrorAlert(null)}
|
||||||
|
>
|
||||||
|
<span>×</span>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<></>
|
||||||
|
)}
|
||||||
<div className="row">
|
<div className="row">
|
||||||
<div className="col-md-10 col-md-offset-1 col-lg-8 col-lg-offset-2">
|
<div className="col-md-10 col-md-offset-1 col-lg-8 col-lg-offset-2">
|
||||||
<h3>Editing Group {group_data.name}</h3>
|
<h3>Editing Group {group_data.name}</h3>
|
||||||
@@ -65,6 +84,7 @@ const GroupEdit = (props) => {
|
|||||||
<span> </span>
|
<span> </span>
|
||||||
<button
|
<button
|
||||||
id="submit"
|
id="submit"
|
||||||
|
data-testid="submit"
|
||||||
className="btn btn-primary"
|
className="btn btn-primary"
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
// check for changes
|
// check for changes
|
||||||
@@ -89,29 +109,43 @@ const GroupEdit = (props) => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
Promise.all(promiseQueue)
|
Promise.all(promiseQueue)
|
||||||
.then(() => {
|
.then((data) => {
|
||||||
updateGroups(0, limit)
|
// ensure status of all requests are < 300
|
||||||
|
let allPassed =
|
||||||
|
data.map((e) => e.status).filter((e) => e >= 300).length ==
|
||||||
|
0;
|
||||||
|
|
||||||
|
allPassed
|
||||||
|
? updateGroups(0, limit)
|
||||||
.then((data) => dispatchPageUpdate(data, 0))
|
.then((data) => dispatchPageUpdate(data, 0))
|
||||||
.then(() => history.push("/groups"));
|
.then(() => history.push("/groups"))
|
||||||
|
: setErrorAlert(`Failed to edit group.`);
|
||||||
})
|
})
|
||||||
.catch((err) => console.log(err));
|
.catch(() => {
|
||||||
|
console.log("outer");
|
||||||
|
setErrorAlert(`Failed to edit group.`);
|
||||||
|
});
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
Apply
|
Apply
|
||||||
</button>
|
</button>
|
||||||
<button
|
<button
|
||||||
id="delete-group"
|
id="delete-group"
|
||||||
|
data-testid="delete-group"
|
||||||
className="btn btn-danger"
|
className="btn btn-danger"
|
||||||
style={{ float: "right" }}
|
style={{ float: "right" }}
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
var groupName = group_data.name;
|
var groupName = group_data.name;
|
||||||
deleteGroup(groupName)
|
deleteGroup(groupName)
|
||||||
.then(() => {
|
// TODO add error if res not ok
|
||||||
updateGroups(0, limit)
|
.then((data) => {
|
||||||
|
data.status < 300
|
||||||
|
? updateGroups(0, limit)
|
||||||
.then((data) => dispatchPageUpdate(data, 0))
|
.then((data) => dispatchPageUpdate(data, 0))
|
||||||
.then(() => history.push("/groups"));
|
.then(() => history.push("/groups"))
|
||||||
|
: setErrorAlert(`Failed to delete group.`);
|
||||||
})
|
})
|
||||||
.catch((err) => console.log(err));
|
.catch(() => setErrorAlert(`Failed to delete group.`));
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
Delete Group
|
Delete Group
|
||||||
|
@@ -1,22 +1,26 @@
|
|||||||
import React from "react";
|
import React from "react";
|
||||||
import Enzyme, { mount } from "enzyme";
|
import "@testing-library/jest-dom";
|
||||||
import GroupEdit from "./GroupEdit";
|
import { act } from "react-dom/test-utils";
|
||||||
import Adapter from "@wojtekmaj/enzyme-adapter-react-17";
|
import { render, screen, fireEvent } from "@testing-library/react";
|
||||||
|
import userEvent from "@testing-library/user-event";
|
||||||
import { Provider, useSelector } from "react-redux";
|
import { Provider, useSelector } from "react-redux";
|
||||||
import { createStore } from "redux";
|
import { createStore } from "redux";
|
||||||
import { HashRouter } from "react-router-dom";
|
import { HashRouter } from "react-router-dom";
|
||||||
import { act } from "react-dom/test-utils";
|
// eslint-disable-next-line
|
||||||
import regeneratorRuntime from "regenerator-runtime"; // eslint-disable-line
|
import regeneratorRuntime from "regenerator-runtime";
|
||||||
|
|
||||||
Enzyme.configure({ adapter: new Adapter() });
|
import GroupEdit from "./GroupEdit";
|
||||||
|
|
||||||
jest.mock("react-redux", () => ({
|
jest.mock("react-redux", () => ({
|
||||||
...jest.requireActual("react-redux"),
|
...jest.requireActual("react-redux"),
|
||||||
useSelector: jest.fn(),
|
useSelector: jest.fn(),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
describe("GroupEdit Component: ", () => {
|
var mockAsync = (data) =>
|
||||||
var mockAsync = () => jest.fn().mockImplementation(() => Promise.resolve());
|
jest.fn().mockImplementation(() => Promise.resolve(data));
|
||||||
|
|
||||||
|
var mockAsyncRejection = () =>
|
||||||
|
jest.fn().mockImplementation(() => Promise.reject());
|
||||||
|
|
||||||
var okPacket = new Promise((resolve) => resolve(true));
|
var okPacket = new Promise((resolve) => resolve(true));
|
||||||
|
|
||||||
@@ -55,46 +59,170 @@ describe("GroupEdit Component: ", () => {
|
|||||||
useSelector.mockClear();
|
useSelector.mockClear();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("Adds user from input to user selectables on button click", async () => {
|
test("Renders", async () => {
|
||||||
let callbackSpy = mockAsync(),
|
let callbackSpy = mockAsync();
|
||||||
component = mount(groupEditJsx(callbackSpy)),
|
|
||||||
input = component.find("#username-input"),
|
await act(async () => {
|
||||||
validateUser = component.find("#validate-user"),
|
render(groupEditJsx(callbackSpy));
|
||||||
submit = component.find("#submit");
|
});
|
||||||
|
|
||||||
|
expect(screen.getByTestId("container")).toBeVisible();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Adds user from input to user selectables on button click", async () => {
|
||||||
|
let callbackSpy = mockAsync();
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(groupEditJsx(callbackSpy));
|
||||||
|
});
|
||||||
|
|
||||||
|
let input = screen.getByTestId("username-input");
|
||||||
|
let validateUser = screen.getByTestId("validate-user");
|
||||||
|
let submit = screen.getByTestId("submit");
|
||||||
|
|
||||||
|
userEvent.type(input, "bar");
|
||||||
|
fireEvent.click(validateUser);
|
||||||
|
await act(async () => okPacket);
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(submit);
|
||||||
|
});
|
||||||
|
|
||||||
input.simulate("change", { target: { value: "bar" } });
|
|
||||||
validateUser.simulate("click");
|
|
||||||
await act(() => okPacket);
|
|
||||||
submit.simulate("click");
|
|
||||||
expect(callbackSpy).toHaveBeenNthCalledWith(1, ["bar"], "group");
|
expect(callbackSpy).toHaveBeenNthCalledWith(1, ["bar"], "group");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("Removes a user recently added from input from the selectables list", () => {
|
test("Removes a user recently added from input from the selectables list", async () => {
|
||||||
let callbackSpy = mockAsync(),
|
let callbackSpy = mockAsync();
|
||||||
component = mount(groupEditJsx(callbackSpy)),
|
|
||||||
unsubmittedUser = component.find(".item.selected").last();
|
await act(async () => {
|
||||||
unsubmittedUser.simulate("click");
|
render(groupEditJsx(callbackSpy));
|
||||||
expect(component.find(".item").length).toBe(1);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it("Grays out a user, already in the group, when unselected and calls deleteUser on submit", () => {
|
let selectedUser = screen.getByText("foo");
|
||||||
let callbackSpy = mockAsync(),
|
fireEvent.click(selectedUser);
|
||||||
component = mount(groupEditJsx(callbackSpy)),
|
|
||||||
groupUser = component.find(".item.selected").first();
|
let unselectedUser = screen.getByText("foo");
|
||||||
groupUser.simulate("click");
|
|
||||||
expect(component.find(".item.unselected").length).toBe(1);
|
expect(unselectedUser.className).toBe("item unselected");
|
||||||
expect(component.find(".item").length).toBe(1);
|
});
|
||||||
|
|
||||||
|
test("Grays out a user, already in the group, when unselected and calls deleteUser on submit", async () => {
|
||||||
|
let callbackSpy = mockAsync();
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(groupEditJsx(callbackSpy));
|
||||||
|
});
|
||||||
|
|
||||||
|
let submit = screen.getByTestId("submit");
|
||||||
|
|
||||||
|
let groupUser = screen.getByText("foo");
|
||||||
|
fireEvent.click(groupUser);
|
||||||
|
|
||||||
|
let unselectedUser = screen.getByText("foo");
|
||||||
|
expect(unselectedUser.className).toBe("item unselected");
|
||||||
|
|
||||||
// test deleteUser call
|
// test deleteUser call
|
||||||
let submit = component.find("#submit");
|
await act(async () => {
|
||||||
submit.simulate("click");
|
fireEvent.click(submit);
|
||||||
|
});
|
||||||
|
|
||||||
expect(callbackSpy).toHaveBeenNthCalledWith(1, ["foo"], "group");
|
expect(callbackSpy).toHaveBeenNthCalledWith(1, ["foo"], "group");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("Calls deleteGroup on button click", () => {
|
test("Calls deleteGroup on button click", async () => {
|
||||||
let callbackSpy = mockAsync(),
|
let callbackSpy = mockAsync();
|
||||||
component = mount(groupEditJsx(callbackSpy)),
|
|
||||||
deleteGroup = component.find("#delete-group").first();
|
await act(async () => {
|
||||||
deleteGroup.simulate("click");
|
render(groupEditJsx(callbackSpy));
|
||||||
|
});
|
||||||
|
|
||||||
|
let deleteGroup = screen.getByTestId("delete-group");
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(deleteGroup);
|
||||||
|
});
|
||||||
|
|
||||||
expect(callbackSpy).toHaveBeenNthCalledWith(1, "group");
|
expect(callbackSpy).toHaveBeenNthCalledWith(1, "group");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("Shows a UI error dialogue when group edit fails", async () => {
|
||||||
|
let callbackSpy = mockAsyncRejection();
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(groupEditJsx(callbackSpy));
|
||||||
|
});
|
||||||
|
|
||||||
|
let groupUser = screen.getByText("foo");
|
||||||
|
fireEvent.click(groupUser);
|
||||||
|
|
||||||
|
let submit = screen.getByTestId("submit");
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(submit);
|
||||||
|
});
|
||||||
|
|
||||||
|
let errorDialog = screen.getByText("Failed to edit group.");
|
||||||
|
|
||||||
|
expect(errorDialog).toBeVisible();
|
||||||
|
expect(callbackSpy).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Shows a UI error dialogue when group edit returns an improper status code", async () => {
|
||||||
|
let callbackSpy = mockAsync({ status: 403 });
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(groupEditJsx(callbackSpy));
|
||||||
|
});
|
||||||
|
|
||||||
|
let groupUser = screen.getByText("foo");
|
||||||
|
fireEvent.click(groupUser);
|
||||||
|
|
||||||
|
let submit = screen.getByTestId("submit");
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(submit);
|
||||||
|
});
|
||||||
|
|
||||||
|
let errorDialog = screen.getByText("Failed to edit group.");
|
||||||
|
|
||||||
|
expect(errorDialog).toBeVisible();
|
||||||
|
expect(callbackSpy).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Shows a UI error dialogue when group delete fails", async () => {
|
||||||
|
let callbackSpy = mockAsyncRejection();
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(groupEditJsx(callbackSpy));
|
||||||
|
});
|
||||||
|
|
||||||
|
let deleteGroup = screen.getByTestId("delete-group");
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(deleteGroup);
|
||||||
|
});
|
||||||
|
|
||||||
|
let errorDialog = screen.getByText("Failed to delete group.");
|
||||||
|
|
||||||
|
expect(errorDialog).toBeVisible();
|
||||||
|
expect(callbackSpy).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Shows a UI error dialogue when group delete returns an improper status code", async () => {
|
||||||
|
let callbackSpy = mockAsync({ status: 403 });
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(groupEditJsx(callbackSpy));
|
||||||
|
});
|
||||||
|
|
||||||
|
let deleteGroup = screen.getByTestId("delete-group");
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(deleteGroup);
|
||||||
|
});
|
||||||
|
|
||||||
|
let errorDialog = screen.getByText("Failed to delete group.");
|
||||||
|
|
||||||
|
expect(errorDialog).toBeVisible();
|
||||||
|
expect(callbackSpy).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
@@ -24,6 +24,7 @@ const GroupSelect = (props) => {
|
|||||||
<div className="input-group">
|
<div className="input-group">
|
||||||
<input
|
<input
|
||||||
id="username-input"
|
id="username-input"
|
||||||
|
data-testid="username-input"
|
||||||
type="text"
|
type="text"
|
||||||
className="form-control"
|
className="form-control"
|
||||||
placeholder="Add by username"
|
placeholder="Add by username"
|
||||||
@@ -35,6 +36,7 @@ const GroupSelect = (props) => {
|
|||||||
<span className="input-group-btn">
|
<span className="input-group-btn">
|
||||||
<button
|
<button
|
||||||
id="validate-user"
|
id="validate-user"
|
||||||
|
data-testid="validate-user"
|
||||||
className="btn btn-default"
|
className="btn btn-default"
|
||||||
type="button"
|
type="button"
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
|
@@ -19,7 +19,7 @@ const Groups = (props) => {
|
|||||||
var { updateGroups, history } = props;
|
var { updateGroups, history } = props;
|
||||||
|
|
||||||
if (!groups_data || !user_data) {
|
if (!groups_data || !user_data) {
|
||||||
return <div></div>;
|
return <div data-testid="no-show"></div>;
|
||||||
}
|
}
|
||||||
|
|
||||||
const dispatchPageChange = (data, page) => {
|
const dispatchPageChange = (data, page) => {
|
||||||
@@ -39,7 +39,7 @@ const Groups = (props) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="container">
|
<div className="container" data-testid="container">
|
||||||
<div className="row">
|
<div className="row">
|
||||||
<div className="col-md-12 col-lg-10 col-lg-offset-1">
|
<div className="col-md-12 col-lg-10 col-lg-offset-1">
|
||||||
<div className="panel panel-default">
|
<div className="panel panel-default">
|
||||||
|
@@ -1,12 +1,14 @@
|
|||||||
import React from "react";
|
import React from "react";
|
||||||
import Enzyme, { mount } from "enzyme";
|
import "@testing-library/jest-dom";
|
||||||
import Groups from "./Groups";
|
import { act } from "react-dom/test-utils";
|
||||||
import Adapter from "@wojtekmaj/enzyme-adapter-react-17";
|
import { render, screen } from "@testing-library/react";
|
||||||
import { Provider, useDispatch, useSelector } from "react-redux";
|
import { Provider, useDispatch, useSelector } from "react-redux";
|
||||||
import { createStore } from "redux";
|
import { createStore } from "redux";
|
||||||
import { HashRouter } from "react-router-dom";
|
import { HashRouter } from "react-router-dom";
|
||||||
|
// eslint-disable-next-line
|
||||||
|
import regeneratorRuntime from "regenerator-runtime";
|
||||||
|
|
||||||
Enzyme.configure({ adapter: new Adapter() });
|
import Groups from "./Groups";
|
||||||
|
|
||||||
jest.mock("react-redux", () => ({
|
jest.mock("react-redux", () => ({
|
||||||
...jest.requireActual("react-redux"),
|
...jest.requireActual("react-redux"),
|
||||||
@@ -14,7 +16,6 @@ jest.mock("react-redux", () => ({
|
|||||||
useDispatch: jest.fn(),
|
useDispatch: jest.fn(),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
describe("Groups Component: ", () => {
|
|
||||||
var mockAsync = () =>
|
var mockAsync = () =>
|
||||||
jest.fn().mockImplementation(() => Promise.resolve({ key: "value" }));
|
jest.fn().mockImplementation(() => Promise.resolve({ key: "value" }));
|
||||||
|
|
||||||
@@ -33,6 +34,7 @@ describe("Groups Component: ", () => {
|
|||||||
groups_data: JSON.parse(
|
groups_data: JSON.parse(
|
||||||
'[{"kind":"group","name":"testgroup","users":[]}, {"kind":"group","name":"testgroup2","users":["foo", "bar"]}]'
|
'[{"kind":"group","name":"testgroup","users":[]}, {"kind":"group","name":"testgroup2","users":["foo", "bar"]}]'
|
||||||
),
|
),
|
||||||
|
limit: 10,
|
||||||
});
|
});
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
@@ -48,18 +50,41 @@ describe("Groups Component: ", () => {
|
|||||||
useSelector.mockClear();
|
useSelector.mockClear();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("Renders groups_data prop into links", () => {
|
test("Renders", async () => {
|
||||||
let callbackSpy = mockAsync(),
|
let callbackSpy = mockAsync();
|
||||||
component = mount(groupsJsx(callbackSpy)),
|
|
||||||
links = component.find("li");
|
await act(async () => {
|
||||||
expect(links.length).toBe(2);
|
render(groupsJsx(callbackSpy));
|
||||||
});
|
});
|
||||||
|
|
||||||
it("Renders nothing if required data is not available", () => {
|
expect(screen.getByTestId("container")).toBeVisible();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Renders groups_data prop into links", async () => {
|
||||||
|
let callbackSpy = mockAsync();
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(groupsJsx(callbackSpy));
|
||||||
|
});
|
||||||
|
|
||||||
|
let testgroup = screen.getByText("testgroup");
|
||||||
|
let testgroup2 = screen.getByText("testgroup2");
|
||||||
|
|
||||||
|
expect(testgroup).toBeVisible();
|
||||||
|
expect(testgroup2).toBeVisible();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Renders nothing if required data is not available", async () => {
|
||||||
useSelector.mockImplementation((callback) => {
|
useSelector.mockImplementation((callback) => {
|
||||||
return callback({});
|
return callback({});
|
||||||
});
|
});
|
||||||
let component = mount(groupsJsx());
|
|
||||||
expect(component.html()).toBe("<div></div>");
|
let callbackSpy = mockAsync();
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(groupsJsx(callbackSpy));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
let noShow = screen.getByTestId("no-show");
|
||||||
|
expect(noShow).toBeVisible();
|
||||||
});
|
});
|
||||||
|
@@ -1,8 +1,19 @@
|
|||||||
import React, { useState } from "react";
|
import React, { useState } from "react";
|
||||||
|
import regeneratorRuntime from "regenerator-runtime";
|
||||||
import { useSelector, useDispatch } from "react-redux";
|
import { useSelector, useDispatch } from "react-redux";
|
||||||
import PropTypes from "prop-types";
|
import PropTypes from "prop-types";
|
||||||
|
|
||||||
import { Button } from "react-bootstrap";
|
import {
|
||||||
|
Button,
|
||||||
|
Col,
|
||||||
|
Row,
|
||||||
|
FormControl,
|
||||||
|
Card,
|
||||||
|
CardGroup,
|
||||||
|
Collapse,
|
||||||
|
} from "react-bootstrap";
|
||||||
|
import ReactObjectTableViewer from "react-object-table-viewer";
|
||||||
|
|
||||||
import { Link } from "react-router-dom";
|
import { Link } from "react-router-dom";
|
||||||
import { FaSort, FaSortUp, FaSortDown } from "react-icons/fa";
|
import { FaSort, FaSortUp, FaSortDown } from "react-icons/fa";
|
||||||
|
|
||||||
@@ -10,7 +21,16 @@ import "./server-dashboard.css";
|
|||||||
import { timeSince } from "../../util/timeSince";
|
import { timeSince } from "../../util/timeSince";
|
||||||
import PaginationFooter from "../PaginationFooter/PaginationFooter";
|
import PaginationFooter from "../PaginationFooter/PaginationFooter";
|
||||||
|
|
||||||
|
const AccessServerButton = ({ url }) => (
|
||||||
|
<a href={url || ""}>
|
||||||
|
<button className="btn btn-primary btn-xs" style={{ marginRight: 20 }}>
|
||||||
|
Access Server
|
||||||
|
</button>
|
||||||
|
</a>
|
||||||
|
);
|
||||||
|
|
||||||
const ServerDashboard = (props) => {
|
const ServerDashboard = (props) => {
|
||||||
|
let base_url = window.base_url;
|
||||||
// sort methods
|
// sort methods
|
||||||
var usernameDesc = (e) => e.sort((a, b) => (a.name > b.name ? 1 : -1)),
|
var usernameDesc = (e) => e.sort((a, b) => (a.name > b.name ? 1 : -1)),
|
||||||
usernameAsc = (e) => e.sort((a, b) => (a.name < b.name ? 1 : -1)),
|
usernameAsc = (e) => e.sort((a, b) => (a.name < b.name ? 1 : -1)),
|
||||||
@@ -27,15 +47,19 @@ const ServerDashboard = (props) => {
|
|||||||
runningAsc = (e) => e.sort((a) => (a.server == null ? -1 : 1)),
|
runningAsc = (e) => e.sort((a) => (a.server == null ? -1 : 1)),
|
||||||
runningDesc = (e) => e.sort((a) => (a.server == null ? 1 : -1));
|
runningDesc = (e) => e.sort((a) => (a.server == null ? 1 : -1));
|
||||||
|
|
||||||
|
var [errorAlert, setErrorAlert] = useState(null);
|
||||||
var [sortMethod, setSortMethod] = useState(null);
|
var [sortMethod, setSortMethod] = useState(null);
|
||||||
|
var [disabledButtons, setDisabledButtons] = useState({});
|
||||||
|
const [collapseStates, setCollapseStates] = useState({});
|
||||||
|
|
||||||
var user_data = useSelector((state) => state.user_data),
|
var user_data = useSelector((state) => state.user_data),
|
||||||
user_page = useSelector((state) => state.user_page),
|
user_page = useSelector((state) => state.user_page),
|
||||||
limit = useSelector((state) => state.limit),
|
limit = useSelector((state) => state.limit),
|
||||||
|
name_filter = useSelector((state) => state.name_filter),
|
||||||
page = parseInt(new URLSearchParams(props.location.search).get("page"));
|
page = parseInt(new URLSearchParams(props.location.search).get("page"));
|
||||||
|
|
||||||
page = isNaN(page) ? 0 : page;
|
page = isNaN(page) ? 0 : page;
|
||||||
var slice = [page * limit, limit];
|
var slice = [page * limit, limit, name_filter];
|
||||||
|
|
||||||
const dispatch = useDispatch();
|
const dispatch = useDispatch();
|
||||||
|
|
||||||
@@ -49,35 +73,304 @@ const ServerDashboard = (props) => {
|
|||||||
history,
|
history,
|
||||||
} = props;
|
} = props;
|
||||||
|
|
||||||
var dispatchPageUpdate = (data, page) => {
|
var dispatchPageUpdate = (data, page, name_filter) => {
|
||||||
dispatch({
|
dispatch({
|
||||||
type: "USER_PAGE",
|
type: "USER_PAGE",
|
||||||
value: {
|
value: {
|
||||||
data: data,
|
data: data,
|
||||||
page: page,
|
page: page,
|
||||||
|
name_filter: name_filter,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
if (!user_data) {
|
if (!user_data) {
|
||||||
return <div></div>;
|
return <div data-testid="no-show"></div>;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (page != user_page) {
|
if (page != user_page) {
|
||||||
updateUsers(...slice).then((data) => dispatchPageUpdate(data, page));
|
updateUsers(...slice).then((data) =>
|
||||||
|
dispatchPageUpdate(data, page, name_filter)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var debounce = require("lodash.debounce");
|
||||||
|
const handleSearch = debounce(async (event) => {
|
||||||
|
// setNameFilter(event.target.value);
|
||||||
|
updateUsers(page * limit, limit, event.target.value).then((data) =>
|
||||||
|
dispatchPageUpdate(data, page, name_filter)
|
||||||
|
);
|
||||||
|
}, 300);
|
||||||
|
|
||||||
if (sortMethod != null) {
|
if (sortMethod != null) {
|
||||||
user_data = sortMethod(user_data);
|
user_data = sortMethod(user_data);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const StopServerButton = ({ serverName, userName }) => {
|
||||||
|
var [isDisabled, setIsDisabled] = useState(false);
|
||||||
return (
|
return (
|
||||||
<div className="container">
|
<button
|
||||||
<div className="manage-groups" style={{ float: "right", margin: "20px" }}>
|
className="btn btn-danger btn-xs stop-button"
|
||||||
<Link to="/groups">{"> Manage Groups"}</Link>
|
disabled={isDisabled}
|
||||||
|
onClick={() => {
|
||||||
|
setIsDisabled(true);
|
||||||
|
stopServer(userName, serverName)
|
||||||
|
.then((res) => {
|
||||||
|
if (res.status < 300) {
|
||||||
|
updateUsers(...slice)
|
||||||
|
.then((data) => {
|
||||||
|
dispatchPageUpdate(data, page, name_filter);
|
||||||
|
})
|
||||||
|
.catch(() => {
|
||||||
|
setIsDisabled(false);
|
||||||
|
setErrorAlert(`Failed to update users list.`);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
setErrorAlert(`Failed to stop server.`);
|
||||||
|
setIsDisabled(false);
|
||||||
|
}
|
||||||
|
return res;
|
||||||
|
})
|
||||||
|
.catch(() => {
|
||||||
|
setErrorAlert(`Failed to stop server.`);
|
||||||
|
setIsDisabled(false);
|
||||||
|
});
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
Stop Server
|
||||||
|
</button>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const StartServerButton = ({ serverName, userName }) => {
|
||||||
|
var [isDisabled, setIsDisabled] = useState(false);
|
||||||
|
return (
|
||||||
|
<button
|
||||||
|
className="btn btn-success btn-xs start-button"
|
||||||
|
disabled={isDisabled}
|
||||||
|
onClick={() => {
|
||||||
|
setIsDisabled(true);
|
||||||
|
startServer(userName, serverName)
|
||||||
|
.then((res) => {
|
||||||
|
if (res.status < 300) {
|
||||||
|
updateUsers(...slice)
|
||||||
|
.then((data) => {
|
||||||
|
dispatchPageUpdate(data, page, name_filter);
|
||||||
|
})
|
||||||
|
.catch(() => {
|
||||||
|
setErrorAlert(`Failed to update users list.`);
|
||||||
|
setIsDisabled(false);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
setErrorAlert(`Failed to start server.`);
|
||||||
|
setIsDisabled(false);
|
||||||
|
}
|
||||||
|
return res;
|
||||||
|
})
|
||||||
|
.catch(() => {
|
||||||
|
setErrorAlert(`Failed to start server.`);
|
||||||
|
setIsDisabled(false);
|
||||||
|
});
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
Start Server
|
||||||
|
</button>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const EditUserCell = ({ user }) => {
|
||||||
|
return (
|
||||||
|
<td>
|
||||||
|
<button
|
||||||
|
className="btn btn-primary btn-xs"
|
||||||
|
style={{ marginRight: 20 }}
|
||||||
|
onClick={() =>
|
||||||
|
history.push({
|
||||||
|
pathname: "/edit-user",
|
||||||
|
state: {
|
||||||
|
username: user.name,
|
||||||
|
has_admin: user.admin,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
>
|
||||||
|
Edit User
|
||||||
|
</button>
|
||||||
|
</td>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const serverRow = (user, server) => {
|
||||||
|
const { servers, ...userNoServers } = user;
|
||||||
|
const serverNameDash = server.name ? `-${server.name}` : "";
|
||||||
|
const userServerName = user.name + serverNameDash;
|
||||||
|
const open = collapseStates[userServerName] || false;
|
||||||
|
return [
|
||||||
|
<tr key={`${userServerName}-row`} className="user-row">
|
||||||
|
<td data-testid="user-row-name">
|
||||||
|
<span>
|
||||||
|
<Button
|
||||||
|
onClick={() =>
|
||||||
|
setCollapseStates({
|
||||||
|
...collapseStates,
|
||||||
|
[userServerName]: !open,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
aria-controls={`${userServerName}-collapse`}
|
||||||
|
aria-expanded={open}
|
||||||
|
data-testid={`${userServerName}-collapse-button`}
|
||||||
|
variant={open ? "secondary" : "primary"}
|
||||||
|
size="sm"
|
||||||
|
>
|
||||||
|
<span className="caret"></span>
|
||||||
|
</Button>{" "}
|
||||||
|
</span>
|
||||||
|
<span data-testid={`user-name-div-${userServerName}`}>
|
||||||
|
{user.name}
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
<td data-testid="user-row-admin">{user.admin ? "admin" : ""}</td>
|
||||||
|
|
||||||
|
<td data-testid="user-row-server">
|
||||||
|
{server.name ? (
|
||||||
|
<p className="text-secondary">{server.name}</p>
|
||||||
|
) : (
|
||||||
|
<p style={{ color: "lightgrey" }}>[MAIN]</p>
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
<td data-testid="user-row-last-activity">
|
||||||
|
{server.last_activity ? timeSince(server.last_activity) : "Never"}
|
||||||
|
</td>
|
||||||
|
<td data-testid="user-row-server-activity">
|
||||||
|
{server.started ? (
|
||||||
|
// Stop Single-user server
|
||||||
|
<>
|
||||||
|
<StopServerButton serverName={server.name} userName={user.name} />
|
||||||
|
<AccessServerButton url={server.url} />
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
// Start Single-user server
|
||||||
|
<>
|
||||||
|
<StartServerButton
|
||||||
|
serverName={server.name}
|
||||||
|
userName={user.name}
|
||||||
|
style={{ marginRight: 20 }}
|
||||||
|
/>
|
||||||
|
<a
|
||||||
|
href={`${base_url}spawn/${user.name}${
|
||||||
|
server.name && "/" + server.name
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
<button
|
||||||
|
className="btn btn-secondary btn-xs"
|
||||||
|
style={{ marginRight: 20 }}
|
||||||
|
>
|
||||||
|
Spawn Page
|
||||||
|
</button>
|
||||||
|
</a>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
<EditUserCell user={user} />
|
||||||
|
</tr>,
|
||||||
|
<tr>
|
||||||
|
<td
|
||||||
|
colSpan={6}
|
||||||
|
style={{ padding: 0 }}
|
||||||
|
data-testid={`${userServerName}-td`}
|
||||||
|
>
|
||||||
|
<Collapse in={open} data-testid={`${userServerName}-collapse`}>
|
||||||
|
<CardGroup
|
||||||
|
id={`${userServerName}-card-group`}
|
||||||
|
style={{ width: "100%", margin: "0 auto", float: "none" }}
|
||||||
|
>
|
||||||
|
<Card style={{ width: "100%", padding: 3, margin: "0 auto" }}>
|
||||||
|
<Card.Title>User</Card.Title>
|
||||||
|
<ReactObjectTableViewer
|
||||||
|
className="table-striped table-bordered admin-table-head"
|
||||||
|
style={{
|
||||||
|
padding: "3px 6px",
|
||||||
|
margin: "auto",
|
||||||
|
}}
|
||||||
|
keyStyle={{
|
||||||
|
padding: "4px",
|
||||||
|
}}
|
||||||
|
valueStyle={{
|
||||||
|
padding: "4px",
|
||||||
|
}}
|
||||||
|
data={userNoServers}
|
||||||
|
/>
|
||||||
|
</Card>
|
||||||
|
<Card style={{ width: "100%", padding: 3, margin: "0 auto" }}>
|
||||||
|
<Card.Title>Server</Card.Title>
|
||||||
|
<ReactObjectTableViewer
|
||||||
|
className="table-striped table-bordered admin-table-head"
|
||||||
|
style={{
|
||||||
|
padding: "3px 6px",
|
||||||
|
margin: "auto",
|
||||||
|
}}
|
||||||
|
keyStyle={{
|
||||||
|
padding: "4px",
|
||||||
|
}}
|
||||||
|
valueStyle={{
|
||||||
|
padding: "4px",
|
||||||
|
}}
|
||||||
|
data={server}
|
||||||
|
/>
|
||||||
|
</Card>
|
||||||
|
</CardGroup>
|
||||||
|
</Collapse>
|
||||||
|
</td>
|
||||||
|
</tr>,
|
||||||
|
];
|
||||||
|
};
|
||||||
|
|
||||||
|
let servers = user_data.flatMap((user) => {
|
||||||
|
let userServers = Object.values({
|
||||||
|
"": user.server || {},
|
||||||
|
...(user.servers || {}),
|
||||||
|
});
|
||||||
|
return userServers.map((server) => [user, server]);
|
||||||
|
});
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="container" data-testid="container">
|
||||||
|
{errorAlert != null ? (
|
||||||
|
<div className="row">
|
||||||
|
<div className="col-md-10 col-md-offset-1 col-lg-8 col-lg-offset-2">
|
||||||
|
<div className="alert alert-danger">
|
||||||
|
{errorAlert}
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
className="close"
|
||||||
|
onClick={() => setErrorAlert(null)}
|
||||||
|
>
|
||||||
|
<span>×</span>
|
||||||
|
</button>
|
||||||
</div>
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<></>
|
||||||
|
)}
|
||||||
<div className="server-dashboard-container">
|
<div className="server-dashboard-container">
|
||||||
<table className="table table-striped table-bordered table-hover">
|
<Row>
|
||||||
|
<Col md={4}>
|
||||||
|
<FormControl
|
||||||
|
type="text"
|
||||||
|
name="user_search"
|
||||||
|
placeholder="Search users"
|
||||||
|
aria-label="user-search"
|
||||||
|
defaultValue={name_filter}
|
||||||
|
onChange={handleSearch}
|
||||||
|
/>
|
||||||
|
</Col>
|
||||||
|
|
||||||
|
<Col md="auto" style={{ float: "right", margin: 15 }}>
|
||||||
|
<Link to="/groups">{"> Manage Groups"}</Link>
|
||||||
|
</Col>
|
||||||
|
</Row>
|
||||||
|
<table className="table table-bordered table-hover">
|
||||||
<thead className="admin-table-head">
|
<thead className="admin-table-head">
|
||||||
<tr>
|
<tr>
|
||||||
<th id="user-header">
|
<th id="user-header">
|
||||||
@@ -85,6 +378,7 @@ const ServerDashboard = (props) => {
|
|||||||
<SortHandler
|
<SortHandler
|
||||||
sorts={{ asc: usernameAsc, desc: usernameDesc }}
|
sorts={{ asc: usernameAsc, desc: usernameDesc }}
|
||||||
callback={(method) => setSortMethod(() => method)}
|
callback={(method) => setSortMethod(() => method)}
|
||||||
|
testid="user-sort"
|
||||||
/>
|
/>
|
||||||
</th>
|
</th>
|
||||||
<th id="admin-header">
|
<th id="admin-header">
|
||||||
@@ -92,6 +386,15 @@ const ServerDashboard = (props) => {
|
|||||||
<SortHandler
|
<SortHandler
|
||||||
sorts={{ asc: adminAsc, desc: adminDesc }}
|
sorts={{ asc: adminAsc, desc: adminDesc }}
|
||||||
callback={(method) => setSortMethod(() => method)}
|
callback={(method) => setSortMethod(() => method)}
|
||||||
|
testid="admin-sort"
|
||||||
|
/>
|
||||||
|
</th>
|
||||||
|
<th id="server-header">
|
||||||
|
Server{" "}
|
||||||
|
<SortHandler
|
||||||
|
sorts={{ asc: usernameAsc, desc: usernameDesc }}
|
||||||
|
callback={(method) => setSortMethod(() => method)}
|
||||||
|
testid="server-sort"
|
||||||
/>
|
/>
|
||||||
</th>
|
</th>
|
||||||
<th id="last-activity-header">
|
<th id="last-activity-header">
|
||||||
@@ -99,6 +402,7 @@ const ServerDashboard = (props) => {
|
|||||||
<SortHandler
|
<SortHandler
|
||||||
sorts={{ asc: dateAsc, desc: dateDesc }}
|
sorts={{ asc: dateAsc, desc: dateDesc }}
|
||||||
callback={(method) => setSortMethod(() => method)}
|
callback={(method) => setSortMethod(() => method)}
|
||||||
|
testid="last-activity-sort"
|
||||||
/>
|
/>
|
||||||
</th>
|
</th>
|
||||||
<th id="running-status-header">
|
<th id="running-status-header">
|
||||||
@@ -106,6 +410,7 @@ const ServerDashboard = (props) => {
|
|||||||
<SortHandler
|
<SortHandler
|
||||||
sorts={{ asc: runningAsc, desc: runningDesc }}
|
sorts={{ asc: runningAsc, desc: runningDesc }}
|
||||||
callback={(method) => setSortMethod(() => method)}
|
callback={(method) => setSortMethod(() => method)}
|
||||||
|
testid="running-status-sort"
|
||||||
/>
|
/>
|
||||||
</th>
|
</th>
|
||||||
<th id="actions-header">Actions</th>
|
<th id="actions-header">Actions</th>
|
||||||
@@ -125,17 +430,33 @@ const ServerDashboard = (props) => {
|
|||||||
<Button
|
<Button
|
||||||
variant="primary"
|
variant="primary"
|
||||||
className="start-all"
|
className="start-all"
|
||||||
|
data-testid="start-all"
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
Promise.all(startAll(user_data.map((e) => e.name)))
|
Promise.all(startAll(user_data.map((e) => e.name)))
|
||||||
.then((res) => {
|
.then((res) => {
|
||||||
updateUsers(...slice)
|
let failedServers = res.filter((e) => !e.ok);
|
||||||
.then((data) => {
|
if (failedServers.length > 0) {
|
||||||
dispatchPageUpdate(data, page);
|
setErrorAlert(
|
||||||
})
|
`Failed to start ${failedServers.length} ${
|
||||||
.catch((err) => console.log(err));
|
failedServers.length > 1 ? "servers" : "server"
|
||||||
|
}. ${
|
||||||
|
failedServers.length > 1 ? "Are they " : "Is it "
|
||||||
|
} already running?`
|
||||||
|
);
|
||||||
|
}
|
||||||
return res;
|
return res;
|
||||||
})
|
})
|
||||||
.catch((err) => console.log(err));
|
.then((res) => {
|
||||||
|
updateUsers(...slice)
|
||||||
|
.then((data) => {
|
||||||
|
dispatchPageUpdate(data, page, name_filter);
|
||||||
|
})
|
||||||
|
.catch(() =>
|
||||||
|
setErrorAlert(`Failed to update users list.`)
|
||||||
|
);
|
||||||
|
return res;
|
||||||
|
})
|
||||||
|
.catch(() => setErrorAlert(`Failed to start servers.`));
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
Start All
|
Start All
|
||||||
@@ -145,17 +466,33 @@ const ServerDashboard = (props) => {
|
|||||||
<Button
|
<Button
|
||||||
variant="danger"
|
variant="danger"
|
||||||
className="stop-all"
|
className="stop-all"
|
||||||
|
data-testid="stop-all"
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
Promise.all(stopAll(user_data.map((e) => e.name)))
|
Promise.all(stopAll(user_data.map((e) => e.name)))
|
||||||
.then((res) => {
|
.then((res) => {
|
||||||
updateUsers(...slice)
|
let failedServers = res.filter((e) => !e.ok);
|
||||||
.then((data) => {
|
if (failedServers.length > 0) {
|
||||||
dispatchPageUpdate(data, page);
|
setErrorAlert(
|
||||||
})
|
`Failed to stop ${failedServers.length} ${
|
||||||
.catch((err) => console.log(err));
|
failedServers.length > 1 ? "servers" : "server"
|
||||||
|
}. ${
|
||||||
|
failedServers.length > 1 ? "Are they " : "Is it "
|
||||||
|
} already stopped?`
|
||||||
|
);
|
||||||
|
}
|
||||||
return res;
|
return res;
|
||||||
})
|
})
|
||||||
.catch((err) => console.log(err));
|
.then((res) => {
|
||||||
|
updateUsers(...slice)
|
||||||
|
.then((data) => {
|
||||||
|
dispatchPageUpdate(data, page, name_filter);
|
||||||
|
})
|
||||||
|
.catch(() =>
|
||||||
|
setErrorAlert(`Failed to update users list.`)
|
||||||
|
);
|
||||||
|
return res;
|
||||||
|
})
|
||||||
|
.catch(() => setErrorAlert(`Failed to stop servers.`));
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
Stop All
|
Stop All
|
||||||
@@ -172,70 +509,7 @@ const ServerDashboard = (props) => {
|
|||||||
</Button>
|
</Button>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
{user_data.map((e, i) => (
|
{servers.flatMap(([user, server]) => serverRow(user, server))}
|
||||||
<tr key={i + "row"} className="user-row">
|
|
||||||
<td>{e.name}</td>
|
|
||||||
<td>{e.admin ? "admin" : ""}</td>
|
|
||||||
<td>
|
|
||||||
{e.last_activity ? timeSince(e.last_activity) : "Never"}
|
|
||||||
</td>
|
|
||||||
<td>
|
|
||||||
{e.server != null ? (
|
|
||||||
// Stop Single-user server
|
|
||||||
<button
|
|
||||||
className="btn btn-danger btn-xs stop-button"
|
|
||||||
onClick={() =>
|
|
||||||
stopServer(e.name)
|
|
||||||
.then((res) => {
|
|
||||||
updateUsers(...slice).then((data) => {
|
|
||||||
dispatchPageUpdate(data, page);
|
|
||||||
});
|
|
||||||
return res;
|
|
||||||
})
|
|
||||||
.catch((err) => console.log(err))
|
|
||||||
}
|
|
||||||
>
|
|
||||||
Stop Server
|
|
||||||
</button>
|
|
||||||
) : (
|
|
||||||
// Start Single-user server
|
|
||||||
<button
|
|
||||||
className="btn btn-primary btn-xs start-button"
|
|
||||||
onClick={() =>
|
|
||||||
startServer(e.name)
|
|
||||||
.then((res) => {
|
|
||||||
updateUsers(...slice).then((data) => {
|
|
||||||
dispatchPageUpdate(data, page);
|
|
||||||
});
|
|
||||||
return res;
|
|
||||||
})
|
|
||||||
.catch((err) => console.log(err))
|
|
||||||
}
|
|
||||||
>
|
|
||||||
Start Server
|
|
||||||
</button>
|
|
||||||
)}
|
|
||||||
</td>
|
|
||||||
<td>
|
|
||||||
{/* Edit User */}
|
|
||||||
<button
|
|
||||||
className="btn btn-primary btn-xs"
|
|
||||||
style={{ marginRight: 20 }}
|
|
||||||
onClick={() =>
|
|
||||||
history.push({
|
|
||||||
pathname: "/edit-user",
|
|
||||||
state: {
|
|
||||||
username: e.name,
|
|
||||||
has_admin: e.admin,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
>
|
|
||||||
edit user
|
|
||||||
</button>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
))}
|
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
<PaginationFooter
|
<PaginationFooter
|
||||||
@@ -269,13 +543,14 @@ ServerDashboard.propTypes = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const SortHandler = (props) => {
|
const SortHandler = (props) => {
|
||||||
var { sorts, callback } = props;
|
var { sorts, callback, testid } = props;
|
||||||
|
|
||||||
var [direction, setDirection] = useState(undefined);
|
var [direction, setDirection] = useState(undefined);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
className="sort-icon"
|
className="sort-icon"
|
||||||
|
data-testid={testid}
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
if (!direction) {
|
if (!direction) {
|
||||||
callback(sorts.desc);
|
callback(sorts.desc);
|
||||||
@@ -303,6 +578,7 @@ const SortHandler = (props) => {
|
|||||||
SortHandler.propTypes = {
|
SortHandler.propTypes = {
|
||||||
sorts: PropTypes.object,
|
sorts: PropTypes.object,
|
||||||
callback: PropTypes.func,
|
callback: PropTypes.func,
|
||||||
|
testid: PropTypes.string,
|
||||||
};
|
};
|
||||||
|
|
||||||
export default ServerDashboard;
|
export default ServerDashboard;
|
||||||
|
@@ -1,42 +1,46 @@
|
|||||||
import React from "react";
|
import React from "react";
|
||||||
import Enzyme, { mount } from "enzyme";
|
import "@testing-library/jest-dom";
|
||||||
import ServerDashboard from "./ServerDashboard";
|
import { act } from "react-dom/test-utils";
|
||||||
import Adapter from "@wojtekmaj/enzyme-adapter-react-17";
|
import userEvent from "@testing-library/user-event";
|
||||||
|
import { render, screen, fireEvent } from "@testing-library/react";
|
||||||
import { HashRouter, Switch } from "react-router-dom";
|
import { HashRouter, Switch } from "react-router-dom";
|
||||||
import { Provider, useSelector } from "react-redux";
|
import { Provider, useSelector } from "react-redux";
|
||||||
import { createStore } from "redux";
|
import { createStore } from "redux";
|
||||||
|
// eslint-disable-next-line
|
||||||
|
import regeneratorRuntime from "regenerator-runtime";
|
||||||
|
|
||||||
Enzyme.configure({ adapter: new Adapter() });
|
import ServerDashboard from "./ServerDashboard";
|
||||||
|
import * as sinon from "sinon";
|
||||||
|
|
||||||
|
let clock;
|
||||||
|
|
||||||
jest.mock("react-redux", () => ({
|
jest.mock("react-redux", () => ({
|
||||||
...jest.requireActual("react-redux"),
|
...jest.requireActual("react-redux"),
|
||||||
useSelector: jest.fn(),
|
useSelector: jest.fn(),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
describe("ServerDashboard Component: ", () => {
|
var serverDashboardJsx = (spy) => (
|
||||||
var serverDashboardJsx = (callbackSpy) => (
|
|
||||||
<Provider store={createStore(() => {}, {})}>
|
<Provider store={createStore(() => {}, {})}>
|
||||||
<HashRouter>
|
<HashRouter>
|
||||||
<Switch>
|
<Switch>
|
||||||
<ServerDashboard
|
<ServerDashboard
|
||||||
updateUsers={callbackSpy}
|
updateUsers={spy}
|
||||||
shutdownHub={callbackSpy}
|
shutdownHub={spy}
|
||||||
startServer={callbackSpy}
|
startServer={spy}
|
||||||
stopServer={callbackSpy}
|
stopServer={spy}
|
||||||
startAll={callbackSpy}
|
startAll={spy}
|
||||||
stopAll={callbackSpy}
|
stopAll={spy}
|
||||||
/>
|
/>
|
||||||
</Switch>
|
</Switch>
|
||||||
</HashRouter>
|
</HashRouter>
|
||||||
</Provider>
|
</Provider>
|
||||||
);
|
);
|
||||||
|
|
||||||
var mockAsync = () =>
|
var mockAsync = (data) =>
|
||||||
jest
|
jest.fn().mockImplementation(() => Promise.resolve(data ? data : { k: "v" }));
|
||||||
.fn()
|
|
||||||
.mockImplementation(() =>
|
var mockAsyncRejection = () =>
|
||||||
Promise.resolve({ json: () => Promise.resolve({ k: "v" }) })
|
jest.fn().mockImplementation(() => Promise.reject());
|
||||||
);
|
|
||||||
|
|
||||||
var mockAppState = () => ({
|
var mockAppState = () => ({
|
||||||
user_data: JSON.parse(
|
user_data: JSON.parse(
|
||||||
@@ -45,6 +49,7 @@ describe("ServerDashboard Component: ", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
|
clock = sinon.useFakeTimers();
|
||||||
useSelector.mockImplementation((callback) => {
|
useSelector.mockImplementation((callback) => {
|
||||||
return callback(mockAppState());
|
return callback(mockAppState());
|
||||||
});
|
});
|
||||||
@@ -52,110 +57,467 @@ describe("ServerDashboard Component: ", () => {
|
|||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
useSelector.mockClear();
|
useSelector.mockClear();
|
||||||
|
clock.restore();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("Renders users from props.user_data into table", () => {
|
test("Renders", async () => {
|
||||||
let component = mount(serverDashboardJsx(mockAsync())),
|
let callbackSpy = mockAsync();
|
||||||
userRows = component.find(".user-row");
|
|
||||||
expect(userRows.length).toBe(2);
|
await act(async () => {
|
||||||
|
render(serverDashboardJsx(callbackSpy));
|
||||||
});
|
});
|
||||||
|
|
||||||
it("Renders correctly the status of a single-user server", () => {
|
expect(screen.getByTestId("container")).toBeVisible();
|
||||||
let component = mount(serverDashboardJsx(mockAsync())),
|
});
|
||||||
userRows = component.find(".user-row");
|
|
||||||
// Renders .stop-button when server is started
|
test("Renders users from props.user_data into table", async () => {
|
||||||
// Should be 1 since user foo is started
|
let callbackSpy = mockAsync();
|
||||||
expect(userRows.at(0).find(".stop-button").length).toBe(1);
|
|
||||||
// Renders .start-button when server is stopped
|
await act(async () => {
|
||||||
// Should be 1 since user bar is stopped
|
render(serverDashboardJsx(callbackSpy));
|
||||||
expect(userRows.at(1).find(".start-button").length).toBe(1);
|
});
|
||||||
|
|
||||||
|
let foo = screen.getByTestId("user-name-div-foo");
|
||||||
|
let bar = screen.getByTestId("user-name-div-bar");
|
||||||
|
|
||||||
|
expect(foo).toBeVisible();
|
||||||
|
expect(bar).toBeVisible();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Renders correctly the status of a single-user server", async () => {
|
||||||
|
let callbackSpy = mockAsync();
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(serverDashboardJsx(callbackSpy));
|
||||||
|
});
|
||||||
|
|
||||||
|
let start = screen.getByText("Start Server");
|
||||||
|
let stop = screen.getByText("Stop Server");
|
||||||
|
|
||||||
|
expect(start).toBeVisible();
|
||||||
|
expect(stop).toBeVisible();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Invokes the startServer event on button click", async () => {
|
||||||
|
let callbackSpy = mockAsync();
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(serverDashboardJsx(callbackSpy));
|
||||||
|
});
|
||||||
|
|
||||||
|
let start = screen.getByText("Start Server");
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(start);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("Invokes the startServer event on button click", () => {
|
|
||||||
let callbackSpy = mockAsync(),
|
|
||||||
component = mount(serverDashboardJsx(callbackSpy)),
|
|
||||||
startBtn = component.find(".start-button");
|
|
||||||
startBtn.simulate("click");
|
|
||||||
expect(callbackSpy).toHaveBeenCalled();
|
expect(callbackSpy).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("Invokes the stopServer event on button click", () => {
|
test("Invokes the stopServer event on button click", async () => {
|
||||||
let callbackSpy = mockAsync(),
|
let callbackSpy = mockAsync();
|
||||||
component = mount(serverDashboardJsx(callbackSpy)),
|
|
||||||
stopBtn = component.find(".stop-button");
|
await act(async () => {
|
||||||
stopBtn.simulate("click");
|
render(serverDashboardJsx(callbackSpy));
|
||||||
|
});
|
||||||
|
|
||||||
|
let stop = screen.getByText("Stop Server");
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(stop);
|
||||||
|
});
|
||||||
|
|
||||||
expect(callbackSpy).toHaveBeenCalled();
|
expect(callbackSpy).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("Invokes the shutdownHub event on button click", () => {
|
test("Invokes the shutdownHub event on button click", async () => {
|
||||||
let callbackSpy = mockAsync(),
|
let callbackSpy = mockAsync();
|
||||||
component = mount(serverDashboardJsx(callbackSpy)),
|
|
||||||
shutdownBtn = component.find("#shutdown-button").first();
|
await act(async () => {
|
||||||
shutdownBtn.simulate("click");
|
render(serverDashboardJsx(callbackSpy));
|
||||||
|
});
|
||||||
|
|
||||||
|
let shutdown = screen.getByText("Shutdown Hub");
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(shutdown);
|
||||||
|
});
|
||||||
|
|
||||||
expect(callbackSpy).toHaveBeenCalled();
|
expect(callbackSpy).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("Sorts according to username", () => {
|
test("Sorts according to username", async () => {
|
||||||
let component = mount(serverDashboardJsx(mockAsync())).find(
|
let callbackSpy = mockAsync();
|
||||||
"ServerDashboard"
|
|
||||||
),
|
await act(async () => {
|
||||||
handler = component.find("SortHandler").first();
|
render(serverDashboardJsx(callbackSpy));
|
||||||
handler.simulate("click");
|
|
||||||
let first = component.find(".user-row").first();
|
|
||||||
expect(first.html().includes("bar")).toBe(true);
|
|
||||||
handler.simulate("click");
|
|
||||||
first = component.find(".user-row").first();
|
|
||||||
expect(first.html().includes("foo")).toBe(true);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it("Sorts according to admin", () => {
|
let handler = screen.getByTestId("user-sort");
|
||||||
let component = mount(serverDashboardJsx(mockAsync())).find(
|
fireEvent.click(handler);
|
||||||
"ServerDashboard"
|
|
||||||
),
|
let first = screen.getAllByTestId("user-row-name")[0];
|
||||||
handler = component.find("SortHandler").at(1);
|
expect(first.textContent).toContain("bar");
|
||||||
handler.simulate("click");
|
|
||||||
let first = component.find(".user-row").first();
|
fireEvent.click(handler);
|
||||||
expect(first.html().includes("admin")).toBe(true);
|
|
||||||
handler.simulate("click");
|
first = screen.getAllByTestId("user-row-name")[0];
|
||||||
first = component.find(".user-row").first();
|
expect(first.textContent).toContain("foo");
|
||||||
expect(first.html().includes("admin")).toBe(false);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it("Sorts according to last activity", () => {
|
test("Sorts according to admin", async () => {
|
||||||
let component = mount(serverDashboardJsx(mockAsync())).find(
|
let callbackSpy = mockAsync();
|
||||||
"ServerDashboard"
|
|
||||||
),
|
await act(async () => {
|
||||||
handler = component.find("SortHandler").at(2);
|
render(serverDashboardJsx(callbackSpy));
|
||||||
handler.simulate("click");
|
|
||||||
let first = component.find(".user-row").first();
|
|
||||||
// foo used most recently
|
|
||||||
expect(first.html().includes("foo")).toBe(true);
|
|
||||||
handler.simulate("click");
|
|
||||||
first = component.find(".user-row").first();
|
|
||||||
// invert sort - bar used least recently
|
|
||||||
expect(first.html().includes("bar")).toBe(true);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it("Sorts according to server status (running/not running)", () => {
|
let handler = screen.getByTestId("admin-sort");
|
||||||
let component = mount(serverDashboardJsx(mockAsync())).find(
|
fireEvent.click(handler);
|
||||||
"ServerDashboard"
|
|
||||||
),
|
let first = screen.getAllByTestId("user-row-admin")[0];
|
||||||
handler = component.find("SortHandler").at(3);
|
expect(first.textContent).toBe("admin");
|
||||||
handler.simulate("click");
|
|
||||||
let first = component.find(".user-row").first();
|
fireEvent.click(handler);
|
||||||
// foo running
|
|
||||||
expect(first.html().includes("foo")).toBe(true);
|
first = screen.getAllByTestId("user-row-admin")[0];
|
||||||
handler.simulate("click");
|
expect(first.textContent).toBe("");
|
||||||
first = component.find(".user-row").first();
|
|
||||||
// invert sort - bar not running
|
|
||||||
expect(first.html().includes("bar")).toBe(true);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it("Renders nothing if required data is not available", () => {
|
test("Sorts according to last activity", async () => {
|
||||||
|
let callbackSpy = mockAsync();
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(serverDashboardJsx(callbackSpy));
|
||||||
|
});
|
||||||
|
|
||||||
|
let handler = screen.getByTestId("last-activity-sort");
|
||||||
|
fireEvent.click(handler);
|
||||||
|
|
||||||
|
let first = screen.getAllByTestId("user-row-name")[0];
|
||||||
|
expect(first.textContent).toContain("foo");
|
||||||
|
|
||||||
|
fireEvent.click(handler);
|
||||||
|
|
||||||
|
first = screen.getAllByTestId("user-row-name")[0];
|
||||||
|
expect(first.textContent).toContain("bar");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Sorts according to server status (running/not running)", async () => {
|
||||||
|
let callbackSpy = mockAsync();
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(serverDashboardJsx(callbackSpy));
|
||||||
|
});
|
||||||
|
|
||||||
|
let handler = screen.getByTestId("running-status-sort");
|
||||||
|
fireEvent.click(handler);
|
||||||
|
|
||||||
|
let first = screen.getAllByTestId("user-row-name")[0];
|
||||||
|
expect(first.textContent).toContain("foo");
|
||||||
|
|
||||||
|
fireEvent.click(handler);
|
||||||
|
|
||||||
|
first = screen.getAllByTestId("user-row-name")[0];
|
||||||
|
expect(first.textContent).toContain("bar");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Shows server details with button click", async () => {
|
||||||
|
let callbackSpy = mockAsync();
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(serverDashboardJsx(callbackSpy));
|
||||||
|
});
|
||||||
|
let button = screen.getByTestId("foo-collapse-button");
|
||||||
|
let collapse = screen.getByTestId("foo-collapse");
|
||||||
|
let collapseBar = screen.getByTestId("bar-collapse");
|
||||||
|
|
||||||
|
// expect().toBeVisible does not work here with collapse.
|
||||||
|
expect(collapse).toHaveClass("collapse");
|
||||||
|
expect(collapse).not.toHaveClass("show");
|
||||||
|
expect(collapseBar).not.toHaveClass("show");
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(button);
|
||||||
|
});
|
||||||
|
clock.tick(400);
|
||||||
|
|
||||||
|
expect(collapse).toHaveClass("collapse show");
|
||||||
|
expect(collapseBar).not.toHaveClass("show");
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(button);
|
||||||
|
});
|
||||||
|
clock.tick(400);
|
||||||
|
|
||||||
|
expect(collapse).toHaveClass("collapse");
|
||||||
|
expect(collapse).not.toHaveClass("show");
|
||||||
|
expect(collapseBar).not.toHaveClass("show");
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(button);
|
||||||
|
});
|
||||||
|
clock.tick(400);
|
||||||
|
|
||||||
|
expect(collapse).toHaveClass("collapse show");
|
||||||
|
expect(collapseBar).not.toHaveClass("show");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Renders nothing if required data is not available", async () => {
|
||||||
useSelector.mockImplementation((callback) => {
|
useSelector.mockImplementation((callback) => {
|
||||||
return callback({});
|
return callback({});
|
||||||
});
|
});
|
||||||
let component = mount(serverDashboardJsx(jest.fn()));
|
|
||||||
expect(component.html()).toBe("<div></div>");
|
let callbackSpy = mockAsync();
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(serverDashboardJsx(callbackSpy));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
let noShow = screen.getByTestId("no-show");
|
||||||
|
|
||||||
|
expect(noShow).toBeVisible();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Shows a UI error dialogue when start all servers fails", async () => {
|
||||||
|
let spy = mockAsync();
|
||||||
|
let rejectSpy = mockAsyncRejection;
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(
|
||||||
|
<Provider store={createStore(() => {}, {})}>
|
||||||
|
<HashRouter>
|
||||||
|
<Switch>
|
||||||
|
<ServerDashboard
|
||||||
|
updateUsers={spy}
|
||||||
|
shutdownHub={spy}
|
||||||
|
startServer={spy}
|
||||||
|
stopServer={spy}
|
||||||
|
startAll={rejectSpy}
|
||||||
|
stopAll={spy}
|
||||||
|
/>
|
||||||
|
</Switch>
|
||||||
|
</HashRouter>
|
||||||
|
</Provider>
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
let startAll = screen.getByTestId("start-all");
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(startAll);
|
||||||
|
});
|
||||||
|
|
||||||
|
let errorDialog = screen.getByText("Failed to start servers.");
|
||||||
|
|
||||||
|
expect(errorDialog).toBeVisible();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Shows a UI error dialogue when stop all servers fails", async () => {
|
||||||
|
let spy = mockAsync();
|
||||||
|
let rejectSpy = mockAsyncRejection;
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(
|
||||||
|
<Provider store={createStore(() => {}, {})}>
|
||||||
|
<HashRouter>
|
||||||
|
<Switch>
|
||||||
|
<ServerDashboard
|
||||||
|
updateUsers={spy}
|
||||||
|
shutdownHub={spy}
|
||||||
|
startServer={spy}
|
||||||
|
stopServer={spy}
|
||||||
|
startAll={spy}
|
||||||
|
stopAll={rejectSpy}
|
||||||
|
/>
|
||||||
|
</Switch>
|
||||||
|
</HashRouter>
|
||||||
|
</Provider>
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
let stopAll = screen.getByTestId("stop-all");
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(stopAll);
|
||||||
|
});
|
||||||
|
|
||||||
|
let errorDialog = screen.getByText("Failed to stop servers.");
|
||||||
|
|
||||||
|
expect(errorDialog).toBeVisible();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Shows a UI error dialogue when start user server fails", async () => {
|
||||||
|
let spy = mockAsync();
|
||||||
|
let rejectSpy = mockAsyncRejection();
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(
|
||||||
|
<Provider store={createStore(() => {}, {})}>
|
||||||
|
<HashRouter>
|
||||||
|
<Switch>
|
||||||
|
<ServerDashboard
|
||||||
|
updateUsers={spy}
|
||||||
|
shutdownHub={spy}
|
||||||
|
startServer={rejectSpy}
|
||||||
|
stopServer={spy}
|
||||||
|
startAll={spy}
|
||||||
|
stopAll={spy}
|
||||||
|
/>
|
||||||
|
</Switch>
|
||||||
|
</HashRouter>
|
||||||
|
</Provider>
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
let start = screen.getByText("Start Server");
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(start);
|
||||||
|
});
|
||||||
|
|
||||||
|
let errorDialog = screen.getByText("Failed to start server.");
|
||||||
|
|
||||||
|
expect(errorDialog).toBeVisible();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Shows a UI error dialogue when start user server returns an improper status code", async () => {
|
||||||
|
let spy = mockAsync();
|
||||||
|
let rejectSpy = mockAsync({ status: 403 });
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(
|
||||||
|
<Provider store={createStore(() => {}, {})}>
|
||||||
|
<HashRouter>
|
||||||
|
<Switch>
|
||||||
|
<ServerDashboard
|
||||||
|
updateUsers={spy}
|
||||||
|
shutdownHub={spy}
|
||||||
|
startServer={rejectSpy}
|
||||||
|
stopServer={spy}
|
||||||
|
startAll={spy}
|
||||||
|
stopAll={spy}
|
||||||
|
/>
|
||||||
|
</Switch>
|
||||||
|
</HashRouter>
|
||||||
|
</Provider>
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
let start = screen.getByText("Start Server");
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(start);
|
||||||
|
});
|
||||||
|
|
||||||
|
let errorDialog = screen.getByText("Failed to start server.");
|
||||||
|
|
||||||
|
expect(errorDialog).toBeVisible();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Shows a UI error dialogue when stop user servers fails", async () => {
|
||||||
|
let spy = mockAsync();
|
||||||
|
let rejectSpy = mockAsyncRejection();
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(
|
||||||
|
<Provider store={createStore(() => {}, {})}>
|
||||||
|
<HashRouter>
|
||||||
|
<Switch>
|
||||||
|
<ServerDashboard
|
||||||
|
updateUsers={spy}
|
||||||
|
shutdownHub={spy}
|
||||||
|
startServer={spy}
|
||||||
|
stopServer={rejectSpy}
|
||||||
|
startAll={spy}
|
||||||
|
stopAll={spy}
|
||||||
|
/>
|
||||||
|
</Switch>
|
||||||
|
</HashRouter>
|
||||||
|
</Provider>
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
let stop = screen.getByText("Stop Server");
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(stop);
|
||||||
|
});
|
||||||
|
|
||||||
|
let errorDialog = screen.getByText("Failed to stop server.");
|
||||||
|
|
||||||
|
expect(errorDialog).toBeVisible();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Shows a UI error dialogue when stop user server returns an improper status code", async () => {
|
||||||
|
let spy = mockAsync();
|
||||||
|
let rejectSpy = mockAsync({ status: 403 });
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
render(
|
||||||
|
<Provider store={createStore(() => {}, {})}>
|
||||||
|
<HashRouter>
|
||||||
|
<Switch>
|
||||||
|
<ServerDashboard
|
||||||
|
updateUsers={spy}
|
||||||
|
shutdownHub={spy}
|
||||||
|
startServer={spy}
|
||||||
|
stopServer={rejectSpy}
|
||||||
|
startAll={spy}
|
||||||
|
stopAll={spy}
|
||||||
|
/>
|
||||||
|
</Switch>
|
||||||
|
</HashRouter>
|
||||||
|
</Provider>
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
let stop = screen.getByText("Stop Server");
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
fireEvent.click(stop);
|
||||||
|
});
|
||||||
|
|
||||||
|
let errorDialog = screen.getByText("Failed to stop server.");
|
||||||
|
|
||||||
|
expect(errorDialog).toBeVisible();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Search for user calls updateUsers with name filter", async () => {
|
||||||
|
let spy = mockAsync();
|
||||||
|
let mockUpdateUsers = jest.fn((offset, limit, name_filter) => {
|
||||||
|
return Promise.resolve([]);
|
||||||
|
});
|
||||||
|
await act(async () => {
|
||||||
|
render(
|
||||||
|
<Provider store={createStore(() => {}, {})}>
|
||||||
|
<HashRouter>
|
||||||
|
<Switch>
|
||||||
|
<ServerDashboard
|
||||||
|
updateUsers={mockUpdateUsers}
|
||||||
|
shutdownHub={spy}
|
||||||
|
startServer={spy}
|
||||||
|
stopServer={spy}
|
||||||
|
startAll={spy}
|
||||||
|
stopAll={spy}
|
||||||
|
/>
|
||||||
|
</Switch>
|
||||||
|
</HashRouter>
|
||||||
|
</Provider>
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
let search = screen.getByLabelText("user-search");
|
||||||
|
|
||||||
|
userEvent.type(search, "a");
|
||||||
|
expect(search.value).toEqual("a");
|
||||||
|
clock.tick(400);
|
||||||
|
expect(mockUpdateUsers.mock.calls[1][2]).toEqual("a");
|
||||||
|
expect(mockUpdateUsers.mock.calls).toHaveLength(2);
|
||||||
|
|
||||||
|
userEvent.type(search, "b");
|
||||||
|
expect(search.value).toEqual("ab");
|
||||||
|
clock.tick(400);
|
||||||
|
expect(mockUpdateUsers.mock.calls[2][2]).toEqual("ab");
|
||||||
|
expect(mockUpdateUsers.mock.calls).toHaveLength(3);
|
||||||
});
|
});
|
||||||
|
@@ -1,5 +1,7 @@
|
|||||||
export const jhapiRequest = (endpoint, method, data) => {
|
export const jhapiRequest = (endpoint, method, data) => {
|
||||||
return fetch("/hub/api" + endpoint, {
|
let base_url = window.base_url,
|
||||||
|
api_url = `${base_url}hub/api`;
|
||||||
|
return fetch(api_url + endpoint, {
|
||||||
method: method,
|
method: method,
|
||||||
json: true,
|
json: true,
|
||||||
headers: {
|
headers: {
|
||||||
|
@@ -2,17 +2,20 @@ import { withProps } from "recompose";
|
|||||||
import { jhapiRequest } from "./jhapiUtil";
|
import { jhapiRequest } from "./jhapiUtil";
|
||||||
|
|
||||||
const withAPI = withProps(() => ({
|
const withAPI = withProps(() => ({
|
||||||
updateUsers: (offset, limit) =>
|
updateUsers: (offset, limit, name_filter) =>
|
||||||
jhapiRequest(`/users?offset=${offset}&limit=${limit}`, "GET").then((data) =>
|
jhapiRequest(
|
||||||
data.json()
|
`/users?offset=${offset}&limit=${limit}&name_filter=${name_filter || ""}`,
|
||||||
),
|
"GET"
|
||||||
|
).then((data) => data.json()),
|
||||||
updateGroups: (offset, limit) =>
|
updateGroups: (offset, limit) =>
|
||||||
jhapiRequest(`/groups?offset=${offset}&limit=${limit}`, "GET").then(
|
jhapiRequest(`/groups?offset=${offset}&limit=${limit}`, "GET").then(
|
||||||
(data) => data.json()
|
(data) => data.json()
|
||||||
),
|
),
|
||||||
shutdownHub: () => jhapiRequest("/shutdown", "POST"),
|
shutdownHub: () => jhapiRequest("/shutdown", "POST"),
|
||||||
startServer: (name) => jhapiRequest("/users/" + name + "/server", "POST"),
|
startServer: (name, serverName = "") =>
|
||||||
stopServer: (name) => jhapiRequest("/users/" + name + "/server", "DELETE"),
|
jhapiRequest("/users/" + name + "/servers/" + (serverName || ""), "POST"),
|
||||||
|
stopServer: (name, serverName = "") =>
|
||||||
|
jhapiRequest("/users/" + name + "/servers/" + (serverName || ""), "DELETE"),
|
||||||
startAll: (names) =>
|
startAll: (names) =>
|
||||||
names.map((e) => jhapiRequest("/users/" + e + "/server", "POST")),
|
names.map((e) => jhapiRequest("/users/" + e + "/server", "POST")),
|
||||||
stopAll: (names) =>
|
stopAll: (names) =>
|
||||||
@@ -36,13 +39,14 @@ const withAPI = withProps(() => ({
|
|||||||
jhapiRequest("/users/" + username, "GET")
|
jhapiRequest("/users/" + username, "GET")
|
||||||
.then((data) => data.status)
|
.then((data) => data.status)
|
||||||
.then((data) => (data > 200 ? false : true)),
|
.then((data) => (data > 200 ? false : true)),
|
||||||
failRegexEvent: () =>
|
// Temporarily Unused
|
||||||
alert(
|
failRegexEvent: () => {
|
||||||
"Cannot change username - either contains special characters or is too short."
|
return null;
|
||||||
),
|
|
||||||
noChangeEvent: () => {
|
|
||||||
returns;
|
|
||||||
},
|
},
|
||||||
|
noChangeEvent: () => {
|
||||||
|
return null;
|
||||||
|
},
|
||||||
|
//
|
||||||
refreshGroupsData: () =>
|
refreshGroupsData: () =>
|
||||||
jhapiRequest("/groups", "GET").then((data) => data.json()),
|
jhapiRequest("/groups", "GET").then((data) => data.json()),
|
||||||
refreshUserData: () =>
|
refreshUserData: () =>
|
||||||
|
5011
jsx/yarn.lock
5011
jsx/yarn.lock
File diff suppressed because it is too large
Load Diff
@@ -1,14 +1,8 @@
|
|||||||
"""JupyterHub version info"""
|
"""JupyterHub version info"""
|
||||||
# Copyright (c) Jupyter Development Team.
|
# Copyright (c) Jupyter Development Team.
|
||||||
# Distributed under the terms of the Modified BSD License.
|
# Distributed under the terms of the Modified BSD License.
|
||||||
|
# version_info updated by running `tbump`
|
||||||
version_info = (
|
version_info = (2, 3, 0, "", "")
|
||||||
2,
|
|
||||||
0,
|
|
||||||
0,
|
|
||||||
"b1", # release (b1, rc1, or "" for final or dev)
|
|
||||||
# "dev", # dev or nothing for beta/rc/stable releases
|
|
||||||
)
|
|
||||||
|
|
||||||
# pep 440 version: no dot before beta/rc, but before .dev
|
# pep 440 version: no dot before beta/rc, but before .dev
|
||||||
# 0.1.0rc1
|
# 0.1.0rc1
|
||||||
@@ -16,7 +10,9 @@ version_info = (
|
|||||||
# 0.1.0b1.dev
|
# 0.1.0b1.dev
|
||||||
# 0.1.0.dev
|
# 0.1.0.dev
|
||||||
|
|
||||||
__version__ = ".".join(map(str, version_info[:3])) + ".".join(version_info[3:])
|
__version__ = ".".join(map(str, version_info[:3])) + ".".join(version_info[3:]).rstrip(
|
||||||
|
"."
|
||||||
|
)
|
||||||
|
|
||||||
# Singleton flag to only log the major/minor mismatch warning once per mismatch combo.
|
# Singleton flag to only log the major/minor mismatch warning once per mismatch combo.
|
||||||
_version_mismatch_warning_logged = {}
|
_version_mismatch_warning_logged = {}
|
||||||
|
@@ -55,8 +55,15 @@ def run_migrations_offline():
|
|||||||
script output.
|
script output.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
connectable = config.attributes.get('connection', None)
|
||||||
|
|
||||||
|
if connectable is None:
|
||||||
url = config.get_main_option("sqlalchemy.url")
|
url = config.get_main_option("sqlalchemy.url")
|
||||||
context.configure(url=url, target_metadata=target_metadata, literal_binds=True)
|
context.configure(url=url, target_metadata=target_metadata, literal_binds=True)
|
||||||
|
else:
|
||||||
|
context.configure(
|
||||||
|
connection=connectable, target_metadata=target_metadata, literal_binds=True
|
||||||
|
)
|
||||||
|
|
||||||
with context.begin_transaction():
|
with context.begin_transaction():
|
||||||
context.run_migrations()
|
context.run_migrations()
|
||||||
@@ -69,6 +76,9 @@ def run_migrations_online():
|
|||||||
and associate a connection with the context.
|
and associate a connection with the context.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
connectable = config.attributes.get('connection', None)
|
||||||
|
|
||||||
|
if connectable is None:
|
||||||
connectable = engine_from_config(
|
connectable = engine_from_config(
|
||||||
config.get_section(config.config_ini_section),
|
config.get_section(config.config_ini_section),
|
||||||
prefix='sqlalchemy.',
|
prefix='sqlalchemy.',
|
||||||
|
@@ -16,6 +16,7 @@ from tornado import web
|
|||||||
from .. import orm
|
from .. import orm
|
||||||
from .. import roles
|
from .. import roles
|
||||||
from .. import scopes
|
from .. import scopes
|
||||||
|
from ..utils import get_browser_protocol
|
||||||
from ..utils import token_authenticated
|
from ..utils import token_authenticated
|
||||||
from .base import APIHandler
|
from .base import APIHandler
|
||||||
from .base import BaseHandler
|
from .base import BaseHandler
|
||||||
@@ -115,7 +116,10 @@ class OAuthHandler:
|
|||||||
# make absolute local redirects full URLs
|
# make absolute local redirects full URLs
|
||||||
# to satisfy oauthlib's absolute URI requirement
|
# to satisfy oauthlib's absolute URI requirement
|
||||||
redirect_uri = (
|
redirect_uri = (
|
||||||
self.request.protocol + "://" + self.request.headers['Host'] + redirect_uri
|
get_browser_protocol(self.request)
|
||||||
|
+ "://"
|
||||||
|
+ self.request.host
|
||||||
|
+ redirect_uri
|
||||||
)
|
)
|
||||||
parsed_url = urlparse(uri)
|
parsed_url = urlparse(uri)
|
||||||
query_list = parse_qsl(parsed_url.query, keep_blank_values=True)
|
query_list = parse_qsl(parsed_url.query, keep_blank_values=True)
|
||||||
@@ -308,12 +312,14 @@ class OAuthAuthorizeHandler(OAuthHandler, BaseHandler):
|
|||||||
"filter": "",
|
"filter": "",
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
elif 'all' in raw_scopes:
|
elif 'inherit' in raw_scopes:
|
||||||
raw_scopes = ['all']
|
raw_scopes = ['inherit']
|
||||||
scope_descriptions = [
|
scope_descriptions = [
|
||||||
{
|
{
|
||||||
"scope": "all",
|
"scope": "inherit",
|
||||||
"description": scopes.scope_definitions['all']['description'],
|
"description": scopes.scope_definitions['inherit'][
|
||||||
|
'description'
|
||||||
|
],
|
||||||
"filter": "",
|
"filter": "",
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@@ -14,6 +14,7 @@ from tornado import web
|
|||||||
|
|
||||||
from .. import orm
|
from .. import orm
|
||||||
from ..handlers import BaseHandler
|
from ..handlers import BaseHandler
|
||||||
|
from ..utils import get_browser_protocol
|
||||||
from ..utils import isoformat
|
from ..utils import isoformat
|
||||||
from ..utils import url_path_join
|
from ..utils import url_path_join
|
||||||
|
|
||||||
@@ -31,6 +32,9 @@ class APIHandler(BaseHandler):
|
|||||||
- methods for REST API models
|
- methods for REST API models
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# accept token-based authentication for API requests
|
||||||
|
_accept_token_auth = True
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def content_security_policy(self):
|
def content_security_policy(self):
|
||||||
return '; '.join([super().content_security_policy, "default-src 'none'"])
|
return '; '.join([super().content_security_policy, "default-src 'none'"])
|
||||||
@@ -55,7 +59,10 @@ class APIHandler(BaseHandler):
|
|||||||
|
|
||||||
- allow unspecified host/referer (e.g. scripts)
|
- allow unspecified host/referer (e.g. scripts)
|
||||||
"""
|
"""
|
||||||
host = self.request.headers.get("Host")
|
host_header = self.app.forwarded_host_header or "Host"
|
||||||
|
host = self.request.headers.get(host_header)
|
||||||
|
if host and "," in host:
|
||||||
|
host = host.split(",", 1)[0].strip()
|
||||||
referer = self.request.headers.get("Referer")
|
referer = self.request.headers.get("Referer")
|
||||||
|
|
||||||
# If no header is provided, assume it comes from a script/curl.
|
# If no header is provided, assume it comes from a script/curl.
|
||||||
@@ -67,13 +74,25 @@ class APIHandler(BaseHandler):
|
|||||||
self.log.warning("Blocking API request with no referer")
|
self.log.warning("Blocking API request with no referer")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
host_path = url_path_join(host, self.hub.base_url)
|
proto = get_browser_protocol(self.request)
|
||||||
referer_path = referer.split('://', 1)[-1]
|
|
||||||
if not (referer_path + '/').startswith(host_path):
|
full_host = f"{proto}://{host}{self.hub.base_url}"
|
||||||
|
host_url = urlparse(full_host)
|
||||||
|
referer_url = urlparse(referer)
|
||||||
|
# resolve default ports for http[s]
|
||||||
|
referer_port = referer_url.port or (
|
||||||
|
443 if referer_url.scheme == 'https' else 80
|
||||||
|
)
|
||||||
|
host_port = host_url.port or (443 if host_url.scheme == 'https' else 80)
|
||||||
|
if (
|
||||||
|
referer_url.scheme != host_url.scheme
|
||||||
|
or referer_url.hostname != host_url.hostname
|
||||||
|
or referer_port != host_port
|
||||||
|
or not (referer_url.path + "/").startswith(host_url.path)
|
||||||
|
):
|
||||||
self.log.warning(
|
self.log.warning(
|
||||||
"Blocking Cross Origin API request. Referer: %s, Host: %s",
|
f"Blocking Cross Origin API request. Referer: {referer},"
|
||||||
referer,
|
f" {host_header}: {host}, Host URL: {full_host}",
|
||||||
host_path,
|
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
@@ -210,6 +229,7 @@ class APIHandler(BaseHandler):
|
|||||||
'last_activity': isoformat(token.last_activity),
|
'last_activity': isoformat(token.last_activity),
|
||||||
'expires_at': isoformat(token.expires_at),
|
'expires_at': isoformat(token.expires_at),
|
||||||
'note': token.note,
|
'note': token.note,
|
||||||
|
'session_id': token.session_id,
|
||||||
'oauth_client': token.oauth_client.description
|
'oauth_client': token.oauth_client.description
|
||||||
or token.oauth_client.identifier,
|
or token.oauth_client.identifier,
|
||||||
}
|
}
|
||||||
|
@@ -33,6 +33,11 @@ class _GroupAPIHandler(APIHandler):
|
|||||||
raise web.HTTPError(404, "No such group: %s", group_name)
|
raise web.HTTPError(404, "No such group: %s", group_name)
|
||||||
return group
|
return group
|
||||||
|
|
||||||
|
def check_authenticator_managed_groups(self):
|
||||||
|
"""Raise error on group-management APIs if Authenticator is managing groups"""
|
||||||
|
if self.authenticator.manage_groups:
|
||||||
|
raise web.HTTPError(400, "Group management via API is disabled")
|
||||||
|
|
||||||
|
|
||||||
class GroupListAPIHandler(_GroupAPIHandler):
|
class GroupListAPIHandler(_GroupAPIHandler):
|
||||||
@needs_scope('list:groups')
|
@needs_scope('list:groups')
|
||||||
@@ -45,7 +50,7 @@ class GroupListAPIHandler(_GroupAPIHandler):
|
|||||||
# the only valid filter is group=...
|
# the only valid filter is group=...
|
||||||
# don't expand invalid !server=x to all groups!
|
# don't expand invalid !server=x to all groups!
|
||||||
self.log.warning(
|
self.log.warning(
|
||||||
"Invalid filter on list:group for {self.current_user}: {sub_scope}"
|
f"Invalid filter on list:group for {self.current_user}: {sub_scope}"
|
||||||
)
|
)
|
||||||
raise web.HTTPError(403)
|
raise web.HTTPError(403)
|
||||||
query = query.filter(orm.Group.name.in_(sub_scope['group']))
|
query = query.filter(orm.Group.name.in_(sub_scope['group']))
|
||||||
@@ -68,6 +73,9 @@ class GroupListAPIHandler(_GroupAPIHandler):
|
|||||||
@needs_scope('admin:groups')
|
@needs_scope('admin:groups')
|
||||||
async def post(self):
|
async def post(self):
|
||||||
"""POST creates Multiple groups"""
|
"""POST creates Multiple groups"""
|
||||||
|
|
||||||
|
self.check_authenticator_managed_groups()
|
||||||
|
|
||||||
model = self.get_json_body()
|
model = self.get_json_body()
|
||||||
if not model or not isinstance(model, dict) or not model.get('groups'):
|
if not model or not isinstance(model, dict) or not model.get('groups'):
|
||||||
raise web.HTTPError(400, "Must specify at least one group to create")
|
raise web.HTTPError(400, "Must specify at least one group to create")
|
||||||
@@ -106,6 +114,7 @@ class GroupAPIHandler(_GroupAPIHandler):
|
|||||||
@needs_scope('admin:groups')
|
@needs_scope('admin:groups')
|
||||||
async def post(self, group_name):
|
async def post(self, group_name):
|
||||||
"""POST creates a group by name"""
|
"""POST creates a group by name"""
|
||||||
|
self.check_authenticator_managed_groups()
|
||||||
model = self.get_json_body()
|
model = self.get_json_body()
|
||||||
if model is None:
|
if model is None:
|
||||||
model = {}
|
model = {}
|
||||||
@@ -129,9 +138,10 @@ class GroupAPIHandler(_GroupAPIHandler):
|
|||||||
self.write(json.dumps(self.group_model(group)))
|
self.write(json.dumps(self.group_model(group)))
|
||||||
self.set_status(201)
|
self.set_status(201)
|
||||||
|
|
||||||
@needs_scope('admin:groups')
|
@needs_scope('delete:groups')
|
||||||
def delete(self, group_name):
|
def delete(self, group_name):
|
||||||
"""Delete a group by name"""
|
"""Delete a group by name"""
|
||||||
|
self.check_authenticator_managed_groups()
|
||||||
group = self.find_group(group_name)
|
group = self.find_group(group_name)
|
||||||
self.log.info("Deleting group %s", group_name)
|
self.log.info("Deleting group %s", group_name)
|
||||||
self.db.delete(group)
|
self.db.delete(group)
|
||||||
@@ -145,6 +155,7 @@ class GroupUsersAPIHandler(_GroupAPIHandler):
|
|||||||
@needs_scope('groups')
|
@needs_scope('groups')
|
||||||
def post(self, group_name):
|
def post(self, group_name):
|
||||||
"""POST adds users to a group"""
|
"""POST adds users to a group"""
|
||||||
|
self.check_authenticator_managed_groups()
|
||||||
group = self.find_group(group_name)
|
group = self.find_group(group_name)
|
||||||
data = self.get_json_body()
|
data = self.get_json_body()
|
||||||
self._check_group_model(data)
|
self._check_group_model(data)
|
||||||
@@ -163,6 +174,7 @@ class GroupUsersAPIHandler(_GroupAPIHandler):
|
|||||||
@needs_scope('groups')
|
@needs_scope('groups')
|
||||||
async def delete(self, group_name):
|
async def delete(self, group_name):
|
||||||
"""DELETE removes users from a group"""
|
"""DELETE removes users from a group"""
|
||||||
|
self.check_authenticator_managed_groups()
|
||||||
group = self.find_group(group_name)
|
group = self.find_group(group_name)
|
||||||
data = self.get_json_body()
|
data = self.get_json_body()
|
||||||
self._check_group_model(data)
|
self._check_group_model(data)
|
||||||
|
@@ -47,9 +47,8 @@ class ShutdownAPIHandler(APIHandler):
|
|||||||
self.set_status(202)
|
self.set_status(202)
|
||||||
self.finish(json.dumps({"message": "Shutting down Hub"}))
|
self.finish(json.dumps({"message": "Shutting down Hub"}))
|
||||||
|
|
||||||
# stop the eventloop, which will trigger cleanup
|
# instruct the app to stop, which will trigger cleanup
|
||||||
loop = IOLoop.current()
|
app.stop()
|
||||||
loop.add_callback(loop.stop)
|
|
||||||
|
|
||||||
|
|
||||||
class RootAPIHandler(APIHandler):
|
class RootAPIHandler(APIHandler):
|
||||||
|
@@ -58,6 +58,14 @@ class SelfAPIHandler(APIHandler):
|
|||||||
|
|
||||||
model = get_model(user)
|
model = get_model(user)
|
||||||
|
|
||||||
|
# add session_id associated with token
|
||||||
|
# added in 2.0
|
||||||
|
token = self.get_token()
|
||||||
|
if token:
|
||||||
|
model["session_id"] = token.session_id
|
||||||
|
else:
|
||||||
|
model["session_id"] = None
|
||||||
|
|
||||||
# add scopes to identify model,
|
# add scopes to identify model,
|
||||||
# but not the scopes we added to ensure we could read our own model
|
# but not the scopes we added to ensure we could read our own model
|
||||||
model["scopes"] = sorted(self.expanded_scopes.difference(_added_scopes))
|
model["scopes"] = sorted(self.expanded_scopes.difference(_added_scopes))
|
||||||
@@ -76,6 +84,7 @@ class UserListAPIHandler(APIHandler):
|
|||||||
@needs_scope('list:users')
|
@needs_scope('list:users')
|
||||||
def get(self):
|
def get(self):
|
||||||
state_filter = self.get_argument("state", None)
|
state_filter = self.get_argument("state", None)
|
||||||
|
name_filter = self.get_argument("name_filter", None)
|
||||||
offset, limit = self.get_api_pagination()
|
offset, limit = self.get_api_pagination()
|
||||||
|
|
||||||
# post_filter
|
# post_filter
|
||||||
@@ -122,7 +131,7 @@ class UserListAPIHandler(APIHandler):
|
|||||||
if not set(sub_scope).issubset({'group', 'user'}):
|
if not set(sub_scope).issubset({'group', 'user'}):
|
||||||
# don't expand invalid !server=x filter to all users!
|
# don't expand invalid !server=x filter to all users!
|
||||||
self.log.warning(
|
self.log.warning(
|
||||||
"Invalid filter on list:user for {self.current_user}: {sub_scope}"
|
f"Invalid filter on list:user for {self.current_user}: {sub_scope}"
|
||||||
)
|
)
|
||||||
raise web.HTTPError(403)
|
raise web.HTTPError(403)
|
||||||
filters = []
|
filters = []
|
||||||
@@ -140,6 +149,9 @@ class UserListAPIHandler(APIHandler):
|
|||||||
else:
|
else:
|
||||||
query = query.filter(or_(*filters))
|
query = query.filter(or_(*filters))
|
||||||
|
|
||||||
|
if name_filter:
|
||||||
|
query = query.filter(orm.User.name.ilike(f'%{name_filter}%'))
|
||||||
|
|
||||||
full_query = query
|
full_query = query
|
||||||
query = query.order_by(orm.User.id.asc()).offset(offset).limit(limit)
|
query = query.order_by(orm.User.id.asc()).offset(offset).limit(limit)
|
||||||
|
|
||||||
@@ -266,7 +278,7 @@ class UserAPIHandler(APIHandler):
|
|||||||
self.write(json.dumps(self.user_model(user)))
|
self.write(json.dumps(self.user_model(user)))
|
||||||
self.set_status(201)
|
self.set_status(201)
|
||||||
|
|
||||||
@needs_scope('admin:users')
|
@needs_scope('delete:users')
|
||||||
async def delete(self, user_name):
|
async def delete(self, user_name):
|
||||||
user = self.find_user(user_name)
|
user = self.find_user(user_name)
|
||||||
if user is None:
|
if user is None:
|
||||||
@@ -397,9 +409,11 @@ class UserTokenListAPIHandler(APIHandler):
|
|||||||
token_roles = body.get('roles')
|
token_roles = body.get('roles')
|
||||||
try:
|
try:
|
||||||
api_token = user.new_api_token(
|
api_token = user.new_api_token(
|
||||||
note=note, expires_in=body.get('expires_in', None), roles=token_roles
|
note=note,
|
||||||
|
expires_in=body.get('expires_in', None),
|
||||||
|
roles=token_roles,
|
||||||
)
|
)
|
||||||
except NameError:
|
except KeyError:
|
||||||
raise web.HTTPError(404, "Requested roles %r not found" % token_roles)
|
raise web.HTTPError(404, "Requested roles %r not found" % token_roles)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise web.HTTPError(
|
raise web.HTTPError(
|
||||||
@@ -421,6 +435,7 @@ class UserTokenListAPIHandler(APIHandler):
|
|||||||
token_model = self.token_model(orm.APIToken.find(self.db, api_token))
|
token_model = self.token_model(orm.APIToken.find(self.db, api_token))
|
||||||
token_model['token'] = api_token
|
token_model['token'] = api_token
|
||||||
self.write(json.dumps(token_model))
|
self.write(json.dumps(token_model))
|
||||||
|
self.set_status(201)
|
||||||
|
|
||||||
|
|
||||||
class UserTokenAPIHandler(APIHandler):
|
class UserTokenAPIHandler(APIHandler):
|
||||||
@@ -483,6 +498,11 @@ class UserServerAPIHandler(APIHandler):
|
|||||||
@needs_scope('servers')
|
@needs_scope('servers')
|
||||||
async def post(self, user_name, server_name=''):
|
async def post(self, user_name, server_name=''):
|
||||||
user = self.find_user(user_name)
|
user = self.find_user(user_name)
|
||||||
|
if user is None:
|
||||||
|
# this can be reached if a token has `servers`
|
||||||
|
# permission on *all* users
|
||||||
|
raise web.HTTPError(404)
|
||||||
|
|
||||||
if server_name:
|
if server_name:
|
||||||
if not self.allow_named_servers:
|
if not self.allow_named_servers:
|
||||||
raise web.HTTPError(400, "Named servers are not enabled.")
|
raise web.HTTPError(400, "Named servers are not enabled.")
|
||||||
@@ -499,7 +519,7 @@ class UserServerAPIHandler(APIHandler):
|
|||||||
user_name, self.named_server_limit_per_user
|
user_name, self.named_server_limit_per_user
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
spawner = user.spawners[server_name]
|
spawner = user.get_spawner(server_name, replace_failed=True)
|
||||||
pending = spawner.pending
|
pending = spawner.pending
|
||||||
if pending == 'spawn':
|
if pending == 'spawn':
|
||||||
self.set_header('Content-Type', 'text/plain')
|
self.set_header('Content-Type', 'text/plain')
|
||||||
@@ -525,7 +545,7 @@ class UserServerAPIHandler(APIHandler):
|
|||||||
self.set_header('Content-Type', 'text/plain')
|
self.set_header('Content-Type', 'text/plain')
|
||||||
self.set_status(status)
|
self.set_status(status)
|
||||||
|
|
||||||
@needs_scope('servers')
|
@needs_scope('delete:servers')
|
||||||
async def delete(self, user_name, server_name=''):
|
async def delete(self, user_name, server_name=''):
|
||||||
user = self.find_user(user_name)
|
user = self.find_user(user_name)
|
||||||
options = self.get_json_body()
|
options = self.get_json_body()
|
||||||
@@ -698,7 +718,12 @@ class SpawnProgressAPIHandler(APIHandler):
|
|||||||
# check if spawner has just failed
|
# check if spawner has just failed
|
||||||
f = spawn_future
|
f = spawn_future
|
||||||
if f and f.done() and f.exception():
|
if f and f.done() and f.exception():
|
||||||
failed_event['message'] = "Spawn failed: %s" % f.exception()
|
exc = f.exception()
|
||||||
|
message = getattr(exc, "jupyterhub_message", str(exc))
|
||||||
|
failed_event['message'] = f"Spawn failed: {message}"
|
||||||
|
html_message = getattr(exc, "jupyterhub_html_message", "")
|
||||||
|
if html_message:
|
||||||
|
failed_event['html_message'] = html_message
|
||||||
await self.send_event(failed_event)
|
await self.send_event(failed_event)
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
@@ -731,7 +756,12 @@ class SpawnProgressAPIHandler(APIHandler):
|
|||||||
# what happened? Maybe spawn failed?
|
# what happened? Maybe spawn failed?
|
||||||
f = spawn_future
|
f = spawn_future
|
||||||
if f and f.done() and f.exception():
|
if f and f.done() and f.exception():
|
||||||
failed_event['message'] = "Spawn failed: %s" % f.exception()
|
exc = f.exception()
|
||||||
|
message = getattr(exc, "jupyterhub_message", str(exc))
|
||||||
|
failed_event['message'] = f"Spawn failed: {message}"
|
||||||
|
html_message = getattr(exc, "jupyterhub_html_message", "")
|
||||||
|
if html_message:
|
||||||
|
failed_event['html_message'] = html_message
|
||||||
else:
|
else:
|
||||||
self.log.warning(
|
self.log.warning(
|
||||||
"Server %s didn't start for unknown reason", spawner._log_name
|
"Server %s didn't start for unknown reason", spawner._log_name
|
||||||
|
@@ -90,6 +90,7 @@ from .log import CoroutineLogFormatter, log_request
|
|||||||
from .proxy import Proxy, ConfigurableHTTPProxy
|
from .proxy import Proxy, ConfigurableHTTPProxy
|
||||||
from .traitlets import URLPrefix, Command, EntryPointType, Callable
|
from .traitlets import URLPrefix, Command, EntryPointType, Callable
|
||||||
from .utils import (
|
from .utils import (
|
||||||
|
AnyTimeoutError,
|
||||||
catch_db_error,
|
catch_db_error,
|
||||||
maybe_future,
|
maybe_future,
|
||||||
url_path_join,
|
url_path_join,
|
||||||
@@ -790,6 +791,16 @@ class JupyterHub(Application):
|
|||||||
self.proxy_api_ip or '127.0.0.1', self.proxy_api_port or self.port + 1
|
self.proxy_api_ip or '127.0.0.1', self.proxy_api_port or self.port + 1
|
||||||
)
|
)
|
||||||
|
|
||||||
|
forwarded_host_header = Unicode(
|
||||||
|
'',
|
||||||
|
help="""Alternate header to use as the Host (e.g., X-Forwarded-Host)
|
||||||
|
when determining whether a request is cross-origin
|
||||||
|
|
||||||
|
This may be useful when JupyterHub is running behind a proxy that rewrites
|
||||||
|
the Host header.
|
||||||
|
""",
|
||||||
|
).tag(config=True)
|
||||||
|
|
||||||
hub_port = Integer(
|
hub_port = Integer(
|
||||||
8081,
|
8081,
|
||||||
help="""The internal port for the Hub process.
|
help="""The internal port for the Hub process.
|
||||||
@@ -1518,6 +1529,25 @@ class JupyterHub(Application):
|
|||||||
""",
|
""",
|
||||||
).tag(config=True)
|
).tag(config=True)
|
||||||
|
|
||||||
|
use_legacy_stopped_server_status_code = Bool(
|
||||||
|
False,
|
||||||
|
help="""
|
||||||
|
Return 503 rather than 424 when request comes in for a non-running server.
|
||||||
|
|
||||||
|
Prior to JupyterHub 2.0, we returned a 503 when any request came in for
|
||||||
|
a user server that was currently not running. By default, JupyterHub 2.0
|
||||||
|
will return a 424 - this makes operational metric dashboards more useful.
|
||||||
|
|
||||||
|
JupyterLab < 3.2 expected the 503 to know if the user server is no longer
|
||||||
|
running, and prompted the user to start their server. Set this config to
|
||||||
|
true to retain the old behavior, so JupyterLab < 3.2 can continue to show
|
||||||
|
the appropriate UI when the user server is stopped.
|
||||||
|
|
||||||
|
This option will be removed in a future release.
|
||||||
|
""",
|
||||||
|
config=True,
|
||||||
|
)
|
||||||
|
|
||||||
def init_handlers(self):
|
def init_handlers(self):
|
||||||
h = []
|
h = []
|
||||||
# load handlers from the authenticator
|
# load handlers from the authenticator
|
||||||
@@ -1873,6 +1903,7 @@ class JupyterHub(Application):
|
|||||||
user = orm.User.find(db, name)
|
user = orm.User.find(db, name)
|
||||||
if user is None:
|
if user is None:
|
||||||
user = orm.User(name=name, admin=True)
|
user = orm.User(name=name, admin=True)
|
||||||
|
roles.assign_default_roles(self.db, entity=user)
|
||||||
new_users.append(user)
|
new_users.append(user)
|
||||||
db.add(user)
|
db.add(user)
|
||||||
else:
|
else:
|
||||||
@@ -1963,12 +1994,16 @@ class JupyterHub(Application):
|
|||||||
self.log.info(f"Creating user {username}")
|
self.log.info(f"Creating user {username}")
|
||||||
user = orm.User(name=username)
|
user = orm.User(name=username)
|
||||||
self.db.add(user)
|
self.db.add(user)
|
||||||
|
roles.assign_default_roles(self.db, entity=user)
|
||||||
self.db.commit()
|
self.db.commit()
|
||||||
return user
|
return user
|
||||||
|
|
||||||
async def init_groups(self):
|
async def init_groups(self):
|
||||||
"""Load predefined groups into the database"""
|
"""Load predefined groups into the database"""
|
||||||
db = self.db
|
db = self.db
|
||||||
|
|
||||||
|
if self.authenticator.manage_groups and self.load_groups:
|
||||||
|
raise ValueError("Group management has been offloaded to the authenticator")
|
||||||
for name, usernames in self.load_groups.items():
|
for name, usernames in self.load_groups.items():
|
||||||
group = orm.Group.find(db, name)
|
group = orm.Group.find(db, name)
|
||||||
if group is None:
|
if group is None:
|
||||||
@@ -1984,14 +2019,25 @@ class JupyterHub(Application):
|
|||||||
|
|
||||||
async def init_role_creation(self):
|
async def init_role_creation(self):
|
||||||
"""Load default and predefined roles into the database"""
|
"""Load default and predefined roles into the database"""
|
||||||
self.log.debug('Loading default roles to database')
|
self.log.debug('Loading roles into database')
|
||||||
default_roles = roles.get_default_roles()
|
default_roles = roles.get_default_roles()
|
||||||
config_role_names = [r['name'] for r in self.load_roles]
|
config_role_names = [r['name'] for r in self.load_roles]
|
||||||
|
|
||||||
init_roles = default_roles
|
default_roles_dict = {role["name"]: role for role in default_roles}
|
||||||
|
init_roles = []
|
||||||
roles_with_new_permissions = []
|
roles_with_new_permissions = []
|
||||||
for role_spec in self.load_roles:
|
for role_spec in self.load_roles:
|
||||||
role_name = role_spec['name']
|
role_name = role_spec['name']
|
||||||
|
if role_name in default_roles_dict:
|
||||||
|
self.log.debug(f"Overriding default role {role_name}")
|
||||||
|
# merge custom role spec with default role spec when overriding
|
||||||
|
# so the new role can be partially defined
|
||||||
|
default_role_spec = default_roles_dict.pop(role_name)
|
||||||
|
merged_role_spec = {}
|
||||||
|
merged_role_spec.update(default_role_spec)
|
||||||
|
merged_role_spec.update(role_spec)
|
||||||
|
role_spec = merged_role_spec
|
||||||
|
|
||||||
# Check for duplicates
|
# Check for duplicates
|
||||||
if config_role_names.count(role_name) > 1:
|
if config_role_names.count(role_name) > 1:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
@@ -2002,10 +2048,13 @@ class JupyterHub(Application):
|
|||||||
old_role = orm.Role.find(self.db, name=role_name)
|
old_role = orm.Role.find(self.db, name=role_name)
|
||||||
if old_role:
|
if old_role:
|
||||||
if not set(role_spec['scopes']).issubset(old_role.scopes):
|
if not set(role_spec['scopes']).issubset(old_role.scopes):
|
||||||
app_log.warning(
|
self.log.warning(
|
||||||
"Role %s has obtained extra permissions" % role_name
|
"Role %s has obtained extra permissions" % role_name
|
||||||
)
|
)
|
||||||
roles_with_new_permissions.append(role_name)
|
roles_with_new_permissions.append(role_name)
|
||||||
|
|
||||||
|
# make sure we load any default roles not overridden
|
||||||
|
init_roles = list(default_roles_dict.values()) + init_roles
|
||||||
if roles_with_new_permissions:
|
if roles_with_new_permissions:
|
||||||
unauthorized_oauth_tokens = (
|
unauthorized_oauth_tokens = (
|
||||||
self.db.query(orm.APIToken)
|
self.db.query(orm.APIToken)
|
||||||
@@ -2017,7 +2066,7 @@ class JupyterHub(Application):
|
|||||||
.filter(orm.APIToken.client_id != 'jupyterhub')
|
.filter(orm.APIToken.client_id != 'jupyterhub')
|
||||||
)
|
)
|
||||||
for token in unauthorized_oauth_tokens:
|
for token in unauthorized_oauth_tokens:
|
||||||
app_log.warning(
|
self.log.warning(
|
||||||
"Deleting OAuth token %s; one of its roles obtained new permissions that were not authorized by user"
|
"Deleting OAuth token %s; one of its roles obtained new permissions that were not authorized by user"
|
||||||
% token
|
% token
|
||||||
)
|
)
|
||||||
@@ -2025,14 +2074,19 @@ class JupyterHub(Application):
|
|||||||
self.db.commit()
|
self.db.commit()
|
||||||
|
|
||||||
init_role_names = [r['name'] for r in init_roles]
|
init_role_names = [r['name'] for r in init_roles]
|
||||||
if not orm.Role.find(self.db, name='admin'):
|
if (
|
||||||
|
self.db.query(orm.Role).first() is None
|
||||||
|
and self.db.query(orm.User).first() is not None
|
||||||
|
):
|
||||||
|
# apply rbac-upgrade default role assignment if there are users in the db,
|
||||||
|
# but not any roles
|
||||||
self._rbac_upgrade = True
|
self._rbac_upgrade = True
|
||||||
else:
|
else:
|
||||||
self._rbac_upgrade = False
|
self._rbac_upgrade = False
|
||||||
for role in self.db.query(orm.Role).filter(
|
for role in self.db.query(orm.Role).filter(
|
||||||
orm.Role.name.notin_(init_role_names)
|
orm.Role.name.notin_(init_role_names)
|
||||||
):
|
):
|
||||||
app_log.info(f"Deleting role {role.name}")
|
self.log.warning(f"Deleting role {role.name}")
|
||||||
self.db.delete(role)
|
self.db.delete(role)
|
||||||
self.db.commit()
|
self.db.commit()
|
||||||
for role in init_roles:
|
for role in init_roles:
|
||||||
@@ -2048,66 +2102,89 @@ class JupyterHub(Application):
|
|||||||
if config_admin_users:
|
if config_admin_users:
|
||||||
for role_spec in self.load_roles:
|
for role_spec in self.load_roles:
|
||||||
if role_spec['name'] == 'admin':
|
if role_spec['name'] == 'admin':
|
||||||
app_log.warning(
|
self.log.warning(
|
||||||
"Configuration specifies both admin_users and users in the admin role specification. "
|
"Configuration specifies both admin_users and users in the admin role specification. "
|
||||||
"If admin role is present in config, c.authenticator.admin_users should not be used."
|
"If admin role is present in config, c.Authenticator.admin_users should not be used."
|
||||||
)
|
)
|
||||||
app_log.info(
|
self.log.info(
|
||||||
"Merging admin_users set with users list in admin role"
|
"Merging admin_users set with users list in admin role"
|
||||||
)
|
)
|
||||||
role_spec['users'] = set(role_spec.get('users', []))
|
role_spec['users'] = set(role_spec.get('users', []))
|
||||||
role_spec['users'] |= config_admin_users
|
role_spec['users'] |= config_admin_users
|
||||||
self.log.debug('Loading predefined roles from config file to database')
|
self.log.debug('Loading role assignments from config')
|
||||||
has_admin_role_spec = {role_bearer: False for role_bearer in admin_role_objects}
|
has_admin_role_spec = {role_bearer: False for role_bearer in admin_role_objects}
|
||||||
for predef_role in self.load_roles:
|
for role_spec in self.load_roles:
|
||||||
predef_role_obj = orm.Role.find(db, name=predef_role['name'])
|
role = orm.Role.find(db, name=role_spec['name'])
|
||||||
if predef_role['name'] == 'admin':
|
role_name = role_spec["name"]
|
||||||
|
if role_name == 'admin':
|
||||||
for kind in admin_role_objects:
|
for kind in admin_role_objects:
|
||||||
has_admin_role_spec[kind] = kind in predef_role
|
has_admin_role_spec[kind] = kind in role_spec
|
||||||
if has_admin_role_spec[kind]:
|
if has_admin_role_spec[kind]:
|
||||||
app_log.info(f"Admin role specifies static {kind} list")
|
self.log.info(f"Admin role specifies static {kind} list")
|
||||||
else:
|
else:
|
||||||
app_log.info(
|
self.log.info(
|
||||||
f"Admin role does not specify {kind}, preserving admin membership in database"
|
f"Admin role does not specify {kind}, preserving admin membership in database"
|
||||||
)
|
)
|
||||||
# add users, services, and/or groups,
|
# add users, services, and/or groups,
|
||||||
# tokens need to be checked for permissions
|
# tokens need to be checked for permissions
|
||||||
for kind in kinds:
|
for kind in kinds:
|
||||||
orm_role_bearers = []
|
orm_role_bearers = []
|
||||||
if kind in predef_role.keys():
|
if kind in role_spec:
|
||||||
for bname in predef_role[kind]:
|
for name in role_spec[kind]:
|
||||||
if kind == 'users':
|
if kind == 'users':
|
||||||
bname = self.authenticator.normalize_username(bname)
|
name = self.authenticator.normalize_username(name)
|
||||||
if not (
|
if not (
|
||||||
await maybe_future(
|
await maybe_future(
|
||||||
self.authenticator.check_allowed(bname, None)
|
self.authenticator.check_allowed(name, None)
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Username %r is not in Authenticator.allowed_users"
|
f"Username {name} is not in Authenticator.allowed_users"
|
||||||
% bname
|
|
||||||
)
|
)
|
||||||
Class = orm.get_class(kind)
|
Class = orm.get_class(kind)
|
||||||
orm_obj = Class.find(db, bname)
|
orm_obj = Class.find(db, name)
|
||||||
if orm_obj:
|
if orm_obj is not None:
|
||||||
orm_role_bearers.append(orm_obj)
|
orm_role_bearers.append(orm_obj)
|
||||||
else:
|
else:
|
||||||
app_log.info(
|
self.log.info(
|
||||||
f"Found unexisting {kind} {bname} in role definition {predef_role['name']}"
|
f"Found unexisting {kind} {name} in role definition {role_name}"
|
||||||
)
|
)
|
||||||
if kind == 'users':
|
if kind == 'users':
|
||||||
orm_obj = await self._get_or_create_user(bname)
|
orm_obj = await self._get_or_create_user(name)
|
||||||
orm_role_bearers.append(orm_obj)
|
orm_role_bearers.append(orm_obj)
|
||||||
|
elif kind == 'groups':
|
||||||
|
group = orm.Group(name=name)
|
||||||
|
db.add(group)
|
||||||
|
db.commit()
|
||||||
|
orm_role_bearers.append(group)
|
||||||
else:
|
else:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f"{kind} {bname} defined in config role definition {predef_role['name']} but not present in database"
|
f"{kind} {name} defined in config role definition {role_name} but not present in database"
|
||||||
)
|
)
|
||||||
# Ensure all with admin role have admin flag
|
# Ensure all with admin role have admin flag
|
||||||
if predef_role['name'] == 'admin':
|
if role_name == 'admin':
|
||||||
orm_obj.admin = True
|
orm_obj.admin = True
|
||||||
setattr(predef_role_obj, kind, orm_role_bearers)
|
# explicitly defined list
|
||||||
|
# ensure membership list is exact match (adds and revokes permissions)
|
||||||
|
setattr(role, kind, orm_role_bearers)
|
||||||
|
else:
|
||||||
|
# no defined members
|
||||||
|
# leaving 'users' undefined in overrides of the default 'user' role
|
||||||
|
# should not clear membership on startup
|
||||||
|
# since allowed users could be managed by the authenticator
|
||||||
|
if kind == "users" and role_name == "user":
|
||||||
|
# Default user lists can be managed by the Authenticator,
|
||||||
|
# if unspecified in role config
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
# otherwise, omitting a member category is equivalent to specifying an empty list
|
||||||
|
setattr(role, kind, [])
|
||||||
|
|
||||||
db.commit()
|
db.commit()
|
||||||
if self.authenticator.allowed_users:
|
if self.authenticator.allowed_users:
|
||||||
|
self.log.debug(
|
||||||
|
f"Assigning {len(self.authenticator.allowed_users)} allowed_users to the user role"
|
||||||
|
)
|
||||||
allowed_users = db.query(orm.User).filter(
|
allowed_users = db.query(orm.User).filter(
|
||||||
orm.User.name.in_(self.authenticator.allowed_users)
|
orm.User.name.in_(self.authenticator.allowed_users)
|
||||||
)
|
)
|
||||||
@@ -2124,8 +2201,8 @@ class JupyterHub(Application):
|
|||||||
db.commit()
|
db.commit()
|
||||||
# make sure that on hub upgrade, all users, services and tokens have at least one role (update with default)
|
# make sure that on hub upgrade, all users, services and tokens have at least one role (update with default)
|
||||||
if getattr(self, '_rbac_upgrade', False):
|
if getattr(self, '_rbac_upgrade', False):
|
||||||
app_log.warning(
|
self.log.warning(
|
||||||
"No admin role found; assuming hub upgrade. Initializing default roles for all entities"
|
"No roles found; assuming hub upgrade. Initializing default roles for all entities"
|
||||||
)
|
)
|
||||||
for kind in kinds:
|
for kind in kinds:
|
||||||
roles.check_for_default_roles(db, kind)
|
roles.check_for_default_roles(db, kind)
|
||||||
@@ -2331,7 +2408,7 @@ class JupyterHub(Application):
|
|||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
await Server.from_orm(service.orm.server).wait_up(timeout=1, http=True)
|
await Server.from_orm(service.orm.server).wait_up(timeout=1, http=True)
|
||||||
except TimeoutError:
|
except AnyTimeoutError:
|
||||||
self.log.warning(
|
self.log.warning(
|
||||||
"Cannot connect to %s service %s at %s",
|
"Cannot connect to %s service %s at %s",
|
||||||
service.kind,
|
service.kind,
|
||||||
@@ -2409,7 +2486,7 @@ class JupyterHub(Application):
|
|||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
await user._wait_up(spawner)
|
await user._wait_up(spawner)
|
||||||
except TimeoutError:
|
except AnyTimeoutError:
|
||||||
self.log.error(
|
self.log.error(
|
||||||
"%s does not appear to be running at %s, shutting it down.",
|
"%s does not appear to be running at %s, shutting it down.",
|
||||||
spawner._log_name,
|
spawner._log_name,
|
||||||
@@ -2773,7 +2850,7 @@ class JupyterHub(Application):
|
|||||||
await gen.with_timeout(
|
await gen.with_timeout(
|
||||||
timedelta(seconds=max(init_spawners_timeout, 1)), init_spawners_future
|
timedelta(seconds=max(init_spawners_timeout, 1)), init_spawners_future
|
||||||
)
|
)
|
||||||
except gen.TimeoutError:
|
except AnyTimeoutError:
|
||||||
self.log.warning(
|
self.log.warning(
|
||||||
"init_spawners did not complete within %i seconds. "
|
"init_spawners did not complete within %i seconds. "
|
||||||
"Allowing to complete in the background.",
|
"Allowing to complete in the background.",
|
||||||
@@ -3036,7 +3113,7 @@ class JupyterHub(Application):
|
|||||||
await Server.from_orm(service.orm.server).wait_up(
|
await Server.from_orm(service.orm.server).wait_up(
|
||||||
http=True, timeout=1, ssl_context=ssl_context
|
http=True, timeout=1, ssl_context=ssl_context
|
||||||
)
|
)
|
||||||
except TimeoutError:
|
except AnyTimeoutError:
|
||||||
if service.managed:
|
if service.managed:
|
||||||
status = await service.spawner.poll()
|
status = await service.spawner.poll()
|
||||||
if status is not None:
|
if status is not None:
|
||||||
@@ -3073,7 +3150,12 @@ class JupyterHub(Application):
|
|||||||
self.last_activity_callback = pc
|
self.last_activity_callback = pc
|
||||||
pc.start()
|
pc.start()
|
||||||
|
|
||||||
|
if self.proxy.should_start:
|
||||||
self.log.info("JupyterHub is now running at %s", self.proxy.public_url)
|
self.log.info("JupyterHub is now running at %s", self.proxy.public_url)
|
||||||
|
else:
|
||||||
|
self.log.info(
|
||||||
|
"JupyterHub is now running, internal Hub API at %s", self.hub.url
|
||||||
|
)
|
||||||
# Use atexit for Windows, it doesn't have signal handling support
|
# Use atexit for Windows, it doesn't have signal handling support
|
||||||
if _mswindows:
|
if _mswindows:
|
||||||
atexit.register(self.atexit)
|
atexit.register(self.atexit)
|
||||||
@@ -3159,9 +3241,15 @@ class JupyterHub(Application):
|
|||||||
loop.make_current()
|
loop.make_current()
|
||||||
loop.run_sync(self.cleanup)
|
loop.run_sync(self.cleanup)
|
||||||
|
|
||||||
async def shutdown_cancel_tasks(self, sig):
|
async def shutdown_cancel_tasks(self, sig=None):
|
||||||
"""Cancel all other tasks of the event loop and initiate cleanup"""
|
"""Cancel all other tasks of the event loop and initiate cleanup"""
|
||||||
|
if sig is None:
|
||||||
|
self.log.critical("Initiating shutdown...")
|
||||||
|
else:
|
||||||
self.log.critical("Received signal %s, initiating shutdown...", sig.name)
|
self.log.critical("Received signal %s, initiating shutdown...", sig.name)
|
||||||
|
|
||||||
|
await self.cleanup()
|
||||||
|
|
||||||
tasks = [t for t in asyncio_all_tasks() if t is not asyncio_current_task()]
|
tasks = [t for t in asyncio_all_tasks() if t is not asyncio_current_task()]
|
||||||
|
|
||||||
if tasks:
|
if tasks:
|
||||||
@@ -3178,7 +3266,6 @@ class JupyterHub(Application):
|
|||||||
tasks = [t for t in asyncio_all_tasks()]
|
tasks = [t for t in asyncio_all_tasks()]
|
||||||
for t in tasks:
|
for t in tasks:
|
||||||
self.log.debug("Task status: %s", t)
|
self.log.debug("Task status: %s", t)
|
||||||
await self.cleanup()
|
|
||||||
asyncio.get_event_loop().stop()
|
asyncio.get_event_loop().stop()
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
@@ -3186,7 +3273,7 @@ class JupyterHub(Application):
|
|||||||
return
|
return
|
||||||
if self.http_server:
|
if self.http_server:
|
||||||
self.http_server.stop()
|
self.http_server.stop()
|
||||||
self.io_loop.add_callback(self.io_loop.stop)
|
self.io_loop.add_callback(self.shutdown_cancel_tasks)
|
||||||
|
|
||||||
async def start_show_config(self):
|
async def start_show_config(self):
|
||||||
"""Async wrapper around base start_show_config method"""
|
"""Async wrapper around base start_show_config method"""
|
||||||
|
@@ -582,9 +582,13 @@ class Authenticator(LoggingConfigurable):
|
|||||||
or None if Authentication failed.
|
or None if Authentication failed.
|
||||||
|
|
||||||
The Authenticator may return a dict instead, which MUST have a
|
The Authenticator may return a dict instead, which MUST have a
|
||||||
key `name` holding the username, and MAY have two optional keys
|
key `name` holding the username, and MAY have additional keys:
|
||||||
set: `auth_state`, a dictionary of of auth state that will be
|
|
||||||
persisted; and `admin`, the admin setting value for the user.
|
- `auth_state`, a dictionary of of auth state that will be
|
||||||
|
persisted;
|
||||||
|
- `admin`, the admin setting value for the user
|
||||||
|
- `groups`, the list of group names the user should be a member of,
|
||||||
|
if Authenticator.manage_groups is True.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def pre_spawn_start(self, user, spawner):
|
def pre_spawn_start(self, user, spawner):
|
||||||
@@ -635,6 +639,19 @@ class Authenticator(LoggingConfigurable):
|
|||||||
"""
|
"""
|
||||||
self.allowed_users.discard(user.name)
|
self.allowed_users.discard(user.name)
|
||||||
|
|
||||||
|
manage_groups = Bool(
|
||||||
|
False,
|
||||||
|
config=True,
|
||||||
|
help="""Let authenticator manage user groups
|
||||||
|
|
||||||
|
If True, Authenticator.authenticate and/or .refresh_user
|
||||||
|
may return a list of group names in the 'groups' field,
|
||||||
|
which will be assigned to the user.
|
||||||
|
|
||||||
|
All group-assignment APIs are disabled if this is True.
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
|
||||||
auto_login = Bool(
|
auto_login = Bool(
|
||||||
False,
|
False,
|
||||||
config=True,
|
config=True,
|
||||||
@@ -958,16 +975,24 @@ class PAMAuthenticator(LocalAuthenticator):
|
|||||||
).tag(config=True)
|
).tag(config=True)
|
||||||
|
|
||||||
open_sessions = Bool(
|
open_sessions = Bool(
|
||||||
True,
|
False,
|
||||||
help="""
|
help="""
|
||||||
Whether to open a new PAM session when spawners are started.
|
Whether to open a new PAM session when spawners are started.
|
||||||
|
|
||||||
This may trigger things like mounting shared filsystems,
|
This may trigger things like mounting shared filesystems,
|
||||||
loading credentials, etc. depending on system configuration,
|
loading credentials, etc. depending on system configuration.
|
||||||
but it does not always work.
|
|
||||||
|
The lifecycle of PAM sessions is not correct,
|
||||||
|
so many PAM session configurations will not work.
|
||||||
|
|
||||||
If any errors are encountered when opening/closing PAM sessions,
|
If any errors are encountered when opening/closing PAM sessions,
|
||||||
this is automatically set to False.
|
this is automatically set to False.
|
||||||
|
|
||||||
|
.. versionchanged:: 2.2
|
||||||
|
|
||||||
|
Due to longstanding problems in the session lifecycle,
|
||||||
|
this is now disabled by default.
|
||||||
|
You may opt-in to opening sessions by setting this to True.
|
||||||
""",
|
""",
|
||||||
).tag(config=True)
|
).tag(config=True)
|
||||||
|
|
||||||
@@ -1173,3 +1198,22 @@ class DummyAuthenticator(Authenticator):
|
|||||||
return data['username']
|
return data['username']
|
||||||
return None
|
return None
|
||||||
return data['username']
|
return data['username']
|
||||||
|
|
||||||
|
|
||||||
|
class NullAuthenticator(Authenticator):
|
||||||
|
"""Null Authenticator for JupyterHub
|
||||||
|
|
||||||
|
For cases where authentication should be disabled,
|
||||||
|
e.g. only allowing access via API tokens.
|
||||||
|
|
||||||
|
.. versionadded:: 2.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
# auto_login skips 'Login with...' page on Hub 0.8
|
||||||
|
auto_login = True
|
||||||
|
|
||||||
|
# for Hub 0.7, show 'login with...'
|
||||||
|
login_service = 'null'
|
||||||
|
|
||||||
|
def get_handlers(self, app):
|
||||||
|
return []
|
||||||
|
@@ -45,9 +45,12 @@ from ..metrics import ServerSpawnStatus
|
|||||||
from ..metrics import ServerStopStatus
|
from ..metrics import ServerStopStatus
|
||||||
from ..metrics import TOTAL_USERS
|
from ..metrics import TOTAL_USERS
|
||||||
from ..objects import Server
|
from ..objects import Server
|
||||||
|
from ..scopes import needs_scope
|
||||||
from ..spawner import LocalProcessSpawner
|
from ..spawner import LocalProcessSpawner
|
||||||
from ..user import User
|
from ..user import User
|
||||||
|
from ..utils import AnyTimeoutError
|
||||||
from ..utils import get_accepted_mimetype
|
from ..utils import get_accepted_mimetype
|
||||||
|
from ..utils import get_browser_protocol
|
||||||
from ..utils import maybe_future
|
from ..utils import maybe_future
|
||||||
from ..utils import url_path_join
|
from ..utils import url_path_join
|
||||||
|
|
||||||
@@ -70,6 +73,12 @@ SESSION_COOKIE_NAME = 'jupyterhub-session-id'
|
|||||||
class BaseHandler(RequestHandler):
|
class BaseHandler(RequestHandler):
|
||||||
"""Base Handler class with access to common methods and properties."""
|
"""Base Handler class with access to common methods and properties."""
|
||||||
|
|
||||||
|
# by default, only accept cookie-based authentication
|
||||||
|
# The APIHandler base class enables token auth
|
||||||
|
# versionadded: 2.0
|
||||||
|
_accept_cookie_auth = True
|
||||||
|
_accept_token_auth = False
|
||||||
|
|
||||||
async def prepare(self):
|
async def prepare(self):
|
||||||
"""Identify the user during the prepare stage of each request
|
"""Identify the user during the prepare stage of each request
|
||||||
|
|
||||||
@@ -339,6 +348,7 @@ class BaseHandler(RequestHandler):
|
|||||||
auth_info['auth_state'] = await user.get_auth_state()
|
auth_info['auth_state'] = await user.get_auth_state()
|
||||||
return await self.auth_to_user(auth_info, user)
|
return await self.auth_to_user(auth_info, user)
|
||||||
|
|
||||||
|
@functools.lru_cache()
|
||||||
def get_token(self):
|
def get_token(self):
|
||||||
"""get token from authorization header"""
|
"""get token from authorization header"""
|
||||||
token = self.get_auth_token()
|
token = self.get_auth_token()
|
||||||
@@ -409,9 +419,11 @@ class BaseHandler(RequestHandler):
|
|||||||
async def get_current_user(self):
|
async def get_current_user(self):
|
||||||
"""get current username"""
|
"""get current username"""
|
||||||
if not hasattr(self, '_jupyterhub_user'):
|
if not hasattr(self, '_jupyterhub_user'):
|
||||||
|
user = None
|
||||||
try:
|
try:
|
||||||
|
if self._accept_token_auth:
|
||||||
user = self.get_current_user_token()
|
user = self.get_current_user_token()
|
||||||
if user is None:
|
if user is None and self._accept_cookie_auth:
|
||||||
user = self.get_current_user_cookie()
|
user = self.get_current_user_cookie()
|
||||||
if user and isinstance(user, User):
|
if user and isinstance(user, User):
|
||||||
user = await self.refresh_auth(user)
|
user = await self.refresh_auth(user)
|
||||||
@@ -490,7 +502,7 @@ class BaseHandler(RequestHandler):
|
|||||||
session_id = self.get_session_cookie()
|
session_id = self.get_session_cookie()
|
||||||
if session_id:
|
if session_id:
|
||||||
# clear session id
|
# clear session id
|
||||||
self.clear_cookie(SESSION_COOKIE_NAME, **kwargs)
|
self.clear_cookie(SESSION_COOKIE_NAME, path=self.base_url, **kwargs)
|
||||||
|
|
||||||
if user:
|
if user:
|
||||||
# user is logged in, clear any tokens associated with the current session
|
# user is logged in, clear any tokens associated with the current session
|
||||||
@@ -514,10 +526,16 @@ class BaseHandler(RequestHandler):
|
|||||||
path=url_path_join(self.base_url, 'services'),
|
path=url_path_join(self.base_url, 'services'),
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
# clear tornado cookie
|
# clear_cookie only accepts a subset of set_cookie's kwargs
|
||||||
|
clear_xsrf_cookie_kwargs = {
|
||||||
|
key: value
|
||||||
|
for key, value in self.settings.get('xsrf_cookie_kwargs', {}).items()
|
||||||
|
if key in {"path", "domain"}
|
||||||
|
}
|
||||||
|
|
||||||
self.clear_cookie(
|
self.clear_cookie(
|
||||||
'_xsrf',
|
'_xsrf',
|
||||||
**self.settings.get('xsrf_cookie_kwargs', {}),
|
**clear_xsrf_cookie_kwargs,
|
||||||
)
|
)
|
||||||
# Reset _jupyterhub_user
|
# Reset _jupyterhub_user
|
||||||
self._jupyterhub_user = None
|
self._jupyterhub_user = None
|
||||||
@@ -569,7 +587,9 @@ class BaseHandler(RequestHandler):
|
|||||||
so other services on this domain can read it.
|
so other services on this domain can read it.
|
||||||
"""
|
"""
|
||||||
session_id = uuid.uuid4().hex
|
session_id = uuid.uuid4().hex
|
||||||
self._set_cookie(SESSION_COOKIE_NAME, session_id, encrypted=False)
|
self._set_cookie(
|
||||||
|
SESSION_COOKIE_NAME, session_id, encrypted=False, path=self.base_url
|
||||||
|
)
|
||||||
return session_id
|
return session_id
|
||||||
|
|
||||||
def set_service_cookie(self, user):
|
def set_service_cookie(self, user):
|
||||||
@@ -620,33 +640,34 @@ class BaseHandler(RequestHandler):
|
|||||||
next_url = self.get_argument('next', default='')
|
next_url = self.get_argument('next', default='')
|
||||||
# protect against some browsers' buggy handling of backslash as slash
|
# protect against some browsers' buggy handling of backslash as slash
|
||||||
next_url = next_url.replace('\\', '%5C')
|
next_url = next_url.replace('\\', '%5C')
|
||||||
if (next_url + '/').startswith(
|
proto = get_browser_protocol(self.request)
|
||||||
(
|
host = self.request.host
|
||||||
f'{self.request.protocol}://{self.request.host}/',
|
if next_url.startswith("///"):
|
||||||
f'//{self.request.host}/',
|
# strip more than 2 leading // down to 2
|
||||||
)
|
# because urlparse treats that as empty netloc,
|
||||||
) or (
|
# whereas browsers treat more than two leading // the same as //,
|
||||||
|
# so netloc is the first non-/ bit
|
||||||
|
next_url = "//" + next_url.lstrip("/")
|
||||||
|
parsed_next_url = urlparse(next_url)
|
||||||
|
|
||||||
|
if (next_url + '/').startswith((f'{proto}://{host}/', f'//{host}/',)) or (
|
||||||
self.subdomain_host
|
self.subdomain_host
|
||||||
and urlparse(next_url).netloc
|
and parsed_next_url.netloc
|
||||||
and ("." + urlparse(next_url).netloc).endswith(
|
and ("." + parsed_next_url.netloc).endswith(
|
||||||
"." + urlparse(self.subdomain_host).netloc
|
"." + urlparse(self.subdomain_host).netloc
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
# treat absolute URLs for our host as absolute paths:
|
# treat absolute URLs for our host as absolute paths:
|
||||||
# below, redirects that aren't strictly paths
|
# below, redirects that aren't strictly paths are rejected
|
||||||
parsed = urlparse(next_url)
|
next_url = parsed_next_url.path
|
||||||
next_url = parsed.path
|
if parsed_next_url.query:
|
||||||
if parsed.query:
|
next_url = next_url + '?' + parsed_next_url.query
|
||||||
next_url = next_url + '?' + parsed.query
|
if parsed_next_url.fragment:
|
||||||
if parsed.fragment:
|
next_url = next_url + '#' + parsed_next_url.fragment
|
||||||
next_url = next_url + '#' + parsed.fragment
|
parsed_next_url = urlparse(next_url)
|
||||||
|
|
||||||
# if it still has host info, it didn't match our above check for *this* host
|
# if it still has host info, it didn't match our above check for *this* host
|
||||||
if next_url and (
|
if next_url and (parsed_next_url.netloc or not next_url.startswith('/')):
|
||||||
'://' in next_url
|
|
||||||
or next_url.startswith('//')
|
|
||||||
or not next_url.startswith('/')
|
|
||||||
):
|
|
||||||
self.log.warning("Disallowing redirect outside JupyterHub: %r", next_url)
|
self.log.warning("Disallowing redirect outside JupyterHub: %r", next_url)
|
||||||
next_url = ''
|
next_url = ''
|
||||||
|
|
||||||
@@ -759,15 +780,25 @@ class BaseHandler(RequestHandler):
|
|||||||
# Only set `admin` if the authenticator returned an explicit value.
|
# Only set `admin` if the authenticator returned an explicit value.
|
||||||
if admin is not None and admin != user.admin:
|
if admin is not None and admin != user.admin:
|
||||||
user.admin = admin
|
user.admin = admin
|
||||||
|
# always ensure default roles ('user', 'admin' if admin) are assigned
|
||||||
|
# after a successful login
|
||||||
roles.assign_default_roles(self.db, entity=user)
|
roles.assign_default_roles(self.db, entity=user)
|
||||||
self.db.commit()
|
|
||||||
|
# apply authenticator-managed groups
|
||||||
|
if self.authenticator.manage_groups:
|
||||||
|
group_names = authenticated.get("groups")
|
||||||
|
if group_names is not None:
|
||||||
|
user.sync_groups(group_names)
|
||||||
|
|
||||||
# always set auth_state and commit,
|
# always set auth_state and commit,
|
||||||
# because there could be key-rotation or clearing of previous values
|
# because there could be key-rotation or clearing of previous values
|
||||||
# going on.
|
# going on.
|
||||||
if not self.authenticator.enable_auth_state:
|
if not self.authenticator.enable_auth_state:
|
||||||
# auth_state is not enabled. Force None.
|
# auth_state is not enabled. Force None.
|
||||||
auth_state = None
|
auth_state = None
|
||||||
|
|
||||||
await user.save_auth_state(auth_state)
|
await user.save_auth_state(auth_state)
|
||||||
|
|
||||||
return user
|
return user
|
||||||
|
|
||||||
async def login_user(self, data=None):
|
async def login_user(self, data=None):
|
||||||
@@ -781,6 +812,7 @@ class BaseHandler(RequestHandler):
|
|||||||
self.set_login_cookie(user)
|
self.set_login_cookie(user)
|
||||||
self.statsd.incr('login.success')
|
self.statsd.incr('login.success')
|
||||||
self.statsd.timing('login.authenticate.success', auth_timer.ms)
|
self.statsd.timing('login.authenticate.success', auth_timer.ms)
|
||||||
|
|
||||||
self.log.info("User logged in: %s", user.name)
|
self.log.info("User logged in: %s", user.name)
|
||||||
user._auth_refreshed = time.monotonic()
|
user._auth_refreshed = time.monotonic()
|
||||||
return user
|
return user
|
||||||
@@ -1019,7 +1051,7 @@ class BaseHandler(RequestHandler):
|
|||||||
await gen.with_timeout(
|
await gen.with_timeout(
|
||||||
timedelta(seconds=self.slow_spawn_timeout), finish_spawn_future
|
timedelta(seconds=self.slow_spawn_timeout), finish_spawn_future
|
||||||
)
|
)
|
||||||
except gen.TimeoutError:
|
except AnyTimeoutError:
|
||||||
# waiting_for_response indicates server process has started,
|
# waiting_for_response indicates server process has started,
|
||||||
# but is yet to become responsive.
|
# but is yet to become responsive.
|
||||||
if spawner._spawn_pending and not spawner._waiting_for_response:
|
if spawner._spawn_pending and not spawner._waiting_for_response:
|
||||||
@@ -1166,7 +1198,7 @@ class BaseHandler(RequestHandler):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
await gen.with_timeout(timedelta(seconds=self.slow_stop_timeout), future)
|
await gen.with_timeout(timedelta(seconds=self.slow_stop_timeout), future)
|
||||||
except gen.TimeoutError:
|
except AnyTimeoutError:
|
||||||
# hit timeout, but stop is still pending
|
# hit timeout, but stop is still pending
|
||||||
self.log.warning(
|
self.log.warning(
|
||||||
"User %s:%s server is slow to stop (timeout=%s)",
|
"User %s:%s server is slow to stop (timeout=%s)",
|
||||||
@@ -1355,7 +1387,7 @@ class UserUrlHandler(BaseHandler):
|
|||||||
|
|
||||||
**Changed Behavior as of 1.0** This handler no longer triggers a spawn. Instead, it checks if:
|
**Changed Behavior as of 1.0** This handler no longer triggers a spawn. Instead, it checks if:
|
||||||
|
|
||||||
1. server is not active, serve page prompting for spawn (status: 503)
|
1. server is not active, serve page prompting for spawn (status: 424)
|
||||||
2. server is ready (This shouldn't happen! Proxy isn't updated yet. Wait a bit and redirect.)
|
2. server is ready (This shouldn't happen! Proxy isn't updated yet. Wait a bit and redirect.)
|
||||||
3. server is active, redirect to /hub/spawn-pending to monitor launch progress
|
3. server is active, redirect to /hub/spawn-pending to monitor launch progress
|
||||||
(will redirect back when finished)
|
(will redirect back when finished)
|
||||||
@@ -1369,12 +1401,22 @@ class UserUrlHandler(BaseHandler):
|
|||||||
Note that this only occurs if bob's server is not already running.
|
Note that this only occurs if bob's server is not already running.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# accept token auth for API requests that are probably to non-running servers
|
||||||
|
_accept_token_auth = True
|
||||||
|
|
||||||
def _fail_api_request(self, user_name='', server_name=''):
|
def _fail_api_request(self, user_name='', server_name=''):
|
||||||
"""Fail an API request to a not-running server"""
|
"""Fail an API request to a not-running server"""
|
||||||
self.log.warning(
|
self.log.warning(
|
||||||
"Failing suspected API request to not-running server: %s", self.request.path
|
"Failing suspected API request to not-running server: %s", self.request.path
|
||||||
)
|
)
|
||||||
self.set_status(503)
|
|
||||||
|
# If we got here, the server is not running. To differentiate
|
||||||
|
# that the *server* itself is not running, rather than just the particular
|
||||||
|
# resource *in* the server is not found, we return a 424 instead of a 404.
|
||||||
|
# We allow retaining the old behavior to support older JupyterLab versions
|
||||||
|
self.set_status(
|
||||||
|
424 if not self.app.use_legacy_stopped_server_status_code else 503
|
||||||
|
)
|
||||||
self.set_header("Content-Type", "application/json")
|
self.set_header("Content-Type", "application/json")
|
||||||
|
|
||||||
spawn_url = urlparse(self.request.full_url())._replace(query="")
|
spawn_url = urlparse(self.request.full_url())._replace(query="")
|
||||||
@@ -1426,54 +1468,24 @@ class UserUrlHandler(BaseHandler):
|
|||||||
delete = non_get
|
delete = non_get
|
||||||
|
|
||||||
@web.authenticated
|
@web.authenticated
|
||||||
|
@needs_scope("access:servers")
|
||||||
async def get(self, user_name, user_path):
|
async def get(self, user_name, user_path):
|
||||||
if not user_path:
|
if not user_path:
|
||||||
user_path = '/'
|
user_path = '/'
|
||||||
current_user = self.current_user
|
current_user = self.current_user
|
||||||
|
if user_name != current_user.name:
|
||||||
if (
|
|
||||||
current_user
|
|
||||||
and current_user.name != user_name
|
|
||||||
and current_user.admin
|
|
||||||
and self.settings.get('admin_access', False)
|
|
||||||
):
|
|
||||||
# allow admins to spawn on behalf of users
|
|
||||||
user = self.find_user(user_name)
|
user = self.find_user(user_name)
|
||||||
if user is None:
|
if user is None:
|
||||||
# no such user
|
# no such user
|
||||||
raise web.HTTPError(404, "No such user %s" % user_name)
|
raise web.HTTPError(404, f"No such user {user_name}")
|
||||||
self.log.info(
|
self.log.info(
|
||||||
"Admin %s requesting spawn on behalf of %s",
|
f"User {current_user.name} requesting spawn on behalf of {user.name}"
|
||||||
current_user.name,
|
|
||||||
user.name,
|
|
||||||
)
|
)
|
||||||
admin_spawn = True
|
admin_spawn = True
|
||||||
should_spawn = True
|
should_spawn = True
|
||||||
redirect_to_self = False
|
redirect_to_self = False
|
||||||
else:
|
else:
|
||||||
user = current_user
|
user = current_user
|
||||||
admin_spawn = False
|
|
||||||
# For non-admins, spawn if the user requested is the current user
|
|
||||||
# otherwise redirect users to their own server
|
|
||||||
should_spawn = current_user and current_user.name == user_name
|
|
||||||
redirect_to_self = not should_spawn
|
|
||||||
|
|
||||||
if redirect_to_self:
|
|
||||||
# logged in as a different non-admin user, redirect to user's own server
|
|
||||||
# this is only a stop-gap for a common mistake,
|
|
||||||
# because the same request will be a 403
|
|
||||||
# if the requested server is running
|
|
||||||
self.statsd.incr('redirects.user_to_user', 1)
|
|
||||||
self.log.warning(
|
|
||||||
"User %s requested server for %s, which they don't own",
|
|
||||||
current_user.name,
|
|
||||||
user_name,
|
|
||||||
)
|
|
||||||
target = url_path_join(current_user.url, user_path or '')
|
|
||||||
if self.request.query:
|
|
||||||
target = url_concat(target, parse_qsl(self.request.query))
|
|
||||||
self.redirect(target)
|
|
||||||
return
|
|
||||||
|
|
||||||
# If people visit /user/:user_name directly on the Hub,
|
# If people visit /user/:user_name directly on the Hub,
|
||||||
# the redirects will just loop, because the proxy is bypassed.
|
# the redirects will just loop, because the proxy is bypassed.
|
||||||
@@ -1517,14 +1529,10 @@ class UserUrlHandler(BaseHandler):
|
|||||||
|
|
||||||
# if request is expecting JSON, assume it's an API request and fail with 503
|
# if request is expecting JSON, assume it's an API request and fail with 503
|
||||||
# because it won't like the redirect to the pending page
|
# because it won't like the redirect to the pending page
|
||||||
if (
|
if get_accepted_mimetype(
|
||||||
get_accepted_mimetype(
|
|
||||||
self.request.headers.get('Accept', ''),
|
self.request.headers.get('Accept', ''),
|
||||||
choices=['application/json', 'text/html'],
|
choices=['application/json', 'text/html'],
|
||||||
)
|
) == 'application/json' or 'api' in user_path.split('/'):
|
||||||
== 'application/json'
|
|
||||||
or 'api' in user_path.split('/')
|
|
||||||
):
|
|
||||||
self._fail_api_request(user_name, server_name)
|
self._fail_api_request(user_name, server_name)
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -1539,15 +1547,17 @@ class UserUrlHandler(BaseHandler):
|
|||||||
self.redirect(pending_url, status=303)
|
self.redirect(pending_url, status=303)
|
||||||
return
|
return
|
||||||
|
|
||||||
# if we got here, the server is not running
|
# If we got here, the server is not running. To differentiate
|
||||||
# serve a page prompting for spawn and 503 error
|
# that the *server* itself is not running, rather than just the particular
|
||||||
# visiting /user/:name no longer triggers implicit spawn
|
# page *in* the server is not found, we return a 424 instead of a 404.
|
||||||
# without explicit user action
|
# We allow retaining the old behavior to support older JupyterLab versions
|
||||||
spawn_url = url_concat(
|
spawn_url = url_concat(
|
||||||
url_path_join(self.hub.base_url, "spawn", user.escaped_name, server_name),
|
url_path_join(self.hub.base_url, "spawn", user.escaped_name, server_name),
|
||||||
{"next": self.request.uri},
|
{"next": self.request.uri},
|
||||||
)
|
)
|
||||||
self.set_status(503)
|
self.set_status(
|
||||||
|
424 if not self.app.use_legacy_stopped_server_status_code else 503
|
||||||
|
)
|
||||||
|
|
||||||
auth_state = await user.get_auth_state()
|
auth_state = await user.get_auth_state()
|
||||||
html = await self.render_template(
|
html = await self.render_template(
|
||||||
|
@@ -12,6 +12,8 @@ class MetricsHandler(BaseHandler):
|
|||||||
Handler to serve Prometheus metrics
|
Handler to serve Prometheus metrics
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
_accept_token_auth = True
|
||||||
|
|
||||||
@metrics_authentication
|
@metrics_authentication
|
||||||
async def get(self):
|
async def get(self):
|
||||||
self.set_header('Content-Type', CONTENT_TYPE_LATEST)
|
self.set_header('Content-Type', CONTENT_TYPE_LATEST)
|
||||||
|
@@ -106,22 +106,27 @@ class SpawnHandler(BaseHandler):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@web.authenticated
|
@web.authenticated
|
||||||
async def get(self, for_user=None, server_name=''):
|
def get(self, user_name=None, server_name=''):
|
||||||
"""GET renders form for spawning with user-specified options
|
"""GET renders form for spawning with user-specified options
|
||||||
|
|
||||||
or triggers spawn via redirect if there is no form.
|
or triggers spawn via redirect if there is no form.
|
||||||
"""
|
"""
|
||||||
|
# two-stage to get the right signature for @require_scopes filter on user_name
|
||||||
|
if user_name is None:
|
||||||
|
user_name = self.current_user.name
|
||||||
|
if server_name is None:
|
||||||
|
server_name = ""
|
||||||
|
return self._get(user_name=user_name, server_name=server_name)
|
||||||
|
|
||||||
|
@needs_scope("servers")
|
||||||
|
async def _get(self, user_name, server_name):
|
||||||
|
for_user = user_name
|
||||||
|
|
||||||
user = current_user = self.current_user
|
user = current_user = self.current_user
|
||||||
if for_user is not None and for_user != user.name:
|
if for_user != user.name:
|
||||||
if not user.admin:
|
|
||||||
raise web.HTTPError(
|
|
||||||
403, "Only admins can spawn on behalf of other users"
|
|
||||||
)
|
|
||||||
|
|
||||||
user = self.find_user(for_user)
|
user = self.find_user(for_user)
|
||||||
if user is None:
|
if user is None:
|
||||||
raise web.HTTPError(404, "No such user: %s" % for_user)
|
raise web.HTTPError(404, f"No such user: {for_user}")
|
||||||
|
|
||||||
if server_name:
|
if server_name:
|
||||||
if not self.allow_named_servers:
|
if not self.allow_named_servers:
|
||||||
@@ -141,15 +146,12 @@ class SpawnHandler(BaseHandler):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if not self.allow_named_servers and user.running:
|
if not self.allow_named_servers and user.running:
|
||||||
url = self.get_next_url(user, default=user.server_url(server_name))
|
url = self.get_next_url(user, default=user.server_url(""))
|
||||||
self.log.info("User is running: %s", user.name)
|
self.log.info("User is running: %s", user.name)
|
||||||
self.redirect(url)
|
self.redirect(url)
|
||||||
return
|
return
|
||||||
|
|
||||||
if server_name is None:
|
spawner = user.get_spawner(server_name, replace_failed=True)
|
||||||
server_name = ''
|
|
||||||
|
|
||||||
spawner = user.spawners[server_name]
|
|
||||||
|
|
||||||
pending_url = self._get_pending_url(user, server_name)
|
pending_url = self._get_pending_url(user, server_name)
|
||||||
|
|
||||||
@@ -189,7 +191,6 @@ class SpawnHandler(BaseHandler):
|
|||||||
spawner._log_name,
|
spawner._log_name,
|
||||||
)
|
)
|
||||||
options = await maybe_future(spawner.options_from_query(query_options))
|
options = await maybe_future(spawner.options_from_query(query_options))
|
||||||
pending_url = self._get_pending_url(user, server_name)
|
|
||||||
return await self._wrap_spawn_single_user(
|
return await self._wrap_spawn_single_user(
|
||||||
user, server_name, spawner, pending_url, options
|
user, server_name, spawner, pending_url, options
|
||||||
)
|
)
|
||||||
@@ -219,19 +220,24 @@ class SpawnHandler(BaseHandler):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@web.authenticated
|
@web.authenticated
|
||||||
async def post(self, for_user=None, server_name=''):
|
def post(self, user_name=None, server_name=''):
|
||||||
"""POST spawns with user-specified options"""
|
"""POST spawns with user-specified options"""
|
||||||
|
if user_name is None:
|
||||||
|
user_name = self.current_user.name
|
||||||
|
if server_name is None:
|
||||||
|
server_name = ""
|
||||||
|
return self._post(user_name=user_name, server_name=server_name)
|
||||||
|
|
||||||
|
@needs_scope("servers")
|
||||||
|
async def _post(self, user_name, server_name):
|
||||||
|
for_user = user_name
|
||||||
user = current_user = self.current_user
|
user = current_user = self.current_user
|
||||||
if for_user is not None and for_user != user.name:
|
if for_user != user.name:
|
||||||
if not user.admin:
|
|
||||||
raise web.HTTPError(
|
|
||||||
403, "Only admins can spawn on behalf of other users"
|
|
||||||
)
|
|
||||||
user = self.find_user(for_user)
|
user = self.find_user(for_user)
|
||||||
if user is None:
|
if user is None:
|
||||||
raise web.HTTPError(404, "No such user: %s" % for_user)
|
raise web.HTTPError(404, "No such user: %s" % for_user)
|
||||||
|
|
||||||
spawner = user.spawners[server_name]
|
spawner = user.get_spawner(server_name, replace_failed=True)
|
||||||
|
|
||||||
if spawner.ready:
|
if spawner.ready:
|
||||||
raise web.HTTPError(400, "%s is already running" % (spawner._log_name))
|
raise web.HTTPError(400, "%s is already running" % (spawner._log_name))
|
||||||
@@ -249,7 +255,7 @@ class SpawnHandler(BaseHandler):
|
|||||||
self.log.debug(
|
self.log.debug(
|
||||||
"Triggering spawn with supplied form options for %s", spawner._log_name
|
"Triggering spawn with supplied form options for %s", spawner._log_name
|
||||||
)
|
)
|
||||||
options = await maybe_future(spawner.options_from_form(form_options))
|
options = await maybe_future(spawner.run_options_from_form(form_options))
|
||||||
pending_url = self._get_pending_url(user, server_name)
|
pending_url = self._get_pending_url(user, server_name)
|
||||||
return await self._wrap_spawn_single_user(
|
return await self._wrap_spawn_single_user(
|
||||||
user, server_name, spawner, pending_url, options
|
user, server_name, spawner, pending_url, options
|
||||||
@@ -308,10 +314,13 @@ class SpawnHandler(BaseHandler):
|
|||||||
# otherwise it may cause a redirect loop
|
# otherwise it may cause a redirect loop
|
||||||
if f.done() and f.exception():
|
if f.done() and f.exception():
|
||||||
exc = f.exception()
|
exc = f.exception()
|
||||||
|
self.log.exception(f"Error starting server {spawner._log_name}: {exc}")
|
||||||
|
if isinstance(exc, web.HTTPError):
|
||||||
|
# allow custom HTTPErrors to pass through
|
||||||
|
raise exc
|
||||||
raise web.HTTPError(
|
raise web.HTTPError(
|
||||||
500,
|
500,
|
||||||
"Error in Authenticator.pre_spawn_start: %s %s"
|
f"Unhandled error starting server {spawner._log_name}",
|
||||||
% (type(exc).__name__, str(exc)),
|
|
||||||
)
|
)
|
||||||
return self.redirect(pending_url)
|
return self.redirect(pending_url)
|
||||||
|
|
||||||
@@ -334,13 +343,11 @@ class SpawnPendingHandler(BaseHandler):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
@web.authenticated
|
@web.authenticated
|
||||||
async def get(self, for_user, server_name=''):
|
@needs_scope("servers")
|
||||||
|
async def get(self, user_name, server_name=''):
|
||||||
|
for_user = user_name
|
||||||
user = current_user = self.current_user
|
user = current_user = self.current_user
|
||||||
if for_user is not None and for_user != current_user.name:
|
if for_user != current_user.name:
|
||||||
if not current_user.admin:
|
|
||||||
raise web.HTTPError(
|
|
||||||
403, "Only admins can spawn on behalf of other users"
|
|
||||||
)
|
|
||||||
user = self.find_user(for_user)
|
user = self.find_user(for_user)
|
||||||
if user is None:
|
if user is None:
|
||||||
raise web.HTTPError(404, "No such user: %s" % for_user)
|
raise web.HTTPError(404, "No such user: %s" % for_user)
|
||||||
@@ -362,13 +369,9 @@ class SpawnPendingHandler(BaseHandler):
|
|||||||
auth_state = await user.get_auth_state()
|
auth_state = await user.get_auth_state()
|
||||||
|
|
||||||
# First, check for previous failure.
|
# First, check for previous failure.
|
||||||
if (
|
if not spawner.active and spawner._failed:
|
||||||
not spawner.active
|
# Condition: spawner not active and last spawn failed
|
||||||
and spawner._spawn_future
|
# (failure is available as spawner._spawn_future.exception()).
|
||||||
and spawner._spawn_future.done()
|
|
||||||
and spawner._spawn_future.exception()
|
|
||||||
):
|
|
||||||
# Condition: spawner not active and _spawn_future exists and contains an Exception
|
|
||||||
# Implicit spawn on /user/:name is not allowed if the user's last spawn failed.
|
# Implicit spawn on /user/:name is not allowed if the user's last spawn failed.
|
||||||
# We should point the user to Home if the most recent spawn failed.
|
# We should point the user to Home if the most recent spawn failed.
|
||||||
exc = spawner._spawn_future.exception()
|
exc = spawner._spawn_future.exception()
|
||||||
@@ -384,6 +387,7 @@ class SpawnPendingHandler(BaseHandler):
|
|||||||
server_name=server_name,
|
server_name=server_name,
|
||||||
spawn_url=spawn_url,
|
spawn_url=spawn_url,
|
||||||
failed=True,
|
failed=True,
|
||||||
|
failed_html_message=getattr(exc, 'jupyterhub_html_message', ''),
|
||||||
failed_message=getattr(exc, 'jupyterhub_message', ''),
|
failed_message=getattr(exc, 'jupyterhub_message', ''),
|
||||||
exception=exc,
|
exception=exc,
|
||||||
)
|
)
|
||||||
@@ -465,6 +469,7 @@ class AdminHandler(BaseHandler):
|
|||||||
named_server_limit_per_user=self.named_server_limit_per_user,
|
named_server_limit_per_user=self.named_server_limit_per_user,
|
||||||
server_version=f'{__version__} {self.version_hash}',
|
server_version=f'{__version__} {self.version_hash}',
|
||||||
api_page_limit=self.settings["api_page_default_limit"],
|
api_page_limit=self.settings["api_page_default_limit"],
|
||||||
|
base_url=self.settings["base_url"],
|
||||||
)
|
)
|
||||||
self.finish(html)
|
self.finish(html)
|
||||||
|
|
||||||
@@ -493,7 +498,7 @@ class TokenPageHandler(BaseHandler):
|
|||||||
continue
|
continue
|
||||||
if not token.client_id:
|
if not token.client_id:
|
||||||
# token should have been deleted when client was deleted
|
# token should have been deleted when client was deleted
|
||||||
self.log.warning("Deleting stale oauth token {token}")
|
self.log.warning(f"Deleting stale oauth token {token}")
|
||||||
self.db.delete(token)
|
self.db.delete(token)
|
||||||
self.db.commit()
|
self.db.commit()
|
||||||
continue
|
continue
|
||||||
|
@@ -44,6 +44,7 @@ from . import utils
|
|||||||
from .metrics import CHECK_ROUTES_DURATION_SECONDS
|
from .metrics import CHECK_ROUTES_DURATION_SECONDS
|
||||||
from .metrics import PROXY_POLL_DURATION_SECONDS
|
from .metrics import PROXY_POLL_DURATION_SECONDS
|
||||||
from .objects import Server
|
from .objects import Server
|
||||||
|
from .utils import AnyTimeoutError
|
||||||
from .utils import exponential_backoff
|
from .utils import exponential_backoff
|
||||||
from .utils import url_path_join
|
from .utils import url_path_join
|
||||||
from jupyterhub.traitlets import Command
|
from jupyterhub.traitlets import Command
|
||||||
@@ -718,7 +719,7 @@ class ConfigurableHTTPProxy(Proxy):
|
|||||||
_check_process()
|
_check_process()
|
||||||
try:
|
try:
|
||||||
await server.wait_up(1)
|
await server.wait_up(1)
|
||||||
except TimeoutError:
|
except AnyTimeoutError:
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
@@ -2,6 +2,7 @@
|
|||||||
# Copyright (c) Jupyter Development Team.
|
# Copyright (c) Jupyter Development Team.
|
||||||
# Distributed under the terms of the Modified BSD License.
|
# Distributed under the terms of the Modified BSD License.
|
||||||
import re
|
import re
|
||||||
|
from functools import wraps
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
|
|
||||||
from sqlalchemy import func
|
from sqlalchemy import func
|
||||||
@@ -44,6 +45,7 @@ def get_default_roles():
|
|||||||
'access:services',
|
'access:services',
|
||||||
'access:servers',
|
'access:servers',
|
||||||
'read:roles',
|
'read:roles',
|
||||||
|
'read:metrics',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -57,7 +59,7 @@ def get_default_roles():
|
|||||||
{
|
{
|
||||||
'name': 'token',
|
'name': 'token',
|
||||||
'description': 'Token with same permissions as its owner',
|
'description': 'Token with same permissions as its owner',
|
||||||
'scopes': ['all'],
|
'scopes': ['inherit'],
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
return default_roles
|
return default_roles
|
||||||
@@ -89,6 +91,7 @@ def expand_self_scope(name):
|
|||||||
'users:activity',
|
'users:activity',
|
||||||
'read:users:activity',
|
'read:users:activity',
|
||||||
'servers',
|
'servers',
|
||||||
|
'delete:servers',
|
||||||
'read:servers',
|
'read:servers',
|
||||||
'tokens',
|
'tokens',
|
||||||
'read:tokens',
|
'read:tokens',
|
||||||
@@ -213,7 +216,7 @@ def _check_scopes(*args, rolename=None):
|
|||||||
or
|
or
|
||||||
scopes (list): list of scopes to check
|
scopes (list): list of scopes to check
|
||||||
|
|
||||||
Raises NameError if scope does not exist
|
Raises KeyError if scope does not exist
|
||||||
"""
|
"""
|
||||||
|
|
||||||
allowed_scopes = set(scopes.scope_definitions.keys())
|
allowed_scopes = set(scopes.scope_definitions.keys())
|
||||||
@@ -227,35 +230,17 @@ def _check_scopes(*args, rolename=None):
|
|||||||
for scope in args:
|
for scope in args:
|
||||||
scopename, _, filter_ = scope.partition('!')
|
scopename, _, filter_ = scope.partition('!')
|
||||||
if scopename not in allowed_scopes:
|
if scopename not in allowed_scopes:
|
||||||
raise NameError(f"Scope '{scope}' {log_role} does not exist")
|
if scopename == "all":
|
||||||
|
raise KeyError("Draft scope 'all' is now called 'inherit'")
|
||||||
|
raise KeyError(f"Scope '{scope}' {log_role} does not exist")
|
||||||
if filter_:
|
if filter_:
|
||||||
full_filter = f"!{filter_}"
|
full_filter = f"!{filter_}"
|
||||||
if not any(f in scope for f in allowed_filters):
|
if not any(f in scope for f in allowed_filters):
|
||||||
raise NameError(
|
raise KeyError(
|
||||||
f"Scope filter '{full_filter}' in scope '{scope}' {log_role} does not exist"
|
f"Scope filter '{full_filter}' in scope '{scope}' {log_role} does not exist"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _overwrite_role(role, role_dict):
|
|
||||||
"""Overwrites role's description and/or scopes with role_dict if role not 'admin'"""
|
|
||||||
for attr in role_dict.keys():
|
|
||||||
if attr == 'description' or attr == 'scopes':
|
|
||||||
if role.name == 'admin':
|
|
||||||
admin_role_spec = [
|
|
||||||
r for r in get_default_roles() if r['name'] == 'admin'
|
|
||||||
][0]
|
|
||||||
if role_dict[attr] != admin_role_spec[attr]:
|
|
||||||
raise ValueError(
|
|
||||||
'admin role description or scopes cannot be overwritten'
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
if role_dict[attr] != getattr(role, attr):
|
|
||||||
setattr(role, attr, role_dict[attr])
|
|
||||||
app_log.info(
|
|
||||||
'Role %r %r attribute has been changed', role.name, attr
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
_role_name_pattern = re.compile(r'^[a-z][a-z0-9\-_~\.]{1,253}[a-z0-9]$')
|
_role_name_pattern = re.compile(r'^[a-z][a-z0-9\-_~\.]{1,253}[a-z0-9]$')
|
||||||
|
|
||||||
|
|
||||||
@@ -290,6 +275,17 @@ def create_role(db, role_dict):
|
|||||||
description = role_dict.get('description')
|
description = role_dict.get('description')
|
||||||
scopes = role_dict.get('scopes')
|
scopes = role_dict.get('scopes')
|
||||||
|
|
||||||
|
if name == "admin":
|
||||||
|
for _role in get_default_roles():
|
||||||
|
if _role["name"] == "admin":
|
||||||
|
admin_spec = _role
|
||||||
|
break
|
||||||
|
for key in ["description", "scopes"]:
|
||||||
|
if key in role_dict and role_dict[key] != admin_spec[key]:
|
||||||
|
raise ValueError(
|
||||||
|
f"Cannot override admin role admin.{key} = {role_dict[key]}"
|
||||||
|
)
|
||||||
|
|
||||||
# check if the provided scopes exist
|
# check if the provided scopes exist
|
||||||
if scopes:
|
if scopes:
|
||||||
_check_scopes(*scopes, rolename=role_dict['name'])
|
_check_scopes(*scopes, rolename=role_dict['name'])
|
||||||
@@ -303,8 +299,22 @@ def create_role(db, role_dict):
|
|||||||
if role_dict not in default_roles:
|
if role_dict not in default_roles:
|
||||||
app_log.info('Role %s added to database', name)
|
app_log.info('Role %s added to database', name)
|
||||||
else:
|
else:
|
||||||
_overwrite_role(role, role_dict)
|
for attr in ["description", "scopes"]:
|
||||||
|
try:
|
||||||
|
new_value = role_dict[attr]
|
||||||
|
except KeyError:
|
||||||
|
continue
|
||||||
|
old_value = getattr(role, attr)
|
||||||
|
if new_value != old_value:
|
||||||
|
setattr(role, attr, new_value)
|
||||||
|
app_log.info(
|
||||||
|
f'Role attribute {role.name}.{attr} has been changed',
|
||||||
|
)
|
||||||
|
app_log.debug(
|
||||||
|
f'Role attribute {role.name}.{attr} changed from %r to %r',
|
||||||
|
old_value,
|
||||||
|
new_value,
|
||||||
|
)
|
||||||
db.commit()
|
db.commit()
|
||||||
|
|
||||||
|
|
||||||
@@ -321,81 +331,64 @@ def delete_role(db, rolename):
|
|||||||
db.commit()
|
db.commit()
|
||||||
app_log.info('Role %s has been deleted', rolename)
|
app_log.info('Role %s has been deleted', rolename)
|
||||||
else:
|
else:
|
||||||
raise NameError('Cannot remove role %r that does not exist', rolename)
|
raise KeyError('Cannot remove role %r that does not exist', rolename)
|
||||||
|
|
||||||
|
|
||||||
def existing_only(func):
|
def _existing_only(func):
|
||||||
"""Decorator for checking if objects and roles exist"""
|
"""Decorator for checking if roles exist"""
|
||||||
|
|
||||||
def _check_existence(db, entity, rolename):
|
@wraps(func)
|
||||||
|
def _check_existence(db, entity, role=None, *, rolename=None):
|
||||||
|
if isinstance(role, str):
|
||||||
|
rolename = role
|
||||||
|
if rolename is not None:
|
||||||
|
# if given as a str, lookup role by name
|
||||||
role = orm.Role.find(db, rolename)
|
role = orm.Role.find(db, rolename)
|
||||||
if entity is None:
|
if role is None:
|
||||||
raise ValueError(
|
raise ValueError(f"Role {rolename} does not exist")
|
||||||
f"{entity!r} of kind {type(entity).__name__!r} does not exist"
|
|
||||||
)
|
return func(db, entity, role)
|
||||||
elif role is None:
|
|
||||||
raise ValueError("Role %r does not exist" % rolename)
|
|
||||||
else:
|
|
||||||
func(db, entity, role)
|
|
||||||
|
|
||||||
return _check_existence
|
return _check_existence
|
||||||
|
|
||||||
|
|
||||||
@existing_only
|
@_existing_only
|
||||||
def grant_role(db, entity, rolename):
|
def grant_role(db, entity, role):
|
||||||
"""Adds a role for users, services, groups or tokens"""
|
"""Adds a role for users, services, groups or tokens"""
|
||||||
if isinstance(entity, orm.APIToken):
|
if isinstance(entity, orm.APIToken):
|
||||||
entity_repr = entity
|
entity_repr = entity
|
||||||
else:
|
else:
|
||||||
entity_repr = entity.name
|
entity_repr = entity.name
|
||||||
|
|
||||||
if rolename not in entity.roles:
|
if role not in entity.roles:
|
||||||
entity.roles.append(rolename)
|
entity.roles.append(role)
|
||||||
db.commit()
|
db.commit()
|
||||||
app_log.info(
|
app_log.info(
|
||||||
'Adding role %s for %s: %s',
|
'Adding role %s for %s: %s',
|
||||||
rolename.name,
|
role.name,
|
||||||
type(entity).__name__,
|
type(entity).__name__,
|
||||||
entity_repr,
|
entity_repr,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@existing_only
|
@_existing_only
|
||||||
def strip_role(db, entity, rolename):
|
def strip_role(db, entity, role):
|
||||||
"""Removes a role for users, services, groups or tokens"""
|
"""Removes a role for users, services, groups or tokens"""
|
||||||
if isinstance(entity, orm.APIToken):
|
if isinstance(entity, orm.APIToken):
|
||||||
entity_repr = entity
|
entity_repr = entity
|
||||||
else:
|
else:
|
||||||
entity_repr = entity.name
|
entity_repr = entity.name
|
||||||
if rolename in entity.roles:
|
if role in entity.roles:
|
||||||
entity.roles.remove(rolename)
|
entity.roles.remove(role)
|
||||||
db.commit()
|
db.commit()
|
||||||
app_log.info(
|
app_log.info(
|
||||||
'Removing role %s for %s: %s',
|
'Removing role %s for %s: %s',
|
||||||
rolename.name,
|
role.name,
|
||||||
type(entity).__name__,
|
type(entity).__name__,
|
||||||
entity_repr,
|
entity_repr,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _switch_default_role(db, obj, admin):
|
|
||||||
"""Switch between default user/service and admin roles for users/services"""
|
|
||||||
user_role = orm.Role.find(db, 'user')
|
|
||||||
admin_role = orm.Role.find(db, 'admin')
|
|
||||||
|
|
||||||
def add_and_remove(db, obj, current_role, new_role):
|
|
||||||
if current_role in obj.roles:
|
|
||||||
strip_role(db, entity=obj, rolename=current_role.name)
|
|
||||||
# only add new default role if the user has no other roles
|
|
||||||
if len(obj.roles) < 1:
|
|
||||||
grant_role(db, entity=obj, rolename=new_role.name)
|
|
||||||
|
|
||||||
if admin:
|
|
||||||
add_and_remove(db, obj, user_role, admin_role)
|
|
||||||
else:
|
|
||||||
add_and_remove(db, obj, admin_role, user_role)
|
|
||||||
|
|
||||||
|
|
||||||
def _token_allowed_role(db, token, role):
|
def _token_allowed_role(db, token, role):
|
||||||
"""Checks if requested role for token does not grant the token
|
"""Checks if requested role for token does not grant the token
|
||||||
higher permissions than the token's owner has
|
higher permissions than the token's owner has
|
||||||
@@ -410,58 +403,73 @@ def _token_allowed_role(db, token, role):
|
|||||||
if owner is None:
|
if owner is None:
|
||||||
raise ValueError(f"Owner not found for {token}")
|
raise ValueError(f"Owner not found for {token}")
|
||||||
|
|
||||||
|
if role in owner.roles:
|
||||||
|
# shortcut: token is assigned an exact role the owner has
|
||||||
|
return True
|
||||||
|
|
||||||
expanded_scopes = _get_subscopes(role, owner=owner)
|
expanded_scopes = _get_subscopes(role, owner=owner)
|
||||||
|
|
||||||
implicit_permissions = {'all', 'read:all'}
|
implicit_permissions = {'inherit', 'read:inherit'}
|
||||||
explicit_scopes = expanded_scopes - implicit_permissions
|
explicit_scopes = expanded_scopes - implicit_permissions
|
||||||
# ignore horizontal filters
|
|
||||||
no_filter_scopes = {
|
|
||||||
scope.split('!', 1)[0] if '!' in scope else scope for scope in explicit_scopes
|
|
||||||
}
|
|
||||||
# find the owner's scopes
|
# find the owner's scopes
|
||||||
expanded_owner_scopes = expand_roles_to_scopes(owner)
|
expanded_owner_scopes = expand_roles_to_scopes(owner)
|
||||||
# ignore horizontal filters
|
allowed_scopes = scopes._intersect_expanded_scopes(
|
||||||
no_filter_owner_scopes = {
|
explicit_scopes, expanded_owner_scopes, db
|
||||||
scope.split('!', 1)[0] if '!' in scope else scope
|
)
|
||||||
for scope in expanded_owner_scopes
|
disallowed_scopes = explicit_scopes.difference(allowed_scopes)
|
||||||
}
|
|
||||||
disallowed_scopes = no_filter_scopes.difference(no_filter_owner_scopes)
|
|
||||||
if not disallowed_scopes:
|
if not disallowed_scopes:
|
||||||
# no scopes requested outside owner's own scopes
|
# no scopes requested outside owner's own scopes
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
app_log.warning(
|
app_log.warning(
|
||||||
f"Token requesting scopes exceeding owner {owner.name}: {disallowed_scopes}"
|
f"Token requesting role {role.name} with scopes not held by owner {owner.name}: {disallowed_scopes}"
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def assign_default_roles(db, entity):
|
def assign_default_roles(db, entity):
|
||||||
"""Assigns default role to an entity:
|
"""Assigns default role(s) to an entity:
|
||||||
users and services get 'user' role, or admin role if they have admin flag
|
|
||||||
tokens get 'token' role
|
tokens get 'token' role
|
||||||
|
|
||||||
|
users and services get 'admin' role if they are admin (removed if they are not)
|
||||||
|
|
||||||
|
users always get 'user' role
|
||||||
"""
|
"""
|
||||||
if isinstance(entity, orm.Group):
|
if isinstance(entity, orm.Group):
|
||||||
pass
|
return
|
||||||
elif isinstance(entity, orm.APIToken):
|
|
||||||
app_log.debug('Assigning default roles to tokens')
|
if isinstance(entity, orm.APIToken):
|
||||||
|
app_log.debug('Assigning default role to token')
|
||||||
default_token_role = orm.Role.find(db, 'token')
|
default_token_role = orm.Role.find(db, 'token')
|
||||||
if not entity.roles and (entity.user or entity.service) is not None:
|
if not entity.roles and (entity.user or entity.service) is not None:
|
||||||
default_token_role.tokens.append(entity)
|
default_token_role.tokens.append(entity)
|
||||||
app_log.info('Added role %s to token %s', default_token_role.name, entity)
|
app_log.info('Added role %s to token %s', default_token_role.name, entity)
|
||||||
db.commit()
|
db.commit()
|
||||||
# users and services can have 'user' or 'admin' roles as default
|
# users and services all have 'user' role by default
|
||||||
|
# and optionally 'admin' as well
|
||||||
else:
|
else:
|
||||||
kind = type(entity).__name__
|
kind = type(entity).__name__
|
||||||
app_log.debug(f'Assigning default roles to {kind} {entity.name}')
|
app_log.debug(f'Assigning default role to {kind} {entity.name}')
|
||||||
_switch_default_role(db, entity, entity.admin)
|
if entity.admin:
|
||||||
|
grant_role(db, entity=entity, rolename="admin")
|
||||||
|
else:
|
||||||
|
admin_role = orm.Role.find(db, 'admin')
|
||||||
|
if admin_role in entity.roles:
|
||||||
|
strip_role(db, entity=entity, rolename="admin")
|
||||||
|
if kind == "User":
|
||||||
|
grant_role(db, entity=entity, rolename="user")
|
||||||
|
|
||||||
|
|
||||||
def update_roles(db, entity, roles):
|
def update_roles(db, entity, roles):
|
||||||
"""Updates object's roles checking for requested permissions
|
"""Add roles to an entity (token, user, etc.)
|
||||||
if object is orm.APIToken
|
|
||||||
|
If it is an API token, check role permissions against token owner
|
||||||
|
prior to assignment to avoid permission expansion.
|
||||||
|
|
||||||
|
Otherwise, it just calls `grant_role` for each role.
|
||||||
"""
|
"""
|
||||||
standard_permissions = {'all', 'read:all'}
|
|
||||||
for rolename in roles:
|
for rolename in roles:
|
||||||
if isinstance(entity, orm.APIToken):
|
if isinstance(entity, orm.APIToken):
|
||||||
role = orm.Role.find(db, rolename)
|
role = orm.Role.find(db, rolename)
|
||||||
@@ -474,12 +482,11 @@ def update_roles(db, entity, roles):
|
|||||||
app_log.info('Adding role %s to token: %s', role.name, entity)
|
app_log.info('Adding role %s to token: %s', role.name, entity)
|
||||||
else:
|
else:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f'Requested token role {rolename} of {entity} has more permissions than the token owner'
|
f'Requested token role {rolename} for {entity} has more permissions than the token owner'
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise NameError('Role %r does not exist' % rolename)
|
raise KeyError(f'Role {rolename} does not exist')
|
||||||
else:
|
else:
|
||||||
app_log.debug('Assigning default roles to %s', type(entity).__name__)
|
|
||||||
grant_role(db, entity=entity, rolename=rolename)
|
grant_role(db, entity=entity, rolename=rolename)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -30,19 +30,22 @@ scope_definitions = {
|
|||||||
'description': 'Your own resources',
|
'description': 'Your own resources',
|
||||||
'doc_description': 'The user’s own resources _(metascope for users, resolves to (no_scope) for services)_',
|
'doc_description': 'The user’s own resources _(metascope for users, resolves to (no_scope) for services)_',
|
||||||
},
|
},
|
||||||
'all': {
|
'inherit': {
|
||||||
'description': 'Anything you have access to',
|
'description': 'Anything you have access to',
|
||||||
'doc_description': 'Everything that the token-owning entity can access _(metascope for tokens)_',
|
'doc_description': 'Everything that the token-owning entity can access _(metascope for tokens)_',
|
||||||
},
|
},
|
||||||
'admin:users': {
|
'admin:users': {
|
||||||
'description': 'Read, write, create and delete users and their authentication state, not including their servers or tokens.',
|
'description': 'Read, write, create and delete users and their authentication state, not including their servers or tokens.',
|
||||||
'subscopes': ['admin:auth_state', 'users', 'read:roles:users'],
|
'subscopes': ['admin:auth_state', 'users', 'read:roles:users', 'delete:users'],
|
||||||
},
|
},
|
||||||
'admin:auth_state': {'description': 'Read a user’s authentication state.'},
|
'admin:auth_state': {'description': 'Read a user’s authentication state.'},
|
||||||
'users': {
|
'users': {
|
||||||
'description': 'Read and write permissions to user models (excluding servers, tokens and authentication state).',
|
'description': 'Read and write permissions to user models (excluding servers, tokens and authentication state).',
|
||||||
'subscopes': ['read:users', 'list:users', 'users:activity'],
|
'subscopes': ['read:users', 'list:users', 'users:activity'],
|
||||||
},
|
},
|
||||||
|
'delete:users': {
|
||||||
|
'description': "Delete users.",
|
||||||
|
},
|
||||||
'list:users': {
|
'list:users': {
|
||||||
'description': 'List users, including at least their names.',
|
'description': 'List users, including at least their names.',
|
||||||
'subscopes': ['read:users:name'],
|
'subscopes': ['read:users:name'],
|
||||||
@@ -76,12 +79,13 @@ scope_definitions = {
|
|||||||
'admin:server_state': {'description': 'Read and write users’ server state.'},
|
'admin:server_state': {'description': 'Read and write users’ server state.'},
|
||||||
'servers': {
|
'servers': {
|
||||||
'description': 'Start and stop user servers.',
|
'description': 'Start and stop user servers.',
|
||||||
'subscopes': ['read:servers'],
|
'subscopes': ['read:servers', 'delete:servers'],
|
||||||
},
|
},
|
||||||
'read:servers': {
|
'read:servers': {
|
||||||
'description': 'Read users’ names and their server models (excluding the server state).',
|
'description': 'Read users’ names and their server models (excluding the server state).',
|
||||||
'subscopes': ['read:users:name'],
|
'subscopes': ['read:users:name'],
|
||||||
},
|
},
|
||||||
|
'delete:servers': {'description': "Stop and delete users' servers."},
|
||||||
'tokens': {
|
'tokens': {
|
||||||
'description': 'Read, write, create and delete user tokens.',
|
'description': 'Read, write, create and delete user tokens.',
|
||||||
'subscopes': ['read:tokens'],
|
'subscopes': ['read:tokens'],
|
||||||
@@ -89,7 +93,7 @@ scope_definitions = {
|
|||||||
'read:tokens': {'description': 'Read user tokens.'},
|
'read:tokens': {'description': 'Read user tokens.'},
|
||||||
'admin:groups': {
|
'admin:groups': {
|
||||||
'description': 'Read and write group information, create and delete groups.',
|
'description': 'Read and write group information, create and delete groups.',
|
||||||
'subscopes': ['groups', 'read:roles:groups'],
|
'subscopes': ['groups', 'read:roles:groups', 'delete:groups'],
|
||||||
},
|
},
|
||||||
'groups': {
|
'groups': {
|
||||||
'description': 'Read and write group information, including adding/removing users to/from groups.',
|
'description': 'Read and write group information, including adding/removing users to/from groups.',
|
||||||
@@ -104,6 +108,9 @@ scope_definitions = {
|
|||||||
'subscopes': ['read:groups:name'],
|
'subscopes': ['read:groups:name'],
|
||||||
},
|
},
|
||||||
'read:groups:name': {'description': 'Read group names.'},
|
'read:groups:name': {'description': 'Read group names.'},
|
||||||
|
'delete:groups': {
|
||||||
|
'description': "Delete groups.",
|
||||||
|
},
|
||||||
'list:services': {
|
'list:services': {
|
||||||
'description': 'List services, including at least their names.',
|
'description': 'List services, including at least their names.',
|
||||||
'subscopes': ['read:services:name'],
|
'subscopes': ['read:services:name'],
|
||||||
@@ -124,6 +131,9 @@ scope_definitions = {
|
|||||||
'description': 'Read information about the proxy’s routing table, sync the Hub with the proxy and notify the Hub about a new proxy.'
|
'description': 'Read information about the proxy’s routing table, sync the Hub with the proxy and notify the Hub about a new proxy.'
|
||||||
},
|
},
|
||||||
'shutdown': {'description': 'Shutdown the hub.'},
|
'shutdown': {'description': 'Shutdown the hub.'},
|
||||||
|
'read:metrics': {
|
||||||
|
'description': "Read prometheus metrics.",
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -288,7 +298,7 @@ def get_scopes_for(orm_object):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if isinstance(orm_object, orm.APIToken):
|
if isinstance(orm_object, orm.APIToken):
|
||||||
app_log.warning(f"Authenticated with token {orm_object}")
|
app_log.debug(f"Authenticated with token {orm_object}")
|
||||||
owner = orm_object.user or orm_object.service
|
owner = orm_object.user or orm_object.service
|
||||||
token_scopes = roles.expand_roles_to_scopes(orm_object)
|
token_scopes = roles.expand_roles_to_scopes(orm_object)
|
||||||
if orm_object.client_id != "jupyterhub":
|
if orm_object.client_id != "jupyterhub":
|
||||||
@@ -310,13 +320,13 @@ def get_scopes_for(orm_object):
|
|||||||
|
|
||||||
owner_scopes = roles.expand_roles_to_scopes(owner)
|
owner_scopes = roles.expand_roles_to_scopes(owner)
|
||||||
|
|
||||||
if token_scopes == {'all'}:
|
if token_scopes == {'inherit'}:
|
||||||
# token_scopes is only 'all', return owner scopes as-is
|
# token_scopes is only 'inherit', return scopes inherited from owner as-is
|
||||||
# short-circuit common case where we don't need to compute an intersection
|
# short-circuit common case where we don't need to compute an intersection
|
||||||
return owner_scopes
|
return owner_scopes
|
||||||
|
|
||||||
if 'all' in token_scopes:
|
if 'inherit' in token_scopes:
|
||||||
token_scopes.remove('all')
|
token_scopes.remove('inherit')
|
||||||
token_scopes |= owner_scopes
|
token_scopes |= owner_scopes
|
||||||
|
|
||||||
intersection = _intersect_expanded_scopes(
|
intersection = _intersect_expanded_scopes(
|
||||||
|
@@ -3,10 +3,24 @@
|
|||||||
Tokens are sent to the Hub for verification.
|
Tokens are sent to the Hub for verification.
|
||||||
The Hub replies with a JSON model describing the authenticated user.
|
The Hub replies with a JSON model describing the authenticated user.
|
||||||
|
|
||||||
``HubAuth`` can be used in any application, even outside tornado.
|
This contains two levels of authentication:
|
||||||
|
|
||||||
``HubAuthenticated`` is a mixin class for tornado handlers that should
|
- :class:`HubOAuth` - Use OAuth 2 to authenticate browsers with the Hub.
|
||||||
authenticate with the Hub.
|
This should be used for any service that should respond to browser requests
|
||||||
|
(i.e. most services).
|
||||||
|
|
||||||
|
- :class:`HubAuth` - token-only authentication, for a service that only need to handle token-authenticated API requests
|
||||||
|
|
||||||
|
The ``Auth`` classes (:class:`HubAuth`, :class:`HubOAuth`)
|
||||||
|
can be used in any application, even outside tornado.
|
||||||
|
They contain reference implementations of talking to the Hub API
|
||||||
|
to resolve a token to a user.
|
||||||
|
|
||||||
|
The ``Authenticated`` classes (:class:`HubAuthenticated`, :class:`HubOAuthenticated`)
|
||||||
|
are mixins for tornado handlers that should authenticate with the Hub.
|
||||||
|
|
||||||
|
If you are using OAuth, you will also need to register an oauth callback handler to complete the oauth process.
|
||||||
|
A tornado implementation is provided in :class:`HubOAuthCallbackHandler`.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import base64
|
import base64
|
||||||
@@ -39,6 +53,7 @@ from traitlets import validate
|
|||||||
from traitlets.config import SingletonConfigurable
|
from traitlets.config import SingletonConfigurable
|
||||||
|
|
||||||
from ..scopes import _intersect_expanded_scopes
|
from ..scopes import _intersect_expanded_scopes
|
||||||
|
from ..utils import get_browser_protocol
|
||||||
from ..utils import url_path_join
|
from ..utils import url_path_join
|
||||||
|
|
||||||
|
|
||||||
@@ -212,6 +227,7 @@ class HubAuth(SingletonConfigurable):
|
|||||||
help="""The base API URL of the Hub.
|
help="""The base API URL of the Hub.
|
||||||
|
|
||||||
Typically `http://hub-ip:hub-port/hub/api`
|
Typically `http://hub-ip:hub-port/hub/api`
|
||||||
|
Default: $JUPYTERHUB_API_URL
|
||||||
""",
|
""",
|
||||||
).tag(config=True)
|
).tag(config=True)
|
||||||
|
|
||||||
@@ -227,7 +243,10 @@ class HubAuth(SingletonConfigurable):
|
|||||||
os.getenv('JUPYTERHUB_API_TOKEN', ''),
|
os.getenv('JUPYTERHUB_API_TOKEN', ''),
|
||||||
help="""API key for accessing Hub API.
|
help="""API key for accessing Hub API.
|
||||||
|
|
||||||
Generate with `jupyterhub token [username]` or add to JupyterHub.services config.
|
Default: $JUPYTERHUB_API_TOKEN
|
||||||
|
|
||||||
|
Loaded from services configuration in jupyterhub_config.
|
||||||
|
Will be auto-generated for hub-managed services.
|
||||||
""",
|
""",
|
||||||
).tag(config=True)
|
).tag(config=True)
|
||||||
|
|
||||||
@@ -236,6 +255,7 @@ class HubAuth(SingletonConfigurable):
|
|||||||
help="""The URL prefix for the Hub itself.
|
help="""The URL prefix for the Hub itself.
|
||||||
|
|
||||||
Typically /hub/
|
Typically /hub/
|
||||||
|
Default: $JUPYTERHUB_BASE_URL
|
||||||
""",
|
""",
|
||||||
).tag(config=True)
|
).tag(config=True)
|
||||||
|
|
||||||
@@ -481,11 +501,17 @@ class HubAuth(SingletonConfigurable):
|
|||||||
auth_header_name = 'Authorization'
|
auth_header_name = 'Authorization'
|
||||||
auth_header_pat = re.compile(r'(?:token|bearer)\s+(.+)', re.IGNORECASE)
|
auth_header_pat = re.compile(r'(?:token|bearer)\s+(.+)', re.IGNORECASE)
|
||||||
|
|
||||||
def get_token(self, handler):
|
def get_token(self, handler, in_cookie=True):
|
||||||
"""Get the user token from a request
|
"""Get the token authenticating a request
|
||||||
|
|
||||||
|
.. versionchanged:: 2.2
|
||||||
|
in_cookie added.
|
||||||
|
Previously, only URL params and header were considered.
|
||||||
|
Pass `in_cookie=False` to preserve that behavior.
|
||||||
|
|
||||||
- in URL parameters: ?token=<token>
|
- in URL parameters: ?token=<token>
|
||||||
- in header: Authorization: token <token>
|
- in header: Authorization: token <token>
|
||||||
|
- in cookie (stored after oauth), if in_cookie is True
|
||||||
"""
|
"""
|
||||||
|
|
||||||
user_token = handler.get_argument('token', '')
|
user_token = handler.get_argument('token', '')
|
||||||
@@ -496,8 +522,14 @@ class HubAuth(SingletonConfigurable):
|
|||||||
)
|
)
|
||||||
if m:
|
if m:
|
||||||
user_token = m.group(1)
|
user_token = m.group(1)
|
||||||
|
if not user_token and in_cookie:
|
||||||
|
user_token = self._get_token_cookie(handler)
|
||||||
return user_token
|
return user_token
|
||||||
|
|
||||||
|
def _get_token_cookie(self, handler):
|
||||||
|
"""Base class doesn't store tokens in cookies"""
|
||||||
|
return None
|
||||||
|
|
||||||
def _get_user_cookie(self, handler):
|
def _get_user_cookie(self, handler):
|
||||||
"""Get the user model from a cookie"""
|
"""Get the user model from a cookie"""
|
||||||
# overridden in HubOAuth to store the access token after oauth
|
# overridden in HubOAuth to store the access token after oauth
|
||||||
@@ -533,8 +565,10 @@ class HubAuth(SingletonConfigurable):
|
|||||||
handler._cached_hub_user = user_model = None
|
handler._cached_hub_user = user_model = None
|
||||||
session_id = self.get_session_id(handler)
|
session_id = self.get_session_id(handler)
|
||||||
|
|
||||||
# check token first
|
# check token first, ignoring cookies
|
||||||
token = self.get_token(handler)
|
# because some checks are different when a request
|
||||||
|
# is token-authenticated (CORS-related)
|
||||||
|
token = self.get_token(handler, in_cookie=False)
|
||||||
if token:
|
if token:
|
||||||
user_model = self.user_for_token(token, session_id=session_id)
|
user_model = self.user_for_token(token, session_id=session_id)
|
||||||
if user_model:
|
if user_model:
|
||||||
@@ -594,11 +628,18 @@ class HubOAuth(HubAuth):
|
|||||||
"""
|
"""
|
||||||
return self.cookie_name + '-oauth-state'
|
return self.cookie_name + '-oauth-state'
|
||||||
|
|
||||||
def _get_user_cookie(self, handler):
|
def _get_token_cookie(self, handler):
|
||||||
|
"""Base class doesn't store tokens in cookies"""
|
||||||
token = handler.get_secure_cookie(self.cookie_name)
|
token = handler.get_secure_cookie(self.cookie_name)
|
||||||
|
if token:
|
||||||
|
# decode cookie bytes
|
||||||
|
token = token.decode('ascii', 'replace')
|
||||||
|
return token
|
||||||
|
|
||||||
|
def _get_user_cookie(self, handler):
|
||||||
|
token = self._get_token_cookie(handler)
|
||||||
session_id = self.get_session_id(handler)
|
session_id = self.get_session_id(handler)
|
||||||
if token:
|
if token:
|
||||||
token = token.decode('ascii', 'replace')
|
|
||||||
user_model = self.user_for_token(token, session_id=session_id)
|
user_model = self.user_for_token(token, session_id=session_id)
|
||||||
if user_model is None:
|
if user_model is None:
|
||||||
app_log.warning("Token stored in cookie may have expired")
|
app_log.warning("Token stored in cookie may have expired")
|
||||||
@@ -753,7 +794,7 @@ class HubOAuth(HubAuth):
|
|||||||
# OAuth that doesn't complete shouldn't linger too long.
|
# OAuth that doesn't complete shouldn't linger too long.
|
||||||
'max_age': 600,
|
'max_age': 600,
|
||||||
}
|
}
|
||||||
if handler.request.protocol == 'https':
|
if get_browser_protocol(handler.request) == 'https':
|
||||||
kwargs['secure'] = True
|
kwargs['secure'] = True
|
||||||
# load user cookie overrides
|
# load user cookie overrides
|
||||||
kwargs.update(self.cookie_options)
|
kwargs.update(self.cookie_options)
|
||||||
@@ -793,7 +834,7 @@ class HubOAuth(HubAuth):
|
|||||||
def set_cookie(self, handler, access_token):
|
def set_cookie(self, handler, access_token):
|
||||||
"""Set a cookie recording OAuth result"""
|
"""Set a cookie recording OAuth result"""
|
||||||
kwargs = {'path': self.base_url, 'httponly': True}
|
kwargs = {'path': self.base_url, 'httponly': True}
|
||||||
if handler.request.protocol == 'https':
|
if get_browser_protocol(handler.request) == 'https':
|
||||||
kwargs['secure'] = True
|
kwargs['secure'] = True
|
||||||
# load user cookie overrides
|
# load user cookie overrides
|
||||||
kwargs.update(self.cookie_options)
|
kwargs.update(self.cookie_options)
|
||||||
@@ -854,8 +895,6 @@ class HubAuthenticated:
|
|||||||
Examples::
|
Examples::
|
||||||
|
|
||||||
class MyHandler(HubAuthenticated, web.RequestHandler):
|
class MyHandler(HubAuthenticated, web.RequestHandler):
|
||||||
hub_users = {'inara', 'mal'}
|
|
||||||
|
|
||||||
def initialize(self, hub_auth):
|
def initialize(self, hub_auth):
|
||||||
self.hub_auth = hub_auth
|
self.hub_auth = hub_auth
|
||||||
|
|
||||||
@@ -865,6 +904,7 @@ class HubAuthenticated:
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# deprecated, pre-2.0 allow sets
|
||||||
hub_services = None # set of allowed services
|
hub_services = None # set of allowed services
|
||||||
hub_users = None # set of allowed users
|
hub_users = None # set of allowed users
|
||||||
hub_groups = None # set of allowed groups
|
hub_groups = None # set of allowed groups
|
||||||
@@ -960,6 +1000,10 @@ class HubAuthenticated:
|
|||||||
raise UserNotAllowed(model)
|
raise UserNotAllowed(model)
|
||||||
|
|
||||||
# proceed with the pre-2.0 way if hub_scopes is not set
|
# proceed with the pre-2.0 way if hub_scopes is not set
|
||||||
|
warnings.warn(
|
||||||
|
"hub_scopes ($JUPYTERHUB not set, proceeding with pre-2.0 authentication",
|
||||||
|
DeprecationWarning,
|
||||||
|
)
|
||||||
|
|
||||||
if self.allow_admin and model.get('admin', False):
|
if self.allow_admin and model.get('admin', False):
|
||||||
app_log.debug("Allowing Hub admin %s", name)
|
app_log.debug("Allowing Hub admin %s", name)
|
||||||
@@ -1023,8 +1067,8 @@ class HubAuthenticated:
|
|||||||
self._hub_auth_user_cache = None
|
self._hub_auth_user_cache = None
|
||||||
raise
|
raise
|
||||||
|
|
||||||
# store tokens passed via url or header in a cookie for future requests
|
# store ?token=... tokens passed via url in a cookie for future requests
|
||||||
url_token = self.hub_auth.get_token(self)
|
url_token = self.get_argument('token', '')
|
||||||
if (
|
if (
|
||||||
user_model
|
user_model
|
||||||
and url_token
|
and url_token
|
||||||
|
@@ -1,7 +1,12 @@
|
|||||||
"""Make a single-user app based on the environment:
|
"""Make a single-user app based on the environment:
|
||||||
|
|
||||||
- $JUPYTERHUB_SINGLEUSER_APP, the base Application class, to be wrapped in JupyterHub authentication.
|
- $JUPYTERHUB_SINGLEUSER_APP, the base Application class, to be wrapped in JupyterHub authentication.
|
||||||
default: notebook.notebookapp.NotebookApp
|
default: jupyter_server.serverapp.ServerApp
|
||||||
|
|
||||||
|
.. versionchanged:: 2.0
|
||||||
|
|
||||||
|
Default app changed to launch `jupyter labhub`.
|
||||||
|
Use JUPYTERHUB_SINGLEUSER_APP=notebook.notebookapp.NotebookApp for the legacy 'classic' notebook server.
|
||||||
"""
|
"""
|
||||||
import os
|
import os
|
||||||
|
|
||||||
@@ -9,12 +14,55 @@ from traitlets import import_item
|
|||||||
|
|
||||||
from .mixins import make_singleuser_app
|
from .mixins import make_singleuser_app
|
||||||
|
|
||||||
JUPYTERHUB_SINGLEUSER_APP = (
|
JUPYTERHUB_SINGLEUSER_APP = os.environ.get("JUPYTERHUB_SINGLEUSER_APP")
|
||||||
os.environ.get("JUPYTERHUB_SINGLEUSER_APP") or "notebook.notebookapp.NotebookApp"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
|
if JUPYTERHUB_SINGLEUSER_APP:
|
||||||
App = import_item(JUPYTERHUB_SINGLEUSER_APP)
|
App = import_item(JUPYTERHUB_SINGLEUSER_APP)
|
||||||
|
else:
|
||||||
|
App = None
|
||||||
|
_import_error = None
|
||||||
|
for JUPYTERHUB_SINGLEUSER_APP in (
|
||||||
|
"jupyter_server.serverapp.ServerApp",
|
||||||
|
"notebook.notebookapp.NotebookApp",
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
App = import_item(JUPYTERHUB_SINGLEUSER_APP)
|
||||||
|
except ImportError as e:
|
||||||
|
continue
|
||||||
|
if _import_error is None:
|
||||||
|
_import_error = e
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
if App is None:
|
||||||
|
raise _import_error
|
||||||
|
|
||||||
|
|
||||||
SingleUserNotebookApp = make_singleuser_app(App)
|
SingleUserNotebookApp = make_singleuser_app(App)
|
||||||
|
|
||||||
main = SingleUserNotebookApp.launch_instance
|
|
||||||
|
def main():
|
||||||
|
"""Launch a jupyterhub single-user server"""
|
||||||
|
if not os.environ.get("JUPYTERHUB_SINGLEUSER_APP"):
|
||||||
|
# app not specified, launch jupyter-labhub by default,
|
||||||
|
# if jupyterlab is recent enough (3.1).
|
||||||
|
# This is a minimally extended ServerApp that does:
|
||||||
|
# 1. ensure lab extension is enabled, and
|
||||||
|
# 2. set default URL to `/lab`
|
||||||
|
import re
|
||||||
|
|
||||||
|
_version_pat = re.compile(r"(\d+)\.(\d+)")
|
||||||
|
try:
|
||||||
|
import jupyterlab
|
||||||
|
from jupyterlab.labhubapp import SingleUserLabApp
|
||||||
|
|
||||||
|
m = _version_pat.match(jupyterlab.__version__)
|
||||||
|
except Exception:
|
||||||
|
m = None
|
||||||
|
|
||||||
|
if m is not None:
|
||||||
|
version_tuple = tuple(int(v) for v in m.groups())
|
||||||
|
if version_tuple >= (3, 1):
|
||||||
|
return SingleUserLabApp.launch_instance()
|
||||||
|
|
||||||
|
return SingleUserNotebookApp.launch_instance()
|
||||||
|
@@ -16,8 +16,8 @@ import random
|
|||||||
import secrets
|
import secrets
|
||||||
import sys
|
import sys
|
||||||
import warnings
|
import warnings
|
||||||
from datetime import datetime
|
|
||||||
from datetime import timezone
|
from datetime import timezone
|
||||||
|
from importlib import import_module
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
@@ -606,10 +606,34 @@ class SingleUserNotebookAppMixin(Configurable):
|
|||||||
t = self.hub_activity_interval * (1 + 0.2 * (random.random() - 0.5))
|
t = self.hub_activity_interval * (1 + 0.2 * (random.random() - 0.5))
|
||||||
await asyncio.sleep(t)
|
await asyncio.sleep(t)
|
||||||
|
|
||||||
|
def _log_app_versions(self):
|
||||||
|
"""Log application versions at startup
|
||||||
|
|
||||||
|
Logs versions of jupyterhub and singleuser-server base versions (jupyterlab, jupyter_server, notebook)
|
||||||
|
"""
|
||||||
|
self.log.info(f"Starting jupyterhub single-user server version {__version__}")
|
||||||
|
|
||||||
|
# don't log these package versions
|
||||||
|
seen = {"jupyterhub", "traitlets", "jupyter_core", "builtins"}
|
||||||
|
|
||||||
|
for cls in self.__class__.mro():
|
||||||
|
module_name = cls.__module__.partition(".")[0]
|
||||||
|
if module_name not in seen:
|
||||||
|
seen.add(module_name)
|
||||||
|
try:
|
||||||
|
mod = import_module(module_name)
|
||||||
|
mod_version = getattr(mod, "__version__")
|
||||||
|
except Exception:
|
||||||
|
mod_version = ""
|
||||||
|
self.log.info(
|
||||||
|
f"Extending {cls.__module__}.{cls.__name__} from {module_name} {mod_version}"
|
||||||
|
)
|
||||||
|
|
||||||
def initialize(self, argv=None):
|
def initialize(self, argv=None):
|
||||||
# disable trash by default
|
# disable trash by default
|
||||||
# this can be re-enabled by config
|
# this can be re-enabled by config
|
||||||
self.config.FileContentsManager.delete_to_trash = False
|
self.config.FileContentsManager.delete_to_trash = False
|
||||||
|
self._log_app_versions()
|
||||||
return super().initialize(argv)
|
return super().initialize(argv)
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
@@ -655,6 +679,7 @@ class SingleUserNotebookAppMixin(Configurable):
|
|||||||
s['hub_prefix'] = self.hub_prefix
|
s['hub_prefix'] = self.hub_prefix
|
||||||
s['hub_host'] = self.hub_host
|
s['hub_host'] = self.hub_host
|
||||||
s['hub_auth'] = self.hub_auth
|
s['hub_auth'] = self.hub_auth
|
||||||
|
s['page_config_hook'] = self.page_config_hook
|
||||||
csp_report_uri = s['csp_report_uri'] = self.hub_host + url_path_join(
|
csp_report_uri = s['csp_report_uri'] = self.hub_host + url_path_join(
|
||||||
self.hub_prefix, 'security/csp-report'
|
self.hub_prefix, 'security/csp-report'
|
||||||
)
|
)
|
||||||
@@ -682,6 +707,18 @@ class SingleUserNotebookAppMixin(Configurable):
|
|||||||
self.patch_default_headers()
|
self.patch_default_headers()
|
||||||
self.patch_templates()
|
self.patch_templates()
|
||||||
|
|
||||||
|
def page_config_hook(self, handler, page_config):
|
||||||
|
"""JupyterLab page config hook
|
||||||
|
|
||||||
|
Adds JupyterHub info to page config.
|
||||||
|
|
||||||
|
Places the JupyterHub API token in PageConfig.token.
|
||||||
|
|
||||||
|
Only has effect on jupyterlab_server >=2.9
|
||||||
|
"""
|
||||||
|
page_config["token"] = self.hub_auth.get_token(handler) or ""
|
||||||
|
return page_config
|
||||||
|
|
||||||
def patch_default_headers(self):
|
def patch_default_headers(self):
|
||||||
if hasattr(RequestHandler, '_orig_set_default_headers'):
|
if hasattr(RequestHandler, '_orig_set_default_headers'):
|
||||||
return
|
return
|
||||||
@@ -715,6 +752,18 @@ class SingleUserNotebookAppMixin(Configurable):
|
|||||||
orig_loader = env.loader
|
orig_loader = env.loader
|
||||||
env.loader = ChoiceLoader([FunctionLoader(get_page), orig_loader])
|
env.loader = ChoiceLoader([FunctionLoader(get_page), orig_loader])
|
||||||
|
|
||||||
|
def load_server_extensions(self):
|
||||||
|
# Loading LabApp sets $JUPYTERHUB_API_TOKEN on load, which is incorrect
|
||||||
|
r = super().load_server_extensions()
|
||||||
|
# clear the token in PageConfig at this step
|
||||||
|
# so that cookie auth is used
|
||||||
|
# FIXME: in the future,
|
||||||
|
# it would probably make sense to set page_config.token to the token
|
||||||
|
# from the current request.
|
||||||
|
if 'page_config_data' in self.web_app.settings:
|
||||||
|
self.web_app.settings['page_config_data']['token'] = ''
|
||||||
|
return r
|
||||||
|
|
||||||
|
|
||||||
def detect_base_package(App):
|
def detect_base_package(App):
|
||||||
"""Detect the base package for an App class
|
"""Detect the base package for an App class
|
||||||
|
@@ -11,12 +11,11 @@ import shutil
|
|||||||
import signal
|
import signal
|
||||||
import sys
|
import sys
|
||||||
import warnings
|
import warnings
|
||||||
|
from inspect import signature
|
||||||
from subprocess import Popen
|
from subprocess import Popen
|
||||||
from tempfile import mkdtemp
|
from tempfile import mkdtemp
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
if os.name == 'nt':
|
|
||||||
import psutil
|
|
||||||
from async_generator import aclosing
|
from async_generator import aclosing
|
||||||
from sqlalchemy import inspect
|
from sqlalchemy import inspect
|
||||||
from tornado.ioloop import PeriodicCallback
|
from tornado.ioloop import PeriodicCallback
|
||||||
@@ -38,12 +37,14 @@ from .objects import Server
|
|||||||
from .traitlets import ByteSpecification
|
from .traitlets import ByteSpecification
|
||||||
from .traitlets import Callable
|
from .traitlets import Callable
|
||||||
from .traitlets import Command
|
from .traitlets import Command
|
||||||
|
from .utils import AnyTimeoutError
|
||||||
from .utils import exponential_backoff
|
from .utils import exponential_backoff
|
||||||
from .utils import maybe_future
|
from .utils import maybe_future
|
||||||
from .utils import random_port
|
from .utils import random_port
|
||||||
from .utils import url_path_join
|
from .utils import url_path_join
|
||||||
|
|
||||||
# FIXME: remove when we drop Python 3.5 support
|
if os.name == 'nt':
|
||||||
|
import psutil
|
||||||
|
|
||||||
|
|
||||||
def _quote_safe(s):
|
def _quote_safe(s):
|
||||||
@@ -96,10 +97,15 @@ class Spawner(LoggingConfigurable):
|
|||||||
|
|
||||||
Used in logging for consistency with named servers.
|
Used in logging for consistency with named servers.
|
||||||
"""
|
"""
|
||||||
if self.name:
|
if self.user:
|
||||||
return f'{self.user.name}:{self.name}'
|
user_name = self.user.name
|
||||||
else:
|
else:
|
||||||
return self.user.name
|
# no user, only happens in mock tests
|
||||||
|
user_name = "(no user)"
|
||||||
|
if self.name:
|
||||||
|
return f"{user_name}:{self.name}"
|
||||||
|
else:
|
||||||
|
return user_name
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _failed(self):
|
def _failed(self):
|
||||||
@@ -183,17 +189,38 @@ class Spawner(LoggingConfigurable):
|
|||||||
def last_activity(self):
|
def last_activity(self):
|
||||||
return self.orm_spawner.last_activity
|
return self.orm_spawner.last_activity
|
||||||
|
|
||||||
|
# Spawner.server is a wrapper of the ORM orm_spawner.server
|
||||||
|
# make sure it's always in sync with the underlying state
|
||||||
|
# this is harder to do with traitlets,
|
||||||
|
# which do not run on every access, only on set and first-get
|
||||||
|
_server = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def server(self):
|
def server(self):
|
||||||
if hasattr(self, '_server'):
|
# always check that we're in sync with orm_spawner
|
||||||
|
if not self.orm_spawner:
|
||||||
|
# no ORM spawner, nothing to check
|
||||||
|
return self._server
|
||||||
|
|
||||||
|
orm_server = self.orm_spawner.server
|
||||||
|
|
||||||
|
if orm_server is not None and (
|
||||||
|
self._server is None or orm_server is not self._server.orm_server
|
||||||
|
):
|
||||||
|
# self._server is not connected to orm_spawner
|
||||||
|
self._server = Server(orm_server=self.orm_spawner.server)
|
||||||
|
elif orm_server is None:
|
||||||
|
# no ORM server, clear it
|
||||||
|
self._server = None
|
||||||
return self._server
|
return self._server
|
||||||
if self.orm_spawner and self.orm_spawner.server:
|
|
||||||
return Server(orm_server=self.orm_spawner.server)
|
|
||||||
|
|
||||||
@server.setter
|
@server.setter
|
||||||
def server(self, server):
|
def server(self, server):
|
||||||
self._server = server
|
self._server = server
|
||||||
if self.orm_spawner:
|
if self.orm_spawner is not None:
|
||||||
|
if server is not None and server.orm_server == self.orm_spawner.server:
|
||||||
|
# no change
|
||||||
|
return
|
||||||
if self.orm_spawner.server is not None:
|
if self.orm_spawner.server is not None:
|
||||||
# delete the old value
|
# delete the old value
|
||||||
db = inspect(self.orm_spawner.server).session
|
db = inspect(self.orm_spawner.server).session
|
||||||
@@ -201,7 +228,13 @@ class Spawner(LoggingConfigurable):
|
|||||||
if server is None:
|
if server is None:
|
||||||
self.orm_spawner.server = None
|
self.orm_spawner.server = None
|
||||||
else:
|
else:
|
||||||
|
if server.orm_server is None:
|
||||||
|
self.log.warning(f"No ORM server for {self._log_name}")
|
||||||
self.orm_spawner.server = server.orm_server
|
self.orm_spawner.server = server.orm_server
|
||||||
|
elif server is not None:
|
||||||
|
self.log.warning(
|
||||||
|
f"Setting Spawner.server for {self._log_name} with no underlying orm_spawner"
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
@@ -424,6 +457,13 @@ class Spawner(LoggingConfigurable):
|
|||||||
def _default_options_from_form(self, form_data):
|
def _default_options_from_form(self, form_data):
|
||||||
return form_data
|
return form_data
|
||||||
|
|
||||||
|
def run_options_from_form(self, form_data):
|
||||||
|
sig = signature(self.options_from_form)
|
||||||
|
if 'spawner' in sig.parameters:
|
||||||
|
return self.options_from_form(form_data, spawner=self)
|
||||||
|
else:
|
||||||
|
return self.options_from_form(form_data)
|
||||||
|
|
||||||
def options_from_query(self, query_data):
|
def options_from_query(self, query_data):
|
||||||
"""Interpret query arguments passed to /spawn
|
"""Interpret query arguments passed to /spawn
|
||||||
|
|
||||||
@@ -836,9 +876,6 @@ class Spawner(LoggingConfigurable):
|
|||||||
|
|
||||||
if self.server:
|
if self.server:
|
||||||
base_url = self.server.base_url
|
base_url = self.server.base_url
|
||||||
if self.ip or self.port:
|
|
||||||
self.server.ip = self.ip
|
|
||||||
self.server.port = self.port
|
|
||||||
env['JUPYTERHUB_SERVICE_PREFIX'] = self.server.base_url
|
env['JUPYTERHUB_SERVICE_PREFIX'] = self.server.base_url
|
||||||
else:
|
else:
|
||||||
# this should only occur in mock/testing scenarios
|
# this should only occur in mock/testing scenarios
|
||||||
@@ -1263,7 +1300,7 @@ class Spawner(LoggingConfigurable):
|
|||||||
timeout=timeout,
|
timeout=timeout,
|
||||||
)
|
)
|
||||||
return r
|
return r
|
||||||
except TimeoutError:
|
except AnyTimeoutError:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@@ -57,12 +57,14 @@ from .utils import add_user
|
|||||||
_db = None
|
_db = None
|
||||||
|
|
||||||
|
|
||||||
def pytest_collection_modifyitems(items):
|
def _pytest_collection_modifyitems(items):
|
||||||
"""This function is automatically run by pytest passing all collected test
|
"""This function is automatically run by pytest passing all collected test
|
||||||
functions.
|
functions.
|
||||||
|
|
||||||
We use it to add asyncio marker to all async tests and assert we don't use
|
We use it to add asyncio marker to all async tests and assert we don't use
|
||||||
test functions that are async generators which wouldn't make sense.
|
test functions that are async generators which wouldn't make sense.
|
||||||
|
|
||||||
|
It is no longer required with pytest-asyncio >= 0.17
|
||||||
"""
|
"""
|
||||||
for item in items:
|
for item in items:
|
||||||
if inspect.iscoroutinefunction(item.obj):
|
if inspect.iscoroutinefunction(item.obj):
|
||||||
@@ -70,6 +72,13 @@ def pytest_collection_modifyitems(items):
|
|||||||
assert not inspect.isasyncgenfunction(item.obj)
|
assert not inspect.isasyncgenfunction(item.obj)
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info < (3, 7):
|
||||||
|
# apply pytest-asyncio's 'auto' mode on Python 3.6.
|
||||||
|
# 'auto' mode is new in pytest-asyncio 0.17,
|
||||||
|
# which requires Python 3.7.
|
||||||
|
pytest_collection_modifyitems = _pytest_collection_modifyitems
|
||||||
|
|
||||||
|
|
||||||
@fixture(scope='module')
|
@fixture(scope='module')
|
||||||
def ssl_tmpdir(tmpdir_factory):
|
def ssl_tmpdir(tmpdir_factory):
|
||||||
return tmpdir_factory.mktemp('ssl')
|
return tmpdir_factory.mktemp('ssl')
|
||||||
@@ -182,6 +191,8 @@ def cleanup_after(request, io_loop):
|
|||||||
if not MockHub.initialized():
|
if not MockHub.initialized():
|
||||||
return
|
return
|
||||||
app = MockHub.instance()
|
app = MockHub.instance()
|
||||||
|
if app.db_file.closed:
|
||||||
|
return
|
||||||
for uid, user in list(app.users.items()):
|
for uid, user in list(app.users.items()):
|
||||||
for name, spawner in list(user.spawners.items()):
|
for name, spawner in list(user.spawners.items()):
|
||||||
if spawner.active:
|
if spawner.active:
|
||||||
|
@@ -333,26 +333,28 @@ class MockHub(JupyterHub):
|
|||||||
roles.assign_default_roles(self.db, entity=user)
|
roles.assign_default_roles(self.db, entity=user)
|
||||||
self.db.commit()
|
self.db.commit()
|
||||||
|
|
||||||
def stop(self):
|
_stop_called = False
|
||||||
super().stop()
|
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
if self._stop_called:
|
||||||
|
return
|
||||||
|
self._stop_called = True
|
||||||
# run cleanup in a background thread
|
# run cleanup in a background thread
|
||||||
# to avoid multiple eventloops in the same thread errors from asyncio
|
# to avoid multiple eventloops in the same thread errors from asyncio
|
||||||
|
|
||||||
def cleanup():
|
def cleanup():
|
||||||
asyncio.set_event_loop(asyncio.new_event_loop())
|
loop = asyncio.new_event_loop()
|
||||||
loop = IOLoop.current()
|
loop.run_until_complete(self.cleanup())
|
||||||
loop.run_sync(self.cleanup)
|
|
||||||
loop.close()
|
loop.close()
|
||||||
|
|
||||||
pool = ThreadPoolExecutor(1)
|
with ThreadPoolExecutor(1) as pool:
|
||||||
f = pool.submit(cleanup)
|
f = pool.submit(cleanup)
|
||||||
# wait for cleanup to finish
|
# wait for cleanup to finish
|
||||||
f.result()
|
f.result()
|
||||||
pool.shutdown()
|
|
||||||
|
|
||||||
# ignore the call that will fire in atexit
|
# prevent redundant atexit from running
|
||||||
self.cleanup = lambda: None
|
self._atexit_ran = True
|
||||||
|
super().stop()
|
||||||
self.db_file.close()
|
self.db_file.close()
|
||||||
|
|
||||||
async def login_user(self, name):
|
async def login_user(self, name):
|
||||||
|
@@ -9,6 +9,7 @@ from datetime import timedelta
|
|||||||
from unittest import mock
|
from unittest import mock
|
||||||
from urllib.parse import quote
|
from urllib.parse import quote
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
from urllib.parse import urlunparse
|
||||||
|
|
||||||
from pytest import fixture
|
from pytest import fixture
|
||||||
from pytest import mark
|
from pytest import mark
|
||||||
@@ -65,7 +66,15 @@ async def test_auth_api(app):
|
|||||||
assert r.status_code == 403
|
assert r.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
async def test_cors_checks(app):
|
@mark.parametrize(
|
||||||
|
"content_type, status",
|
||||||
|
[
|
||||||
|
("text/plain", 403),
|
||||||
|
# accepted, but invalid
|
||||||
|
("application/json; charset=UTF-8", 400),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
async def test_post_content_type(app, content_type, status):
|
||||||
url = ujoin(public_host(app), app.hub.base_url)
|
url = ujoin(public_host(app), app.hub.base_url)
|
||||||
host = urlparse(url).netloc
|
host = urlparse(url).netloc
|
||||||
# add admin user
|
# add admin user
|
||||||
@@ -74,42 +83,6 @@ async def test_cors_checks(app):
|
|||||||
user = add_user(app.db, name='admin', admin=True)
|
user = add_user(app.db, name='admin', admin=True)
|
||||||
cookies = await app.login_user('admin')
|
cookies = await app.login_user('admin')
|
||||||
|
|
||||||
r = await api_request(
|
|
||||||
app, 'users', headers={'Authorization': '', 'Referer': 'null'}, cookies=cookies
|
|
||||||
)
|
|
||||||
assert r.status_code == 403
|
|
||||||
|
|
||||||
r = await api_request(
|
|
||||||
app,
|
|
||||||
'users',
|
|
||||||
headers={
|
|
||||||
'Authorization': '',
|
|
||||||
'Referer': 'http://attack.com/csrf/vulnerability',
|
|
||||||
},
|
|
||||||
cookies=cookies,
|
|
||||||
)
|
|
||||||
assert r.status_code == 403
|
|
||||||
|
|
||||||
r = await api_request(
|
|
||||||
app,
|
|
||||||
'users',
|
|
||||||
headers={'Authorization': '', 'Referer': url, 'Host': host},
|
|
||||||
cookies=cookies,
|
|
||||||
)
|
|
||||||
assert r.status_code == 200
|
|
||||||
|
|
||||||
r = await api_request(
|
|
||||||
app,
|
|
||||||
'users',
|
|
||||||
headers={
|
|
||||||
'Authorization': '',
|
|
||||||
'Referer': ujoin(url, 'foo/bar/baz/bat'),
|
|
||||||
'Host': host,
|
|
||||||
},
|
|
||||||
cookies=cookies,
|
|
||||||
)
|
|
||||||
assert r.status_code == 200
|
|
||||||
|
|
||||||
r = await api_request(
|
r = await api_request(
|
||||||
app,
|
app,
|
||||||
'users',
|
'users',
|
||||||
@@ -117,24 +90,115 @@ async def test_cors_checks(app):
|
|||||||
data='{}',
|
data='{}',
|
||||||
headers={
|
headers={
|
||||||
"Authorization": "",
|
"Authorization": "",
|
||||||
"Content-Type": "text/plain",
|
"Content-Type": content_type,
|
||||||
},
|
},
|
||||||
cookies=cookies,
|
cookies=cookies,
|
||||||
)
|
)
|
||||||
assert r.status_code == 403
|
assert r.status_code == status
|
||||||
|
|
||||||
|
|
||||||
|
@mark.parametrize(
|
||||||
|
"host, referer, extraheaders, status",
|
||||||
|
[
|
||||||
|
('$host', '$url', {}, 200),
|
||||||
|
(None, None, {}, 200),
|
||||||
|
(None, 'null', {}, 403),
|
||||||
|
(None, 'http://attack.com/csrf/vulnerability', {}, 403),
|
||||||
|
('$host', {"path": "/user/someuser"}, {}, 403),
|
||||||
|
('$host', {"path": "{path}/foo/bar/subpath"}, {}, 200),
|
||||||
|
# mismatch host
|
||||||
|
("mismatch.com", "$url", {}, 403),
|
||||||
|
# explicit host, matches
|
||||||
|
("fake.example", {"netloc": "fake.example"}, {}, 200),
|
||||||
|
# explicit port, matches implicit port
|
||||||
|
("fake.example:80", {"netloc": "fake.example"}, {}, 200),
|
||||||
|
# explicit port, mismatch
|
||||||
|
("fake.example:81", {"netloc": "fake.example"}, {}, 403),
|
||||||
|
# implicit ports, mismatch proto
|
||||||
|
("fake.example", {"netloc": "fake.example", "scheme": "https"}, {}, 403),
|
||||||
|
# explicit ports, match
|
||||||
|
("fake.example:81", {"netloc": "fake.example:81"}, {}, 200),
|
||||||
|
# Test proxy protocol defined headers taken into account by utils.get_browser_protocol
|
||||||
|
(
|
||||||
|
"fake.example",
|
||||||
|
{"netloc": "fake.example", "scheme": "https"},
|
||||||
|
{'X-Scheme': 'https'},
|
||||||
|
200,
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"fake.example",
|
||||||
|
{"netloc": "fake.example", "scheme": "https"},
|
||||||
|
{'X-Forwarded-Proto': 'https'},
|
||||||
|
200,
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"fake.example",
|
||||||
|
{"netloc": "fake.example", "scheme": "https"},
|
||||||
|
{
|
||||||
|
'Forwarded': 'host=fake.example;proto=https,for=1.2.34;proto=http',
|
||||||
|
'X-Scheme': 'http',
|
||||||
|
},
|
||||||
|
200,
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"fake.example",
|
||||||
|
{"netloc": "fake.example", "scheme": "https"},
|
||||||
|
{
|
||||||
|
'Forwarded': 'host=fake.example;proto=http,for=1.2.34;proto=http',
|
||||||
|
'X-Scheme': 'https',
|
||||||
|
},
|
||||||
|
403,
|
||||||
|
),
|
||||||
|
("fake.example", {"netloc": "fake.example"}, {'X-Scheme': 'https'}, 403),
|
||||||
|
("fake.example", {"netloc": "fake.example"}, {'X-Scheme': 'https, http'}, 403),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
async def test_cors_check(request, app, host, referer, extraheaders, status):
|
||||||
|
url = ujoin(public_host(app), app.hub.base_url)
|
||||||
|
real_host = urlparse(url).netloc
|
||||||
|
if host == "$host":
|
||||||
|
host = real_host
|
||||||
|
|
||||||
|
if referer == '$url':
|
||||||
|
referer = url
|
||||||
|
elif isinstance(referer, dict):
|
||||||
|
parsed_url = urlparse(url)
|
||||||
|
# apply {}
|
||||||
|
url_ns = {key: getattr(parsed_url, key) for key in parsed_url._fields}
|
||||||
|
for key, value in referer.items():
|
||||||
|
referer[key] = value.format(**url_ns)
|
||||||
|
referer = urlunparse(parsed_url._replace(**referer))
|
||||||
|
|
||||||
|
# disable default auth header, cors is for cookie auth
|
||||||
|
headers = {"Authorization": ""}
|
||||||
|
if host is not None:
|
||||||
|
headers['X-Forwarded-Host'] = host
|
||||||
|
if referer is not None:
|
||||||
|
headers['Referer'] = referer
|
||||||
|
headers.update(extraheaders)
|
||||||
|
|
||||||
|
# add admin user
|
||||||
|
user = find_user(app.db, 'admin')
|
||||||
|
if user is None:
|
||||||
|
user = add_user(app.db, name='admin', admin=True)
|
||||||
|
cookies = await app.login_user('admin')
|
||||||
|
|
||||||
|
# test custom forwarded_host_header behavior
|
||||||
|
app.forwarded_host_header = 'X-Forwarded-Host'
|
||||||
|
|
||||||
|
# reset the config after the test to avoid leaking state
|
||||||
|
def reset_header():
|
||||||
|
app.forwarded_host_header = ""
|
||||||
|
|
||||||
|
request.addfinalizer(reset_header)
|
||||||
|
|
||||||
r = await api_request(
|
r = await api_request(
|
||||||
app,
|
app,
|
||||||
'users',
|
'users',
|
||||||
method='post',
|
headers=headers,
|
||||||
data='{}',
|
|
||||||
headers={
|
|
||||||
"Authorization": "",
|
|
||||||
"Content-Type": "application/json; charset=UTF-8",
|
|
||||||
},
|
|
||||||
cookies=cookies,
|
cookies=cookies,
|
||||||
)
|
)
|
||||||
assert r.status_code == 400 # accepted, but invalid
|
assert r.status_code == status
|
||||||
|
|
||||||
|
|
||||||
# --------------
|
# --------------
|
||||||
@@ -160,6 +224,8 @@ def normalize_user(user):
|
|||||||
"""
|
"""
|
||||||
for key in ('created', 'last_activity'):
|
for key in ('created', 'last_activity'):
|
||||||
user[key] = normalize_timestamp(user[key])
|
user[key] = normalize_timestamp(user[key])
|
||||||
|
if 'roles' in user:
|
||||||
|
user['roles'] = sorted(user['roles'])
|
||||||
if 'servers' in user:
|
if 'servers' in user:
|
||||||
for server in user['servers'].values():
|
for server in user['servers'].values():
|
||||||
for key in ('started', 'last_activity'):
|
for key in ('started', 'last_activity'):
|
||||||
@@ -212,7 +278,12 @@ async def test_get_users(app):
|
|||||||
}
|
}
|
||||||
assert users == [
|
assert users == [
|
||||||
fill_user(
|
fill_user(
|
||||||
{'name': 'admin', 'admin': True, 'roles': ['admin'], 'auth_state': None}
|
{
|
||||||
|
'name': 'admin',
|
||||||
|
'admin': True,
|
||||||
|
'roles': ['admin', 'user'],
|
||||||
|
'auth_state': None,
|
||||||
|
}
|
||||||
),
|
),
|
||||||
fill_user(user_model),
|
fill_user(user_model),
|
||||||
]
|
]
|
||||||
@@ -400,6 +471,42 @@ async def test_get_users_state_filter(app, state):
|
|||||||
assert usernames == expected
|
assert usernames == expected
|
||||||
|
|
||||||
|
|
||||||
|
@mark.user
|
||||||
|
async def test_get_users_name_filter(app):
|
||||||
|
db = app.db
|
||||||
|
|
||||||
|
add_user(db, app=app, name='q')
|
||||||
|
add_user(db, app=app, name='qr')
|
||||||
|
add_user(db, app=app, name='qrs')
|
||||||
|
add_user(db, app=app, name='qrst')
|
||||||
|
added_usernames = {'q', 'qr', 'qrs', 'qrst'}
|
||||||
|
|
||||||
|
r = await api_request(app, 'users')
|
||||||
|
assert r.status_code == 200
|
||||||
|
response_users = [u.get("name") for u in r.json()]
|
||||||
|
assert added_usernames.intersection(response_users) == added_usernames
|
||||||
|
|
||||||
|
r = await api_request(app, 'users?name_filter=q')
|
||||||
|
assert r.status_code == 200
|
||||||
|
response_users = [u.get("name") for u in r.json()]
|
||||||
|
assert response_users == ['q', 'qr', 'qrs', 'qrst']
|
||||||
|
|
||||||
|
r = await api_request(app, 'users?name_filter=qr')
|
||||||
|
assert r.status_code == 200
|
||||||
|
response_users = [u.get("name") for u in r.json()]
|
||||||
|
assert response_users == ['qr', 'qrs', 'qrst']
|
||||||
|
|
||||||
|
r = await api_request(app, 'users?name_filter=qrs')
|
||||||
|
assert r.status_code == 200
|
||||||
|
response_users = [u.get("name") for u in r.json()]
|
||||||
|
assert response_users == ['qrs', 'qrst']
|
||||||
|
|
||||||
|
r = await api_request(app, 'users?name_filter=qrst')
|
||||||
|
assert r.status_code == 200
|
||||||
|
response_users = [u.get("name") for u in r.json()]
|
||||||
|
assert response_users == ['qrst']
|
||||||
|
|
||||||
|
|
||||||
@mark.user
|
@mark.user
|
||||||
async def test_get_self(app):
|
async def test_get_self(app):
|
||||||
db = app.db
|
db = app.db
|
||||||
@@ -597,7 +704,7 @@ async def test_add_multi_user_admin(app):
|
|||||||
assert user is not None
|
assert user is not None
|
||||||
assert user.name == name
|
assert user.name == name
|
||||||
assert user.admin
|
assert user.admin
|
||||||
assert orm.Role.find(db, 'user') not in user.roles
|
assert orm.Role.find(db, 'user') in user.roles
|
||||||
assert orm.Role.find(db, 'admin') in user.roles
|
assert orm.Role.find(db, 'admin') in user.roles
|
||||||
|
|
||||||
|
|
||||||
@@ -637,7 +744,7 @@ async def test_add_admin(app):
|
|||||||
assert user.name == name
|
assert user.name == name
|
||||||
assert user.admin
|
assert user.admin
|
||||||
# assert newadmin has default 'admin' role
|
# assert newadmin has default 'admin' role
|
||||||
assert orm.Role.find(db, 'user') not in user.roles
|
assert orm.Role.find(db, 'user') in user.roles
|
||||||
assert orm.Role.find(db, 'admin') in user.roles
|
assert orm.Role.find(db, 'admin') in user.roles
|
||||||
|
|
||||||
|
|
||||||
@@ -672,7 +779,7 @@ async def test_make_admin(app):
|
|||||||
assert user is not None
|
assert user is not None
|
||||||
assert user.name == name
|
assert user.name == name
|
||||||
assert user.admin
|
assert user.admin
|
||||||
assert orm.Role.find(db, 'user') not in user.roles
|
assert orm.Role.find(db, 'user') in user.roles
|
||||||
assert orm.Role.find(db, 'admin') in user.roles
|
assert orm.Role.find(db, 'admin') in user.roles
|
||||||
|
|
||||||
|
|
||||||
@@ -959,7 +1066,7 @@ async def test_never_spawn(app, no_patience, never_spawn):
|
|||||||
assert not app_user.spawner._spawn_pending
|
assert not app_user.spawner._spawn_pending
|
||||||
status = await app_user.spawner.poll()
|
status = await app_user.spawner.poll()
|
||||||
assert status is not None
|
assert status is not None
|
||||||
# failed spawn should decrements pending count
|
# failed spawn should decrement pending count
|
||||||
assert app.users.count_active_users()['pending'] == 0
|
assert app.users.count_active_users()['pending'] == 0
|
||||||
|
|
||||||
|
|
||||||
@@ -968,9 +1075,21 @@ async def test_bad_spawn(app, bad_spawn):
|
|||||||
name = 'prim'
|
name = 'prim'
|
||||||
user = add_user(db, app=app, name=name)
|
user = add_user(db, app=app, name=name)
|
||||||
r = await api_request(app, 'users', name, 'server', method='post')
|
r = await api_request(app, 'users', name, 'server', method='post')
|
||||||
|
# check that we don't re-use spawners that failed
|
||||||
|
user.spawners[''].reused = True
|
||||||
assert r.status_code == 500
|
assert r.status_code == 500
|
||||||
assert app.users.count_active_users()['pending'] == 0
|
assert app.users.count_active_users()['pending'] == 0
|
||||||
|
|
||||||
|
r = await api_request(app, 'users', name, 'server', method='post')
|
||||||
|
# check that we don't re-use spawners that failed
|
||||||
|
spawner = user.spawners['']
|
||||||
|
assert not getattr(spawner, 'reused', False)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_spawn_nosuch_user(app):
|
||||||
|
r = await api_request(app, 'users', "nosuchuser", 'server', method='post')
|
||||||
|
assert r.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
async def test_slow_bad_spawn(app, no_patience, slow_bad_spawn):
|
async def test_slow_bad_spawn(app, no_patience, slow_bad_spawn):
|
||||||
db = app.db
|
db = app.db
|
||||||
@@ -1366,8 +1485,8 @@ async def test_get_new_token_deprecated(app, headers, status):
|
|||||||
@mark.parametrize(
|
@mark.parametrize(
|
||||||
"headers, status, note, expires_in",
|
"headers, status, note, expires_in",
|
||||||
[
|
[
|
||||||
({}, 200, 'test note', None),
|
({}, 201, 'test note', None),
|
||||||
({}, 200, '', 100),
|
({}, 201, '', 100),
|
||||||
({'Authorization': 'token bad'}, 403, '', None),
|
({'Authorization': 'token bad'}, 403, '', None),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@@ -1386,7 +1505,7 @@ async def test_get_new_token(app, headers, status, note, expires_in):
|
|||||||
app, 'users/admin/tokens', method='post', headers=headers, data=body
|
app, 'users/admin/tokens', method='post', headers=headers, data=body
|
||||||
)
|
)
|
||||||
assert r.status_code == status
|
assert r.status_code == status
|
||||||
if status != 200:
|
if status != 201:
|
||||||
return
|
return
|
||||||
# check the new-token reply
|
# check the new-token reply
|
||||||
reply = r.json()
|
reply = r.json()
|
||||||
@@ -1424,10 +1543,10 @@ async def test_get_new_token(app, headers, status, note, expires_in):
|
|||||||
@mark.parametrize(
|
@mark.parametrize(
|
||||||
"as_user, for_user, status",
|
"as_user, for_user, status",
|
||||||
[
|
[
|
||||||
('admin', 'other', 200),
|
('admin', 'other', 201),
|
||||||
('admin', 'missing', 403),
|
('admin', 'missing', 403),
|
||||||
('user', 'other', 403),
|
('user', 'other', 403),
|
||||||
('user', 'user', 200),
|
('user', 'user', 201),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
async def test_token_for_user(app, as_user, for_user, status):
|
async def test_token_for_user(app, as_user, for_user, status):
|
||||||
@@ -1448,7 +1567,7 @@ async def test_token_for_user(app, as_user, for_user, status):
|
|||||||
)
|
)
|
||||||
assert r.status_code == status
|
assert r.status_code == status
|
||||||
reply = r.json()
|
reply = r.json()
|
||||||
if status != 200:
|
if status != 201:
|
||||||
return
|
return
|
||||||
assert 'token' in reply
|
assert 'token' in reply
|
||||||
|
|
||||||
@@ -1486,7 +1605,7 @@ async def test_token_authenticator_noauth(app):
|
|||||||
data=json.dumps(data) if data else None,
|
data=json.dumps(data) if data else None,
|
||||||
noauth=True,
|
noauth=True,
|
||||||
)
|
)
|
||||||
assert r.status_code == 200
|
assert r.status_code == 201
|
||||||
reply = r.json()
|
reply = r.json()
|
||||||
assert 'token' in reply
|
assert 'token' in reply
|
||||||
r = await api_request(app, 'authorizations', 'token', reply['token'])
|
r = await api_request(app, 'authorizations', 'token', reply['token'])
|
||||||
@@ -1509,7 +1628,7 @@ async def test_token_authenticator_dict_noauth(app):
|
|||||||
data=json.dumps(data) if data else None,
|
data=json.dumps(data) if data else None,
|
||||||
noauth=True,
|
noauth=True,
|
||||||
)
|
)
|
||||||
assert r.status_code == 200
|
assert r.status_code == 201
|
||||||
reply = r.json()
|
reply = r.json()
|
||||||
assert 'token' in reply
|
assert 'token' in reply
|
||||||
r = await api_request(app, 'authorizations', 'token', reply['token'])
|
r = await api_request(app, 'authorizations', 'token', reply['token'])
|
||||||
@@ -1730,6 +1849,38 @@ async def test_group_add_delete_users(app):
|
|||||||
assert sorted(u.name for u in group.users) == sorted(names[2:])
|
assert sorted(u.name for u in group.users) == sorted(names[2:])
|
||||||
|
|
||||||
|
|
||||||
|
@mark.group
|
||||||
|
async def test_auth_managed_groups(request, app, group, user):
|
||||||
|
group.users.append(user)
|
||||||
|
app.db.commit()
|
||||||
|
app.authenticator.manage_groups = True
|
||||||
|
request.addfinalizer(lambda: setattr(app.authenticator, "manage_groups", False))
|
||||||
|
# create groups
|
||||||
|
r = await api_request(app, 'groups', method='post')
|
||||||
|
assert r.status_code == 400
|
||||||
|
r = await api_request(app, 'groups/newgroup', method='post')
|
||||||
|
assert r.status_code == 400
|
||||||
|
# delete groups
|
||||||
|
r = await api_request(app, f'groups/{group.name}', method='delete')
|
||||||
|
assert r.status_code == 400
|
||||||
|
# add users to group
|
||||||
|
r = await api_request(
|
||||||
|
app,
|
||||||
|
f'groups/{group.name}/users',
|
||||||
|
method='post',
|
||||||
|
data=json.dumps({"users": [user.name]}),
|
||||||
|
)
|
||||||
|
assert r.status_code == 400
|
||||||
|
# remove users from group
|
||||||
|
r = await api_request(
|
||||||
|
app,
|
||||||
|
f'groups/{group.name}/users',
|
||||||
|
method='delete',
|
||||||
|
data=json.dumps({"users": [user.name]}),
|
||||||
|
)
|
||||||
|
assert r.status_code == 400
|
||||||
|
|
||||||
|
|
||||||
# -----------------
|
# -----------------
|
||||||
# Service API tests
|
# Service API tests
|
||||||
# -----------------
|
# -----------------
|
||||||
@@ -1953,14 +2104,23 @@ def test_shutdown(app):
|
|||||||
)
|
)
|
||||||
return r
|
return r
|
||||||
|
|
||||||
real_stop = loop.stop
|
real_stop = loop.asyncio_loop.stop
|
||||||
|
|
||||||
def stop():
|
def stop():
|
||||||
stop.called = True
|
stop.called = True
|
||||||
loop.call_later(1, real_stop)
|
loop.call_later(1, real_stop)
|
||||||
|
|
||||||
with mock.patch.object(loop, 'stop', stop):
|
real_cleanup = app.cleanup
|
||||||
|
|
||||||
|
def cleanup():
|
||||||
|
cleanup.called = True
|
||||||
|
return real_cleanup()
|
||||||
|
|
||||||
|
app.cleanup = cleanup
|
||||||
|
|
||||||
|
with mock.patch.object(loop.asyncio_loop, 'stop', stop):
|
||||||
r = loop.run_sync(shutdown, timeout=5)
|
r = loop.run_sync(shutdown, timeout=5)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
reply = r.json()
|
reply = r.json()
|
||||||
|
assert cleanup.called
|
||||||
assert stop.called
|
assert stop.called
|
||||||
|
@@ -6,7 +6,6 @@ import os
|
|||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from distutils.version import LooseVersion as V
|
|
||||||
from subprocess import check_output
|
from subprocess import check_output
|
||||||
from subprocess import PIPE
|
from subprocess import PIPE
|
||||||
from subprocess import Popen
|
from subprocess import Popen
|
||||||
@@ -33,7 +32,7 @@ def test_help_all():
|
|||||||
assert '--JupyterHub.ip' in out
|
assert '--JupyterHub.ip' in out
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(V(traitlets.__version__) < V('5'), reason="requires traitlets 5")
|
@pytest.mark.skipif(traitlets.version_info < (5,), reason="requires traitlets 5")
|
||||||
def test_show_config(tmpdir):
|
def test_show_config(tmpdir):
|
||||||
tmpdir.chdir()
|
tmpdir.chdir()
|
||||||
p = Popen(
|
p = Popen(
|
||||||
@@ -247,6 +246,7 @@ async def test_load_groups(tmpdir, request):
|
|||||||
kwargs['internal_certs_location'] = str(tmpdir)
|
kwargs['internal_certs_location'] = str(tmpdir)
|
||||||
hub = MockHub(**kwargs)
|
hub = MockHub(**kwargs)
|
||||||
hub.init_db()
|
hub.init_db()
|
||||||
|
await hub.init_role_creation()
|
||||||
await hub.init_users()
|
await hub.init_users()
|
||||||
await hub.init_groups()
|
await hub.init_groups()
|
||||||
db = hub.db
|
db = hub.db
|
||||||
|
@@ -3,15 +3,20 @@
|
|||||||
# Distributed under the terms of the Modified BSD License.
|
# Distributed under the terms of the Modified BSD License.
|
||||||
import logging
|
import logging
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from requests import HTTPError
|
from requests import HTTPError
|
||||||
|
from traitlets import Any
|
||||||
from traitlets.config import Config
|
from traitlets.config import Config
|
||||||
|
|
||||||
from .mocking import MockPAMAuthenticator
|
from .mocking import MockPAMAuthenticator
|
||||||
from .mocking import MockStructGroup
|
from .mocking import MockStructGroup
|
||||||
from .mocking import MockStructPasswd
|
from .mocking import MockStructPasswd
|
||||||
from .utils import add_user
|
from .utils import add_user
|
||||||
|
from .utils import async_requests
|
||||||
|
from .utils import get_page
|
||||||
|
from .utils import public_url
|
||||||
from jupyterhub import auth
|
from jupyterhub import auth
|
||||||
from jupyterhub import crypto
|
from jupyterhub import crypto
|
||||||
from jupyterhub import orm
|
from jupyterhub import orm
|
||||||
@@ -515,3 +520,80 @@ def test_deprecated_methods_subclass():
|
|||||||
assert authenticator.check_whitelist("subclass-allowed")
|
assert authenticator.check_whitelist("subclass-allowed")
|
||||||
assert not authenticator.check_allowed("otheruser")
|
assert not authenticator.check_allowed("otheruser")
|
||||||
assert not authenticator.check_whitelist("otheruser")
|
assert not authenticator.check_whitelist("otheruser")
|
||||||
|
|
||||||
|
|
||||||
|
async def test_nullauthenticator(app):
|
||||||
|
with mock.patch.dict(
|
||||||
|
app.tornado_settings, {"authenticator": auth.NullAuthenticator(parent=app)}
|
||||||
|
):
|
||||||
|
r = await async_requests.get(public_url(app))
|
||||||
|
assert urlparse(r.url).path.endswith("/hub/login")
|
||||||
|
assert r.status_code == 403
|
||||||
|
|
||||||
|
|
||||||
|
class MockGroupsAuthenticator(auth.Authenticator):
|
||||||
|
authenticated_groups = Any()
|
||||||
|
refresh_groups = Any()
|
||||||
|
|
||||||
|
manage_groups = True
|
||||||
|
|
||||||
|
def authenticate(self, handler, data):
|
||||||
|
return {
|
||||||
|
"name": data["username"],
|
||||||
|
"groups": self.authenticated_groups,
|
||||||
|
}
|
||||||
|
|
||||||
|
async def refresh_user(self, user, handler):
|
||||||
|
return {
|
||||||
|
"name": user.name,
|
||||||
|
"groups": self.refresh_groups,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"authenticated_groups, refresh_groups",
|
||||||
|
[
|
||||||
|
(None, None),
|
||||||
|
(["auth1"], None),
|
||||||
|
(None, ["auth1"]),
|
||||||
|
(["auth1"], ["auth1", "auth2"]),
|
||||||
|
(["auth1", "auth2"], ["auth1"]),
|
||||||
|
(["auth1", "auth2"], ["auth3"]),
|
||||||
|
(["auth1", "auth2"], ["auth3"]),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
async def test_auth_managed_groups(
|
||||||
|
app, user, group, authenticated_groups, refresh_groups
|
||||||
|
):
|
||||||
|
|
||||||
|
authenticator = MockGroupsAuthenticator(
|
||||||
|
parent=app,
|
||||||
|
authenticated_groups=authenticated_groups,
|
||||||
|
refresh_groups=refresh_groups,
|
||||||
|
)
|
||||||
|
|
||||||
|
user.groups.append(group)
|
||||||
|
app.db.commit()
|
||||||
|
before_groups = [group.name]
|
||||||
|
if authenticated_groups is None:
|
||||||
|
expected_authenticated_groups = before_groups
|
||||||
|
else:
|
||||||
|
expected_authenticated_groups = authenticated_groups
|
||||||
|
if refresh_groups is None:
|
||||||
|
expected_refresh_groups = expected_authenticated_groups
|
||||||
|
else:
|
||||||
|
expected_refresh_groups = refresh_groups
|
||||||
|
|
||||||
|
with mock.patch.dict(app.tornado_settings, {"authenticator": authenticator}):
|
||||||
|
cookies = await app.login_user(user.name)
|
||||||
|
assert not app.db.dirty
|
||||||
|
groups = sorted(g.name for g in user.groups)
|
||||||
|
assert groups == expected_authenticated_groups
|
||||||
|
|
||||||
|
# force refresh_user on next request
|
||||||
|
user._auth_refreshed -= 10 + app.authenticator.auth_refresh_age
|
||||||
|
r = await get_page('home', app, cookies=cookies, allow_redirects=False)
|
||||||
|
assert r.status_code == 200
|
||||||
|
assert not app.db.dirty
|
||||||
|
groups = sorted(g.name for g in user.groups)
|
||||||
|
assert groups == expected_refresh_groups
|
||||||
|
@@ -1,16 +1,13 @@
|
|||||||
"""Tests for jupyterhub internal_ssl connections"""
|
"""Tests for jupyterhub internal_ssl connections"""
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from subprocess import check_output
|
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
from urllib.parse import urlparse
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from requests.exceptions import ConnectionError
|
from requests.exceptions import ConnectionError
|
||||||
from requests.exceptions import SSLError
|
from requests.exceptions import SSLError
|
||||||
from tornado import gen
|
|
||||||
|
|
||||||
import jupyterhub
|
from ..utils import AnyTimeoutError
|
||||||
from .test_api import add_user
|
from .test_api import add_user
|
||||||
from .utils import async_requests
|
from .utils import async_requests
|
||||||
|
|
||||||
@@ -35,7 +32,7 @@ async def wait_for_spawner(spawner, timeout=10):
|
|||||||
assert status is None
|
assert status is None
|
||||||
try:
|
try:
|
||||||
await wait()
|
await wait()
|
||||||
except TimeoutError:
|
except AnyTimeoutError:
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
@@ -1,9 +1,13 @@
|
|||||||
import json
|
import json
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
from .utils import add_user
|
|
||||||
from .utils import api_request
|
from .utils import api_request
|
||||||
|
from .utils import get_page
|
||||||
from jupyterhub import metrics
|
from jupyterhub import metrics
|
||||||
from jupyterhub import orm
|
from jupyterhub import orm
|
||||||
|
from jupyterhub import roles
|
||||||
|
|
||||||
|
|
||||||
async def test_total_users(app):
|
async def test_total_users(app):
|
||||||
@@ -32,3 +36,42 @@ async def test_total_users(app):
|
|||||||
|
|
||||||
sample = metrics.TOTAL_USERS.collect()[0].samples[0]
|
sample = metrics.TOTAL_USERS.collect()[0].samples[0]
|
||||||
assert sample.value == num_users
|
assert sample.value == num_users
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"authenticate_prometheus, authenticated, authorized, success",
|
||||||
|
[
|
||||||
|
(True, True, True, True),
|
||||||
|
(True, True, False, False),
|
||||||
|
(True, False, False, False),
|
||||||
|
(False, True, True, True),
|
||||||
|
(False, False, False, True),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
async def test_metrics_auth(
|
||||||
|
app,
|
||||||
|
authenticate_prometheus,
|
||||||
|
authenticated,
|
||||||
|
authorized,
|
||||||
|
success,
|
||||||
|
create_temp_role,
|
||||||
|
user,
|
||||||
|
):
|
||||||
|
if authorized:
|
||||||
|
role = create_temp_role(["read:metrics"])
|
||||||
|
roles.grant_role(app.db, user, role)
|
||||||
|
|
||||||
|
headers = {}
|
||||||
|
if authenticated:
|
||||||
|
token = user.new_api_token()
|
||||||
|
headers["Authorization"] = f"token {token}"
|
||||||
|
|
||||||
|
with mock.patch.dict(
|
||||||
|
app.tornado_settings, {"authenticate_prometheus": authenticate_prometheus}
|
||||||
|
):
|
||||||
|
r = await get_page("metrics", app, headers=headers)
|
||||||
|
if success:
|
||||||
|
assert r.status_code == 200
|
||||||
|
else:
|
||||||
|
assert r.status_code == 403
|
||||||
|
assert 'read:metrics' in r.text
|
||||||
|
@@ -12,6 +12,7 @@ from tornado.escape import url_escape
|
|||||||
from tornado.httputil import url_concat
|
from tornado.httputil import url_concat
|
||||||
|
|
||||||
from .. import orm
|
from .. import orm
|
||||||
|
from .. import roles
|
||||||
from .. import scopes
|
from .. import scopes
|
||||||
from ..auth import Authenticator
|
from ..auth import Authenticator
|
||||||
from ..handlers import BaseHandler
|
from ..handlers import BaseHandler
|
||||||
@@ -20,7 +21,6 @@ from ..utils import url_path_join as ujoin
|
|||||||
from .mocking import FalsyCallableFormSpawner
|
from .mocking import FalsyCallableFormSpawner
|
||||||
from .mocking import FormSpawner
|
from .mocking import FormSpawner
|
||||||
from .test_api import next_event
|
from .test_api import next_event
|
||||||
from .utils import add_user
|
|
||||||
from .utils import api_request
|
from .utils import api_request
|
||||||
from .utils import async_requests
|
from .utils import async_requests
|
||||||
from .utils import AsyncSession
|
from .utils import AsyncSession
|
||||||
@@ -48,16 +48,16 @@ async def test_root_auth(app):
|
|||||||
# if spawning was quick, there will be one more entry that's public_url(user)
|
# if spawning was quick, there will be one more entry that's public_url(user)
|
||||||
|
|
||||||
|
|
||||||
async def test_root_redirect(app):
|
async def test_root_redirect(app, user):
|
||||||
name = 'wash'
|
name = 'wash'
|
||||||
cookies = await app.login_user(name)
|
cookies = await app.login_user(name)
|
||||||
next_url = ujoin(app.base_url, 'user/other/test.ipynb')
|
next_url = ujoin(app.base_url, f'user/{user.name}/test.ipynb')
|
||||||
url = '/?' + urlencode({'next': next_url})
|
url = '/?' + urlencode({'next': next_url})
|
||||||
r = await get_page(url, app, cookies=cookies)
|
r = await get_page(url, app, cookies=cookies)
|
||||||
path = urlparse(r.url).path
|
path = urlparse(r.url).path
|
||||||
assert path == ujoin(app.base_url, 'hub/user/%s/test.ipynb' % name)
|
assert path == ujoin(app.base_url, f'hub/user/{user.name}/test.ipynb')
|
||||||
# serve "server not running" page, which has status 503
|
# preserves choice to requested user, which 404s as unavailable without access
|
||||||
assert r.status_code == 503
|
assert r.status_code == 404
|
||||||
|
|
||||||
|
|
||||||
async def test_root_default_url_noauth(app):
|
async def test_root_default_url_noauth(app):
|
||||||
@@ -128,11 +128,20 @@ async def test_admin_sort(app, sort):
|
|||||||
assert r.status_code == 200
|
assert r.status_code == 200
|
||||||
|
|
||||||
|
|
||||||
async def test_spawn_redirect(app):
|
@pytest.mark.parametrize("last_failed", [True, False])
|
||||||
|
async def test_spawn_redirect(app, last_failed):
|
||||||
name = 'wash'
|
name = 'wash'
|
||||||
cookies = await app.login_user(name)
|
cookies = await app.login_user(name)
|
||||||
u = app.users[orm.User.find(app.db, name)]
|
u = app.users[orm.User.find(app.db, name)]
|
||||||
|
|
||||||
|
if last_failed:
|
||||||
|
# mock a failed spawn
|
||||||
|
last_spawner = u.spawners['']
|
||||||
|
last_spawner._spawn_future = asyncio.Future()
|
||||||
|
last_spawner._spawn_future.set_exception(RuntimeError("I failed!"))
|
||||||
|
else:
|
||||||
|
last_spawner = None
|
||||||
|
|
||||||
status = await u.spawner.poll()
|
status = await u.spawner.poll()
|
||||||
assert status is not None
|
assert status is not None
|
||||||
|
|
||||||
@@ -141,6 +150,10 @@ async def test_spawn_redirect(app):
|
|||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
print(urlparse(r.url))
|
print(urlparse(r.url))
|
||||||
path = urlparse(r.url).path
|
path = urlparse(r.url).path
|
||||||
|
|
||||||
|
# ensure we got a new spawner
|
||||||
|
assert u.spawners[''] is not last_spawner
|
||||||
|
|
||||||
# make sure we visited hub/spawn-pending after spawn
|
# make sure we visited hub/spawn-pending after spawn
|
||||||
# if spawn was really quick, we might get redirected all the way to the running server,
|
# if spawn was really quick, we might get redirected all the way to the running server,
|
||||||
# so check history instead of r.url
|
# so check history instead of r.url
|
||||||
@@ -172,7 +185,7 @@ async def test_spawn_redirect(app):
|
|||||||
r = await get_page('user/' + name, app, hub=False, cookies=cookies)
|
r = await get_page('user/' + name, app, hub=False, cookies=cookies)
|
||||||
path = urlparse(r.url).path
|
path = urlparse(r.url).path
|
||||||
assert path == ujoin(app.base_url, 'hub/user/%s/' % name)
|
assert path == ujoin(app.base_url, 'hub/user/%s/' % name)
|
||||||
assert r.status_code == 503
|
assert r.status_code == 424
|
||||||
|
|
||||||
|
|
||||||
async def test_spawn_handler_access(app):
|
async def test_spawn_handler_access(app):
|
||||||
@@ -203,13 +216,34 @@ async def test_spawn_handler_access(app):
|
|||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
|
|
||||||
|
|
||||||
async def test_spawn_admin_access(app, admin_access):
|
@pytest.mark.parametrize("has_access", ["all", "user", "group", False])
|
||||||
"""GET /user/:name as admin with admin-access spawns user's server"""
|
async def test_spawn_other_user(
|
||||||
cookies = await app.login_user('admin')
|
app, user, username, group, create_temp_role, has_access
|
||||||
name = 'mariel'
|
):
|
||||||
user = add_user(app.db, app=app, name=name)
|
"""GET /user/:name as another user with access to spawns user's server"""
|
||||||
|
cookies = await app.login_user(username)
|
||||||
|
requester = app.users[username]
|
||||||
|
name = user.name
|
||||||
|
|
||||||
|
if has_access:
|
||||||
|
if has_access == "group":
|
||||||
|
group.users.append(user)
|
||||||
app.db.commit()
|
app.db.commit()
|
||||||
|
scopes = [
|
||||||
|
f"access:servers!group={group.name}",
|
||||||
|
f"servers!group={group.name}",
|
||||||
|
]
|
||||||
|
elif has_access == "all":
|
||||||
|
scopes = ["access:servers", "servers"]
|
||||||
|
elif has_access == "user":
|
||||||
|
scopes = [f"access:servers!user={user.name}", f"servers!user={user.name}"]
|
||||||
|
role = create_temp_role(scopes)
|
||||||
|
roles.grant_role(app.db, requester, role)
|
||||||
|
|
||||||
r = await get_page('spawn/' + name, app, cookies=cookies)
|
r = await get_page('spawn/' + name, app, cookies=cookies)
|
||||||
|
if not has_access:
|
||||||
|
assert r.status_code == 404
|
||||||
|
return
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
|
|
||||||
while '/spawn-pending/' in r.url:
|
while '/spawn-pending/' in r.url:
|
||||||
@@ -237,6 +271,25 @@ async def test_spawn_page(app):
|
|||||||
assert FormSpawner.options_form in r.text
|
assert FormSpawner.options_form in r.text
|
||||||
|
|
||||||
|
|
||||||
|
async def test_spawn_page_after_failed(app, user):
|
||||||
|
cookies = await app.login_user(user.name)
|
||||||
|
|
||||||
|
# mock a failed spawn
|
||||||
|
last_spawner = user.spawners['']
|
||||||
|
last_spawner._spawn_future = asyncio.Future()
|
||||||
|
last_spawner._spawn_future.set_exception(RuntimeError("I failed!"))
|
||||||
|
|
||||||
|
with mock.patch.dict(app.users.settings, {'spawner_class': FormSpawner}):
|
||||||
|
r = await get_page('spawn', app, cookies=cookies)
|
||||||
|
spawner = user.spawners['']
|
||||||
|
# make sure we didn't reuse last spawner
|
||||||
|
assert isinstance(spawner, FormSpawner)
|
||||||
|
assert spawner is not last_spawner
|
||||||
|
assert r.url.endswith('/spawn')
|
||||||
|
spawner = user.spawners['']
|
||||||
|
assert FormSpawner.options_form in r.text
|
||||||
|
|
||||||
|
|
||||||
async def test_spawn_page_falsy_callable(app):
|
async def test_spawn_page_falsy_callable(app):
|
||||||
with mock.patch.dict(
|
with mock.patch.dict(
|
||||||
app.users.settings, {'spawner_class': FalsyCallableFormSpawner}
|
app.users.settings, {'spawner_class': FalsyCallableFormSpawner}
|
||||||
@@ -248,14 +301,36 @@ async def test_spawn_page_falsy_callable(app):
|
|||||||
assert history[1] == ujoin(public_url(app), "hub/spawn-pending/erik")
|
assert history[1] == ujoin(public_url(app), "hub/spawn-pending/erik")
|
||||||
|
|
||||||
|
|
||||||
async def test_spawn_page_admin(app, admin_access):
|
@pytest.mark.parametrize("has_access", ["all", "user", "group", False])
|
||||||
|
async def test_spawn_page_access(
|
||||||
|
app, has_access, group, username, user, create_temp_role
|
||||||
|
):
|
||||||
|
cookies = await app.login_user(username)
|
||||||
|
requester = app.users[username]
|
||||||
|
if has_access:
|
||||||
|
if has_access == "group":
|
||||||
|
group.users.append(user)
|
||||||
|
app.db.commit()
|
||||||
|
scopes = [
|
||||||
|
f"access:servers!group={group.name}",
|
||||||
|
f"servers!group={group.name}",
|
||||||
|
]
|
||||||
|
elif has_access == "all":
|
||||||
|
scopes = ["access:servers", "servers"]
|
||||||
|
elif has_access == "user":
|
||||||
|
scopes = [f"access:servers!user={user.name}", f"servers!user={user.name}"]
|
||||||
|
role = create_temp_role(scopes)
|
||||||
|
roles.grant_role(app.db, requester, role)
|
||||||
|
|
||||||
with mock.patch.dict(app.users.settings, {'spawner_class': FormSpawner}):
|
with mock.patch.dict(app.users.settings, {'spawner_class': FormSpawner}):
|
||||||
cookies = await app.login_user('admin')
|
r = await get_page('spawn/' + user.name, app, cookies=cookies)
|
||||||
u = add_user(app.db, app=app, name='melanie')
|
if not has_access:
|
||||||
r = await get_page('spawn/' + u.name, app, cookies=cookies)
|
assert r.status_code == 404
|
||||||
assert r.url.endswith('/spawn/' + u.name)
|
return
|
||||||
|
assert r.status_code == 200
|
||||||
|
assert r.url.endswith('/spawn/' + user.name)
|
||||||
assert FormSpawner.options_form in r.text
|
assert FormSpawner.options_form in r.text
|
||||||
assert f"Spawning server for {u.name}" in r.text
|
assert f"Spawning server for {user.name}" in r.text
|
||||||
|
|
||||||
|
|
||||||
async def test_spawn_with_query_arguments(app):
|
async def test_spawn_with_query_arguments(app):
|
||||||
@@ -322,18 +397,39 @@ async def test_spawn_form(app):
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
async def test_spawn_form_admin_access(app, admin_access):
|
@pytest.mark.parametrize("has_access", ["all", "user", "group", False])
|
||||||
|
async def test_spawn_form_other_user(
|
||||||
|
app, username, user, group, create_temp_role, has_access
|
||||||
|
):
|
||||||
|
cookies = await app.login_user(username)
|
||||||
|
requester = app.users[username]
|
||||||
|
if has_access:
|
||||||
|
if has_access == "group":
|
||||||
|
group.users.append(user)
|
||||||
|
app.db.commit()
|
||||||
|
scopes = [
|
||||||
|
f"access:servers!group={group.name}",
|
||||||
|
f"servers!group={group.name}",
|
||||||
|
]
|
||||||
|
elif has_access == "all":
|
||||||
|
scopes = ["access:servers", "servers"]
|
||||||
|
elif has_access == "user":
|
||||||
|
scopes = [f"access:servers!user={user.name}", f"servers!user={user.name}"]
|
||||||
|
role = create_temp_role(scopes)
|
||||||
|
roles.grant_role(app.db, requester, role)
|
||||||
|
|
||||||
with mock.patch.dict(app.tornado_settings, {'spawner_class': FormSpawner}):
|
with mock.patch.dict(app.tornado_settings, {'spawner_class': FormSpawner}):
|
||||||
base_url = ujoin(public_host(app), app.hub.base_url)
|
base_url = ujoin(public_host(app), app.hub.base_url)
|
||||||
cookies = await app.login_user('admin')
|
next_url = ujoin(app.base_url, 'user', user.name, 'tree')
|
||||||
u = add_user(app.db, app=app, name='martha')
|
|
||||||
next_url = ujoin(app.base_url, 'user', u.name, 'tree')
|
|
||||||
|
|
||||||
r = await async_requests.post(
|
r = await async_requests.post(
|
||||||
url_concat(ujoin(base_url, 'spawn', u.name), {'next': next_url}),
|
url_concat(ujoin(base_url, 'spawn', user.name), {'next': next_url}),
|
||||||
cookies=cookies,
|
cookies=cookies,
|
||||||
data={'bounds': ['-3', '3'], 'energy': '938MeV'},
|
data={'bounds': ['-3', '3'], 'energy': '938MeV'},
|
||||||
)
|
)
|
||||||
|
if not has_access:
|
||||||
|
assert r.status_code == 404
|
||||||
|
return
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
|
|
||||||
while '/spawn-pending/' in r.url:
|
while '/spawn-pending/' in r.url:
|
||||||
@@ -342,8 +438,8 @@ async def test_spawn_form_admin_access(app, admin_access):
|
|||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
|
|
||||||
assert r.history
|
assert r.history
|
||||||
assert r.url.startswith(public_url(app, u))
|
assert r.url.startswith(public_url(app, user))
|
||||||
assert u.spawner.user_options == {
|
assert user.spawner.user_options == {
|
||||||
'energy': '938MeV',
|
'energy': '938MeV',
|
||||||
'bounds': [-3, 3],
|
'bounds': [-3, 3],
|
||||||
'notspecified': 5,
|
'notspecified': 5,
|
||||||
@@ -498,31 +594,54 @@ async def test_user_redirect_hook(app, username):
|
|||||||
assert redirected_url.path == ujoin(app.base_url, 'user', username, 'terminals/1')
|
assert redirected_url.path == ujoin(app.base_url, 'user', username, 'terminals/1')
|
||||||
|
|
||||||
|
|
||||||
async def test_user_redirect_deprecated(app, username):
|
@pytest.mark.parametrize("has_access", ["all", "user", "group", False])
|
||||||
"""redirecting from /user/someonelse/ URLs (deprecated)"""
|
async def test_other_user_url(app, username, user, group, create_temp_role, has_access):
|
||||||
|
"""Test accessing /user/someonelse/ URLs when the server is not running
|
||||||
|
|
||||||
|
Used to redirect to your own server,
|
||||||
|
which produced inconsistent behavior depending on whether the server was running.
|
||||||
|
"""
|
||||||
name = username
|
name = username
|
||||||
cookies = await app.login_user(name)
|
cookies = await app.login_user(name)
|
||||||
|
other_user = user
|
||||||
|
requester = app.users[name]
|
||||||
|
other_user_url = f"/user/{other_user.name}"
|
||||||
|
if has_access:
|
||||||
|
if has_access == "group":
|
||||||
|
group.users.append(other_user)
|
||||||
|
app.db.commit()
|
||||||
|
scopes = [f"access:servers!group={group.name}"]
|
||||||
|
elif has_access == "all":
|
||||||
|
scopes = ["access:servers"]
|
||||||
|
elif has_access == "user":
|
||||||
|
scopes = [f"access:servers!user={other_user.name}"]
|
||||||
|
role = create_temp_role(scopes)
|
||||||
|
roles.grant_role(app.db, requester, role)
|
||||||
|
status = 424
|
||||||
|
else:
|
||||||
|
# 404 - access denied without revealing if the user exists
|
||||||
|
status = 404
|
||||||
|
|
||||||
r = await get_page('/user/baduser', app, cookies=cookies, hub=False)
|
r = await get_page(other_user_url, app, cookies=cookies, hub=False)
|
||||||
print(urlparse(r.url))
|
print(urlparse(r.url))
|
||||||
path = urlparse(r.url).path
|
path = urlparse(r.url).path
|
||||||
assert path == ujoin(app.base_url, 'hub/user/%s/' % name)
|
assert path == ujoin(app.base_url, f'hub/user/{other_user.name}/')
|
||||||
assert r.status_code == 503
|
assert r.status_code == status
|
||||||
|
|
||||||
r = await get_page('/user/baduser/test.ipynb', app, cookies=cookies, hub=False)
|
r = await get_page(f'{other_user_url}/test.ipynb', app, cookies=cookies, hub=False)
|
||||||
print(urlparse(r.url))
|
print(urlparse(r.url))
|
||||||
path = urlparse(r.url).path
|
path = urlparse(r.url).path
|
||||||
assert path == ujoin(app.base_url, 'hub/user/%s/test.ipynb' % name)
|
assert path == ujoin(app.base_url, f'hub/user/{other_user.name}/test.ipynb')
|
||||||
assert r.status_code == 503
|
assert r.status_code == status
|
||||||
|
|
||||||
r = await get_page('/user/baduser/test.ipynb', app, hub=False)
|
r = await get_page(f'{other_user_url}/test.ipynb', app, hub=False)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
print(urlparse(r.url))
|
print(urlparse(r.url))
|
||||||
path = urlparse(r.url).path
|
path = urlparse(r.url).path
|
||||||
assert path == ujoin(app.base_url, '/hub/login')
|
assert path == ujoin(app.base_url, '/hub/login')
|
||||||
query = urlparse(r.url).query
|
query = urlparse(r.url).query
|
||||||
assert query == urlencode(
|
assert query == urlencode(
|
||||||
{'next': ujoin(app.base_url, '/hub/user/baduser/test.ipynb')}
|
{'next': ujoin(app.base_url, f'/hub/user/{other_user.name}/test.ipynb')}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -578,6 +697,41 @@ async def test_login_page(app, url, params, redirected_url, form_action):
|
|||||||
assert action.endswith(form_action)
|
assert action.endswith(form_action)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"url, token_in",
|
||||||
|
[
|
||||||
|
("/home", "url"),
|
||||||
|
("/home", "header"),
|
||||||
|
("/login", "url"),
|
||||||
|
("/login", "header"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
async def test_page_with_token(app, user, url, token_in):
|
||||||
|
cookies = await app.login_user(user.name)
|
||||||
|
token = user.new_api_token()
|
||||||
|
if token_in == "url":
|
||||||
|
url = url_concat(url, {"token": token})
|
||||||
|
headers = None
|
||||||
|
elif token_in == "header":
|
||||||
|
headers = {
|
||||||
|
"Authorization": f"token {token}",
|
||||||
|
}
|
||||||
|
|
||||||
|
# request a page with ?token= in URL shouldn't be allowed
|
||||||
|
r = await get_page(
|
||||||
|
url,
|
||||||
|
app,
|
||||||
|
headers=headers,
|
||||||
|
allow_redirects=False,
|
||||||
|
)
|
||||||
|
if "/hub/login" in r.url:
|
||||||
|
assert r.status_code == 200
|
||||||
|
else:
|
||||||
|
assert r.status_code == 302
|
||||||
|
assert r.headers["location"].partition("?")[0].endswith("/hub/login")
|
||||||
|
assert not r.cookies
|
||||||
|
|
||||||
|
|
||||||
async def test_login_fail(app):
|
async def test_login_fail(app):
|
||||||
name = 'wash'
|
name = 'wash'
|
||||||
base_url = public_url(app)
|
base_url = public_url(app)
|
||||||
@@ -617,6 +771,10 @@ async def test_login_strip(app):
|
|||||||
(False, '/user/other', '/hub/user/other', None),
|
(False, '/user/other', '/hub/user/other', None),
|
||||||
(False, '/absolute', '/absolute', None),
|
(False, '/absolute', '/absolute', None),
|
||||||
(False, '/has?query#andhash', '/has?query#andhash', None),
|
(False, '/has?query#andhash', '/has?query#andhash', None),
|
||||||
|
# :// in query string or fragment
|
||||||
|
(False, '/has?repo=https/host.git', '/has?repo=https/host.git', None),
|
||||||
|
(False, '/has?repo=https://host.git', '/has?repo=https://host.git', None),
|
||||||
|
(False, '/has#repo=https://host.git', '/has#repo=https://host.git', None),
|
||||||
# next_url outside is not allowed
|
# next_url outside is not allowed
|
||||||
(False, 'relative/path', '', None),
|
(False, 'relative/path', '', None),
|
||||||
(False, 'https://other.domain', '', None),
|
(False, 'https://other.domain', '', None),
|
||||||
@@ -656,7 +814,9 @@ async def test_login_redirect(app, running, next_url, location, params):
|
|||||||
if params:
|
if params:
|
||||||
url = url_concat(url, params)
|
url = url_concat(url, params)
|
||||||
if next_url:
|
if next_url:
|
||||||
if '//' not in next_url and next_url.startswith('/'):
|
if next_url.startswith('/') and not (
|
||||||
|
next_url.startswith("//") or urlparse(next_url).netloc
|
||||||
|
):
|
||||||
next_url = ujoin(app.base_url, next_url, '')
|
next_url = ujoin(app.base_url, next_url, '')
|
||||||
url = url_concat(url, dict(next=next_url))
|
url = url_concat(url, dict(next=next_url))
|
||||||
|
|
||||||
@@ -1061,24 +1221,18 @@ async def test_token_page(app):
|
|||||||
async def test_server_not_running_api_request(app):
|
async def test_server_not_running_api_request(app):
|
||||||
cookies = await app.login_user("bees")
|
cookies = await app.login_user("bees")
|
||||||
r = await get_page("user/bees/api/status", app, hub=False, cookies=cookies)
|
r = await get_page("user/bees/api/status", app, hub=False, cookies=cookies)
|
||||||
assert r.status_code == 503
|
assert r.status_code == 424
|
||||||
assert r.headers["content-type"] == "application/json"
|
assert r.headers["content-type"] == "application/json"
|
||||||
message = r.json()['message']
|
message = r.json()['message']
|
||||||
assert ujoin(app.base_url, "hub/spawn/bees") in message
|
assert ujoin(app.base_url, "hub/spawn/bees") in message
|
||||||
assert " /user/bees" in message
|
assert " /user/bees" in message
|
||||||
|
|
||||||
|
|
||||||
async def test_metrics_no_auth(app):
|
async def test_server_not_running_api_request_legacy_status(app):
|
||||||
r = await get_page("metrics", app)
|
app.use_legacy_stopped_server_status_code = True
|
||||||
assert r.status_code == 403
|
cookies = await app.login_user("bees")
|
||||||
|
r = await get_page("user/bees/api/status", app, hub=False, cookies=cookies)
|
||||||
|
assert r.status_code == 503
|
||||||
async def test_metrics_auth(app):
|
|
||||||
cookies = await app.login_user('river')
|
|
||||||
metrics_url = ujoin(public_host(app), app.hub.base_url, 'metrics')
|
|
||||||
r = await get_page("metrics", app, cookies=cookies)
|
|
||||||
assert r.status_code == 200
|
|
||||||
assert r.url == metrics_url
|
|
||||||
|
|
||||||
|
|
||||||
async def test_health_check_request(app):
|
async def test_health_check_request(app):
|
||||||
@@ -1087,7 +1241,7 @@ async def test_health_check_request(app):
|
|||||||
|
|
||||||
|
|
||||||
async def test_pre_spawn_start_exc_no_form(app):
|
async def test_pre_spawn_start_exc_no_form(app):
|
||||||
exc = "pre_spawn_start error"
|
exc = "Unhandled error starting server"
|
||||||
|
|
||||||
# throw exception from pre_spawn_start
|
# throw exception from pre_spawn_start
|
||||||
async def mock_pre_spawn_start(user, spawner):
|
async def mock_pre_spawn_start(user, spawner):
|
||||||
|
@@ -28,7 +28,7 @@ def test_orm_roles(db):
|
|||||||
user_role = orm.Role(name='user', scopes=['self'])
|
user_role = orm.Role(name='user', scopes=['self'])
|
||||||
db.add(user_role)
|
db.add(user_role)
|
||||||
if not token_role:
|
if not token_role:
|
||||||
token_role = orm.Role(name='token', scopes=['all'])
|
token_role = orm.Role(name='token', scopes=['inherit'])
|
||||||
db.add(token_role)
|
db.add(token_role)
|
||||||
if not service_role:
|
if not service_role:
|
||||||
service_role = orm.Role(name='service', scopes=[])
|
service_role = orm.Role(name='service', scopes=[])
|
||||||
@@ -182,6 +182,7 @@ def test_orm_roles_delete_cascade(db):
|
|||||||
'admin:users',
|
'admin:users',
|
||||||
'admin:auth_state',
|
'admin:auth_state',
|
||||||
'users',
|
'users',
|
||||||
|
'delete:users',
|
||||||
'list:users',
|
'list:users',
|
||||||
'read:users',
|
'read:users',
|
||||||
'users:activity',
|
'users:activity',
|
||||||
@@ -218,6 +219,7 @@ def test_orm_roles_delete_cascade(db):
|
|||||||
{
|
{
|
||||||
'admin:groups',
|
'admin:groups',
|
||||||
'groups',
|
'groups',
|
||||||
|
'delete:groups',
|
||||||
'list:groups',
|
'list:groups',
|
||||||
'read:groups',
|
'read:groups',
|
||||||
'read:roles:groups',
|
'read:roles:groups',
|
||||||
@@ -229,6 +231,7 @@ def test_orm_roles_delete_cascade(db):
|
|||||||
{
|
{
|
||||||
'admin:groups',
|
'admin:groups',
|
||||||
'groups',
|
'groups',
|
||||||
|
'delete:groups',
|
||||||
'list:groups',
|
'list:groups',
|
||||||
'read:groups',
|
'read:groups',
|
||||||
'read:roles:groups',
|
'read:roles:groups',
|
||||||
@@ -366,7 +369,7 @@ async def test_creating_roles(app, role, role_def, response_type, response):
|
|||||||
'info',
|
'info',
|
||||||
app_log.info('Role user scopes attribute has been changed'),
|
app_log.info('Role user scopes attribute has been changed'),
|
||||||
),
|
),
|
||||||
('non-existing', 'test-role2', 'error', NameError),
|
('non-existing', 'test-role2', 'error', KeyError),
|
||||||
('default', 'user', 'error', ValueError),
|
('default', 'user', 'error', ValueError),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@@ -407,9 +410,9 @@ async def test_delete_roles(db, role_type, rolename, response_type, response):
|
|||||||
},
|
},
|
||||||
'existing',
|
'existing',
|
||||||
),
|
),
|
||||||
({'name': 'test-scopes-2', 'scopes': ['uses']}, NameError),
|
({'name': 'test-scopes-2', 'scopes': ['uses']}, KeyError),
|
||||||
({'name': 'test-scopes-3', 'scopes': ['users:activities']}, NameError),
|
({'name': 'test-scopes-3', 'scopes': ['users:activities']}, KeyError),
|
||||||
({'name': 'test-scopes-4', 'scopes': ['groups!goup=class-A']}, NameError),
|
({'name': 'test-scopes-4', 'scopes': ['groups!goup=class-A']}, KeyError),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
async def test_scope_existence(tmpdir, request, role, response):
|
async def test_scope_existence(tmpdir, request, role, response):
|
||||||
@@ -428,7 +431,7 @@ async def test_scope_existence(tmpdir, request, role, response):
|
|||||||
assert added_role is not None
|
assert added_role is not None
|
||||||
assert added_role.scopes == role['scopes']
|
assert added_role.scopes == role['scopes']
|
||||||
|
|
||||||
elif response == NameError:
|
elif response == KeyError:
|
||||||
with pytest.raises(response):
|
with pytest.raises(response):
|
||||||
roles.create_role(db, role)
|
roles.create_role(db, role)
|
||||||
added_role = orm.Role.find(db, role['name'])
|
added_role = orm.Role.find(db, role['name'])
|
||||||
@@ -440,7 +443,14 @@ async def test_scope_existence(tmpdir, request, role, response):
|
|||||||
|
|
||||||
|
|
||||||
@mark.role
|
@mark.role
|
||||||
async def test_load_roles_users(tmpdir, request):
|
@mark.parametrize(
|
||||||
|
"explicit_allowed_users",
|
||||||
|
[
|
||||||
|
(True,),
|
||||||
|
(False,),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
async def test_load_roles_users(tmpdir, request, explicit_allowed_users):
|
||||||
"""Test loading predefined roles for users in app.py"""
|
"""Test loading predefined roles for users in app.py"""
|
||||||
roles_to_load = [
|
roles_to_load = [
|
||||||
{
|
{
|
||||||
@@ -458,6 +468,7 @@ async def test_load_roles_users(tmpdir, request):
|
|||||||
hub.init_db()
|
hub.init_db()
|
||||||
db = hub.db
|
db = hub.db
|
||||||
hub.authenticator.admin_users = ['admin']
|
hub.authenticator.admin_users = ['admin']
|
||||||
|
if explicit_allowed_users:
|
||||||
hub.authenticator.allowed_users = ['cyclops', 'gandalf', 'bilbo', 'gargamel']
|
hub.authenticator.allowed_users = ['cyclops', 'gandalf', 'bilbo', 'gargamel']
|
||||||
await hub.init_role_creation()
|
await hub.init_role_creation()
|
||||||
await hub.init_users()
|
await hub.init_users()
|
||||||
@@ -575,7 +586,7 @@ async def test_load_roles_groups(tmpdir, request):
|
|||||||
'name': 'head',
|
'name': 'head',
|
||||||
'description': 'Whole user access',
|
'description': 'Whole user access',
|
||||||
'scopes': ['users', 'admin:users'],
|
'scopes': ['users', 'admin:users'],
|
||||||
'groups': ['group3'],
|
'groups': ['group3', "group4"],
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
kwargs = {'load_groups': groups_to_load, 'load_roles': roles_to_load}
|
kwargs = {'load_groups': groups_to_load, 'load_roles': roles_to_load}
|
||||||
@@ -595,11 +606,13 @@ async def test_load_roles_groups(tmpdir, request):
|
|||||||
group1 = orm.Group.find(db, name='group1')
|
group1 = orm.Group.find(db, name='group1')
|
||||||
group2 = orm.Group.find(db, name='group2')
|
group2 = orm.Group.find(db, name='group2')
|
||||||
group3 = orm.Group.find(db, name='group3')
|
group3 = orm.Group.find(db, name='group3')
|
||||||
|
group4 = orm.Group.find(db, name='group4')
|
||||||
|
|
||||||
# test group roles
|
# test group roles
|
||||||
assert group1.roles == []
|
assert group1.roles == []
|
||||||
assert group2 in assist_role.groups
|
assert group2 in assist_role.groups
|
||||||
assert group3 in head_role.groups
|
assert group3 in head_role.groups
|
||||||
|
assert group4 in head_role.groups
|
||||||
|
|
||||||
# delete the test roles
|
# delete the test roles
|
||||||
for role in roles_to_load:
|
for role in roles_to_load:
|
||||||
@@ -658,11 +671,15 @@ async def test_load_roles_user_tokens(tmpdir, request):
|
|||||||
"headers, rolename, scopes, status",
|
"headers, rolename, scopes, status",
|
||||||
[
|
[
|
||||||
# no role requested - gets default 'token' role
|
# no role requested - gets default 'token' role
|
||||||
({}, None, None, 200),
|
({}, None, None, 201),
|
||||||
# role scopes within the user's default 'user' role
|
# role scopes within the user's default 'user' role
|
||||||
({}, 'self-reader', ['read:users'], 200),
|
({}, 'self-reader', ['read:users!user'], 201),
|
||||||
|
# role scopes within the user's default 'user' role, but with disjoint filter
|
||||||
|
({}, 'other-reader', ['read:users!user=other'], 403),
|
||||||
|
# role scopes within the user's default 'user' role, without filter
|
||||||
|
({}, 'other-reader', ['read:users'], 403),
|
||||||
# role scopes outside of the user's role but within the group's role scopes of which the user is a member
|
# role scopes outside of the user's role but within the group's role scopes of which the user is a member
|
||||||
({}, 'groups-reader', ['read:groups'], 200),
|
({}, 'groups-reader', ['read:groups'], 201),
|
||||||
# non-existing role request
|
# non-existing role request
|
||||||
({}, 'non-existing', [], 404),
|
({}, 'non-existing', [], 404),
|
||||||
# role scopes outside of both user's role and group's role scopes
|
# role scopes outside of both user's role and group's role scopes
|
||||||
@@ -1178,14 +1195,47 @@ async def test_no_admin_role_change():
|
|||||||
await hub.init_role_creation()
|
await hub.init_role_creation()
|
||||||
|
|
||||||
|
|
||||||
async def test_user_config_respects_memberships():
|
@pytest.mark.parametrize(
|
||||||
role_spec = [
|
"in_db, role_users, allowed_users, expected_members",
|
||||||
{
|
[
|
||||||
|
# users in the db, not specified in custom user role
|
||||||
|
# no change to membership
|
||||||
|
(["alpha", "beta"], None, None, ["alpha", "beta"]),
|
||||||
|
# allowed_users is additive, not strict
|
||||||
|
(["alpha", "beta"], None, {"gamma"}, ["alpha", "beta", "gamma"]),
|
||||||
|
# explicit empty revokes all assignments
|
||||||
|
(["alpha", "beta"], [], None, []),
|
||||||
|
# explicit value is respected exactly
|
||||||
|
(["alpha", "beta"], ["alpha", "gamma"], None, ["alpha", "gamma"]),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
async def test_user_role_from_config(
|
||||||
|
in_db, role_users, allowed_users, expected_members
|
||||||
|
):
|
||||||
|
role_spec = {
|
||||||
'name': 'user',
|
'name': 'user',
|
||||||
'scopes': ['self', 'shutdown'],
|
'scopes': ['self', 'shutdown'],
|
||||||
}
|
}
|
||||||
|
if role_users is not None:
|
||||||
|
role_spec['users'] = role_users
|
||||||
|
hub = MockHub(load_roles=[role_spec])
|
||||||
|
hub.init_db()
|
||||||
|
db = hub.db
|
||||||
|
hub.authenticator.admin_users = set()
|
||||||
|
if allowed_users:
|
||||||
|
hub.authenticator.allowed_users = allowed_users
|
||||||
|
await hub.init_role_creation()
|
||||||
|
|
||||||
|
|
||||||
|
async def test_user_config_creates_default_role():
|
||||||
|
role_spec = [
|
||||||
|
{
|
||||||
|
'name': 'new-role',
|
||||||
|
'scopes': ['read:users'],
|
||||||
|
'users': ['not-yet-created-user'],
|
||||||
|
}
|
||||||
]
|
]
|
||||||
user_names = ['eddy', 'carol']
|
user_names = []
|
||||||
hub = MockHub(load_roles=role_spec)
|
hub = MockHub(load_roles=role_spec)
|
||||||
hub.init_db()
|
hub.init_db()
|
||||||
hub.authenticator.allowed_users = user_names
|
hub.authenticator.allowed_users = user_names
|
||||||
@@ -1193,9 +1243,9 @@ async def test_user_config_respects_memberships():
|
|||||||
await hub.init_users()
|
await hub.init_users()
|
||||||
await hub.init_role_assignment()
|
await hub.init_role_assignment()
|
||||||
user_role = orm.Role.find(hub.db, 'user')
|
user_role = orm.Role.find(hub.db, 'user')
|
||||||
for user_name in user_names:
|
new_role = orm.Role.find(hub.db, 'new-role')
|
||||||
user = orm.User.find(hub.db, user_name)
|
assert orm.User.find(hub.db, 'not-yet-created-user') in new_role.users
|
||||||
assert user in user_role.users
|
assert orm.User.find(hub.db, 'not-yet-created-user') in user_role.users
|
||||||
|
|
||||||
|
|
||||||
async def test_admin_role_respects_config():
|
async def test_admin_role_respects_config():
|
||||||
@@ -1217,16 +1267,45 @@ async def test_admin_role_respects_config():
|
|||||||
assert user in admin_role.users
|
assert user in admin_role.users
|
||||||
|
|
||||||
|
|
||||||
async def test_empty_admin_spec():
|
@pytest.mark.parametrize(
|
||||||
role_spec = [{'name': 'admin', 'users': []}]
|
"in_db, role_users, admin_users, expected_members",
|
||||||
hub = MockHub(load_roles=role_spec)
|
[
|
||||||
|
# users in the db, not specified in custom user role
|
||||||
|
# no change to membership
|
||||||
|
(["alpha", "beta"], None, None, ["alpha", "beta"]),
|
||||||
|
# admin_users is additive, not strict
|
||||||
|
(["alpha", "beta"], None, {"gamma"}, ["alpha", "beta", "gamma"]),
|
||||||
|
# explicit empty revokes all assignments
|
||||||
|
(["alpha", "beta"], [], None, []),
|
||||||
|
# explicit value is respected exactly
|
||||||
|
(["alpha", "beta"], ["alpha", "gamma"], None, ["alpha", "gamma"]),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
async def test_admin_role_membership(in_db, role_users, admin_users, expected_members):
|
||||||
|
|
||||||
|
load_roles = []
|
||||||
|
if role_users is not None:
|
||||||
|
load_roles.append({"name": "admin", "users": role_users})
|
||||||
|
if not admin_users:
|
||||||
|
admin_users = set()
|
||||||
|
hub = MockHub(load_roles=load_roles, db_url="sqlite:///:memory:")
|
||||||
hub.init_db()
|
hub.init_db()
|
||||||
hub.authenticator.admin_users = []
|
|
||||||
await hub.init_role_creation()
|
await hub.init_role_creation()
|
||||||
|
db = hub.db
|
||||||
|
hub.authenticator.admin_users = admin_users
|
||||||
|
# add in_db users to the database
|
||||||
|
# this is the 'before' state of who had the role before startup
|
||||||
|
for username in in_db or []:
|
||||||
|
user = orm.User(name=username)
|
||||||
|
db.add(user)
|
||||||
|
db.commit()
|
||||||
|
roles.grant_role(db, user, "admin")
|
||||||
|
db.commit()
|
||||||
await hub.init_users()
|
await hub.init_users()
|
||||||
await hub.init_role_assignment()
|
await hub.init_role_assignment()
|
||||||
admin_role = orm.Role.find(hub.db, 'admin')
|
admin_role = orm.Role.find(db, 'admin')
|
||||||
assert not admin_role.users
|
role_members = sorted(user.name for user in admin_role.users)
|
||||||
|
assert role_members == expected_members
|
||||||
|
|
||||||
|
|
||||||
async def test_no_default_service_role():
|
async def test_no_default_service_role():
|
||||||
@@ -1327,3 +1406,20 @@ async def test_token_keep_roles_on_restart():
|
|||||||
for token in user.api_tokens:
|
for token in user.api_tokens:
|
||||||
hub.db.delete(token)
|
hub.db.delete(token)
|
||||||
hub.db.commit()
|
hub.db.commit()
|
||||||
|
|
||||||
|
|
||||||
|
async def test_login_default_role(app, username):
|
||||||
|
cookies = await app.login_user(username)
|
||||||
|
user = app.users[username]
|
||||||
|
# assert login new user gets 'user' role
|
||||||
|
assert [role.name for role in user.roles] == ["user"]
|
||||||
|
|
||||||
|
# clear roles, keep user
|
||||||
|
user.roles = []
|
||||||
|
app.db.commit()
|
||||||
|
|
||||||
|
# login *again*; user exists,
|
||||||
|
# login should always trigger "user" role assignment
|
||||||
|
cookies = await app.login_user(username)
|
||||||
|
user = app.users[username]
|
||||||
|
assert [role.name for role in user.roles] == ["user"]
|
||||||
|
@@ -477,7 +477,7 @@ async def test_metascope_all_expansion(app, create_user_with_scopes):
|
|||||||
user = create_user_with_scopes('self')
|
user = create_user_with_scopes('self')
|
||||||
user.new_api_token()
|
user.new_api_token()
|
||||||
token = user.api_tokens[0]
|
token = user.api_tokens[0]
|
||||||
# Check 'all' expansion
|
# Check 'inherit' expansion
|
||||||
token_scope_set = get_scopes_for(token)
|
token_scope_set = get_scopes_for(token)
|
||||||
user_scope_set = get_scopes_for(user)
|
user_scope_set = get_scopes_for(user)
|
||||||
assert user_scope_set == token_scope_set
|
assert user_scope_set == token_scope_set
|
||||||
@@ -677,9 +677,14 @@ async def test_resolve_token_permissions(
|
|||||||
intersection_scopes,
|
intersection_scopes,
|
||||||
):
|
):
|
||||||
orm_user = create_user_with_scopes(*user_scopes).orm_user
|
orm_user = create_user_with_scopes(*user_scopes).orm_user
|
||||||
|
# ensure user has full permissions when token is created
|
||||||
|
# to create tokens with permissions exceeding their owner
|
||||||
|
roles.grant_role(app.db, orm_user, "admin")
|
||||||
create_temp_role(token_scopes, 'active-posting')
|
create_temp_role(token_scopes, 'active-posting')
|
||||||
api_token = orm_user.new_api_token(roles=['active-posting'])
|
api_token = orm_user.new_api_token(roles=['active-posting'])
|
||||||
orm_api_token = orm.APIToken.find(app.db, token=api_token)
|
orm_api_token = orm.APIToken.find(app.db, token=api_token)
|
||||||
|
# drop admin so that filtering can be applied
|
||||||
|
roles.strip_role(app.db, orm_user, "admin")
|
||||||
|
|
||||||
# get expanded !user filter scopes for check
|
# get expanded !user filter scopes for check
|
||||||
user_scopes = roles.expand_roles_to_scopes(orm_user)
|
user_scopes = roles.expand_roles_to_scopes(orm_user)
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user