mirror of
https://github.com/jupyterhub/jupyterhub.git
synced 2025-10-07 18:14:10 +00:00
Compare commits
151 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
b7b2558ab7 | ||
![]() |
a77e57290e | ||
![]() |
506f931f15 | ||
![]() |
b094381f79 | ||
![]() |
e6e85eebc1 | ||
![]() |
984a67932f | ||
![]() |
133dda26cc | ||
![]() |
e797e31ef9 | ||
![]() |
e2798a088f | ||
![]() |
3fa60e6849 | ||
![]() |
aeeabbee07 | ||
![]() |
999c58f584 | ||
![]() |
513c61321f | ||
![]() |
715c8599b3 | ||
![]() |
63e118f144 | ||
![]() |
05e569cb42 | ||
![]() |
d4c7d9748a | ||
![]() |
d8f404d25e | ||
![]() |
4492b508a1 | ||
![]() |
6221f27c19 | ||
![]() |
77ae4401a1 | ||
![]() |
df7ae422f6 | ||
![]() |
a0dd715bf7 | ||
![]() |
bfccb9af73 | ||
![]() |
fd14165da3 | ||
![]() |
5778d8fa48 | ||
![]() |
cd51660eff | ||
![]() |
6af20e79cf | ||
![]() |
262557579f | ||
![]() |
77a6d75d70 | ||
![]() |
6f3be4b697 | ||
![]() |
d4bfbdfde2 | ||
![]() |
10f507e83b | ||
![]() |
0bbda9a45e | ||
![]() |
c8bb3a3679 | ||
![]() |
1a65858968 | ||
![]() |
2aa28e1a1f | ||
![]() |
dbd90b1bfe | ||
![]() |
7a3ff4028a | ||
![]() |
44518d00c2 | ||
![]() |
051848d1ef | ||
![]() |
5e57e0141a | ||
![]() |
6cfa789d6a | ||
![]() |
55c3211ec2 | ||
![]() |
603ba309f5 | ||
![]() |
6337b695bb | ||
![]() |
ee9e509ab5 | ||
![]() |
f0e049226d | ||
![]() |
7ffb0b0719 | ||
![]() |
825e8aacea | ||
![]() |
55213f6f53 | ||
![]() |
32dfe70a01 | ||
![]() |
9db326fb7a | ||
![]() |
0e7689f277 | ||
![]() |
b677655572 | ||
![]() |
9adc871448 | ||
![]() |
29d6540333 | ||
![]() |
5a4949faa5 | ||
![]() |
f2ab23b376 | ||
![]() |
b61582420a | ||
![]() |
f11ae34b73 | ||
![]() |
e91ab50d1b | ||
![]() |
4cb3a45ce4 | ||
![]() |
4e8f9b4334 | ||
![]() |
6131f2dbaa | ||
![]() |
a9dc588454 | ||
![]() |
537b2eaff6 | ||
![]() |
7f8a981aed | ||
![]() |
bc86e4c8f5 | ||
![]() |
20f75c0018 | ||
![]() |
689dc5ba24 | ||
![]() |
d42a7261a4 | ||
![]() |
bcbf136de2 | ||
![]() |
55e9a0f5b5 | ||
![]() |
d64d916abc | ||
![]() |
da668b5e9a | ||
![]() |
d54442ecbf | ||
![]() |
c930d6bf6a | ||
![]() |
2ce263d45f | ||
![]() |
68f81fdc30 | ||
![]() |
e7ab18a720 | ||
![]() |
582467642c | ||
![]() |
d65e2daa15 | ||
![]() |
4eaa7c5eb3 | ||
![]() |
02de44e551 | ||
![]() |
4cdf0a65cd | ||
![]() |
b0367c21f3 | ||
![]() |
9d68107722 | ||
![]() |
ad61c23873 | ||
![]() |
c359221ef3 | ||
![]() |
cc94d290ab | ||
![]() |
da0a58cb9c | ||
![]() |
7ddd3b0589 | ||
![]() |
ff71d09fd1 | ||
![]() |
1eb0b1b073 | ||
![]() |
9ea9902c76 | ||
![]() |
6494017ce2 | ||
![]() |
b0cd9eebe9 | ||
![]() |
c3d4885521 | ||
![]() |
2919aaae79 | ||
![]() |
1986ba71c1 | ||
![]() |
a2c39a4dbc | ||
![]() |
1e847c8710 | ||
![]() |
83a8552a63 | ||
![]() |
f60c633320 | ||
![]() |
a5c7384228 | ||
![]() |
27de930978 | ||
![]() |
98e76d52bc | ||
![]() |
729aac9bd1 | ||
![]() |
bc85c445ab | ||
![]() |
9f708fa10c | ||
![]() |
d26c7cd6fc | ||
![]() |
0174083439 | ||
![]() |
e6fc2aee4a | ||
![]() |
47513cfbd0 | ||
![]() |
4e7147a495 | ||
![]() |
5cfc0db0d5 | ||
![]() |
eb862e2cbb | ||
![]() |
98799e4227 | ||
![]() |
ea6a0e53cc | ||
![]() |
f2b42a50c8 | ||
![]() |
43336f5b07 | ||
![]() |
bf2d948366 | ||
![]() |
271fd35bce | ||
![]() |
1d70986c25 | ||
![]() |
ec017d1f1d | ||
![]() |
a8c804de5b | ||
![]() |
3578001fab | ||
![]() |
b199110276 | ||
![]() |
b69bba5a7d | ||
![]() |
efdad701df | ||
![]() |
8a074b12b5 | ||
![]() |
b5e5fe630d | ||
![]() |
5d23bf6da3 | ||
![]() |
e5a8939481 | ||
![]() |
0eca901c65 | ||
![]() |
4a1964f881 | ||
![]() |
131094b5ff | ||
![]() |
4544a98fb9 | ||
![]() |
cbacdecb1e | ||
![]() |
64d8b2adc9 | ||
![]() |
9c83c15f67 | ||
![]() |
d2a545a01e | ||
![]() |
a376f33af1 | ||
![]() |
6f8a49569b | ||
![]() |
a4c553a5c5 | ||
![]() |
41445cffb4 | ||
![]() |
dafd2d67f6 | ||
![]() |
823ab58f3a | ||
![]() |
ab7883e5c3 | ||
![]() |
8fd1fb3234 |
74
.github/workflows/release.yml
vendored
74
.github/workflows/release.yml
vendored
@@ -30,16 +30,16 @@ on:
|
||||
|
||||
jobs:
|
||||
build-release:
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.9"
|
||||
python-version: "3.11"
|
||||
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "14"
|
||||
node-version: "20"
|
||||
|
||||
- name: install build requirements
|
||||
run: |
|
||||
@@ -67,7 +67,7 @@ jobs:
|
||||
docker run --rm -v $PWD/dist:/dist:ro docker.io/library/python:3.9-slim-bullseye bash -c 'pip install /dist/jupyterhub-*.tar.gz'
|
||||
|
||||
# ref: https://github.com/actions/upload-artifact#readme
|
||||
- uses: actions/upload-artifact@v3
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: jupyterhub-${{ github.sha }}
|
||||
path: "dist/*"
|
||||
@@ -83,8 +83,8 @@ jobs:
|
||||
twine upload --skip-existing dist/*
|
||||
|
||||
publish-docker:
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 30
|
||||
runs-on: ubuntu-22.04
|
||||
timeout-minutes: 20
|
||||
|
||||
services:
|
||||
# So that we can test this in PRs/branches
|
||||
@@ -97,39 +97,35 @@ jobs:
|
||||
- name: Should we push this image to a public registry?
|
||||
run: |
|
||||
if [ "${{ startsWith(github.ref, 'refs/tags/') || (github.ref == 'refs/heads/main') }}" = "true" ]; then
|
||||
# Empty => Docker Hub
|
||||
echo "REGISTRY=" >> $GITHUB_ENV
|
||||
echo "REGISTRY=quay.io/" >> $GITHUB_ENV
|
||||
else
|
||||
echo "REGISTRY=localhost:5000/" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# Setup docker to build for multiple platforms, see:
|
||||
# https://github.com/docker/build-push-action/tree/v2.4.0#usage
|
||||
# https://github.com/docker/build-push-action/blob/v2.4.0/docs/advanced/multi-platform.md
|
||||
- name: Set up QEMU (for docker buildx)
|
||||
uses: docker/setup-qemu-action@v2
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx (for multi-arch builds)
|
||||
uses: docker/setup-buildx-action@v2
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
# Allows pushing to registry on localhost:5000
|
||||
driver-opts: network=host
|
||||
|
||||
- name: Setup push rights to Docker Hub
|
||||
# This was setup by...
|
||||
# 1. Creating a Docker Hub service account "jupyterhubbot"
|
||||
# 2. Creating a access token for the service account specific to this
|
||||
# repository: https://hub.docker.com/settings/security
|
||||
# 3. Making the account part of the "bots" team, and granting that team
|
||||
# permissions to push to the relevant images:
|
||||
# https://hub.docker.com/orgs/jupyterhub/teams/bots/permissions
|
||||
# 4. Registering the username and token as a secret for this repo:
|
||||
# https://github.com/jupyterhub/jupyterhub/settings/secrets/actions
|
||||
# 1. Creating a [Robot Account](https://quay.io/organization/jupyterhub?tab=robots) in the JupyterHub
|
||||
# . Quay.io org
|
||||
# 2. Giving it enough permissions to push to the jupyterhub and singleuser images
|
||||
# 3. Putting the robot account's username and password in GitHub actions environment
|
||||
if: env.REGISTRY != 'localhost:5000/'
|
||||
run: |
|
||||
docker login -u "${{ secrets.DOCKERHUB_USERNAME }}" -p "${{ secrets.DOCKERHUB_TOKEN }}"
|
||||
docker login -u "${{ secrets.QUAY_USERNAME }}" -p "${{ secrets.QUAY_PASSWORD }}" "${{ env.REGISTRY }}"
|
||||
docker login -u "${{ secrets.DOCKERHUB_USERNAME }}" -p "${{ secrets.DOCKERHUB_TOKEN }}" docker.io
|
||||
|
||||
# image: jupyterhub/jupyterhub
|
||||
#
|
||||
@@ -142,15 +138,17 @@ jobs:
|
||||
# If GITHUB_TOKEN isn't available (e.g. in PRs) returns no tags [].
|
||||
- name: Get list of jupyterhub tags
|
||||
id: jupyterhubtags
|
||||
uses: jupyterhub/action-major-minor-tag-calculator@v2
|
||||
uses: jupyterhub/action-major-minor-tag-calculator@v3
|
||||
with:
|
||||
githubToken: ${{ secrets.GITHUB_TOKEN }}
|
||||
prefix: "${{ env.REGISTRY }}jupyterhub/jupyterhub:"
|
||||
prefix: >-
|
||||
${{ env.REGISTRY }}jupyterhub/jupyterhub:
|
||||
jupyterhub/jupyterhub:
|
||||
defaultTag: "${{ env.REGISTRY }}jupyterhub/jupyterhub:noref"
|
||||
branchRegex: ^\w[\w-.]*$
|
||||
|
||||
- name: Build and push jupyterhub
|
||||
uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
@@ -163,15 +161,17 @@ jobs:
|
||||
#
|
||||
- name: Get list of jupyterhub-onbuild tags
|
||||
id: onbuildtags
|
||||
uses: jupyterhub/action-major-minor-tag-calculator@v2
|
||||
uses: jupyterhub/action-major-minor-tag-calculator@v3
|
||||
with:
|
||||
githubToken: ${{ secrets.GITHUB_TOKEN }}
|
||||
prefix: "${{ env.REGISTRY }}jupyterhub/jupyterhub-onbuild:"
|
||||
prefix: >-
|
||||
${{ env.REGISTRY }}jupyterhub/jupyterhub-onbuild:
|
||||
jupyterhub/jupyterhub-onbuild:
|
||||
defaultTag: "${{ env.REGISTRY }}jupyterhub/jupyterhub-onbuild:noref"
|
||||
branchRegex: ^\w[\w-.]*$
|
||||
|
||||
- name: Build and push jupyterhub-onbuild
|
||||
uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
build-args: |
|
||||
BASE_IMAGE=${{ fromJson(steps.jupyterhubtags.outputs.tags)[0] }}
|
||||
@@ -184,15 +184,17 @@ jobs:
|
||||
#
|
||||
- name: Get list of jupyterhub-demo tags
|
||||
id: demotags
|
||||
uses: jupyterhub/action-major-minor-tag-calculator@v2
|
||||
uses: jupyterhub/action-major-minor-tag-calculator@v3
|
||||
with:
|
||||
githubToken: ${{ secrets.GITHUB_TOKEN }}
|
||||
prefix: "${{ env.REGISTRY }}jupyterhub/jupyterhub-demo:"
|
||||
prefix: >-
|
||||
${{ env.REGISTRY }}jupyterhub/jupyterhub-demo:
|
||||
jupyterhub/jupyterhub-demo:
|
||||
defaultTag: "${{ env.REGISTRY }}jupyterhub/jupyterhub-demo:noref"
|
||||
branchRegex: ^\w[\w-.]*$
|
||||
|
||||
- name: Build and push jupyterhub-demo
|
||||
uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
build-args: |
|
||||
BASE_IMAGE=${{ fromJson(steps.onbuildtags.outputs.tags)[0] }}
|
||||
@@ -208,15 +210,17 @@ jobs:
|
||||
#
|
||||
- name: Get list of jupyterhub/singleuser tags
|
||||
id: singleusertags
|
||||
uses: jupyterhub/action-major-minor-tag-calculator@v2
|
||||
uses: jupyterhub/action-major-minor-tag-calculator@v3
|
||||
with:
|
||||
githubToken: ${{ secrets.GITHUB_TOKEN }}
|
||||
prefix: "${{ env.REGISTRY }}jupyterhub/singleuser:"
|
||||
prefix: >-
|
||||
${{ env.REGISTRY }}jupyterhub/singleuser:
|
||||
jupyterhub/singleuser:
|
||||
defaultTag: "${{ env.REGISTRY }}jupyterhub/singleuser:noref"
|
||||
branchRegex: ^\w[\w-.]*$
|
||||
|
||||
- name: Build and push jupyterhub/singleuser
|
||||
uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
build-args: |
|
||||
JUPYTERHUB_VERSION=${{ github.ref_type == 'tag' && github.ref_name || format('git:{0}', github.sha) }}
|
||||
|
2
.github/workflows/support-bot.yml
vendored
2
.github/workflows/support-bot.yml
vendored
@@ -25,7 +25,7 @@ jobs:
|
||||
|
||||
Our goal is to sustain a positive experience for both users and developers. We use GitHub issues for specific discussions related to changing a repository's content, and let the forum be where we can more generally help and inspire each other.
|
||||
|
||||
Thanks you for being an active member of our community! :heart:
|
||||
Thank you for being an active member of our community! :heart:
|
||||
close-issue: true
|
||||
lock-issue: false
|
||||
issue-lock-reason: "off-topic"
|
||||
|
16
.github/workflows/test.yml
vendored
16
.github/workflows/test.yml
vendored
@@ -99,7 +99,10 @@ jobs:
|
||||
noextension: noextension
|
||||
subset: singleuser
|
||||
- python: "3.11"
|
||||
selenium: selenium
|
||||
browser: browser
|
||||
- python: "3.11"
|
||||
subdomain: subdomain
|
||||
browser: browser
|
||||
- python: "3.11"
|
||||
main_dependencies: main_dependencies
|
||||
|
||||
@@ -229,9 +232,13 @@ jobs:
|
||||
DB=postgres bash ci/init-db.sh
|
||||
fi
|
||||
|
||||
- name: Configure selenium tests
|
||||
if: matrix.selenium
|
||||
run: echo "PYTEST_ADDOPTS=$PYTEST_ADDOPTS -m selenium" >> "${GITHUB_ENV}"
|
||||
- name: Configure browser tests
|
||||
if: matrix.browser
|
||||
run: echo "PYTEST_ADDOPTS=$PYTEST_ADDOPTS -m browser" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: Ensure browsers are installed for playwright
|
||||
if: matrix.browser
|
||||
run: python -m playwright install --with-deps
|
||||
|
||||
- name: Run pytest
|
||||
run: |
|
||||
@@ -250,7 +257,6 @@ jobs:
|
||||
run: |
|
||||
DOCKER_BUILDKIT=1 docker build -t jupyterhub/jupyterhub .
|
||||
docker build -t jupyterhub/jupyterhub-onbuild onbuild
|
||||
docker build -t jupyterhub/jupyterhub:alpine -f dockerfiles/Dockerfile.alpine .
|
||||
docker build -t jupyterhub/singleuser singleuser
|
||||
|
||||
- name: smoke test jupyterhub
|
||||
|
@@ -16,7 +16,7 @@ ci:
|
||||
repos:
|
||||
# Autoformat: Python code, syntax patterns are modernized
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.3.1
|
||||
rev: v3.15.0
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args:
|
||||
@@ -24,7 +24,7 @@ repos:
|
||||
|
||||
# Autoformat: Python code
|
||||
- repo: https://github.com/PyCQA/autoflake
|
||||
rev: v2.0.2
|
||||
rev: v2.2.1
|
||||
hooks:
|
||||
- id: autoflake
|
||||
# args ref: https://github.com/PyCQA/autoflake#advanced-usage
|
||||
@@ -33,25 +33,25 @@ repos:
|
||||
|
||||
# Autoformat: Python code
|
||||
- repo: https://github.com/pycqa/isort
|
||||
rev: 5.12.0
|
||||
rev: 5.13.2
|
||||
hooks:
|
||||
- id: isort
|
||||
|
||||
# Autoformat: Python code
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.3.0
|
||||
rev: 24.1.1
|
||||
hooks:
|
||||
- id: black
|
||||
|
||||
# Autoformat: markdown, yaml, javascript (see the file .prettierignore)
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v3.0.0-alpha.6
|
||||
rev: v4.0.0-alpha.8
|
||||
hooks:
|
||||
- id: prettier
|
||||
|
||||
# Autoformat and linting, misc. details
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
rev: v4.5.0
|
||||
hooks:
|
||||
- id: end-of-file-fixer
|
||||
exclude: share/jupyterhub/static/js/admin-react.js
|
||||
@@ -61,6 +61,6 @@ repos:
|
||||
|
||||
# Linting: Python code (see the file .flake8)
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: "6.0.0"
|
||||
rev: "7.0.0"
|
||||
hooks:
|
||||
- id: flake8
|
||||
|
85
Dockerfile
85
Dockerfile
@@ -6,7 +6,7 @@
|
||||
#
|
||||
# Option 1:
|
||||
#
|
||||
# FROM jupyterhub/jupyterhub:latest
|
||||
# FROM quay.io/jupyterhub/jupyterhub:latest
|
||||
#
|
||||
# And put your configuration file jupyterhub_config.py in /srv/jupyterhub/jupyterhub_config.py.
|
||||
#
|
||||
@@ -14,44 +14,90 @@
|
||||
#
|
||||
# Or you can create your jupyterhub config and database on the host machine, and mount it with:
|
||||
#
|
||||
# docker run -v $PWD:/srv/jupyterhub -t jupyterhub/jupyterhub
|
||||
# docker run -v $PWD:/srv/jupyterhub -t quay.io/jupyterhub/jupyterhub
|
||||
#
|
||||
# NOTE
|
||||
# If you base on jupyterhub/jupyterhub-onbuild
|
||||
# If you base on quay.io/jupyterhub/jupyterhub-onbuild
|
||||
# your jupyterhub_config.py will be added automatically
|
||||
# from your docker directory.
|
||||
|
||||
######################################################################
|
||||
# This Dockerfile uses multi-stage builds with optimisations to build
|
||||
# the JupyterHub wheel on the native architecture only
|
||||
# https://www.docker.com/blog/faster-multi-platform-builds-dockerfile-cross-compilation-guide/
|
||||
|
||||
ARG BASE_IMAGE=ubuntu:22.04
|
||||
FROM $BASE_IMAGE AS builder
|
||||
|
||||
|
||||
######################################################################
|
||||
# The JupyterHub wheel is pure Python so can be built for any platform
|
||||
# on the native architecture (avoiding QEMU emulation)
|
||||
FROM --platform=${BUILDPLATFORM:-linux/amd64} $BASE_IMAGE AS jupyterhub-builder
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
WORKDIR /src/jupyterhub
|
||||
|
||||
RUN apt update -q \
|
||||
&& apt install -yq --no-install-recommends \
|
||||
# Don't clear apt cache, and don't combine RUN commands, so that cached layers can
|
||||
# be reused in other stages
|
||||
|
||||
RUN apt-get update -qq \
|
||||
&& apt-get install -yqq --no-install-recommends \
|
||||
build-essential \
|
||||
ca-certificates \
|
||||
curl \
|
||||
locales \
|
||||
python3-dev \
|
||||
python3-pip \
|
||||
python3-pycurl \
|
||||
python3-venv \
|
||||
&& python3 -m pip install --no-cache-dir --upgrade setuptools pip build wheel
|
||||
# Ubuntu 22.04 comes with Nodejs 12 which is too old for building JupyterHub JS
|
||||
# It's fine at runtime though (used only by configurable-http-proxy)
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_18.x | bash - \
|
||||
&& apt-get install -yqq --no-install-recommends \
|
||||
nodejs \
|
||||
npm \
|
||||
&& apt clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& python3 -m pip install --no-cache-dir --upgrade setuptools pip build wheel \
|
||||
&& npm install --global yarn
|
||||
|
||||
WORKDIR /src/jupyterhub
|
||||
# copy everything except whats in .dockerignore, its a
|
||||
# compromise between needing to rebuild and maintaining
|
||||
# what needs to be part of the build
|
||||
COPY . .
|
||||
|
||||
ARG PIP_CACHE_DIR=/tmp/pip-cache
|
||||
RUN --mount=type=cache,target=${PIP_CACHE_DIR} \
|
||||
python3 -m build --wheel \
|
||||
&& python3 -m pip wheel --wheel-dir wheelhouse dist/*.whl
|
||||
python3 -m build --wheel
|
||||
|
||||
|
||||
######################################################################
|
||||
# All other wheels required by JupyterHub, some are platform specific
|
||||
FROM $BASE_IMAGE AS wheel-builder
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN apt-get update -qq \
|
||||
&& apt-get install -yqq --no-install-recommends \
|
||||
build-essential \
|
||||
ca-certificates \
|
||||
curl \
|
||||
locales \
|
||||
python3-dev \
|
||||
python3-pip \
|
||||
python3-pycurl \
|
||||
python3-venv \
|
||||
&& python3 -m pip install --no-cache-dir --upgrade setuptools pip build wheel
|
||||
|
||||
WORKDIR /src/jupyterhub
|
||||
|
||||
COPY --from=jupyterhub-builder /src/jupyterhub/dist/*.whl /src/jupyterhub/dist/
|
||||
ARG PIP_CACHE_DIR=/tmp/pip-cache
|
||||
RUN --mount=type=cache,target=${PIP_CACHE_DIR} \
|
||||
python3 -m pip wheel --wheel-dir wheelhouse dist/*.whl
|
||||
|
||||
|
||||
######################################################################
|
||||
# The final JupyterHub image, platform specific
|
||||
FROM $BASE_IMAGE AS jupyterhub
|
||||
|
||||
FROM $BASE_IMAGE
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
SHELL=/bin/bash \
|
||||
LC_ALL=en_US.UTF-8 \
|
||||
@@ -66,8 +112,8 @@ LABEL org.jupyter.service="jupyterhub"
|
||||
|
||||
WORKDIR /srv/jupyterhub
|
||||
|
||||
RUN apt update -q \
|
||||
&& apt install -yq --no-install-recommends \
|
||||
RUN apt-get update -qq \
|
||||
&& apt-get install -yqq --no-install-recommends \
|
||||
ca-certificates \
|
||||
curl \
|
||||
gnupg \
|
||||
@@ -80,10 +126,9 @@ RUN apt update -q \
|
||||
&& locale-gen $LC_ALL \
|
||||
&& npm install -g configurable-http-proxy@^4.2.0 \
|
||||
# clean cache and logs
|
||||
&& rm -rf /var/lib/apt/lists/* /var/log/* /var/tmp/* ~/.npm \
|
||||
&& find / -type d -name '__pycache__' -prune -exec rm -rf {} \;
|
||||
# install the wheels we built in the first stage
|
||||
RUN --mount=type=cache,from=builder,source=/src/jupyterhub/wheelhouse,target=/tmp/wheelhouse \
|
||||
&& rm -rf /var/lib/apt/lists/* /var/log/* /var/tmp/* ~/.npm
|
||||
# install the wheels we built in the previous stage
|
||||
RUN --mount=type=cache,from=wheel-builder,source=/src/jupyterhub/wheelhouse,target=/tmp/wheelhouse \
|
||||
# always make sure pip is up to date!
|
||||
python3 -m pip install --no-compile --no-cache-dir --upgrade setuptools pip \
|
||||
&& python3 -m pip install --no-compile --no-cache-dir /tmp/wheelhouse/*
|
||||
|
@@ -14,7 +14,6 @@
|
||||
[](https://anaconda.org/conda-forge/jupyterhub)
|
||||
[](https://jupyterhub.readthedocs.org/en/latest/)
|
||||
[](https://github.com/jupyterhub/jupyterhub/actions)
|
||||
[](https://hub.docker.com/r/jupyterhub/jupyterhub/tags)
|
||||
[](https://codecov.io/gh/jupyterhub/jupyterhub)
|
||||
[](https://github.com/jupyterhub/jupyterhub/issues)
|
||||
[](https://discourse.jupyter.org/c/jupyterhub)
|
||||
@@ -160,10 +159,10 @@ To start the Hub on a specific url and port `10.0.1.2:443` with **https**:
|
||||
|
||||
## Docker
|
||||
|
||||
A starter [**docker image for JupyterHub**](https://hub.docker.com/r/jupyterhub/jupyterhub/)
|
||||
A starter [**docker image for JupyterHub**](https://quay.io/repository/jupyterhub/jupyterhub)
|
||||
gives a baseline deployment of JupyterHub using Docker.
|
||||
|
||||
**Important:** This `jupyterhub/jupyterhub` image contains only the Hub itself,
|
||||
**Important:** This `quay.io/jupyterhub/jupyterhub` image contains only the Hub itself,
|
||||
with no configuration. In general, one needs to make a derivative image, with
|
||||
at least a `jupyterhub_config.py` setting up an Authenticator and/or a Spawner.
|
||||
To run the single-user servers, which may be on the same system as the Hub or
|
||||
@@ -171,7 +170,7 @@ not, Jupyter Notebook version 4 or greater must be installed.
|
||||
|
||||
The JupyterHub docker image can be started with the following command:
|
||||
|
||||
docker run -p 8000:8000 -d --name jupyterhub jupyterhub/jupyterhub jupyterhub
|
||||
docker run -p 8000:8000 -d --name jupyterhub quay.io/jupyterhub/jupyterhub jupyterhub
|
||||
|
||||
This command will create a container named `jupyterhub` that you can
|
||||
**stop and resume** with `docker stop/start`.
|
||||
|
@@ -3,7 +3,7 @@
|
||||
# This should only be used for demo or testing and not as a base image to build on.
|
||||
#
|
||||
# It includes the notebook package and it uses the DummyAuthenticator and the SimpleLocalProcessSpawner.
|
||||
ARG BASE_IMAGE=jupyterhub/jupyterhub-onbuild
|
||||
ARG BASE_IMAGE=quay.io/jupyterhub/jupyterhub-onbuild
|
||||
FROM ${BASE_IMAGE}
|
||||
|
||||
# Install the notebook package
|
||||
|
@@ -1,14 +0,0 @@
|
||||
FROM alpine:3.13
|
||||
ENV LANG=en_US.UTF-8
|
||||
RUN apk add --no-cache \
|
||||
python3 \
|
||||
py3-pip \
|
||||
py3-ruamel.yaml \
|
||||
py3-cryptography \
|
||||
py3-sqlalchemy
|
||||
|
||||
ARG JUPYTERHUB_VERSION=1.3.0
|
||||
RUN pip3 install --no-cache jupyterhub==${JUPYTERHUB_VERSION}
|
||||
|
||||
USER nobody
|
||||
CMD ["jupyterhub"]
|
@@ -1,22 +0,0 @@
|
||||
## What is Dockerfile.alpine
|
||||
|
||||
Dockerfile.alpine contains the base image for jupyterhub. It does not work independently, but only as part of a full jupyterhub cluster
|
||||
|
||||
## How to use it?
|
||||
|
||||
You will need:
|
||||
|
||||
1. A running configurable-http-proxy, whose API is accessible.
|
||||
2. A jupyterhub_config file.
|
||||
3. Authentication and other libraries required by the specific jupyterhub_config file.
|
||||
|
||||
## Steps to test it outside a cluster
|
||||
|
||||
- start configurable-http-proxy in another container
|
||||
- specify CONFIGPROXY_AUTH_TOKEN env in both containers
|
||||
- put both containers on the same network (e.g. docker network create jupyterhub; docker run ... --net jupyterhub)
|
||||
- tell jupyterhub where CHP is (e.g. c.ConfigurableHTTPProxy.api_url = 'http://chp:8001')
|
||||
- tell jupyterhub not to start the proxy itself (c.ConfigurableHTTPProxy.should_start = False)
|
||||
- Use a dummy authenticator for ease of testing. Update following in jupyterhub_config file
|
||||
- c.JupyterHub.authenticator_class = 'dummyauthenticator.DummyAuthenticator'
|
||||
- c.DummyAuthenticator.password = "your strong password"
|
@@ -6,7 +6,7 @@ info:
|
||||
description: The REST API for JupyterHub
|
||||
license:
|
||||
name: BSD-3-Clause
|
||||
version: 4.0.0
|
||||
version: 4.1.1
|
||||
servers:
|
||||
- url: /hub/api
|
||||
security:
|
||||
@@ -562,9 +562,10 @@ paths:
|
||||
properties:
|
||||
expires_in:
|
||||
type: number
|
||||
example: 3600
|
||||
description:
|
||||
lifetime (in seconds) after which the requested token
|
||||
will expire.
|
||||
will expire. Omit, or specify null or 0 for no expiration.
|
||||
note:
|
||||
type: string
|
||||
description: A note attached to the token for future bookkeeping
|
||||
@@ -1202,13 +1203,13 @@ components:
|
||||
description: Timestamp of last-seen activity from the user
|
||||
format: date-time
|
||||
servers:
|
||||
type: array
|
||||
type: object
|
||||
description: |
|
||||
The servers for this user.
|
||||
By default: only includes _active_ servers.
|
||||
Changed in 3.0: if `?include_stopped_servers` parameter is specified,
|
||||
stopped servers will be included as well.
|
||||
items:
|
||||
additionalProperties:
|
||||
$ref: "#/components/schemas/Server"
|
||||
auth_state:
|
||||
type: object
|
||||
|
@@ -187,7 +187,9 @@ linkcheck_ignore = [
|
||||
"https://github.com/jupyterhub/jupyterhub/compare/", # too many comparisons in changelog
|
||||
r"https?://(localhost|127.0.0.1).*", # ignore localhost references in auto-links
|
||||
r".*/rest-api.html#.*", # ignore javascript-resolved internal rest-api links
|
||||
r"https://jupyter.chameleoncloud.org", # FIXME: ignore (presumably) short-term SSL issue
|
||||
r"https://linux.die.net/.*", # linux.die.net seems to block requests from CI with 403 sometimes
|
||||
# don't check links to unpublished advisories
|
||||
r"https://github.com/jupyterhub/jupyterhub/security/advisories/.*",
|
||||
]
|
||||
linkcheck_anchors_ignore = [
|
||||
"/#!",
|
||||
@@ -201,6 +203,7 @@ intersphinx_mapping = {
|
||||
"python": ("https://docs.python.org/3/", None),
|
||||
"tornado": ("https://www.tornadoweb.org/en/stable/", None),
|
||||
"jupyter-server": ("https://jupyter-server.readthedocs.io/en/stable/", None),
|
||||
"nbgitpuller": ("https://nbgitpuller.readthedocs.io/en/latest", None),
|
||||
}
|
||||
|
||||
# -- Options for the opengraph extension -------------------------------------
|
||||
|
@@ -130,8 +130,8 @@ configuration:
|
||||
jupyterhub -f testing/jupyterhub_config.py
|
||||
```
|
||||
|
||||
The default JupyterHub [authenticator](https://jupyterhub.readthedocs.io/en/stable/reference/authenticators.html#the-default-pam-authenticator)
|
||||
& [spawner](https://jupyterhub.readthedocs.io/en/stable/api/spawner.html#localprocessspawner)
|
||||
The default JupyterHub [authenticator](PAMAuthenticator)
|
||||
& [spawner](LocalProcessSpawner)
|
||||
require your system to have user accounts for each user you want to log in to
|
||||
JupyterHub as.
|
||||
|
||||
|
@@ -16,7 +16,8 @@ works.
|
||||
|
||||
JupyterHub is designed to be a _simple multi-user server for modestly sized
|
||||
groups_ of **semi-trusted** users. While the design reflects serving
|
||||
semi-trusted users, JupyterHub can also be suitable for serving **untrusted** users.
|
||||
semi-trusted users, JupyterHub can also be suitable for serving **untrusted** users,
|
||||
but **is not suitable for untrusted users** in its default configuration.
|
||||
|
||||
As a result, using JupyterHub with **untrusted** users means more work by the
|
||||
administrator, since much care is required to secure a Hub, with extra caution on
|
||||
@@ -52,33 +53,67 @@ ensure that:
|
||||
their single-user server;
|
||||
- the modification of the configuration of the notebook server
|
||||
(the `~/.jupyter` or `JUPYTER_CONFIG_DIR` directory).
|
||||
- unrestricted selection of the base environment (e.g. the image used in container-based Spawners)
|
||||
|
||||
If any additional services are run on the same domain as the Hub, the services
|
||||
**must never** display user-authored HTML that is neither _sanitized_ nor _sandboxed_
|
||||
(e.g. IFramed) to any user that lacks authentication as the author of a file.
|
||||
to any user that lacks authentication as the author of a file.
|
||||
|
||||
### Sharing access to servers
|
||||
|
||||
Because sharing access to servers (via `access:servers` scopes or the sharing feature in JupyterHub 5) by definition means users can serve each other files, enabling sharing is not suitable for untrusted users without also enabling per-user domains.
|
||||
|
||||
JupyterHub does not enable any sharing by default.
|
||||
|
||||
## Mitigate security issues
|
||||
|
||||
The several approaches to mitigating security issues with configuration
|
||||
options provided by JupyterHub include:
|
||||
|
||||
### Enable subdomains
|
||||
### Enable user subdomains
|
||||
|
||||
JupyterHub provides the ability to run single-user servers on their own
|
||||
subdomains. This means the cross-origin protections between servers has the
|
||||
desired effect, and user servers and the Hub are protected from each other. A
|
||||
user's single-user server will be at `username.jupyter.mydomain.com`. This also
|
||||
requires all user subdomains to point to the same address, which is most easily
|
||||
accomplished with wildcard DNS. Since this spreads the service across multiple
|
||||
domains, you will need wildcard SSL as well. Unfortunately, for many
|
||||
institutional domains, wildcard DNS and SSL are not available. **If you do plan
|
||||
to serve untrusted users, enabling subdomains is highly encouraged**, as it
|
||||
resolves the cross-site issues.
|
||||
domains. This means the cross-origin protections between servers has the
|
||||
desired effect, and user servers and the Hub are protected from each other.
|
||||
|
||||
**Subdomains are the only way to reliably isolate user servers from each other.**
|
||||
|
||||
To enable subdomains, set:
|
||||
|
||||
```python
|
||||
c.JupyterHub.subdomain_host = "https://jupyter.example.org"
|
||||
```
|
||||
|
||||
When subdomains are enabled, each user's single-user server will be at e.g. `https://username.jupyter.example.org`.
|
||||
This also requires all user subdomains to point to the same address,
|
||||
which is most easily accomplished with wildcard DNS, where a single A record points to your server and a wildcard CNAME record points to your A record:
|
||||
|
||||
```
|
||||
A jupyter.example.org 192.168.1.123
|
||||
CNAME *.jupyter.example.org jupyter.example.org
|
||||
```
|
||||
|
||||
Since this spreads the service across multiple domains, you will likely need wildcard SSL as well,
|
||||
matching `*.jupyter.example.org`.
|
||||
|
||||
Unfortunately, for many institutional domains, wildcard DNS and SSL may not be available.
|
||||
|
||||
We also **strongly encourage** serving JupyterHub and user content on a domain that is _not_ a subdomain of any sensitive content.
|
||||
For reasoning, see [GitHub's discussion of moving user content to github.io from \*.github.com](https://github.blog/2013-04-09-yummy-cookies-across-domains/).
|
||||
|
||||
**If you do plan to serve untrusted users, enabling subdomains is highly encouraged**,
|
||||
as it resolves many security issues, which are difficult to unavoidable when JupyterHub is on a single-domain.
|
||||
|
||||
:::{important}
|
||||
JupyterHub makes no guarantees about protecting users from each other unless subdomains are enabled.
|
||||
|
||||
If you want to protect users from each other, you **_must_** enable per-user domains.
|
||||
:::
|
||||
|
||||
### Disable user config
|
||||
|
||||
If subdomains are unavailable or undesirable, JupyterHub provides a
|
||||
configuration option `Spawner.disable_user_config`, which can be set to prevent
|
||||
configuration option `Spawner.disable_user_config = True`, which can be set to prevent
|
||||
the user-owned configuration files from being loaded. After implementing this
|
||||
option, `PATH`s and package installation are the other things that the
|
||||
admin must enforce.
|
||||
@@ -88,21 +123,24 @@ admin must enforce.
|
||||
For most Spawners, `PATH` is not something users can influence, but it's important that
|
||||
the Spawner should _not_ evaluate shell configuration files prior to launching the server.
|
||||
|
||||
### Isolate packages using virtualenv
|
||||
### Isolate packages in a read-only environment
|
||||
|
||||
Package isolation is most easily handled by running the single-user server in
|
||||
a virtualenv with disabled system-site-packages. The user should not have
|
||||
permission to install packages into this environment.
|
||||
The user must not have permission to install packages into the environment where the singleuser-server runs.
|
||||
On a shared system, package isolation is most easily handled by running the single-user server in
|
||||
a root-owned virtualenv with disabled system-site-packages.
|
||||
The user must not have permission to install packages into this environment.
|
||||
The same principle extends to the images used by container-based deployments.
|
||||
If users can select the images in which their servers run, they can disable all security for their own servers.
|
||||
|
||||
It is important to note that the control over the environment only affects the
|
||||
single-user server, and not the environment(s) in which the user's kernel(s)
|
||||
It is important to note that the control over the environment is only required for the
|
||||
single-user server, and not the environment(s) in which the users' kernel(s)
|
||||
may run. Installing additional packages in the kernel environment does not
|
||||
pose additional risk to the web application's security.
|
||||
|
||||
### Encrypt internal connections with SSL/TLS
|
||||
|
||||
By default, all communications on the server, between the proxy, hub, and single
|
||||
-user notebooks are performed unencrypted. Setting the `internal_ssl` flag in
|
||||
By default, all communications within JupyterHub—between the proxy, hub, and single
|
||||
-user notebooks—are performed unencrypted. Setting the `internal_ssl` flag in
|
||||
`jupyterhub_config.py` secures the aforementioned routes. Turning this
|
||||
feature on does require that the enabled `Spawner` can use the certificates
|
||||
generated by the `Hub` (the default `LocalProcessSpawner` can, for instance).
|
||||
@@ -116,6 +154,104 @@ Unix permissions to the communication sockets thereby restricting
|
||||
communication to the socket owner. The `internal_ssl` option will eventually
|
||||
extend to securing the `tcp` sockets as well.
|
||||
|
||||
### Mitigating same-origin deployments
|
||||
|
||||
While per-user domains are **required** for robust protection of users from each other,
|
||||
you can mitigate many (but not all) cross-user issues.
|
||||
First, it is critical that users cannot modify their server environments, as described above.
|
||||
Second, it is important that users do not have `access:servers` permission to any server other than their own.
|
||||
|
||||
If users can access each others' servers, additional security measures must be enabled, some of which come with distinct user-experience costs.
|
||||
|
||||
Without the [Same-Origin Policy] (SOP) protecting user servers from each other,
|
||||
each user server is considered a trusted origin for requests to each other user server (and the Hub itself).
|
||||
Servers _cannot_ meaningfully distinguish requests originating from other user servers,
|
||||
because SOP implies a great deal of trust, losing many restrictions applied to cross-origin requests.
|
||||
|
||||
That means pages served from each user server can:
|
||||
|
||||
1. arbitrarily modify the path in the Referer
|
||||
2. make fully authorized requests with cookies
|
||||
3. access full page contents served from the hub or other servers via popups
|
||||
|
||||
JupyterHub uses distinct xsrf tokens stored in cookies on each server path to attempt to limit requests across.
|
||||
This has limitations because not all requests are protected by these XSRF tokens,
|
||||
and unless additional measures are taken, the XSRF tokens from other user prefixes may be retrieved.
|
||||
|
||||
[Same-Origin Policy]: https://developer.mozilla.org/en-US/docs/Web/Security/Same-origin_policy
|
||||
|
||||
For example:
|
||||
|
||||
- `Content-Security-Policy` header must prohibit popups and iframes from the same origin.
|
||||
The following Content-Security-Policy rules are _insecure_ and readily enable users to access each others' servers:
|
||||
|
||||
- `frame-ancestors: 'self'`
|
||||
- `frame-ancestors: '*'`
|
||||
- `sandbox allow-popups`
|
||||
|
||||
- Ideally, pages should use the strictest `Content-Security-Policy: sandbox` available,
|
||||
but this is not feasible in general for JupyterLab pages, which need at least `sandbox allow-same-origin allow-scripts` to work.
|
||||
|
||||
The default Content-Security-Policy for single-user servers is
|
||||
|
||||
```
|
||||
frame-ancestors: 'none'
|
||||
```
|
||||
|
||||
which prohibits iframe embedding, but not pop-ups.
|
||||
|
||||
A more secure Content-Security-Policy that has some costs to user experience is:
|
||||
|
||||
```
|
||||
frame-ancestors: 'none'; sandbox allow-same-origin allow-scripts
|
||||
```
|
||||
|
||||
`allow-popups` is not disabled by default because disabling it breaks legitimate functionality, like "Open this in a new tab", and the "JupyterHub Control Panel" menu item.
|
||||
To reiterate, the right way to avoid these issues is to enable per-user domains, where none of these concerns come up.
|
||||
|
||||
Note: even this level of protection requires administrators maintaining full control over the user server environment.
|
||||
If users can modify their server environment, these methods are ineffective, as users can readily disable them.
|
||||
|
||||
### Cookie tossing
|
||||
|
||||
Cookie tossing is a technique where another server on a subdomain or peer subdomain can set a cookie
|
||||
which will be read on another domain.
|
||||
This is not relevant unless there are other user-controlled servers on a peer domain.
|
||||
|
||||
"Domain-locked" cookies avoid this issue, but have their own restrictions:
|
||||
|
||||
- JupyterHub must be served over HTTPS
|
||||
- All secure cookies must be set on `/`, not on sub-paths, which means they are shared by all JupyterHub components in a single-domain deployment.
|
||||
|
||||
As a result, this option is only recommended when per-user subdomains are enabled,
|
||||
to prevent sending all jupyterhub cookies to all user servers.
|
||||
|
||||
To enable domain-locked cookies, set:
|
||||
|
||||
```python
|
||||
c.JupyterHub.cookie_host_prefix_enabled = True
|
||||
```
|
||||
|
||||
```{versionadded} 4.1
|
||||
|
||||
```
|
||||
|
||||
### Forced-login
|
||||
|
||||
Jupyter servers can share links with `?token=...`.
|
||||
JupyterHub prior to 5.0 will accept this request and persist the token for future requests.
|
||||
This is useful for enabling admins to create 'fully authenticated' links bypassing login.
|
||||
However, it also means users can share their own links that will log other users into their own servers,
|
||||
enabling them to serve each other notebooks and other arbitrary HTML, depending on server configuration.
|
||||
|
||||
```{versionadded} 4.1
|
||||
Setting environment variable `JUPYTERHUB_ALLOW_TOKEN_IN_URL=0` in the single-user environment can opt out of accepting token auth in URL parameters.
|
||||
```
|
||||
|
||||
```{versionadded} 5.0
|
||||
Accepting tokens in URLs is disabled by default, and `JUPYTERHUB_ALLOW_TOKEN_IN_URL=1` environment variable must be set to _allow_ token auth in URL parameters.
|
||||
```
|
||||
|
||||
## Security audits
|
||||
|
||||
We recommend that you do periodic reviews of your deployment's security. It's
|
||||
|
@@ -2,35 +2,75 @@
|
||||
|
||||
## How do I share links to notebooks?
|
||||
|
||||
In short, where you see `/user/name/notebooks/foo.ipynb` use `/hub/user-redirect/notebooks/foo.ipynb` (replace `/user/name` with `/hub/user-redirect`).
|
||||
|
||||
Sharing links to notebooks is a common activity,
|
||||
and can look different based on what you mean.
|
||||
and can look different depending on what you mean by 'share.'
|
||||
Your first instinct might be to copy the URL you see in the browser,
|
||||
e.g. `hub.jupyter.org/user/yourname/notebooks/coolthing.ipynb`.
|
||||
However, let's break down what this URL means:
|
||||
e.g. `jupyterhub.example/user/yourname/notebooks/coolthing.ipynb`,
|
||||
but this usually won't work, depending on the permissions of the person you share the link with.
|
||||
|
||||
`hub.jupyter.org/user/yourname/` is the URL prefix handled by _your server_,
|
||||
which means that sharing this URL is asking the person you share the link with
|
||||
to come to _your server_ and look at the exact same file.
|
||||
In most circumstances, this is forbidden by permissions because the person you share with does not have access to your server.
|
||||
What actually happens when someone visits this URL will depend on whether your server is running and other factors.
|
||||
Unfortunately, 'share' means at least a few things to people in a JupyterHub context.
|
||||
We'll cover 3 common cases here, when they are applicable, and what assumptions they make:
|
||||
|
||||
**But what is our actual goal?**
|
||||
1. sharing links that will open the same file on the visitor's own server
|
||||
2. sharing links that will bring the visitor to _your_ server (e.g. for real-time collaboration, or RTC)
|
||||
3. publishing notebooks and sharing links that will download the notebook into the user's server
|
||||
|
||||
A typical situation is that you have some shared or common filesystem,
|
||||
such that the same path corresponds to the same document
|
||||
(either the exact same document or another copy of it).
|
||||
Typically, what folks want when they do sharing like this
|
||||
is for each visitor to open the same file _on their own server_,
|
||||
so Breq would open `/user/breq/notebooks/foo.ipynb` and
|
||||
Seivarden would open `/user/seivarden/notebooks/foo.ipynb`, etc.
|
||||
### link to the same file on the visitor's server
|
||||
|
||||
JupyterHub has a special URL that does exactly this!
|
||||
It's called `/hub/user-redirect/...`.
|
||||
So if you replace `/user/yourname` in your URL bar
|
||||
with `/hub/user-redirect` any visitor should get the same
|
||||
URL on their own server, rather than visiting yours.
|
||||
This is for the case where you have JupyterHub on a shared (or sufficiently similar) filesystem, where you want to share a link that will cause users to login and start their _own_ server, to view or edit the file.
|
||||
|
||||
In JupyterLab 2.0, this should also be the result of the "Copy Shareable Link"
|
||||
action in the file browser.
|
||||
**Assumption:** the same path on someone else's server is valid and points to the same file
|
||||
|
||||
This is useful in e.g. classes where you know students have certain files in certain locations, or collaborations where you know you have a shared filesystem where everyone has access to the same files.
|
||||
|
||||
A link should look like `https://jupyterhub.example/hub/user-redirect/lab/tree/foo.ipynb`.
|
||||
You can hand-craft these URLs from the URL you are looking at, where you see `/user/name/lab/tree/foo.ipynb` use `/hub/user-redirect/lab/tree/foo.ipynb` (replace `/user/name/` with `/hub/user-redirect/`).
|
||||
Or you can use JupyterLab's "copy shareable link" in the context menu in the file browser:
|
||||
|
||||

|
||||
|
||||
which will produce a correct URL with `/hub/user-redirect/` in it.
|
||||
|
||||
### link to the file on your server
|
||||
|
||||
This is for the case where you want to both be using _your_ server, e.g. for real-time collaboration (RTC).
|
||||
|
||||
**Assumption:** the user has (or should have) access to your server.
|
||||
|
||||
**Assumption:** your server is running _or_ the user has permission to start it.
|
||||
|
||||
By default, JupyterHub users don't have access to each other's servers, but JupyterHub 2.0 administrators can grant users limited access permissions to each other's servers.
|
||||
If the visitor doesn't have access to the server, these links will result in a 403 Permission Denied error.
|
||||
|
||||
In many cases, for this situation you can copy the link in your URL bar (`/user/yourname/lab`), or you can add `/tree/path/to/specific/notebook.ipynb` to open a specific file.
|
||||
|
||||
The [jupyterlab-link-share] JupyterLab extension generates these links, and even can _grant_ other users access to your server.
|
||||
|
||||
[jupyterlab-link-share]: https://github.com/jupyterlab-contrib/jupyterlab-link-share
|
||||
|
||||
:::{warning}
|
||||
Note that the way the extension _grants_ access is handing over credentials to allow the other user to **_BECOME YOU_**.
|
||||
This is usually not appropriate in JupyterHub.
|
||||
:::
|
||||
|
||||
### link to a published copy
|
||||
|
||||
Another way to 'share' notebooks is to publish copies, e.g. pushing the notebook to a git repository and sharing a download link.
|
||||
This way is especially useful for course materials,
|
||||
where no assumptions are necessary about the user's environment,
|
||||
except for having one package installed.
|
||||
|
||||
**Assumption:** The [nbgitpuller](inv:nbgitpuller#index) server extension is installed
|
||||
|
||||
Unlike the other two methods, nbgitpuller doesn't provide an extension to copy a shareable link for the document you're currently looking at,
|
||||
but it does provide a [link generator](inv:nbgitpuller#link),
|
||||
which uses the `user-redirect` approach above.
|
||||
|
||||
When visiting an nbgitpuller link:
|
||||
|
||||
- The visitor will be directed to their own server
|
||||
- Your repo will be cloned (or updated if it's already been cloned)
|
||||
- and then the file opened when it's ready
|
||||
|
||||
[nbgitpuller]: https://nbgitpuller.readthedocs.io
|
||||
[nbgitpuller-link]: https://nbgitpuller.readthedocs.io/en/latest/link.html
|
||||
|
@@ -66,7 +66,7 @@ Here is a sample of organizations that use JupyterHub:
|
||||
- **Universities and colleges**: UC Berkeley, UC San Diego, Cal Poly SLO, Harvard University, University of Chicago,
|
||||
University of Oslo, University of Sheffield, Université Paris Sud, University of Versailles
|
||||
- **Research laboratories**: NASA, NCAR, NOAA, the Large Synoptic Survey Telescope, Brookhaven National Lab,
|
||||
Minnesota Supercomputing Institute, ALCF, CERN, Lawrence Livermore National Laboratory
|
||||
Minnesota Supercomputing Institute, ALCF, CERN, Lawrence Livermore National Laboratory, HUNT
|
||||
- **Online communities**: Pangeo, Quantopian, mybinder.org, MathHub, Open Humans
|
||||
- **Computing infrastructure providers**: NERSC, San Diego Supercomputing Center, Compute Canada
|
||||
- **Companies**: Capital One, SANDVIK code, Globus
|
||||
@@ -130,7 +130,7 @@ level for several years, and makes a number of "default" security decisions that
|
||||
users.
|
||||
|
||||
- For security considerations in the base JupyterHub application,
|
||||
[see the JupyterHub security page](https://jupyterhub.readthedocs.io/en/stable/reference/websecurity.html).
|
||||
[see the JupyterHub security page](web-security).
|
||||
- For security considerations when deploying JupyterHub on Kubernetes, see the
|
||||
[JupyterHub on Kubernetes security page](https://z2jh.jupyter.org/en/latest/security.html).
|
||||
|
||||
|
@@ -46,13 +46,13 @@ things like inspect other users' servers or modify the user list at runtime).
|
||||
### JupyterHub Docker container is not accessible at localhost
|
||||
|
||||
Even though the command to start your Docker container exposes port 8000
|
||||
(`docker run -p 8000:8000 -d --name jupyterhub jupyterhub/jupyterhub jupyterhub`),
|
||||
(`docker run -p 8000:8000 -d --name jupyterhub quay.io/jupyterhub/jupyterhub jupyterhub`),
|
||||
it is possible that the IP address itself is not accessible/visible. As a result,
|
||||
when you try http://localhost:8000 in your browser, you are unable to connect
|
||||
even though the container is running properly. One workaround is to explicitly
|
||||
tell Jupyterhub to start at `0.0.0.0` which is visible to everyone. Try this
|
||||
command:
|
||||
`docker run -p 8000:8000 -d --name jupyterhub jupyterhub/jupyterhub jupyterhub --ip 0.0.0.0 --port 8000`
|
||||
`docker run -p 8000:8000 -d --name jupyterhub quay.io/jupyterhub/jupyterhub jupyterhub --ip 0.0.0.0 --port 8000`
|
||||
|
||||
### How can I kill ports from JupyterHub-managed services that have been orphaned?
|
||||
|
||||
@@ -167,7 +167,7 @@ When your whole JupyterHub sits behind an organization proxy (_not_ a reverse pr
|
||||
|
||||
### Launching Jupyter Notebooks to run as an externally managed JupyterHub service with the `jupyterhub-singleuser` command returns a `JUPYTERHUB_API_TOKEN` error
|
||||
|
||||
[JupyterHub services](https://jupyterhub.readthedocs.io/en/stable/reference/services.html) allow processes to interact with JupyterHub's REST API. Example use-cases include:
|
||||
{ref}`services` allow processes to interact with JupyterHub's REST API. Example use-cases include:
|
||||
|
||||
- **Secure Testing**: provide a canonical Jupyter Notebook for testing production data to reduce the number of entry points into production systems.
|
||||
- **Grading Assignments**: provide access to shared Jupyter Notebooks that may be used for management tasks such as grading assignments.
|
||||
@@ -347,12 +347,12 @@ In order to resolve this issue, there are two potential options.
|
||||
|
||||
### Where do I find Docker images and Dockerfiles related to JupyterHub?
|
||||
|
||||
Docker images can be found at the [JupyterHub organization on DockerHub](https://hub.docker.com/u/jupyterhub/).
|
||||
The Docker image [jupyterhub/singleuser](https://hub.docker.com/r/jupyterhub/singleuser/)
|
||||
Docker images can be found at the [JupyterHub organization on Quay.io](https://quay.io/organization/jupyterhub).
|
||||
The Docker image [jupyterhub/singleuser](https://quay.io/repository/jupyterhub/singleuser)
|
||||
provides an example single-user notebook server for use with DockerSpawner.
|
||||
|
||||
Additional single-user notebook server images can be found at the [Jupyter
|
||||
organization on DockerHub](https://hub.docker.com/r/jupyter/) and information
|
||||
organization on Quay.io](https://quay.io/organization/jupyter) and information
|
||||
about each image at the [jupyter/docker-stacks repo](https://github.com/jupyter/docker-stacks).
|
||||
|
||||
### How can I view the logs for JupyterHub or the user's Notebook servers when using the DockerSpawner?
|
||||
|
@@ -45,7 +45,7 @@ additional packages.
|
||||
|
||||
## Configuring Jupyter and IPython
|
||||
|
||||
[Jupyter](https://jupyter-notebook.readthedocs.io/en/stable/config_overview.html)
|
||||
[Jupyter](https://jupyter-notebook.readthedocs.io/en/stable/configuring/config_overview.html)
|
||||
and [IPython](https://ipython.readthedocs.io/en/stable/development/config.html)
|
||||
have their own configuration systems.
|
||||
|
||||
@@ -212,13 +212,31 @@ By default, the single-user server launches JupyterLab,
|
||||
which is based on [Jupyter Server][].
|
||||
|
||||
This is the default server when running JupyterHub ≥ 2.0.
|
||||
To switch to using the legacy Jupyter Notebook server, you can set the `JUPYTERHUB_SINGLEUSER_APP` environment variable
|
||||
To switch to using the legacy Jupyter Notebook server (notebook < 7.0), you can set the `JUPYTERHUB_SINGLEUSER_APP` environment variable
|
||||
(in the single-user environment) to:
|
||||
|
||||
```bash
|
||||
export JUPYTERHUB_SINGLEUSER_APP='notebook.notebookapp.NotebookApp'
|
||||
```
|
||||
|
||||
:::{note}
|
||||
|
||||
```
|
||||
JUPYTERHUB_SINGLEUSER_APP='notebook.notebookapp.NotebookApp'
|
||||
```
|
||||
|
||||
is only valid for notebook < 7. notebook v7 is based on jupyter-server,
|
||||
and the default jupyter-server application must be used.
|
||||
Selecting the new notebook UI is no longer a matter of selecting the server app to launch,
|
||||
but only the default URL for users to visit.
|
||||
To use notebook v7 with JupyterHub, leave the default singleuser app config alone (or specify `JUPYTERHUB_SINGLEUSER_APP=jupyter-server`) and set the default _URL_ for user servers:
|
||||
|
||||
```python
|
||||
c.Spawner.default_url = '/tree/'
|
||||
```
|
||||
|
||||
:::
|
||||
|
||||
[jupyter server]: https://jupyter-server.readthedocs.io
|
||||
[jupyter notebook]: https://jupyter-notebook.readthedocs.io
|
||||
|
||||
|
@@ -33,36 +33,13 @@ such as:
|
||||
To send requests using the JupyterHub API, you must pass an API token with
|
||||
the request.
|
||||
|
||||
The preferred way of generating an API token is by running:
|
||||
|
||||
```bash
|
||||
openssl rand -hex 32
|
||||
```
|
||||
|
||||
This `openssl` command generates a potential token that can then be
|
||||
added to JupyterHub using `.api_tokens` configuration setting in
|
||||
`jupyterhub_config.py`.
|
||||
|
||||
```{note}
|
||||
The api_tokens configuration has been softly deprecated since the introduction of services.
|
||||
```
|
||||
|
||||
Alternatively, you can use the `jupyterhub token` command to generate a token
|
||||
for a specific hub user by passing the **username**:
|
||||
|
||||
```bash
|
||||
jupyterhub token <username>
|
||||
```
|
||||
|
||||
This command generates a random string to use as a token and registers
|
||||
it for the given user with the Hub's database.
|
||||
|
||||
In [version 0.8.0](changelog), a token request page for
|
||||
generating an API token is available from the JupyterHub user interface:
|
||||
While JupyterHub is running, any JupyterHub user can request a token via the `token` page.
|
||||
This is accessible via a `token` link in the top nav bar from the JupyterHub home page,
|
||||
or at the URL `/hub/token`.
|
||||
|
||||
:::{figure-md}
|
||||
|
||||

|
||||

|
||||
|
||||
JupyterHub's API token page
|
||||
:::
|
||||
@@ -74,6 +51,40 @@ JupyterHub's token page after successfully requesting a token.
|
||||
|
||||
:::
|
||||
|
||||
### Register API tokens via configuration
|
||||
|
||||
Sometimes, you'll want to pre-generate a token for access to JupyterHub,
|
||||
typically for use by external services,
|
||||
so that both JupyterHub and the service have access to the same value.
|
||||
|
||||
First, you need to generate a good random secret.
|
||||
A good way of generating an API token is by running:
|
||||
|
||||
```bash
|
||||
openssl rand -hex 32
|
||||
```
|
||||
|
||||
This `openssl` command generates a random token that can be added to the JupyterHub configuration in `jupyterhub_config.py`.
|
||||
|
||||
For external services, this would be registered with JupyterHub via configuration:
|
||||
|
||||
```python
|
||||
c.JupyterHub.services = [
|
||||
{
|
||||
"name": "my-service",
|
||||
"api_token": the_secret_value,
|
||||
},
|
||||
]
|
||||
```
|
||||
|
||||
At this point, requests authenticated with the token will be associated with The service `my-service`.
|
||||
|
||||
```{note}
|
||||
You can also load additional tokens for users via the `JupyterHub.api_tokens` configuration.
|
||||
|
||||
However, this option has been deprecated since the introduction of services.
|
||||
```
|
||||
|
||||
## Assigning permissions to a token
|
||||
|
||||
Prior to JupyterHub 2.0, there were two levels of permissions:
|
||||
|
@@ -70,7 +70,7 @@ need to configure the options there.
|
||||
## Docker image
|
||||
|
||||
You can use [jupyterhub configurable-http-proxy docker
|
||||
image](https://hub.docker.com/r/jupyterhub/configurable-http-proxy/)
|
||||
image](https://quay.io/repository/jupyterhub/configurable-http-proxy)
|
||||
to run the proxy.
|
||||
|
||||
## See also
|
||||
|
BIN
docs/source/images/shareable_link.webp
Normal file
BIN
docs/source/images/shareable_link.webp
Normal file
Binary file not shown.
After Width: | Height: | Size: 8.1 KiB |
Binary file not shown.
Before Width: | Height: | Size: 103 KiB After Width: | Height: | Size: 137 KiB |
Binary file not shown.
Before Width: | Height: | Size: 102 KiB After Width: | Height: | Size: 99 KiB |
Binary file not shown.
Before Width: | Height: | Size: 52 KiB |
@@ -13,6 +13,7 @@ The files are:
|
||||
This file is JupyterHub's REST API schema. Both a version and the RBAC
|
||||
scopes descriptions are updated in it.
|
||||
"""
|
||||
|
||||
import os
|
||||
from collections import defaultdict
|
||||
from pathlib import Path
|
||||
|
@@ -39,6 +39,15 @@
|
||||
"reference/server-api.md" "tutorial/server-api.md"
|
||||
"reference/websecurity.md" "explanation/websecurity.md"
|
||||
|
||||
"api/app.md" "reference/api/app.md"
|
||||
"api/auth.md" "reference/api/auth.md"
|
||||
"api/index.md" "reference/api/index.md"
|
||||
"api/proxy.md" "reference/api/proxy.md"
|
||||
"api/service.md" "reference/api/service.md"
|
||||
"api/services.auth.md" "reference/api/services.auth.md"
|
||||
"api/spawner.md" "reference/api/spawner.md"
|
||||
"api/user.md" "reference/api/user.md"
|
||||
|
||||
# -- JupyterHub 4.0 --
|
||||
# redirects above are up-to-date as of JupyterHub 4.0
|
||||
# add future redirects below
|
||||
|
@@ -30,7 +30,6 @@ popular services:
|
||||
- Globus
|
||||
- Google
|
||||
- MediaWiki
|
||||
- Okpy
|
||||
- OpenShift
|
||||
|
||||
A [generic implementation](https://github.com/jupyterhub/oauthenticator/blob/master/oauthenticator/generic.py), which you can use for OAuth authentication with any provider, is also available.
|
||||
|
@@ -8,11 +8,163 @@ command line for details.
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
## 4.1
|
||||
|
||||
### 4.1.1 - 2024-03-23
|
||||
|
||||
4.1.1 fixes a compatibility regression in 4.1.0 for some extensions,
|
||||
particularly jupyter-server-proxy.
|
||||
|
||||
([full changelog](https://github.com/jupyterhub/jupyterhub/compare/4.1.0...4.1.1))
|
||||
|
||||
#### Bugs fixed
|
||||
|
||||
- allow subclasses to override xsrf check [#4745](https://github.com/jupyterhub/jupyterhub/pull/4745) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio))
|
||||
|
||||
#### Contributors to this release
|
||||
|
||||
The following people contributed discussions, new ideas, code and documentation contributions, and review.
|
||||
See [our definition of contributors](https://github-activity.readthedocs.io/en/latest/#how-does-this-tool-define-contributions-in-the-reports).
|
||||
|
||||
([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2024-03-20&to=2024-03-23&type=c))
|
||||
|
||||
@consideRatio ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2024-03-20..2024-03-23&type=Issues)) | @minrk ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2024-03-20..2024-03-23&type=Issues))
|
||||
|
||||
### 4.1.0 - 2024-03-20
|
||||
|
||||
JupyterHub 4.1 is a security release, fixing [CVE-2024-28233].
|
||||
All JupyterHub deployments are encouraged to upgrade,
|
||||
especially those with other user content on peer domains to JupyterHub.
|
||||
|
||||
As always, JupyterHub deployments are especially encouraged to enable per-user domains if protecting users from each other is a concern.
|
||||
|
||||
For more information on securely deploying JupyterHub, see the [web security documentation](web-security).
|
||||
|
||||
[CVE-2024-28233]: https://github.com/jupyterhub/jupyterhub/security/advisories/GHSA-7r3h-4ph8-w38g
|
||||
|
||||
([full changelog](https://github.com/jupyterhub/jupyterhub/compare/4.0.2...4.1.0))
|
||||
|
||||
#### Enhancements made
|
||||
|
||||
- Backport PR #4628 on branch 4.x (Include LDAP groups in local spawner gids) [#4735](https://github.com/jupyterhub/jupyterhub/pull/4735) ([@minrk](https://github.com/minrk))
|
||||
- Backport PR #4561 on branch 4.x (Improve debugging when waiting for servers) [#4714](https://github.com/jupyterhub/jupyterhub/pull/4714) ([@minrk](https://github.com/minrk))
|
||||
- Backport PR #4563 on branch 4.x (only set 'domain' field on session-id cookie) [#4707](https://github.com/jupyterhub/jupyterhub/pull/4707) ([@minrk](https://github.com/minrk))
|
||||
|
||||
#### Bugs fixed
|
||||
|
||||
- Backport PR #4733 on branch 4.x (Catch ValueError while waiting for server to be reachable) [#4734](https://github.com/jupyterhub/jupyterhub/pull/4734) ([@minrk](https://github.com/minrk))
|
||||
- Backport PR #4679 on branch 4.x (Unescape jinja username) [#4705](https://github.com/jupyterhub/jupyterhub/pull/4705) ([@minrk](https://github.com/minrk))
|
||||
- Backport PR #4630: avoid setting unused oauth state cookies on API requests [#4697](https://github.com/jupyterhub/jupyterhub/pull/4697) ([@minrk](https://github.com/minrk))
|
||||
- Backport PR #4632: simplify, avoid errors in parsing accept headers [#4696](https://github.com/jupyterhub/jupyterhub/pull/4696) ([@minrk](https://github.com/minrk))
|
||||
- Backport PR #4677 on branch 4.x (Improve validation, docs for token.expires_in) [#4692](https://github.com/jupyterhub/jupyterhub/pull/4692) ([@minrk](https://github.com/minrk))
|
||||
- Backport PR #4570 on branch 4.x (fix mutation of frozenset in scope intersection) [#4691](https://github.com/jupyterhub/jupyterhub/pull/4691) ([@minrk](https://github.com/minrk))
|
||||
- Backport PR #4562 on branch 4.x (Use `user.stop` to cleanup spawners that stopped while Hub was down) [#4690](https://github.com/jupyterhub/jupyterhub/pull/4690) ([@minrk](https://github.com/minrk))
|
||||
- Backport PR #4542 on branch 4.x (Fix include_stopped_servers in paginated next_url) [#4689](https://github.com/jupyterhub/jupyterhub/pull/4689) ([@minrk](https://github.com/minrk))
|
||||
- Backport PR #4651 on branch 4.x (avoid attempting to patch removed IPythonHandler with notebook v7) [#4688](https://github.com/jupyterhub/jupyterhub/pull/4688) ([@minrk](https://github.com/minrk))
|
||||
- Backport PR #4560 on branch 4.x (singleuser extension: persist token from ?token=... url in cookie) [#4687](https://github.com/jupyterhub/jupyterhub/pull/4687) ([@minrk](https://github.com/minrk))
|
||||
|
||||
#### Maintenance and upkeep improvements
|
||||
|
||||
- Backport quay.io publishing [#4698](https://github.com/jupyterhub/jupyterhub/pull/4698) ([@minrk](https://github.com/minrk))
|
||||
- Backport PR #4617: try to improve reliability of test_external_proxy [#4695](https://github.com/jupyterhub/jupyterhub/pull/4695) ([@minrk](https://github.com/minrk))
|
||||
- Backport PR #4618 on branch 4.x (browser test: wait for token request to finish before reloading) [#4694](https://github.com/jupyterhub/jupyterhub/pull/4694) ([@minrk](https://github.com/minrk))
|
||||
- preparing 4.x branch [#4685](https://github.com/jupyterhub/jupyterhub/pull/4685) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio))
|
||||
|
||||
#### Contributors to this release
|
||||
|
||||
The following people contributed discussions, new ideas, code and documentation contributions, and review.
|
||||
See [our definition of contributors](https://github-activity.readthedocs.io/en/latest/#how-does-this-tool-define-contributions-in-the-reports).
|
||||
|
||||
([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2023-08-10&to=2024-03-19&type=c))
|
||||
|
||||
@Achele ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AAchele+updated%3A2023-08-10..2024-03-19&type=Issues)) | @akashthedeveloper ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aakashthedeveloper+updated%3A2023-08-10..2024-03-19&type=Issues)) | @balajialg ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abalajialg+updated%3A2023-08-10..2024-03-19&type=Issues)) | @BhavyaT-135 ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3ABhavyaT-135+updated%3A2023-08-10..2024-03-19&type=Issues)) | @blink1073 ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ablink1073+updated%3A2023-08-10..2024-03-19&type=Issues)) | @consideRatio ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2023-08-10..2024-03-19&type=Issues)) | @fcollonval ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Afcollonval+updated%3A2023-08-10..2024-03-19&type=Issues)) | @I-Am-D-B ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AI-Am-D-B+updated%3A2023-08-10..2024-03-19&type=Issues)) | @jakirkham ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ajakirkham+updated%3A2023-08-10..2024-03-19&type=Issues)) | @ktaletsk ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aktaletsk+updated%3A2023-08-10..2024-03-19&type=Issues)) | @kzgrzendek ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Akzgrzendek+updated%3A2023-08-10..2024-03-19&type=Issues)) | @lumberbot-app ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Alumberbot-app+updated%3A2023-08-10..2024-03-19&type=Issues)) | @manics ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amanics+updated%3A2023-08-10..2024-03-19&type=Issues)) | @mbiette ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ambiette+updated%3A2023-08-10..2024-03-19&type=Issues)) | @minrk ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2023-08-10..2024-03-19&type=Issues)) | @rcthomas ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arcthomas+updated%3A2023-08-10..2024-03-19&type=Issues)) | @ryanlovett ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aryanlovett+updated%3A2023-08-10..2024-03-19&type=Issues)) | @sgaist ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asgaist+updated%3A2023-08-10..2024-03-19&type=Issues)) | @shubham0473 ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ashubham0473+updated%3A2023-08-10..2024-03-19&type=Issues)) | @Temidayo32 ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3ATemidayo32+updated%3A2023-08-10..2024-03-19&type=Issues)) | @willingc ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Awillingc+updated%3A2023-08-10..2024-03-19&type=Issues)) | @yuvipanda ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ayuvipanda+updated%3A2023-08-10..2024-03-19&type=Issues))
|
||||
|
||||
## 4.0
|
||||
|
||||
### 4.0.2 - 2023-08-10
|
||||
|
||||
([full changelog](https://github.com/jupyterhub/jupyterhub/compare/4.0.1...4.0.2))
|
||||
|
||||
#### Enhancements made
|
||||
|
||||
- avoid counting failed requests to not-running servers as 'activity' [#4491](https://github.com/jupyterhub/jupyterhub/pull/4491) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio))
|
||||
- improve permission-denied errors for various cases [#4489](https://github.com/jupyterhub/jupyterhub/pull/4489) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio))
|
||||
|
||||
#### Bugs fixed
|
||||
|
||||
- set root_dir when using singleuser extension [#4503](https://github.com/jupyterhub/jupyterhub/pull/4503) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio), [@manics](https://github.com/manics))
|
||||
- Allow setting custom log_function in tornado_settings in SingleUserServer [#4475](https://github.com/jupyterhub/jupyterhub/pull/4475) ([@grios-stratio](https://github.com/grios-stratio), [@minrk](https://github.com/minrk))
|
||||
|
||||
#### Documentation improvements
|
||||
|
||||
- doc: update notebook config URL [#4523](https://github.com/jupyterhub/jupyterhub/pull/4523) ([@minrk](https://github.com/minrk), [@manics](https://github.com/manics))
|
||||
- document how to use notebook v7 with jupyterhub [#4522](https://github.com/jupyterhub/jupyterhub/pull/4522) ([@minrk](https://github.com/minrk), [@manics](https://github.com/manics))
|
||||
|
||||
#### Contributors to this release
|
||||
|
||||
The following people contributed discussions, new ideas, code and documentation contributions, and review.
|
||||
See [our definition of contributors](https://github-activity.readthedocs.io/en/latest/#how-does-this-tool-define-contributions-in-the-reports).
|
||||
|
||||
([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2023-06-08&to=2023-08-10&type=c))
|
||||
|
||||
@agelosnm ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aagelosnm+updated%3A2023-06-08..2023-08-10&type=Issues)) | @consideRatio ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2023-06-08..2023-08-10&type=Issues)) | @diocas ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adiocas+updated%3A2023-06-08..2023-08-10&type=Issues)) | @grios-stratio ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Agrios-stratio+updated%3A2023-06-08..2023-08-10&type=Issues)) | @jhgoebbert ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ajhgoebbert+updated%3A2023-06-08..2023-08-10&type=Issues)) | @jtpio ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ajtpio+updated%3A2023-06-08..2023-08-10&type=Issues)) | @kosmonavtus ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Akosmonavtus+updated%3A2023-06-08..2023-08-10&type=Issues)) | @kreuzert ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Akreuzert+updated%3A2023-06-08..2023-08-10&type=Issues)) | @manics ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amanics+updated%3A2023-06-08..2023-08-10&type=Issues)) | @martinRenou ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AmartinRenou+updated%3A2023-06-08..2023-08-10&type=Issues)) | @minrk ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2023-06-08..2023-08-10&type=Issues)) | @opoplawski ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aopoplawski+updated%3A2023-06-08..2023-08-10&type=Issues)) | @Ph0tonic ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3APh0tonic+updated%3A2023-06-08..2023-08-10&type=Issues)) | @sgaist ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asgaist+updated%3A2023-06-08..2023-08-10&type=Issues)) | @trungleduc ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Atrungleduc+updated%3A2023-06-08..2023-08-10&type=Issues)) | @yuvipanda ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ayuvipanda+updated%3A2023-06-08..2023-08-10&type=Issues))
|
||||
|
||||
### 4.0.1 - 2023-06-08
|
||||
|
||||
([full changelog](https://github.com/jupyterhub/jupyterhub/compare/4.0.0...4.0.1))
|
||||
|
||||
#### Enhancements made
|
||||
|
||||
- Delete server button on admin page [#4457](https://github.com/jupyterhub/jupyterhub/pull/4457) ([@diocas](https://github.com/diocas), [@minrk](https://github.com/minrk))
|
||||
|
||||
#### Bugs fixed
|
||||
|
||||
- Abort informatively on unrecognized CLI options [#4467](https://github.com/jupyterhub/jupyterhub/pull/4467) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio))
|
||||
- Add xsrf to custom_html template context [#4464](https://github.com/jupyterhub/jupyterhub/pull/4464) ([@opoplawski](https://github.com/opoplawski), [@minrk](https://github.com/minrk))
|
||||
- preserve CLI > env priority config in jupyterhub-singleuser extension [#4451](https://github.com/jupyterhub/jupyterhub/pull/4451) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio), [@timeu](https://github.com/timeu), [@rcthomas](https://github.com/rcthomas))
|
||||
|
||||
#### Maintenance and upkeep improvements
|
||||
|
||||
- Fix link to collaboration accounts doc in example [#4448](https://github.com/jupyterhub/jupyterhub/pull/4448) ([@minrk](https://github.com/minrk))
|
||||
- Remove Dockerfile.alpine [#4444](https://github.com/jupyterhub/jupyterhub/pull/4444) ([@manics](https://github.com/manics), [@minrk](https://github.com/minrk))
|
||||
- Update jsx dependencies as much as possible [#4443](https://github.com/jupyterhub/jupyterhub/pull/4443) ([@manics](https://github.com/manics), [@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio))
|
||||
- Remove unused admin JS code [#4438](https://github.com/jupyterhub/jupyterhub/pull/4438) ([@yuvipanda](https://github.com/yuvipanda), [@minrk](https://github.com/minrk))
|
||||
- Finish migrating browser tests from selenium to playwright [#4435](https://github.com/jupyterhub/jupyterhub/pull/4435) ([@mouse1203](https://github.com/mouse1203), [@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio))
|
||||
- Migrate some tests from selenium to playwright [#4431](https://github.com/jupyterhub/jupyterhub/pull/4431) ([@mouse1203](https://github.com/mouse1203), [@minrk](https://github.com/minrk))
|
||||
- Begin setup of playwright tests [#4420](https://github.com/jupyterhub/jupyterhub/pull/4420) ([@mouse1203](https://github.com/mouse1203), [@minrk](https://github.com/minrk), [@manics](https://github.com/manics))
|
||||
|
||||
#### Documentation improvements
|
||||
|
||||
- Reorder token request docs [#4463](https://github.com/jupyterhub/jupyterhub/pull/4463) ([@minrk](https://github.com/minrk), [@manics](https://github.com/manics))
|
||||
- 'servers' should be a dict of dicts, not a list of dicts in rest-api.yml [#4458](https://github.com/jupyterhub/jupyterhub/pull/4458) ([@tfmark](https://github.com/tfmark), [@minrk](https://github.com/minrk))
|
||||
- Config reference: link to nicer(?) API docs first [#4456](https://github.com/jupyterhub/jupyterhub/pull/4456) ([@manics](https://github.com/manics), [@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio))
|
||||
- Add CERN to Gallery of JupyterHub Deployments [#4454](https://github.com/jupyterhub/jupyterhub/pull/4454) ([@goseind](https://github.com/goseind), [@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio))
|
||||
- Fix "Thanks" typo. [#4441](https://github.com/jupyterhub/jupyterhub/pull/4441) ([@ryanlovett](https://github.com/ryanlovett), [@minrk](https://github.com/minrk))
|
||||
- add HUNT into research institutions [#4432](https://github.com/jupyterhub/jupyterhub/pull/4432) ([@matuskosut](https://github.com/matuskosut), [@minrk](https://github.com/minrk), [@manics](https://github.com/manics))
|
||||
- docs: fix missing redirects for api to reference/api [#4429](https://github.com/jupyterhub/jupyterhub/pull/4429) ([@consideRatio](https://github.com/consideRatio), [@minrk](https://github.com/minrk), [@manics](https://github.com/manics))
|
||||
- update sharing faq for 2023 [#4428](https://github.com/jupyterhub/jupyterhub/pull/4428) ([@minrk](https://github.com/minrk))
|
||||
- Fix some public URL links within the docs [#4427](https://github.com/jupyterhub/jupyterhub/pull/4427) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio))
|
||||
- add upgrade note for 4.0 to changelog [#4426](https://github.com/jupyterhub/jupyterhub/pull/4426) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio))
|
||||
|
||||
#### Contributors to this release
|
||||
|
||||
The following people contributed discussions, new ideas, code and documentation contributions, and review.
|
||||
See [our definition of contributors](https://github-activity.readthedocs.io/en/latest/#how-does-this-tool-define-contributions-in-the-reports).
|
||||
|
||||
([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2023-04-20&to=2023-06-07&type=c))
|
||||
|
||||
@consideRatio ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2023-04-20..2023-06-07&type=Issues)) | @diocas ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adiocas+updated%3A2023-04-20..2023-06-07&type=Issues)) | @echarles ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aecharles+updated%3A2023-04-20..2023-06-07&type=Issues)) | @goseind ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Agoseind+updated%3A2023-04-20..2023-06-07&type=Issues)) | @hsadia538 ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ahsadia538+updated%3A2023-04-20..2023-06-07&type=Issues)) | @mahamtariq58 ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amahamtariq58+updated%3A2023-04-20..2023-06-07&type=Issues)) | @manics ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amanics+updated%3A2023-04-20..2023-06-07&type=Issues)) | @matuskosut ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amatuskosut+updated%3A2023-04-20..2023-06-07&type=Issues)) | @minrk ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2023-04-20..2023-06-07&type=Issues)) | @mouse1203 ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amouse1203+updated%3A2023-04-20..2023-06-07&type=Issues)) | @opoplawski ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aopoplawski+updated%3A2023-04-20..2023-06-07&type=Issues)) | @rcthomas ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arcthomas+updated%3A2023-04-20..2023-06-07&type=Issues)) | @ryanlovett ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aryanlovett+updated%3A2023-04-20..2023-06-07&type=Issues)) | @tfmark ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Atfmark+updated%3A2023-04-20..2023-06-07&type=Issues)) | @timeu ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Atimeu+updated%3A2023-04-20..2023-06-07&type=Issues)) | @yuvipanda ([activity](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ayuvipanda+updated%3A2023-04-20..2023-06-07&type=Issues))
|
||||
|
||||
### 4.0.0 - 2023-04-20
|
||||
|
||||
4.0 is a major release, but a small one.
|
||||
|
||||
:::{admonition} Upgrade note
|
||||
|
||||
Upgrading from 3.1 to 4.0 should require no additional action beyond running `jupyterhub --upgrade-db` to upgrade the database schema after upgrading the package version.
|
||||
It is otherwise a regular jupyterhub [upgrade](upgrading-jupyterhub).
|
||||
:::
|
||||
|
||||
There are three major changes that _should_ be invisible to most users:
|
||||
|
||||
1. Groups can now have 'properties', editable via the admin page, which can be used by Spawners for their operations.
|
||||
@@ -1372,7 +1524,7 @@ Thanks to everyone who has contributed to this release!
|
||||
- `JupyterHub.init_spawners_timeout` is introduced to combat slow startups on large JupyterHub deployments [#2721](https://github.com/jupyterhub/jupyterhub/pull/2721) ([@minrk](https://github.com/minrk))
|
||||
- The configuration `uids` for local authenticators is added to consistently assign users UNIX id's between installations [#2687](https://github.com/jupyterhub/jupyterhub/pull/2687) ([@rgerkin](https://github.com/rgerkin))
|
||||
- `JupyterHub.activity_resolution` is introduced with a default value of 30s improving performance by not updating the database with user activity too often [#2605](https://github.com/jupyterhub/jupyterhub/pull/2605) ([@minrk](https://github.com/minrk))
|
||||
- [HubAuth](https://jupyterhub.readthedocs.io/en/stable/api/services.auth.html#jupyterhub.services.auth.HubAuth)'s SSL configuration can now be set through environment variables [#2588](https://github.com/jupyterhub/jupyterhub/pull/2588) ([@cmd-ntrf](https://github.com/cmd-ntrf))
|
||||
- [HubAuth](jupyterhub.services.auth.HubAuth)'s SSL configuration can now be set through environment variables [#2588](https://github.com/jupyterhub/jupyterhub/pull/2588) ([@cmd-ntrf](https://github.com/cmd-ntrf))
|
||||
- Expose spawner.user_options in REST API. [#2755](https://github.com/jupyterhub/jupyterhub/pull/2755) ([@danielballan](https://github.com/danielballan))
|
||||
- add block for scripts included in head [#2828](https://github.com/jupyterhub/jupyterhub/pull/2828) ([@bitnik](https://github.com/bitnik))
|
||||
- Instrument JupyterHub to record events with jupyter_telemetry [Part II] [#2698](https://github.com/jupyterhub/jupyterhub/pull/2698) ([@Zsailer](https://github.com/Zsailer))
|
||||
|
@@ -14,6 +14,12 @@ section, the `jupyterhub_config.py` can be automatically generated via
|
||||
> jupyterhub --generate-config
|
||||
> ```
|
||||
|
||||
Most of this information is available in a nicer format in:
|
||||
|
||||
- [](./api/app.md)
|
||||
- [](./api/auth.md)
|
||||
- [](./api/spawner.md)
|
||||
|
||||
The following contains the output of that command for reference.
|
||||
|
||||
```{eval-rst}
|
||||
|
@@ -16,8 +16,6 @@ Please submit pull requests to update information or to add new institutions or
|
||||
|
||||
- [BIDS - Berkeley Institute for Data Science](https://bids.berkeley.edu/)
|
||||
|
||||
- [Teaching with Jupyter notebooks and JupyterHub](https://bids.berkeley.edu/resources/videos/teaching-ipythonjupyter-notebooks-and-jupyterhub)
|
||||
|
||||
- [Data 8](http://data8.org/)
|
||||
|
||||
- [GitHub organization](https://github.com/data-8)
|
||||
@@ -63,6 +61,15 @@ easy to do with RStudio too.
|
||||
|
||||
- [jupyterhub-deploy-teaching](https://github.com/jupyterhub/jupyterhub-deploy-teaching) based on work by Brian Granger for Cal Poly's Data Science 301 Course
|
||||
|
||||
### CERN
|
||||
|
||||
[CERN](https://home.cern/), also known as the European Organization for Nuclear Research, is a world-renowned scientific research centre and the home of the Large Hadron Collider (LHC).
|
||||
|
||||
Within CERN, there are two noteworthy JupyterHub deployments in operation:
|
||||
|
||||
- [SWAN](https://swan.web.cern.ch/swan/), which stands for Service for Web based Analysis, serves as an interactive data analysis platform primarily utilized at CERN.
|
||||
- [VRE](https://vre-hub.github.io/), which stands for Virtual Research Environment, is an analysis platform developed within the [EOSC Project](https://eoscfuture.eu/) to cater to the needs of scientific communities involved in European projects.
|
||||
|
||||
### Chameleon
|
||||
|
||||
[Chameleon](https://www.chameleoncloud.org) is a NSF-funded configurable experimental environment for large-scale computer science systems research with [bare metal reconfigurability](https://chameleoncloud.readthedocs.io/en/latest/technical/baremetal.html). Chameleon users utilize JupyterHub to document and reproduce their complex CISE and networking experiments.
|
||||
|
@@ -112,7 +112,6 @@ popular services:
|
||||
- [Globus](https://oauthenticator.readthedocs.io/en/latest/reference/api/gen/oauthenticator.globus.html)
|
||||
- [Google](https://oauthenticator.readthedocs.io/en/latest/reference/api/gen/oauthenticator.google.html)
|
||||
- [MediaWiki](https://oauthenticator.readthedocs.io/en/latest/reference/api/gen/oauthenticator.mediawiki.html)
|
||||
- [Okpy](https://oauthenticator.readthedocs.io/en/latest/reference/api/gen/oauthenticator.okpy.html)
|
||||
- [OpenShift](https://oauthenticator.readthedocs.io/en/latest/reference/api/gen/oauthenticator.openshift.html)
|
||||
|
||||
A [generic implementation](https://oauthenticator.readthedocs.io/en/latest/reference/api/gen/oauthenticator.generic.html), which you can use for OAuth authentication
|
||||
|
@@ -39,7 +39,7 @@ openssl rand -hex 32
|
||||
In [version 0.8.0](changelog), a TOKEN request page for
|
||||
generating an API token is available from the JupyterHub user interface:
|
||||
|
||||

|
||||

|
||||
|
||||

|
||||
|
||||
|
@@ -1,9 +1,9 @@
|
||||
# Install JupyterHub with Docker
|
||||
|
||||
The JupyterHub [docker image](https://hub.docker.com/r/jupyterhub/jupyterhub/) is the fastest way to set up Jupyterhub in your local development environment.
|
||||
The JupyterHub [docker image](https://quay.io/repository/jupyterhub/jupyterhub) is the fastest way to set up Jupyterhub in your local development environment.
|
||||
|
||||
:::{note}
|
||||
This `jupyterhub/jupyterhub` docker image is only an image for running
|
||||
This `quay.io/jupyterhub/jupyterhub` docker image is only an image for running
|
||||
the Hub service itself. It does not provide the other Jupyter components,
|
||||
such as Notebook installation, which are needed by the single-user servers.
|
||||
To run the single-user servers, which may be on the same system as the Hub or
|
||||
@@ -24,7 +24,7 @@ You should have [Docker] installed on a Linux/Unix based system.
|
||||
To pull the latest JupyterHub image and start the `jupyterhub` container, run this command in your terminal.
|
||||
|
||||
```
|
||||
docker run -d -p 8000:8000 --name jupyterhub jupyterhub/jupyterhub jupyterhub
|
||||
docker run -d -p 8000:8000 --name jupyterhub quay.io/jupyterhub/jupyterhub jupyterhub
|
||||
```
|
||||
|
||||
This command exposes the Jupyter container on port:8000. Navigate to `http://localhost:8000` in a web browser to access the JupyterHub console.
|
||||
|
@@ -2,6 +2,7 @@
|
||||
Example for a Spawner.pre_spawn_hook
|
||||
create a directory for the user before the spawner starts
|
||||
"""
|
||||
|
||||
# pylint: disable=import-error
|
||||
import os
|
||||
import shutil
|
||||
|
@@ -2,4 +2,4 @@
|
||||
|
||||
An example of enabling real-time collaboration with dedicated accounts for collaborations.
|
||||
|
||||
See [collaboration account docs](docs/source/tutorial/collaboration-accounts.md) for details.
|
||||
See [collaboration account docs](../../docs/source/tutorial/collaboration-users.md) for details.
|
||||
|
@@ -3,6 +3,7 @@
|
||||
Implements OAuth handshake manually
|
||||
so all URLs and requests necessary for OAuth with JupyterHub should be in one place
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
from urllib.parse import urlencode, urlparse
|
||||
|
@@ -1,4 +1,4 @@
|
||||
FROM jupyterhub/jupyterhub
|
||||
FROM quay.io/jupyterhub/jupyterhub
|
||||
|
||||
# Create test user (PAM auth) and install single-user Jupyter
|
||||
RUN useradd testuser --create-home --shell /bin/bash
|
||||
|
@@ -4,6 +4,7 @@ This example service serves `/services/whoami-oauth/`,
|
||||
authenticated with the Hub,
|
||||
showing the user their own info.
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
from urllib.parse import urlparse
|
||||
|
@@ -4,6 +4,7 @@ This serves `/services/whoami-api/`, authenticated with the Hub, showing the use
|
||||
|
||||
HubAuthenticated only supports token-based access.
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
from urllib.parse import urlparse
|
||||
|
@@ -1,6 +1,7 @@
|
||||
"""
|
||||
Example JupyterHub config allowing users to specify environment variables and notebook-server args
|
||||
"""
|
||||
|
||||
import shlex
|
||||
|
||||
from jupyterhub.spawner import LocalProcessSpawner
|
||||
|
@@ -25,50 +25,49 @@
|
||||
"moduleNameMapper": {
|
||||
"\\.(jpg|jpeg|png|gif|eot|otf|webp|svg|ttf|woff|woff2|mp4|webm|wav|mp3|m4a|aac|oga)$": "<rootDir>/__mocks__/fileMock.js",
|
||||
"\\.(css|less)$": "identity-obj-proxy"
|
||||
}
|
||||
},
|
||||
"testEnvironment": "jsdom"
|
||||
},
|
||||
"dependencies": {
|
||||
"bootstrap": "^4.5.3",
|
||||
"history": "^5.0.0",
|
||||
"lodash.debounce": "^4.0.8",
|
||||
"prop-types": "^15.7.2",
|
||||
"react": "^17.0.1",
|
||||
"react-bootstrap": "^2.1.1",
|
||||
"react-dom": "^17.0.1",
|
||||
"react-icons": "^4.1.0",
|
||||
"react-multi-select-component": "^3.0.7",
|
||||
"react-redux": "^7.2.2",
|
||||
"react-router": "^5.2.0",
|
||||
"react-router-dom": "^5.2.0",
|
||||
"recompose": "npm:react-recompose@^0.31.2",
|
||||
"redux": "^4.0.5",
|
||||
"regenerator-runtime": "^0.13.9"
|
||||
"bootstrap": "^5.2.3",
|
||||
"history": "^5.3.0",
|
||||
"lodash": "^4.17.21",
|
||||
"prop-types": "^15.8.1",
|
||||
"react": "^17.0.2",
|
||||
"react-bootstrap": "^2.7.4",
|
||||
"react-dom": "^17.0.2",
|
||||
"react-icons": "^4.8.0",
|
||||
"react-multi-select-component": "^4.3.4",
|
||||
"react-redux": "^7.2.8",
|
||||
"react-router-dom": "^5.3.4",
|
||||
"recompose": "npm:react-recompose@^0.33.0",
|
||||
"redux": "^4.2.1",
|
||||
"regenerator-runtime": "^0.13.11"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.12.3",
|
||||
"@babel/preset-env": "^7.12.11",
|
||||
"@babel/preset-react": "^7.12.10",
|
||||
"@testing-library/jest-dom": "^5.15.1",
|
||||
"@testing-library/react": "^12.1.2",
|
||||
"@babel/core": "^7.21.4",
|
||||
"@babel/preset-env": "^7.21.4",
|
||||
"@babel/preset-react": "^7.18.6",
|
||||
"@testing-library/jest-dom": "^5.16.5",
|
||||
"@testing-library/react": "^12.1.5",
|
||||
"@testing-library/user-event": "^13.5.0",
|
||||
"@webpack-cli/serve": "^1.7.0",
|
||||
"@wojtekmaj/enzyme-adapter-react-17": "^0.6.5",
|
||||
"babel-jest": "^26.6.3",
|
||||
"babel-loader": "^8.2.1",
|
||||
"css-loader": "^5.0.1",
|
||||
"enzyme": "^3.11.0",
|
||||
"eslint": "^7.18.0",
|
||||
"eslint-plugin-prettier": "^3.3.1",
|
||||
"eslint-plugin-react": "^7.22.0",
|
||||
"eslint-plugin-unused-imports": "^1.1.1",
|
||||
"@webpack-cli/serve": "^2.0.1",
|
||||
"babel-jest": "^29.5.0",
|
||||
"babel-loader": "^9.1.2",
|
||||
"css-loader": "^6.7.3",
|
||||
"eslint": "^8.38.0",
|
||||
"eslint-plugin-prettier": "^4.2.1",
|
||||
"eslint-plugin-react": "^7.32.2",
|
||||
"eslint-plugin-unused-imports": "^2.0.0",
|
||||
"file-loader": "^6.2.0",
|
||||
"identity-obj-proxy": "^3.0.0",
|
||||
"jest": "^26.6.3",
|
||||
"prettier": "^2.2.1",
|
||||
"sinon": "^13.0.1",
|
||||
"style-loader": "^2.0.0",
|
||||
"webpack": "^5.76.0",
|
||||
"webpack-cli": "^4.10.0",
|
||||
"webpack-dev-server": "^4.9.3"
|
||||
"jest": "^29.5.0",
|
||||
"jest-environment-jsdom": "^29.5.0",
|
||||
"prettier": "^2.8.7",
|
||||
"sinon": "^15.0.3",
|
||||
"style-loader": "^3.3.2",
|
||||
"webpack": "^5.79.0",
|
||||
"webpack-cli": "^5.0.1",
|
||||
"webpack-dev-server": "^4.13.3"
|
||||
}
|
||||
}
|
||||
|
@@ -74,6 +74,7 @@ const ServerDashboard = (props) => {
|
||||
shutdownHub,
|
||||
startServer,
|
||||
stopServer,
|
||||
deleteServer,
|
||||
startAll,
|
||||
stopAll,
|
||||
history,
|
||||
@@ -167,6 +168,50 @@ const ServerDashboard = (props) => {
|
||||
);
|
||||
};
|
||||
|
||||
const DeleteServerButton = ({ serverName, userName }) => {
|
||||
if (serverName === "") {
|
||||
return null;
|
||||
}
|
||||
var [isDisabled, setIsDisabled] = useState(false);
|
||||
return (
|
||||
<button
|
||||
className="btn btn-danger btn-xs stop-button"
|
||||
// It's not possible to delete unnamed servers
|
||||
disabled={isDisabled}
|
||||
onClick={() => {
|
||||
setIsDisabled(true);
|
||||
deleteServer(userName, serverName)
|
||||
.then((res) => {
|
||||
if (res.status < 300) {
|
||||
updateUsers(...slice)
|
||||
.then((data) => {
|
||||
dispatchPageUpdate(
|
||||
data.items,
|
||||
data._pagination,
|
||||
name_filter,
|
||||
);
|
||||
})
|
||||
.catch(() => {
|
||||
setIsDisabled(false);
|
||||
setErrorAlert(`Failed to update users list.`);
|
||||
});
|
||||
} else {
|
||||
setErrorAlert(`Failed to delete server.`);
|
||||
setIsDisabled(false);
|
||||
}
|
||||
return res;
|
||||
})
|
||||
.catch(() => {
|
||||
setErrorAlert(`Failed to delete server.`);
|
||||
setIsDisabled(false);
|
||||
});
|
||||
}}
|
||||
>
|
||||
Delete Server
|
||||
</button>
|
||||
);
|
||||
};
|
||||
|
||||
const StartServerButton = ({ serverName, userName }) => {
|
||||
var [isDisabled, setIsDisabled] = useState(false);
|
||||
return (
|
||||
@@ -278,7 +323,11 @@ const ServerDashboard = (props) => {
|
||||
const userServerName = user.name + serverNameDash;
|
||||
const open = collapseStates[userServerName] || false;
|
||||
return [
|
||||
<tr key={`${userServerName}-row`} className="user-row">
|
||||
<tr
|
||||
key={`${userServerName}-row`}
|
||||
data-testid={`user-row-${userServerName}`}
|
||||
className="user-row"
|
||||
>
|
||||
<td data-testid="user-row-name">
|
||||
<span>
|
||||
<Button
|
||||
@@ -324,6 +373,10 @@ const ServerDashboard = (props) => {
|
||||
userName={user.name}
|
||||
style={{ marginRight: 20 }}
|
||||
/>
|
||||
<DeleteServerButton
|
||||
serverName={server.name}
|
||||
userName={user.name}
|
||||
/>
|
||||
<a
|
||||
href={`${base_url}spawn/${user.name}${
|
||||
server.name ? "/" + server.name : ""
|
||||
@@ -582,6 +635,7 @@ ServerDashboard.propTypes = {
|
||||
shutdownHub: PropTypes.func,
|
||||
startServer: PropTypes.func,
|
||||
stopServer: PropTypes.func,
|
||||
deleteServer: PropTypes.func,
|
||||
startAll: PropTypes.func,
|
||||
stopAll: PropTypes.func,
|
||||
dispatch: PropTypes.func,
|
||||
|
@@ -2,7 +2,7 @@ import React from "react";
|
||||
import "@testing-library/jest-dom";
|
||||
import { act } from "react-dom/test-utils";
|
||||
import userEvent from "@testing-library/user-event";
|
||||
import { render, screen, fireEvent } from "@testing-library/react";
|
||||
import { render, screen, fireEvent, getByText } from "@testing-library/react";
|
||||
import { HashRouter, Switch } from "react-router-dom";
|
||||
import { Provider, useSelector } from "react-redux";
|
||||
import { createStore } from "redux";
|
||||
@@ -43,6 +43,31 @@ var mockAsync = (data) =>
|
||||
var mockAsyncRejection = () =>
|
||||
jest.fn().mockImplementation(() => Promise.reject());
|
||||
|
||||
var bar_servers = {
|
||||
"": {
|
||||
name: "",
|
||||
last_activity: "2020-12-07T20:58:02.437408Z",
|
||||
started: "2020-12-07T20:58:01.508266Z",
|
||||
pending: null,
|
||||
ready: false,
|
||||
state: { pid: 12345 },
|
||||
url: "/user/bar/",
|
||||
user_options: {},
|
||||
progress_url: "/hub/api/users/bar/progress",
|
||||
},
|
||||
servername: {
|
||||
name: "servername",
|
||||
last_activity: "2020-12-07T20:58:02.437408Z",
|
||||
started: "2020-12-07T20:58:01.508266Z",
|
||||
pending: null,
|
||||
ready: false,
|
||||
state: { pid: 12345 },
|
||||
url: "/user/bar/servername",
|
||||
user_options: {},
|
||||
progress_url: "/hub/api/users/bar/servername/progress",
|
||||
},
|
||||
};
|
||||
|
||||
var mockAppState = () =>
|
||||
Object.assign({}, initialState, {
|
||||
user_data: [
|
||||
@@ -78,19 +103,7 @@ var mockAppState = () =>
|
||||
pending: null,
|
||||
created: "2020-12-07T18:46:27.115528Z",
|
||||
last_activity: "2020-12-07T20:43:51.013613Z",
|
||||
servers: {
|
||||
"": {
|
||||
name: "",
|
||||
last_activity: "2020-12-07T20:58:02.437408Z",
|
||||
started: "2020-12-07T20:58:01.508266Z",
|
||||
pending: null,
|
||||
ready: false,
|
||||
state: { pid: 12345 },
|
||||
url: "/user/bar/",
|
||||
user_options: {},
|
||||
progress_url: "/hub/api/users/bar/progress",
|
||||
},
|
||||
},
|
||||
servers: bar_servers,
|
||||
},
|
||||
],
|
||||
user_page: {
|
||||
@@ -150,9 +163,11 @@ test("Renders users from props.user_data into table", async () => {
|
||||
|
||||
let foo = screen.getByTestId("user-name-div-foo");
|
||||
let bar = screen.getByTestId("user-name-div-bar");
|
||||
let bar_server = screen.getByTestId("user-name-div-bar-servername");
|
||||
|
||||
expect(foo).toBeVisible();
|
||||
expect(bar).toBeVisible();
|
||||
expect(bar_server).toBeVisible();
|
||||
});
|
||||
|
||||
test("Renders correctly the status of a single-user server", async () => {
|
||||
@@ -162,10 +177,13 @@ test("Renders correctly the status of a single-user server", async () => {
|
||||
render(serverDashboardJsx(callbackSpy));
|
||||
});
|
||||
|
||||
let start = screen.getByText("Start Server");
|
||||
let stop = screen.getByText("Stop Server");
|
||||
let start_elems = screen.getAllByText("Start Server");
|
||||
expect(start_elems.length).toBe(Object.keys(bar_servers).length);
|
||||
start_elems.forEach((start) => {
|
||||
expect(start).toBeVisible();
|
||||
});
|
||||
|
||||
expect(start).toBeVisible();
|
||||
let stop = screen.getByText("Stop Server");
|
||||
expect(stop).toBeVisible();
|
||||
});
|
||||
|
||||
@@ -176,9 +194,12 @@ test("Renders spawn page link", async () => {
|
||||
render(serverDashboardJsx(callbackSpy));
|
||||
});
|
||||
|
||||
let link = screen.getByText("Spawn Page").closest("a");
|
||||
let url = new URL(link.href);
|
||||
expect(url.pathname).toEqual("/spawn/bar");
|
||||
for (let server in bar_servers) {
|
||||
let row = screen.getByTestId(`user-row-bar${server ? "-" + server : ""}`);
|
||||
let link = getByText(row, "Spawn Page").closest("a");
|
||||
let url = new URL(link.href);
|
||||
expect(url.pathname).toEqual("/spawn/bar" + (server ? "/" + server : ""));
|
||||
}
|
||||
});
|
||||
|
||||
test("Invokes the startServer event on button click", async () => {
|
||||
@@ -188,10 +209,11 @@ test("Invokes the startServer event on button click", async () => {
|
||||
render(serverDashboardJsx(callbackSpy));
|
||||
});
|
||||
|
||||
let start = screen.getByText("Start Server");
|
||||
let start_elems = screen.getAllByText("Start Server");
|
||||
expect(start_elems.length).toBe(Object.keys(bar_servers).length);
|
||||
|
||||
await act(async () => {
|
||||
fireEvent.click(start);
|
||||
fireEvent.click(start_elems[0]);
|
||||
});
|
||||
|
||||
expect(callbackSpy).toHaveBeenCalled();
|
||||
@@ -453,10 +475,11 @@ test("Shows a UI error dialogue when start user server fails", async () => {
|
||||
);
|
||||
});
|
||||
|
||||
let start = screen.getByText("Start Server");
|
||||
let start_elems = screen.getAllByText("Start Server");
|
||||
expect(start_elems.length).toBe(Object.keys(bar_servers).length);
|
||||
|
||||
await act(async () => {
|
||||
fireEvent.click(start);
|
||||
fireEvent.click(start_elems[0]);
|
||||
});
|
||||
|
||||
let errorDialog = screen.getByText("Failed to start server.");
|
||||
@@ -487,10 +510,11 @@ test("Shows a UI error dialogue when start user server returns an improper statu
|
||||
);
|
||||
});
|
||||
|
||||
let start = screen.getByText("Start Server");
|
||||
let start_elems = screen.getAllByText("Start Server");
|
||||
expect(start_elems.length).toBe(Object.keys(bar_servers).length);
|
||||
|
||||
await act(async () => {
|
||||
fireEvent.click(start);
|
||||
fireEvent.click(start_elems[0]);
|
||||
});
|
||||
|
||||
let errorDialog = screen.getByText("Failed to start server.");
|
||||
@@ -656,3 +680,20 @@ test("Interacting with PaginationFooter causes state update and refresh via useE
|
||||
// expect(callbackSpy.mock.calls).toHaveLength(2);
|
||||
// expect(callbackSpy).toHaveBeenCalledWith(2, 2, "");
|
||||
});
|
||||
|
||||
test("Server delete button exists for named servers", async () => {
|
||||
let callbackSpy = mockAsync();
|
||||
|
||||
await act(async () => {
|
||||
render(serverDashboardJsx(callbackSpy));
|
||||
});
|
||||
|
||||
for (let server in bar_servers) {
|
||||
if (server === "") {
|
||||
continue;
|
||||
}
|
||||
let row = screen.getByTestId(`user-row-bar-${server}`);
|
||||
let delete_button = getByText(row, "Delete Server");
|
||||
expect(delete_button).toBeEnabled();
|
||||
}
|
||||
});
|
||||
|
@@ -18,6 +18,12 @@ const withAPI = withProps(() => ({
|
||||
jhapiRequest("/users/" + name + "/servers/" + (serverName || ""), "POST"),
|
||||
stopServer: (name, serverName = "") =>
|
||||
jhapiRequest("/users/" + name + "/servers/" + (serverName || ""), "DELETE"),
|
||||
deleteServer: (name, serverName = "") =>
|
||||
jhapiRequest(
|
||||
"/users/" + name + "/servers/" + (serverName || ""),
|
||||
"DELETE",
|
||||
{ remove: true },
|
||||
),
|
||||
startAll: (names) =>
|
||||
names.map((e) => jhapiRequest("/users/" + e + "/server", "POST")),
|
||||
stopAll: (names) =>
|
||||
|
17021
jsx/yarn.lock
17021
jsx/yarn.lock
File diff suppressed because it is too large
Load Diff
@@ -3,6 +3,7 @@
|
||||
Note: a memoized function should always return an _immutable_
|
||||
result to avoid later modifications polluting cached results.
|
||||
"""
|
||||
|
||||
from collections import OrderedDict
|
||||
from functools import wraps
|
||||
|
||||
|
@@ -1,8 +1,9 @@
|
||||
"""JupyterHub version info"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
# version_info updated by running `tbump`
|
||||
version_info = (4, 0, 0, "", "")
|
||||
version_info = (4, 1, 1, "", "")
|
||||
|
||||
# pep 440 version: no dot before beta/rc, but before .dev
|
||||
# 0.1.0rc1
|
||||
|
210
jupyterhub/_xsrf_utils.py
Normal file
210
jupyterhub/_xsrf_utils.py
Normal file
@@ -0,0 +1,210 @@
|
||||
"""utilities for XSRF
|
||||
|
||||
Extends tornado's xsrf token checks with the following:
|
||||
|
||||
- only set xsrf cookie on navigation requests (cannot be fetched)
|
||||
|
||||
This utility file enables the consistent reuse of these functions
|
||||
in both Hub and single-user code
|
||||
"""
|
||||
|
||||
import base64
|
||||
import hashlib
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from http.cookies import SimpleCookie
|
||||
|
||||
from tornado import web
|
||||
from tornado.httputil import format_timestamp
|
||||
from tornado.log import app_log
|
||||
|
||||
|
||||
def _get_signed_value_urlsafe(handler, name, b64_value):
|
||||
"""Like get_signed_value (used in get_secure_cookie), but for urlsafe values
|
||||
|
||||
Decodes urlsafe_base64-encoded signed values
|
||||
|
||||
Returns None if any decoding failed
|
||||
"""
|
||||
if b64_value is None:
|
||||
return None
|
||||
|
||||
if isinstance(b64_value, str):
|
||||
try:
|
||||
b64_value = b64_value.encode("ascii")
|
||||
except UnicodeEncodeError:
|
||||
app_log.warning("Invalid value %r", b64_value)
|
||||
return None
|
||||
# re-pad, since we stripped padding in _create_signed_value
|
||||
remainder = len(b64_value) % 4
|
||||
if remainder:
|
||||
b64_value = b64_value + (b'=' * (4 - remainder))
|
||||
try:
|
||||
value = base64.urlsafe_b64decode(b64_value)
|
||||
except ValueError:
|
||||
app_log.warning("Invalid base64 value %r", b64_value)
|
||||
return None
|
||||
|
||||
return web.decode_signed_value(
|
||||
handler.application.settings["cookie_secret"],
|
||||
name,
|
||||
value,
|
||||
max_age_days=31,
|
||||
min_version=2,
|
||||
)
|
||||
|
||||
|
||||
def _create_signed_value_urlsafe(handler, name, value):
|
||||
"""Like tornado's create_signed_value (used in set_secure_cookie), but returns urlsafe bytes"""
|
||||
|
||||
signed_value = handler.create_signed_value(name, value)
|
||||
return base64.urlsafe_b64encode(signed_value).rstrip(b"=")
|
||||
|
||||
|
||||
def _clear_invalid_xsrf_cookie(handler, cookie_path):
|
||||
"""
|
||||
Clear invalid XSRF cookie
|
||||
|
||||
This may an old XSRF token, or one set on / by another application.
|
||||
Because we cannot trust browsers or tornado to give us the more specific cookie,
|
||||
try to clear _both_ on / and on our prefix,
|
||||
then reload the page.
|
||||
"""
|
||||
|
||||
expired = format_timestamp(datetime.now(timezone.utc) - timedelta(days=366))
|
||||
cookie = SimpleCookie()
|
||||
cookie["_xsrf"] = ""
|
||||
morsel = cookie["_xsrf"]
|
||||
morsel["expires"] = expired
|
||||
morsel["path"] = "/"
|
||||
# use Set-Cookie directly,
|
||||
# because tornado's set_cookie and clear_cookie use a single _dict_,
|
||||
# so we can't clear a cookie on multiple paths and then set it
|
||||
handler.add_header("Set-Cookie", morsel.OutputString(None))
|
||||
if cookie_path != "/":
|
||||
# clear it multiple times!
|
||||
morsel["path"] = cookie_path
|
||||
handler.add_header("Set-Cookie", morsel.OutputString(None))
|
||||
|
||||
if (
|
||||
handler.request.method.lower() == "get"
|
||||
and handler.request.headers.get("Sec-Fetch-Mode", "navigate") == "navigate"
|
||||
):
|
||||
# reload current page because any subsequent set_cookie
|
||||
# will cancel the clearing of the cookie
|
||||
# this only makes sense on GET requests
|
||||
handler.redirect(handler.request.uri)
|
||||
# halt any other processing of the request
|
||||
raise web.Finish()
|
||||
|
||||
|
||||
def get_xsrf_token(handler, cookie_path=""):
|
||||
"""Override tornado's xsrf token to add further restrictions
|
||||
|
||||
- only set cookie for regular pages (not API requests)
|
||||
- include login info in xsrf token
|
||||
- verify signature
|
||||
"""
|
||||
# original: https://github.com/tornadoweb/tornado/blob/v6.4.0/tornado/web.py#L1455
|
||||
if hasattr(handler, "_xsrf_token"):
|
||||
return handler._xsrf_token
|
||||
|
||||
_set_cookie = False
|
||||
# the raw cookie is the token
|
||||
xsrf_token = xsrf_cookie = handler.get_cookie("_xsrf")
|
||||
if xsrf_token:
|
||||
try:
|
||||
xsrf_token = xsrf_token.encode("ascii")
|
||||
except UnicodeEncodeError:
|
||||
xsrf_token = None
|
||||
|
||||
xsrf_id_cookie = _get_signed_value_urlsafe(handler, "_xsrf", xsrf_token)
|
||||
if xsrf_cookie and not xsrf_id_cookie:
|
||||
# we have a cookie, but it's invalid!
|
||||
# handle possibility of _xsrf being set multiple times,
|
||||
# e.g. on / and on /hub/
|
||||
# this will reload the page if it's a GET request
|
||||
app_log.warning(
|
||||
"Attempting to clear invalid _xsrf cookie %r", xsrf_cookie[:4] + "..."
|
||||
)
|
||||
_clear_invalid_xsrf_cookie(handler, cookie_path)
|
||||
|
||||
# check the decoded, signed value for validity
|
||||
xsrf_id = handler._xsrf_token_id
|
||||
if xsrf_id_cookie != xsrf_id:
|
||||
# this will usually happen on the first page request after login,
|
||||
# which changes the inputs to the token id
|
||||
if xsrf_id_cookie:
|
||||
app_log.debug("xsrf id mismatch %r != %r", xsrf_id_cookie, xsrf_id)
|
||||
# generate new value
|
||||
xsrf_token = _create_signed_value_urlsafe(handler, "_xsrf", xsrf_id)
|
||||
# only set cookie on regular navigation pages
|
||||
# i.e. not API requests, etc.
|
||||
# insecure URLs (public hostname/ip, no https)
|
||||
# don't set Sec-Fetch-Mode.
|
||||
# consequence of assuming 'navigate': setting a cookie unnecessarily
|
||||
# consequence of assuming not 'navigate': xsrf never set, nothing works
|
||||
_set_cookie = (
|
||||
handler.request.headers.get("Sec-Fetch-Mode", "navigate") == "navigate"
|
||||
)
|
||||
|
||||
if _set_cookie:
|
||||
xsrf_cookie_kwargs = {}
|
||||
xsrf_cookie_kwargs.update(handler.settings.get('xsrf_cookie_kwargs', {}))
|
||||
xsrf_cookie_kwargs.setdefault("path", cookie_path)
|
||||
if not handler.current_user:
|
||||
# limit anonymous xsrf cookies to one hour
|
||||
xsrf_cookie_kwargs.pop("expires", None)
|
||||
xsrf_cookie_kwargs.pop("expires_days", None)
|
||||
xsrf_cookie_kwargs["max_age"] = 3600
|
||||
app_log.info(
|
||||
"Setting new xsrf cookie for %r %r",
|
||||
xsrf_id,
|
||||
xsrf_cookie_kwargs,
|
||||
)
|
||||
handler.set_cookie("_xsrf", xsrf_token, **xsrf_cookie_kwargs)
|
||||
handler._xsrf_token = xsrf_token
|
||||
return xsrf_token
|
||||
|
||||
|
||||
def check_xsrf_cookie(handler):
|
||||
"""Check that xsrf cookie matches xsrf token in request"""
|
||||
# overrides tornado's implementation
|
||||
# because we changed what a correct value should be in xsrf_token
|
||||
|
||||
token = (
|
||||
handler.get_argument("_xsrf", None)
|
||||
or handler.request.headers.get("X-Xsrftoken")
|
||||
or handler.request.headers.get("X-Csrftoken")
|
||||
)
|
||||
|
||||
if not token:
|
||||
raise web.HTTPError(
|
||||
403, f"'_xsrf' argument missing from {handler.request.method}"
|
||||
)
|
||||
|
||||
try:
|
||||
token = token.encode("utf8")
|
||||
except UnicodeEncodeError:
|
||||
raise web.HTTPError(403, "'_xsrf' argument invalid")
|
||||
|
||||
if token != handler.xsrf_token:
|
||||
raise web.HTTPError(
|
||||
403, f"XSRF cookie does not match {handler.request.method.upper()} argument"
|
||||
)
|
||||
|
||||
|
||||
def _anonymous_xsrf_id(handler):
|
||||
"""Generate an appropriate xsrf token id for an anonymous request
|
||||
|
||||
Currently uses hash of request ip and user-agent
|
||||
|
||||
These are typically used only for the initial login page,
|
||||
so only need to be valid for a few seconds to a few minutes
|
||||
(enough to submit a login form with MFA).
|
||||
"""
|
||||
hasher = hashlib.sha256()
|
||||
hasher.update(handler.request.remote_ip.encode("ascii"))
|
||||
hasher.update(
|
||||
handler.request.headers.get("User-Agent", "").encode("utf8", "replace")
|
||||
)
|
||||
return base64.urlsafe_b64encode(hasher.digest()).decode("ascii")
|
@@ -5,6 +5,7 @@ Revises: 833da8570507
|
||||
Create Date: 2021-09-15 14:04:09.067024
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '0eee8c825d24'
|
||||
down_revision = '651f5419b74d'
|
||||
|
@@ -5,6 +5,7 @@ Revises:
|
||||
Create Date: 2016-04-11 16:05:34.873288
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '19c0846f6344'
|
||||
down_revision = None
|
||||
|
@@ -5,6 +5,7 @@ Revises: 3ec6993fe20c
|
||||
Create Date: 2017-12-07 14:43:51.500740
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '1cebaf56856c'
|
||||
down_revision = '3ec6993fe20c'
|
||||
|
@@ -12,6 +12,7 @@ Revises: af4cbdb2d13c
|
||||
Create Date: 2017-07-28 16:44:40.413648
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '3ec6993fe20c'
|
||||
down_revision = 'af4cbdb2d13c'
|
||||
|
@@ -5,6 +5,7 @@ Revises: 896818069c98
|
||||
Create Date: 2019-02-28 14:14:27.423927
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '4dc2d5a8c53c'
|
||||
down_revision = '896818069c98'
|
||||
|
@@ -5,6 +5,7 @@ Revises: 1cebaf56856c
|
||||
Create Date: 2017-12-19 15:21:09.300513
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '56cc5a70207e'
|
||||
down_revision = '1cebaf56856c'
|
||||
|
@@ -5,6 +5,7 @@ Revises: 833da8570507
|
||||
Create Date: 2022-02-28 12:42:55.149046
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '651f5419b74d'
|
||||
down_revision = '833da8570507'
|
||||
|
@@ -6,6 +6,7 @@ Revises: 4dc2d5a8c53c
|
||||
Create Date: 2021-02-17 15:03:04.360368
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '833da8570507'
|
||||
down_revision = '4dc2d5a8c53c'
|
||||
|
@@ -5,6 +5,7 @@ Revises: d68c98b66cd4
|
||||
Create Date: 2018-05-07 11:35:58.050542
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '896818069c98'
|
||||
down_revision = 'd68c98b66cd4'
|
||||
|
@@ -5,6 +5,7 @@ Revises: 56cc5a70207e
|
||||
Create Date: 2018-03-21 14:27:17.466841
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '99a28a4418e1'
|
||||
down_revision = '56cc5a70207e'
|
||||
|
@@ -5,6 +5,7 @@ Revises: eeb276e51423
|
||||
Create Date: 2016-07-28 16:16:38.245348
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'af4cbdb2d13c'
|
||||
down_revision = 'eeb276e51423'
|
||||
|
@@ -5,6 +5,7 @@ Revises: 99a28a4418e1
|
||||
Create Date: 2018-04-13 10:50:17.968636
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'd68c98b66cd4'
|
||||
down_revision = '99a28a4418e1'
|
||||
|
@@ -6,6 +6,7 @@ Revision ID: eeb276e51423
|
||||
Revises: 19c0846f6344
|
||||
Create Date: 2016-04-11 16:06:49.239831
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'eeb276e51423'
|
||||
down_revision = '19c0846f6344'
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""Authorization handlers"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
import json
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""Base API handlers"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
import json
|
||||
@@ -75,20 +76,18 @@ class APIHandler(BaseHandler):
|
||||
|
||||
return True
|
||||
|
||||
async def prepare(self):
|
||||
await super().prepare()
|
||||
# tornado only checks xsrf on non-GET
|
||||
# we also check xsrf on GETs to API endpoints
|
||||
# make sure this runs after auth, which happens in super().prepare()
|
||||
if self.request.method not in {"HEAD", "OPTIONS"} and self.settings.get(
|
||||
"xsrf_cookies"
|
||||
):
|
||||
self.check_xsrf_cookie()
|
||||
# we also check xsrf on GETs to API endpoints
|
||||
_xsrf_safe_methods = {"HEAD", "OPTIONS"}
|
||||
|
||||
def check_xsrf_cookie(self):
|
||||
if not hasattr(self, '_jupyterhub_user'):
|
||||
# called too early to check if we're token-authenticated
|
||||
return
|
||||
if self._jupyterhub_user is None and 'Origin' not in self.request.headers:
|
||||
# don't raise xsrf if auth failed
|
||||
# don't apply this shortcut to actual cross-site requests, which have an 'Origin' header,
|
||||
# which would reveal if there are credentials present
|
||||
return
|
||||
if getattr(self, '_token_authenticated', False):
|
||||
# if token-authenticated, ignore XSRF
|
||||
return
|
||||
@@ -475,7 +474,7 @@ class APIHandler(BaseHandler):
|
||||
if next_offset < total_count:
|
||||
# if there's a next page
|
||||
next_url_parsed = urlparse(self.request.full_url())
|
||||
query = parse_qs(next_url_parsed.query)
|
||||
query = parse_qs(next_url_parsed.query, keep_blank_values=True)
|
||||
query['offset'] = [next_offset]
|
||||
query['limit'] = [limit]
|
||||
next_url_parsed = next_url_parsed._replace(
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""Group handlers"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
import json
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""API handlers for administering the Hub itself"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
import json
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""Proxy handlers"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
import json
|
||||
|
@@ -2,6 +2,7 @@
|
||||
|
||||
Currently GET-only, no actions can be taken to modify services.
|
||||
"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
import json
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""User handlers"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
import asyncio
|
||||
@@ -445,7 +446,14 @@ class UserTokenListAPIHandler(APIHandler):
|
||||
user_kind = 'user' if isinstance(user, User) else 'service'
|
||||
self.log.info("%s %s requested new API token", user_kind.title(), user.name)
|
||||
# retrieve the model
|
||||
token_model = self.token_model(orm.APIToken.find(self.db, api_token))
|
||||
orm_token = orm.APIToken.find(self.db, api_token)
|
||||
if orm_token is None:
|
||||
self.log.error(
|
||||
"Failed to find token after creating it: %r. Maybe it expired already?",
|
||||
body,
|
||||
)
|
||||
raise web.HTTPError(500, "Failed to create token")
|
||||
token_model = self.token_model(orm_token)
|
||||
token_model['token'] = api_token
|
||||
self.write(json.dumps(token_model))
|
||||
self.set_status(201)
|
||||
|
@@ -9,6 +9,7 @@ import logging
|
||||
import os
|
||||
import re
|
||||
import secrets
|
||||
import shlex
|
||||
import signal
|
||||
import socket
|
||||
import ssl
|
||||
@@ -400,6 +401,25 @@ class JupyterHub(Application):
|
||||
Useful for daemonizing JupyterHub.
|
||||
""",
|
||||
).tag(config=True)
|
||||
|
||||
cookie_host_prefix_enabled = Bool(
|
||||
False,
|
||||
help="""Enable `__Host-` prefix on authentication cookies.
|
||||
|
||||
The `__Host-` prefix on JupyterHub cookies provides further
|
||||
protection against cookie tossing when untrusted servers
|
||||
may control subdomains of your jupyterhub deployment.
|
||||
|
||||
_However_, it also requires that cookies be set on the path `/`,
|
||||
which means they are shared by all JupyterHub components,
|
||||
so a compromised server component will have access to _all_ JupyterHub-related
|
||||
cookies of the visiting browser.
|
||||
It is recommended to only combine `__Host-` cookies with per-user domains.
|
||||
|
||||
.. versionadded:: 4.1
|
||||
""",
|
||||
).tag(config=True)
|
||||
|
||||
cookie_max_age_days = Float(
|
||||
14,
|
||||
help="""Number of days for a login cookie to be valid.
|
||||
@@ -1897,6 +1917,8 @@ class JupyterHub(Application):
|
||||
hub_args['port'] = self.hub_port
|
||||
|
||||
self.hub = Hub(**hub_args)
|
||||
if self.cookie_host_prefix_enabled:
|
||||
self.hub.cookie_name = "__Host-" + self.hub.cookie_name
|
||||
|
||||
if not self.subdomain_host:
|
||||
api_prefix = url_path_join(self.hub.base_url, "api/")
|
||||
@@ -2581,9 +2603,13 @@ class JupyterHub(Application):
|
||||
"%s appears to have stopped while the Hub was down",
|
||||
spawner._log_name,
|
||||
)
|
||||
# remove server entry from db
|
||||
db.delete(spawner.orm_spawner.server)
|
||||
spawner.server = None
|
||||
try:
|
||||
await user.stop(name)
|
||||
except Exception:
|
||||
self.log.exception(
|
||||
f"Failed to cleanup {spawner._log_name} which appeared to stop while the Hub was down.",
|
||||
exc_info=True,
|
||||
)
|
||||
else:
|
||||
self.log.debug("%s not running", spawner._log_name)
|
||||
|
||||
@@ -2755,6 +2781,7 @@ class JupyterHub(Application):
|
||||
base_url=self.base_url,
|
||||
default_url=self.default_url,
|
||||
cookie_secret=self.cookie_secret,
|
||||
cookie_host_prefix_enabled=self.cookie_host_prefix_enabled,
|
||||
cookie_max_age_days=self.cookie_max_age_days,
|
||||
redirect_to_server=self.redirect_to_server,
|
||||
login_url=login_url,
|
||||
@@ -2840,6 +2867,10 @@ class JupyterHub(Application):
|
||||
super().initialize(*args, **kwargs)
|
||||
if self.generate_config or self.generate_certs or self.subapp:
|
||||
return
|
||||
if self.extra_args:
|
||||
self.exit(
|
||||
f"Unrecognized command-line arguments: {' '.join(shlex.quote(arg) for arg in self.extra_args)!r}"
|
||||
)
|
||||
self._start_future = asyncio.Future()
|
||||
|
||||
def record_start(f):
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""Base Authenticator class and the default PAM Authenticator"""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
import inspect
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""Database utilities for JupyterHub"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
# Based on pgcontents.utils.migrate, used under the Apache license.
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""HTTP Handlers for the hub server"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
import asyncio
|
||||
@@ -23,6 +24,7 @@ from tornado.log import app_log
|
||||
from tornado.web import RequestHandler, addslash
|
||||
|
||||
from .. import __version__, orm, roles, scopes
|
||||
from .._xsrf_utils import _anonymous_xsrf_id, check_xsrf_cookie, get_xsrf_token
|
||||
from ..metrics import (
|
||||
PROXY_ADD_DURATION_SECONDS,
|
||||
PROXY_DELETE_DURATION_SECONDS,
|
||||
@@ -98,7 +100,14 @@ class BaseHandler(RequestHandler):
|
||||
self.log.error("Rolling back session due to database error")
|
||||
self.db.rollback()
|
||||
self._resolve_roles_and_scopes()
|
||||
return await maybe_future(super().prepare())
|
||||
await maybe_future(super().prepare())
|
||||
# run xsrf check after prepare
|
||||
# because our version takes auth info into account
|
||||
if (
|
||||
self.request.method not in self._xsrf_safe_methods
|
||||
and self.application.settings.get("xsrf_cookies")
|
||||
):
|
||||
self.check_xsrf_cookie()
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
@@ -199,9 +208,13 @@ class BaseHandler(RequestHandler):
|
||||
"""The default Content-Security-Policy header
|
||||
|
||||
Can be overridden by defining Content-Security-Policy in settings['headers']
|
||||
|
||||
..versionchanged:: 4.1
|
||||
|
||||
Change default frame-ancestors from 'self' to 'none'
|
||||
"""
|
||||
return '; '.join(
|
||||
["frame-ancestors 'self'", "report-uri " + self.csp_report_uri]
|
||||
["frame-ancestors 'none'", "report-uri " + self.csp_report_uri]
|
||||
)
|
||||
|
||||
def get_content_type(self):
|
||||
@@ -211,7 +224,6 @@ class BaseHandler(RequestHandler):
|
||||
"""
|
||||
Set any headers passed as tornado_settings['headers'].
|
||||
|
||||
By default sets Content-Security-Policy of frame-ancestors 'self'.
|
||||
Also responsible for setting content-type header
|
||||
"""
|
||||
# wrap in HTTPHeaders for case-insensitivity
|
||||
@@ -233,15 +245,63 @@ class BaseHandler(RequestHandler):
|
||||
# Login and cookie-related
|
||||
# ---------------------------------------------------------------
|
||||
|
||||
_xsrf_safe_methods = {"GET", "HEAD", "OPTIONS"}
|
||||
|
||||
@property
|
||||
def _xsrf_token_id(self):
|
||||
"""Value to be signed/encrypted for xsrf token
|
||||
|
||||
include login info in xsrf token
|
||||
this means xsrf tokens are tied to logged-in users,
|
||||
and change after a user logs in.
|
||||
|
||||
While the user is not yet logged in,
|
||||
an anonymous value is used, to prevent portability.
|
||||
These anonymous values are short-lived.
|
||||
"""
|
||||
# cases:
|
||||
# 1. logged in, session id (session_id:user_id)
|
||||
# 2. logged in, no session id (anonymous_id:user_id)
|
||||
# 3. not logged in, session id (session_id:anonymous_id)
|
||||
# 4. no cookies at all, use single anonymous value (:anonymous_id)
|
||||
session_id = self.get_session_cookie()
|
||||
if self.current_user:
|
||||
if isinstance(self.current_user, User):
|
||||
user_id = self.current_user.cookie_id
|
||||
else:
|
||||
# this shouldn't happen, but may if e.g. a Service attempts to fetch a page,
|
||||
# which usually won't work, but this method should not be what raises
|
||||
user_id = ""
|
||||
if not session_id:
|
||||
# no session id, use non-portable anonymous id
|
||||
session_id = _anonymous_xsrf_id(self)
|
||||
else:
|
||||
# not logged in yet, use non-portable anonymous id
|
||||
user_id = _anonymous_xsrf_id(self)
|
||||
xsrf_id = f"{session_id}:{user_id}".encode("utf8", "replace")
|
||||
return xsrf_id
|
||||
|
||||
@property
|
||||
def xsrf_token(self):
|
||||
"""Override tornado's xsrf token with further restrictions
|
||||
|
||||
- only set cookie for regular pages
|
||||
- include login info in xsrf token
|
||||
- verify signature
|
||||
"""
|
||||
return get_xsrf_token(self, cookie_path=self.hub.base_url)
|
||||
|
||||
def check_xsrf_cookie(self):
|
||||
try:
|
||||
return super().check_xsrf_cookie()
|
||||
except Exception as e:
|
||||
# ensure _juptyerhub_user is defined on rejected requests
|
||||
if not hasattr(self, "_jupyterhub_user"):
|
||||
self._jupyterhub_user = None
|
||||
self._resolve_roles_and_scopes()
|
||||
raise
|
||||
"""Check that xsrf cookie matches xsrf token in request"""
|
||||
# overrides tornado's implementation
|
||||
# because we changed what a correct value should be in xsrf_token
|
||||
|
||||
if not hasattr(self, "_jupyterhub_user"):
|
||||
# run too early to check the value
|
||||
# tornado runs this before 'prepare',
|
||||
# but we run it again after so auth info is available, which happens in 'prepare'
|
||||
return None
|
||||
return check_xsrf_cookie(self)
|
||||
|
||||
@property
|
||||
def admin_users(self):
|
||||
@@ -514,15 +574,30 @@ class BaseHandler(RequestHandler):
|
||||
user = self._user_from_orm(u)
|
||||
return user
|
||||
|
||||
def clear_cookie(self, cookie_name, **kwargs):
|
||||
"""Clear a cookie
|
||||
|
||||
overrides RequestHandler to always handle __Host- prefix correctly
|
||||
"""
|
||||
if cookie_name.startswith("__Host-"):
|
||||
kwargs["path"] = "/"
|
||||
kwargs["secure"] = True
|
||||
return super().clear_cookie(cookie_name, **kwargs)
|
||||
|
||||
def clear_login_cookie(self, name=None):
|
||||
kwargs = {}
|
||||
if self.subdomain_host:
|
||||
kwargs['domain'] = self.domain
|
||||
user = self.get_current_user_cookie()
|
||||
session_id = self.get_session_cookie()
|
||||
if session_id:
|
||||
# clear session id
|
||||
self.clear_cookie(SESSION_COOKIE_NAME, path=self.base_url, **kwargs)
|
||||
session_cookie_kwargs = {}
|
||||
session_cookie_kwargs.update(kwargs)
|
||||
if self.subdomain_host:
|
||||
session_cookie_kwargs['domain'] = self.domain
|
||||
|
||||
self.clear_cookie(
|
||||
SESSION_COOKIE_NAME, path=self.base_url, **session_cookie_kwargs
|
||||
)
|
||||
|
||||
if user:
|
||||
# user is logged in, clear any tokens associated with the current session
|
||||
@@ -571,12 +646,15 @@ class BaseHandler(RequestHandler):
|
||||
kwargs = {'httponly': True}
|
||||
if self.request.protocol == 'https':
|
||||
kwargs['secure'] = True
|
||||
if self.subdomain_host:
|
||||
kwargs['domain'] = self.domain
|
||||
|
||||
kwargs.update(self.settings.get('cookie_options', {}))
|
||||
kwargs.update(overrides)
|
||||
|
||||
if key.startswith("__Host-"):
|
||||
# __Host- cookies must be secure and on /
|
||||
kwargs["path"] = "/"
|
||||
kwargs["secure"] = True
|
||||
|
||||
if encrypted:
|
||||
set_cookie = self.set_secure_cookie
|
||||
else:
|
||||
@@ -606,9 +684,21 @@ class BaseHandler(RequestHandler):
|
||||
Session id cookie is *not* encrypted,
|
||||
so other services on this domain can read it.
|
||||
"""
|
||||
session_id = uuid.uuid4().hex
|
||||
if not hasattr(self, "_session_id"):
|
||||
self._session_id = uuid.uuid4().hex
|
||||
session_id = self._session_id
|
||||
# if using subdomains, set session cookie on the domain,
|
||||
# which allows it to be shared by subdomains.
|
||||
# if domain is unspecified, it is _more_ restricted to only the setting domain
|
||||
kwargs = {}
|
||||
if self.subdomain_host:
|
||||
kwargs['domain'] = self.domain
|
||||
self._set_cookie(
|
||||
SESSION_COOKIE_NAME, session_id, encrypted=False, path=self.base_url
|
||||
SESSION_COOKIE_NAME,
|
||||
session_id,
|
||||
encrypted=False,
|
||||
path=self.base_url,
|
||||
**kwargs,
|
||||
)
|
||||
return session_id
|
||||
|
||||
@@ -1431,6 +1521,12 @@ class UserUrlHandler(BaseHandler):
|
||||
# accept token auth for API requests that are probably to non-running servers
|
||||
_accept_token_auth = True
|
||||
|
||||
# don't consider these redirects 'activity'
|
||||
# if the redirect is followed and the subsequent action taken,
|
||||
# _that_ is activity
|
||||
def _record_activity(self, obj, timestamp=None):
|
||||
return False
|
||||
|
||||
def _fail_api_request(self, user_name='', server_name=''):
|
||||
"""Fail an API request to a not-running server"""
|
||||
self.log.warning(
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""HTTP Handlers for the hub server"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
import asyncio
|
||||
@@ -105,6 +106,7 @@ class LoginHandler(BaseHandler):
|
||||
'next': self.get_argument('next', ''),
|
||||
},
|
||||
),
|
||||
"xsrf": self.xsrf_token.decode('ascii'),
|
||||
}
|
||||
custom_html = Template(
|
||||
self.authenticator.get_custom_html(self.hub.base_url)
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""Handlers for serving prometheus metrics"""
|
||||
|
||||
from prometheus_client import CONTENT_TYPE_LATEST, REGISTRY, generate_latest
|
||||
|
||||
from ..utils import metrics_authentication
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""Basic html-rendering handlers."""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
import asyncio
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""logging utilities"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
import json
|
||||
|
@@ -19,6 +19,7 @@ them manually here.
|
||||
|
||||
added ``jupyterhub_`` prefix to metric names.
|
||||
"""
|
||||
|
||||
from datetime import timedelta
|
||||
from enum import Enum
|
||||
|
||||
|
@@ -2,6 +2,7 @@
|
||||
|
||||
implements https://oauthlib.readthedocs.io/en/latest/oauth2/server.html
|
||||
"""
|
||||
|
||||
from oauthlib import uri_validate
|
||||
from oauthlib.oauth2 import RequestValidator, WebApplicationServer
|
||||
from oauthlib.oauth2.rfc6749.grant_types import authorization_code, base
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""Some general objects for use in JupyterHub"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
import socket
|
||||
|
@@ -1,8 +1,10 @@
|
||||
"""sqlalchemy ORM tools for the state of the constellation of processes"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
import enum
|
||||
import json
|
||||
import numbers
|
||||
from base64 import decodebytes, encodebytes
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
@@ -760,7 +762,18 @@ class APIToken(Hashed, Base):
|
||||
else:
|
||||
assert service.id is not None
|
||||
orm_token.service = service
|
||||
if expires_in is not None:
|
||||
if expires_in:
|
||||
if not isinstance(expires_in, numbers.Real):
|
||||
raise TypeError(
|
||||
f"expires_in must be a positive integer or null, not {expires_in!r}"
|
||||
)
|
||||
expires_in = int(expires_in)
|
||||
# tokens must always expire in the future
|
||||
if expires_in < 1:
|
||||
raise ValueError(
|
||||
f"expires_in must be a positive integer or null, not {expires_in!r}"
|
||||
)
|
||||
|
||||
orm_token.expires_at = cls.now() + timedelta(seconds=expires_in)
|
||||
|
||||
db.commit()
|
||||
|
@@ -14,6 +14,7 @@ Route Specification:
|
||||
'host.tld/path/' for host-based routing or '/path/' for default routing.
|
||||
- Route paths should be normalized to always start and end with '/'
|
||||
"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
import asyncio
|
||||
@@ -48,7 +49,7 @@ from jupyterhub.traitlets import Command
|
||||
from . import utils
|
||||
from .metrics import CHECK_ROUTES_DURATION_SECONDS, PROXY_POLL_DURATION_SECONDS
|
||||
from .objects import Server
|
||||
from .utils import AnyTimeoutError, exponential_backoff, url_escape_path, url_path_join
|
||||
from .utils import exponential_backoff, url_escape_path, url_path_join
|
||||
|
||||
|
||||
def _one_at_a_time(method):
|
||||
@@ -766,24 +767,67 @@ class ConfigurableHTTPProxy(Proxy):
|
||||
|
||||
self._write_pid_file()
|
||||
|
||||
def _check_process():
|
||||
status = self.proxy_process.poll()
|
||||
if status is not None:
|
||||
with self.proxy_process:
|
||||
e = RuntimeError("Proxy failed to start with exit code %i" % status)
|
||||
raise e from None
|
||||
async def wait_for_process():
|
||||
"""Watch proxy process for early termination
|
||||
|
||||
Runs forever, checking every 0.5s if the process has exited
|
||||
so we don't keep waiting for endpoints after the proxy has stopped.
|
||||
|
||||
Raises RuntimeError if/when the proxy process exits,
|
||||
otherwise runs forever.
|
||||
Should be cancelled when servers become ready.
|
||||
"""
|
||||
while True:
|
||||
status = self.proxy_process.poll()
|
||||
if status is not None:
|
||||
with self.proxy_process:
|
||||
e = RuntimeError(
|
||||
f"Proxy failed to start with exit code {status}"
|
||||
)
|
||||
raise e from None
|
||||
await asyncio.sleep(0.5)
|
||||
|
||||
# process_exited can only resolve with a RuntimeError when the process has exited,
|
||||
# otherwise it must be cancelled.
|
||||
process_exited = asyncio.ensure_future(wait_for_process())
|
||||
|
||||
# wait for both servers to be ready (or one server to fail)
|
||||
server_futures = [
|
||||
asyncio.ensure_future(server.wait_up(10))
|
||||
for server in (public_server, api_server)
|
||||
]
|
||||
servers_ready = asyncio.gather(*server_futures)
|
||||
|
||||
# wait for process to crash or servers to be ready,
|
||||
# whichever comes first
|
||||
wait_timeout = 15
|
||||
ready, pending = await asyncio.wait(
|
||||
[
|
||||
process_exited,
|
||||
servers_ready,
|
||||
],
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
timeout=wait_timeout,
|
||||
)
|
||||
for task in [servers_ready, process_exited] + server_futures:
|
||||
# cancel any pending tasks
|
||||
if not task.done():
|
||||
task.cancel()
|
||||
if not ready:
|
||||
# timeouts passed to wait_up should prevent this,
|
||||
# but weird things like DNS delays may result in
|
||||
# wait_up taking a lot longer than it should
|
||||
raise TimeoutError(
|
||||
f"Waiting for proxy endpoints didn't complete in {wait_timeout}s"
|
||||
)
|
||||
if process_exited in ready:
|
||||
# process exited, this will raise RuntimeError
|
||||
await process_exited
|
||||
else:
|
||||
# if we got here, servers_ready is done
|
||||
# await it to make sure exceptions are raised
|
||||
await servers_ready
|
||||
|
||||
for server in (public_server, api_server):
|
||||
for i in range(10):
|
||||
_check_process()
|
||||
try:
|
||||
await server.wait_up(1)
|
||||
except AnyTimeoutError:
|
||||
continue
|
||||
else:
|
||||
break
|
||||
await server.wait_up(1)
|
||||
_check_process()
|
||||
self.log.debug("Proxy started and appears to be up")
|
||||
pc = PeriodicCallback(self.check_running, 1e3 * self.check_running_interval)
|
||||
self._check_running_callback = pc
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""Roles utils"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
import re
|
||||
|
@@ -14,6 +14,7 @@ intersection : set of expanded scopes as intersection of 2 expanded scope sets
|
||||
identify scopes: set of expanded scopes needed for identify (whoami) endpoints
|
||||
reduced scopes: expanded scopes that have been reduced
|
||||
"""
|
||||
|
||||
import functools
|
||||
import inspect
|
||||
import re
|
||||
@@ -253,8 +254,12 @@ def _intersect_expanded_scopes(scopes_a, scopes_b, db=None):
|
||||
}
|
||||
|
||||
# resolve hierarchies (group/user/server) in both directions
|
||||
common_servers = common_filters[base].get("server", set())
|
||||
common_users = common_filters[base].get("user", set())
|
||||
common_servers = initial_common_servers = common_filters[base].get(
|
||||
"server", frozenset()
|
||||
)
|
||||
common_users = initial_common_users = common_filters[base].get(
|
||||
"user", frozenset()
|
||||
)
|
||||
|
||||
for a, b in [(filters_a, filters_b), (filters_b, filters_a)]:
|
||||
if 'server' in a and b.get('server') != a['server']:
|
||||
@@ -266,7 +271,7 @@ def _intersect_expanded_scopes(scopes_a, scopes_b, db=None):
|
||||
for server in servers:
|
||||
username, _, servername = server.partition("/")
|
||||
if username in b['user']:
|
||||
common_servers.add(server)
|
||||
common_servers = common_servers | {server}
|
||||
|
||||
# resolve group/server hierarchy if db available
|
||||
servers = servers.difference(common_servers)
|
||||
@@ -275,7 +280,7 @@ def _intersect_expanded_scopes(scopes_a, scopes_b, db=None):
|
||||
for server in servers:
|
||||
server_groups = groups_for_server(server)
|
||||
if server_groups & b['group']:
|
||||
common_servers.add(server)
|
||||
common_servers = common_servers | {server}
|
||||
|
||||
# resolve group/user hierarchy if db available and user sets aren't identical
|
||||
if (
|
||||
@@ -289,14 +294,16 @@ def _intersect_expanded_scopes(scopes_a, scopes_b, db=None):
|
||||
for username in users:
|
||||
groups = groups_for_user(username)
|
||||
if groups & b["group"]:
|
||||
common_users.add(username)
|
||||
common_users = common_users | {username}
|
||||
|
||||
# add server filter if there wasn't one before
|
||||
if common_servers and "server" not in common_filters[base]:
|
||||
# add server filter if it's non-empty
|
||||
# and it changed
|
||||
if common_servers and common_servers != initial_common_servers:
|
||||
common_filters[base]["server"] = common_servers
|
||||
|
||||
# add user filter if it's non-empty and there wasn't one before
|
||||
if common_users and "user" not in common_filters[base]:
|
||||
# add user filter if it's non-empty
|
||||
# and it changed
|
||||
if common_users and common_users != initial_common_users:
|
||||
common_filters[base]["user"] = common_users
|
||||
|
||||
intersection = unparse_scopes(common_filters)
|
||||
@@ -845,6 +852,15 @@ def needs_scope(*scopes):
|
||||
def scope_decorator(func):
|
||||
@functools.wraps(func)
|
||||
def _auth_func(self, *args, **kwargs):
|
||||
if not self.current_user:
|
||||
# not authenticated at all, fail with more generic message
|
||||
# this is the most likely permission error - missing or mis-specified credentials,
|
||||
# don't indicate that they have insufficient permissions.
|
||||
raise web.HTTPError(
|
||||
403,
|
||||
"Missing or invalid credentials.",
|
||||
)
|
||||
|
||||
sig = inspect.signature(func)
|
||||
bound_sig = sig.bind(self, *args, **kwargs)
|
||||
bound_sig.apply_defaults()
|
||||
@@ -853,6 +869,11 @@ def needs_scope(*scopes):
|
||||
self.expanded_scopes = {}
|
||||
self.parsed_scopes = {}
|
||||
|
||||
try:
|
||||
end_point = self.request.path
|
||||
except AttributeError:
|
||||
end_point = self.__name__
|
||||
|
||||
s_kwargs = {}
|
||||
for resource in {'user', 'server', 'group', 'service'}:
|
||||
resource_name = resource + '_name'
|
||||
@@ -860,14 +881,10 @@ def needs_scope(*scopes):
|
||||
resource_value = bound_sig.arguments[resource_name]
|
||||
s_kwargs[resource] = resource_value
|
||||
for scope in scopes:
|
||||
app_log.debug("Checking access via scope %s", scope)
|
||||
app_log.debug("Checking access to %s via scope %s", end_point, scope)
|
||||
has_access = _check_scope_access(self, scope, **s_kwargs)
|
||||
if has_access:
|
||||
return func(self, *args, **kwargs)
|
||||
try:
|
||||
end_point = self.request.path
|
||||
except AttributeError:
|
||||
end_point = self.__name__
|
||||
app_log.warning(
|
||||
"Not authorizing access to {}. Requires any of [{}], not derived from scopes [{}]".format(
|
||||
end_point, ", ".join(scopes), ", ".join(self.expanded_scopes)
|
||||
|
@@ -23,6 +23,7 @@ If you are using OAuth, you will also need to register an oauth callback handler
|
||||
A tornado implementation is provided in :class:`HubOAuthCallbackHandler`.
|
||||
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import base64
|
||||
import hashlib
|
||||
@@ -35,6 +36,7 @@ import string
|
||||
import time
|
||||
import uuid
|
||||
import warnings
|
||||
from functools import partial
|
||||
from http import HTTPStatus
|
||||
from unittest import mock
|
||||
from urllib.parse import urlencode
|
||||
@@ -45,6 +47,7 @@ from tornado.log import app_log
|
||||
from tornado.web import HTTPError, RequestHandler
|
||||
from traitlets import (
|
||||
Any,
|
||||
Bool,
|
||||
Dict,
|
||||
Instance,
|
||||
Integer,
|
||||
@@ -56,8 +59,9 @@ from traitlets import (
|
||||
)
|
||||
from traitlets.config import SingletonConfigurable
|
||||
|
||||
from .._xsrf_utils import _anonymous_xsrf_id, check_xsrf_cookie, get_xsrf_token
|
||||
from ..scopes import _intersect_expanded_scopes
|
||||
from ..utils import get_browser_protocol, url_path_join
|
||||
from ..utils import _bool_env, get_browser_protocol, url_path_join
|
||||
|
||||
|
||||
def check_scopes(required_scopes, scopes):
|
||||
@@ -305,6 +309,46 @@ class HubAuth(SingletonConfigurable):
|
||||
""",
|
||||
).tag(config=True)
|
||||
|
||||
allow_token_in_url = Bool(
|
||||
_bool_env("JUPYTERHUB_ALLOW_TOKEN_IN_URL", default=True),
|
||||
help="""Allow requests to pages with ?token=... in the URL
|
||||
|
||||
This allows starting a user session by sharing a URL with credentials,
|
||||
bypassing authentication with the Hub.
|
||||
|
||||
If False, tokens in URLs will be ignored by the server,
|
||||
except on websocket requests.
|
||||
|
||||
Has no effect on websocket requests,
|
||||
which can only reliably authenticate via token in the URL,
|
||||
as recommended by browser Websocket implementations.
|
||||
|
||||
This will default to False in JupyterHub 5.
|
||||
|
||||
.. versionadded:: 4.1
|
||||
|
||||
.. versionchanged:: 5.0
|
||||
default changed to False
|
||||
""",
|
||||
).tag(config=True)
|
||||
|
||||
allow_websocket_cookie_auth = Bool(
|
||||
_bool_env("JUPYTERHUB_ALLOW_WEBSOCKET_COOKIE_AUTH", default=True),
|
||||
help="""Allow websocket requests with only cookie for authentication
|
||||
|
||||
Cookie-authenticated websockets cannot be protected from other user servers unless per-user domains are used.
|
||||
Disabling cookie auth on websockets protects user servers from each other,
|
||||
but may break some user applications.
|
||||
Per-user domains eliminate the need to lock this down.
|
||||
|
||||
JupyterLab 4.1.2 and Notebook 6.5.6, 7.1.0 will not work
|
||||
because they rely on cookie authentication without
|
||||
API or XSRF tokens.
|
||||
|
||||
.. versionadded:: 4.1
|
||||
""",
|
||||
).tag(config=True)
|
||||
|
||||
cookie_options = Dict(
|
||||
help="""Additional options to pass when setting cookies.
|
||||
|
||||
@@ -323,6 +367,40 @@ class HubAuth(SingletonConfigurable):
|
||||
else:
|
||||
return {}
|
||||
|
||||
cookie_host_prefix_enabled = Bool(
|
||||
False,
|
||||
help="""Enable `__Host-` prefix on authentication cookies.
|
||||
|
||||
The `__Host-` prefix on JupyterHub cookies provides further
|
||||
protection against cookie tossing when untrusted servers
|
||||
may control subdomains of your jupyterhub deployment.
|
||||
|
||||
_However_, it also requires that cookies be set on the path `/`,
|
||||
which means they are shared by all JupyterHub components,
|
||||
so a compromised server component will have access to _all_ JupyterHub-related
|
||||
cookies of the visiting browser.
|
||||
It is recommended to only combine `__Host-` cookies with per-user domains.
|
||||
|
||||
Set via $JUPYTERHUB_COOKIE_HOST_PREFIX_ENABLED
|
||||
""",
|
||||
).tag(config=True)
|
||||
|
||||
@default("cookie_host_prefix_enabled")
|
||||
def _default_cookie_host_prefix_enabled(self):
|
||||
return _bool_env("JUPYTERHUB_COOKIE_HOST_PREFIX_ENABLED")
|
||||
|
||||
@property
|
||||
def cookie_path(self):
|
||||
"""
|
||||
Path prefix on which to set cookies
|
||||
|
||||
self.base_url, but '/' when cookie_host_prefix_enabled is True
|
||||
"""
|
||||
if self.cookie_host_prefix_enabled:
|
||||
return "/"
|
||||
else:
|
||||
return self.base_url
|
||||
|
||||
cookie_cache_max_age = Integer(help="DEPRECATED. Use cache_max_age")
|
||||
|
||||
@observe('cookie_cache_max_age')
|
||||
@@ -585,6 +663,17 @@ class HubAuth(SingletonConfigurable):
|
||||
auth_header_name = 'Authorization'
|
||||
auth_header_pat = re.compile(r'(?:token|bearer)\s+(.+)', re.IGNORECASE)
|
||||
|
||||
def _get_token_url(self, handler):
|
||||
"""Get the token from the URL
|
||||
|
||||
Always run for websockets,
|
||||
otherwise run only if self.allow_token_in_url
|
||||
"""
|
||||
fetch_mode = handler.request.headers.get("Sec-Fetch-Mode", "unspecified")
|
||||
if self.allow_token_in_url or fetch_mode == "websocket":
|
||||
return handler.get_argument("token", "")
|
||||
return ""
|
||||
|
||||
def get_token(self, handler, in_cookie=True):
|
||||
"""Get the token authenticating a request
|
||||
|
||||
@@ -597,8 +686,7 @@ class HubAuth(SingletonConfigurable):
|
||||
- in header: Authorization: token <token>
|
||||
- in cookie (stored after oauth), if in_cookie is True
|
||||
"""
|
||||
|
||||
user_token = handler.get_argument('token', '')
|
||||
user_token = self._get_token_url(handler)
|
||||
if not user_token:
|
||||
# get it from Authorization header
|
||||
m = self.auth_header_pat.match(
|
||||
@@ -645,6 +733,14 @@ class HubAuth(SingletonConfigurable):
|
||||
"""
|
||||
return self._call_coroutine(sync, self._get_user, handler)
|
||||
|
||||
def _patch_xsrf(self, handler):
|
||||
"""Overridden in HubOAuth
|
||||
|
||||
HubAuth base class doesn't handle xsrf,
|
||||
which is only relevant for cookie-based auth
|
||||
"""
|
||||
return
|
||||
|
||||
async def _get_user(self, handler):
|
||||
# only allow this to be called once per handler
|
||||
# avoids issues if an error is raised,
|
||||
@@ -652,6 +748,9 @@ class HubAuth(SingletonConfigurable):
|
||||
if hasattr(handler, '_cached_hub_user'):
|
||||
return handler._cached_hub_user
|
||||
|
||||
# patch XSRF checks, which will apply after user check
|
||||
self._patch_xsrf(handler)
|
||||
|
||||
handler._cached_hub_user = user_model = None
|
||||
session_id = self.get_session_id(handler)
|
||||
|
||||
@@ -680,6 +779,33 @@ class HubAuth(SingletonConfigurable):
|
||||
"""Check whether the user has required scope(s)"""
|
||||
return check_scopes(required_scopes, set(user["scopes"]))
|
||||
|
||||
def _persist_url_token_if_set(self, handler):
|
||||
"""Persist ?token=... from URL in cookie if set
|
||||
|
||||
for use in future cookie-authenticated requests.
|
||||
|
||||
Allows initiating an authenticated session
|
||||
via /user/name/?token=abc...,
|
||||
otherwise only the initial request will be authenticated.
|
||||
|
||||
No-op if no token URL parameter is given.
|
||||
"""
|
||||
url_token = handler.get_argument('token', '')
|
||||
if not url_token:
|
||||
# no token to persist
|
||||
return
|
||||
# only do this if the token in the URL is the source of authentication
|
||||
if not getattr(handler, '_token_authenticated', False):
|
||||
return
|
||||
if not hasattr(self, 'set_cookie'):
|
||||
# only HubOAuth can persist cookies
|
||||
return
|
||||
self.log.info(
|
||||
"Storing token from url in cookie for %s",
|
||||
handler.request.remote_ip,
|
||||
)
|
||||
self.set_cookie(handler, url_token)
|
||||
|
||||
|
||||
class HubOAuth(HubAuth):
|
||||
"""HubAuth using OAuth for login instead of cookies set by the Hub.
|
||||
@@ -710,7 +836,10 @@ class HubOAuth(HubAuth):
|
||||
because we don't want to use the same cookie name
|
||||
across OAuth clients.
|
||||
"""
|
||||
return self.oauth_client_id
|
||||
cookie_name = self.oauth_client_id
|
||||
if self.cookie_host_prefix_enabled:
|
||||
cookie_name = "__Host-" + cookie_name
|
||||
return cookie_name
|
||||
|
||||
@property
|
||||
def state_cookie_name(self):
|
||||
@@ -722,22 +851,131 @@ class HubOAuth(HubAuth):
|
||||
|
||||
def _get_token_cookie(self, handler):
|
||||
"""Base class doesn't store tokens in cookies"""
|
||||
|
||||
fetch_mode = handler.request.headers.get("Sec-Fetch-Mode", "unset")
|
||||
if fetch_mode == "websocket" and not self.allow_websocket_cookie_auth:
|
||||
# disallow cookie auth on websockets
|
||||
return None
|
||||
|
||||
token = handler.get_secure_cookie(self.cookie_name)
|
||||
if token:
|
||||
# decode cookie bytes
|
||||
token = token.decode('ascii', 'replace')
|
||||
return token
|
||||
|
||||
async def _get_user_cookie(self, handler):
|
||||
def _get_xsrf_token_id(self, handler):
|
||||
"""Get contents for xsrf token for a given Handler
|
||||
|
||||
This is the value to be encrypted & signed in the xsrf token
|
||||
"""
|
||||
token = self._get_token_cookie(handler)
|
||||
session_id = self.get_session_id(handler)
|
||||
if token:
|
||||
token_hash = hashlib.sha256(token.encode("ascii", "replace")).hexdigest()
|
||||
if not session_id:
|
||||
session_id = _anonymous_xsrf_id(handler)
|
||||
else:
|
||||
token_hash = _anonymous_xsrf_id(handler)
|
||||
return f"{session_id}:{token_hash}".encode("ascii", "replace")
|
||||
|
||||
def _patch_xsrf(self, handler):
|
||||
"""Patch handler to inject JuptyerHub xsrf token behavior"""
|
||||
if isinstance(handler, HubAuthenticated):
|
||||
# doesn't need patch
|
||||
return
|
||||
|
||||
# patch in our xsrf token handling
|
||||
# overrides tornado and jupyter_server defaults,
|
||||
# but not others.
|
||||
# subclasses will still inherit our overridden behavior,
|
||||
# but their overrides (if any) will take precedence over ours
|
||||
# such as jupyter-server-proxy
|
||||
for cls in handler.__class__.__mro__:
|
||||
# search for the nearest parent class defined
|
||||
# in one of the 'base' Handler-defining packages.
|
||||
# In current implementations, this will
|
||||
# generally be jupyter_server.base.handlers.JupyterHandler
|
||||
# or tornado.web.RequestHandler,
|
||||
# but doing it this way ensures consistent results
|
||||
if (cls.__module__ or '').partition('.')[0] not in {
|
||||
"jupyter_server",
|
||||
"notebook",
|
||||
"tornado",
|
||||
}:
|
||||
continue
|
||||
# override check_xsrf_cookie where it's defined
|
||||
if "check_xsrf_cookie" in cls.__dict__:
|
||||
if "_get_xsrf_token_id" in cls.__dict__:
|
||||
# already patched
|
||||
return
|
||||
cls._xsrf_token_id = property(self._get_xsrf_token_id)
|
||||
cls.xsrf_token = property(
|
||||
partial(get_xsrf_token, cookie_path=self.base_url)
|
||||
)
|
||||
cls.check_xsrf_cookie = lambda handler: self.check_xsrf_cookie(handler)
|
||||
|
||||
def check_xsrf_cookie(self, handler):
|
||||
"""check_xsrf_cookie patch
|
||||
|
||||
Applies JupyterHub check_xsrf_cookie if not token authenticated
|
||||
"""
|
||||
if getattr(handler, '_token_authenticated', False):
|
||||
return
|
||||
check_xsrf_cookie(handler)
|
||||
|
||||
def _clear_cookie(self, handler, cookie_name, **kwargs):
|
||||
"""Clear a cookie, handling __Host- prefix"""
|
||||
# Set-Cookie is rejected without 'secure',
|
||||
# this includes clearing cookies!
|
||||
if cookie_name.startswith("__Host-"):
|
||||
kwargs["path"] = "/"
|
||||
kwargs["secure"] = True
|
||||
return handler.clear_cookie(cookie_name, **kwargs)
|
||||
|
||||
def _needs_check_xsrf(self, handler):
|
||||
"""Does the given cookie-authenticated request need to check xsrf?"""
|
||||
if getattr(handler, "_token_authenticated", False):
|
||||
return False
|
||||
|
||||
fetch_mode = handler.request.headers.get("Sec-Fetch-Mode", "unspecified")
|
||||
if fetch_mode in {"websocket", "no-cors"} or (
|
||||
fetch_mode in {"navigate", "unspecified"}
|
||||
and handler.request.method.lower() in {"get", "head", "options"}
|
||||
):
|
||||
# no xsrf check needed for regular page views or no-cors
|
||||
# or websockets after allow_websocket_cookie_auth passes
|
||||
if fetch_mode == "unspecified":
|
||||
self.log.warning(
|
||||
f"Skipping XSRF check for insecure request {handler.request.method} {handler.request.path}"
|
||||
)
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
async def _get_user_cookie(self, handler):
|
||||
# check xsrf if needed
|
||||
token = self._get_token_cookie(handler)
|
||||
session_id = self.get_session_id(handler)
|
||||
if token and self._needs_check_xsrf(handler):
|
||||
# call handler.check_xsrf_cookie instead of self.check_xsrf_cookie
|
||||
# to allow subclass overrides
|
||||
try:
|
||||
handler.check_xsrf_cookie()
|
||||
except HTTPError as e:
|
||||
self.log.error(
|
||||
f"Not accepting cookie auth on {handler.request.method} {handler.request.path}: {e}"
|
||||
)
|
||||
# don't proceed with cookie auth unless xsrf is okay
|
||||
# don't raise either, because that makes a mess
|
||||
return None
|
||||
|
||||
if token:
|
||||
user_model = await self.user_for_token(
|
||||
token, session_id=session_id, sync=False
|
||||
)
|
||||
if user_model is None:
|
||||
app_log.warning("Token stored in cookie may have expired")
|
||||
handler.clear_cookie(self.cookie_name)
|
||||
self._clear_cookie(handler, self.cookie_name, path=self.cookie_path)
|
||||
return user_model
|
||||
|
||||
# HubOAuth API
|
||||
@@ -883,7 +1121,7 @@ class HubOAuth(HubAuth):
|
||||
cookie_name = self.state_cookie_name
|
||||
b64_state = self.generate_state(next_url, **extra_state)
|
||||
kwargs = {
|
||||
'path': self.base_url,
|
||||
'path': self.cookie_path,
|
||||
'httponly': True,
|
||||
# Expire oauth state cookie in ten minutes.
|
||||
# Usually this will be cleared by completed login
|
||||
@@ -891,8 +1129,12 @@ class HubOAuth(HubAuth):
|
||||
# OAuth that doesn't complete shouldn't linger too long.
|
||||
'max_age': 600,
|
||||
}
|
||||
if get_browser_protocol(handler.request) == 'https':
|
||||
if (
|
||||
get_browser_protocol(handler.request) == 'https'
|
||||
or self.cookie_host_prefix_enabled
|
||||
):
|
||||
kwargs['secure'] = True
|
||||
|
||||
# load user cookie overrides
|
||||
kwargs.update(self.cookie_options)
|
||||
handler.set_secure_cookie(cookie_name, b64_state, **kwargs)
|
||||
@@ -930,8 +1172,11 @@ class HubOAuth(HubAuth):
|
||||
|
||||
def set_cookie(self, handler, access_token):
|
||||
"""Set a cookie recording OAuth result"""
|
||||
kwargs = {'path': self.base_url, 'httponly': True}
|
||||
if get_browser_protocol(handler.request) == 'https':
|
||||
kwargs = {'path': self.cookie_path, 'httponly': True}
|
||||
if (
|
||||
get_browser_protocol(handler.request) == 'https'
|
||||
or self.cookie_host_prefix_enabled
|
||||
):
|
||||
kwargs['secure'] = True
|
||||
# load user cookie overrides
|
||||
kwargs.update(self.cookie_options)
|
||||
@@ -945,7 +1190,7 @@ class HubOAuth(HubAuth):
|
||||
|
||||
def clear_cookie(self, handler):
|
||||
"""Clear the OAuth cookie"""
|
||||
handler.clear_cookie(self.cookie_name, path=self.base_url)
|
||||
self._clear_cookie(handler, self.cookie_name, path=self.cookie_path)
|
||||
|
||||
|
||||
class UserNotAllowed(Exception):
|
||||
@@ -1042,19 +1287,30 @@ class HubAuthenticated:
|
||||
def hub_auth(self, auth):
|
||||
self._hub_auth = auth
|
||||
|
||||
_hub_login_url = None
|
||||
|
||||
def get_login_url(self):
|
||||
"""Return the Hub's login URL"""
|
||||
login_url = self.hub_auth.login_url
|
||||
if isinstance(self.hub_auth, HubOAuth):
|
||||
# add state argument to OAuth url
|
||||
state = self.hub_auth.set_state_cookie(self, next_url=self.request.uri)
|
||||
login_url = url_concat(login_url, {'state': state})
|
||||
if self._hub_login_url is not None:
|
||||
# cached value, don't call this more than once per handler
|
||||
return self._hub_login_url
|
||||
# temporary override at setting level,
|
||||
# to allow any subclass overrides of get_login_url to preserve their effect
|
||||
# for example, APIHandler raises 403 to prevent redirects
|
||||
with mock.patch.dict(self.application.settings, {"login_url": login_url}):
|
||||
app_log.debug("Redirecting to login url: %s", login_url)
|
||||
return super().get_login_url()
|
||||
with mock.patch.dict(
|
||||
self.application.settings, {"login_url": self.hub_auth.login_url}
|
||||
):
|
||||
login_url = super().get_login_url()
|
||||
app_log.debug("Redirecting to login url: %s", login_url)
|
||||
|
||||
if isinstance(self.hub_auth, HubOAuth):
|
||||
# add state argument to OAuth url
|
||||
# must do this _after_ allowing get_login_url to raise
|
||||
# so we don't set unused cookies
|
||||
state = self.hub_auth.set_state_cookie(self, next_url=self.request.uri)
|
||||
login_url = url_concat(login_url, {'state': state})
|
||||
self._hub_login_url = login_url
|
||||
return login_url
|
||||
|
||||
def check_hub_user(self, model):
|
||||
"""Check whether Hub-authenticated user or service should be allowed.
|
||||
@@ -1146,7 +1402,7 @@ class HubAuthenticated:
|
||||
return
|
||||
try:
|
||||
self._hub_auth_user_cache = self.check_hub_user(user_model)
|
||||
except UserNotAllowed as e:
|
||||
except UserNotAllowed:
|
||||
# cache None, in case get_user is called again while processing the error
|
||||
self._hub_auth_user_cache = None
|
||||
|
||||
@@ -1165,20 +1421,28 @@ class HubAuthenticated:
|
||||
self._hub_auth_user_cache = None
|
||||
raise
|
||||
|
||||
# store ?token=... tokens passed via url in a cookie for future requests
|
||||
url_token = self.get_argument('token', '')
|
||||
if (
|
||||
user_model
|
||||
and url_token
|
||||
and getattr(self, '_token_authenticated', False)
|
||||
and hasattr(self.hub_auth, 'set_cookie')
|
||||
):
|
||||
# authenticated via `?token=`
|
||||
# set a cookie for future requests
|
||||
# hub_auth.set_cookie is only available on HubOAuth
|
||||
self.hub_auth.set_cookie(self, url_token)
|
||||
self.hub_auth._persist_url_token_if_set(self)
|
||||
return self._hub_auth_user_cache
|
||||
|
||||
@property
|
||||
def _xsrf_token_id(self):
|
||||
if hasattr(self, "__xsrf_token_id"):
|
||||
return self.__xsrf_token_id
|
||||
if not isinstance(self.hub_auth, HubOAuth):
|
||||
return ""
|
||||
return self.hub_auth._get_xsrf_token_id(self)
|
||||
|
||||
@_xsrf_token_id.setter
|
||||
def _xsrf_token_id(self, value):
|
||||
self.__xsrf_token_id = value
|
||||
|
||||
@property
|
||||
def xsrf_token(self):
|
||||
return get_xsrf_token(self, cookie_path=self.hub_auth.base_url)
|
||||
|
||||
def check_xsrf_cookie(self):
|
||||
return self.hub_auth.check_xsrf_cookie(self)
|
||||
|
||||
|
||||
class HubOAuthenticated(HubAuthenticated):
|
||||
"""Simple subclass of HubAuthenticated using OAuth instead of old shared cookies"""
|
||||
@@ -1213,12 +1477,22 @@ class HubOAuthCallbackHandler(HubOAuthenticated, RequestHandler):
|
||||
cookie_name = self.hub_auth.get_state_cookie_name(arg_state)
|
||||
cookie_state = self.get_secure_cookie(cookie_name)
|
||||
# clear cookie state now that we've consumed it
|
||||
self.clear_cookie(cookie_name, path=self.hub_auth.base_url)
|
||||
clear_kwargs = {}
|
||||
if self.hub_auth.cookie_host_prefix_enabled:
|
||||
# Set-Cookie is rejected without 'secure',
|
||||
# this includes clearing cookies!
|
||||
clear_kwargs["secure"] = True
|
||||
self.hub_auth._clear_cookie(self, cookie_name, path=self.hub_auth.cookie_path)
|
||||
if isinstance(cookie_state, bytes):
|
||||
cookie_state = cookie_state.decode('ascii', 'replace')
|
||||
# check that state matches
|
||||
if arg_state != cookie_state:
|
||||
app_log.warning("oauth state %r != %r", arg_state, cookie_state)
|
||||
app_log.warning(
|
||||
"oauth state argument %r != cookie %s=%r",
|
||||
arg_state,
|
||||
cookie_name,
|
||||
cookie_state,
|
||||
)
|
||||
raise HTTPError(403, "oauth state does not match. Try logging in again.")
|
||||
next_url = self.hub_auth.get_next_url(cookie_state)
|
||||
|
||||
|
@@ -38,6 +38,7 @@ A hub-managed service with no URL::
|
||||
}
|
||||
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import copy
|
||||
import os
|
||||
|
@@ -12,6 +12,7 @@ Application subclass can be controlled with environment variables:
|
||||
- JUPYTERHUB_SINGLEUSER_EXTENSION=1 to opt-in to the extension (requires Jupyter Server 2)
|
||||
- JUPYTERHUB_SINGLEUSER_APP=notebook (or jupyter-server) to opt-in
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from .mixins import HubAuthenticatedHandler, make_singleuser_app
|
||||
|
@@ -6,8 +6,9 @@
|
||||
.. versionchanged:: 2.0
|
||||
|
||||
Default app changed to launch `jupyter labhub`.
|
||||
Use JUPYTERHUB_SINGLEUSER_APP=notebook.notebookapp.NotebookApp for the legacy 'classic' notebook server.
|
||||
Use JUPYTERHUB_SINGLEUSER_APP='notebook' for the legacy 'classic' notebook server (requires notebook<7).
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from traitlets import import_item
|
||||
@@ -27,7 +28,25 @@ JUPYTERHUB_SINGLEUSER_APP = _app_shortcuts.get(
|
||||
JUPYTERHUB_SINGLEUSER_APP.replace("_", "-"), JUPYTERHUB_SINGLEUSER_APP
|
||||
)
|
||||
|
||||
|
||||
if JUPYTERHUB_SINGLEUSER_APP:
|
||||
if JUPYTERHUB_SINGLEUSER_APP in {"notebook", _app_shortcuts["notebook"]}:
|
||||
# better error for notebook v7, which uses jupyter-server
|
||||
# when the legacy notebook server is requested
|
||||
try:
|
||||
from notebook import __version__
|
||||
except ImportError:
|
||||
# will raise later
|
||||
pass
|
||||
else:
|
||||
# check if this failed because of notebook v7
|
||||
_notebook_major_version = int(__version__.split(".", 1)[0])
|
||||
if _notebook_major_version >= 7:
|
||||
raise ImportError(
|
||||
f"JUPYTERHUB_SINGLEUSER_APP={JUPYTERHUB_SINGLEUSER_APP} is not valid with notebook>=7 (have notebook=={__version__}).\n"
|
||||
f"Leave $JUPYTERHUB_SINGLEUSER_APP unspecified (or use the default JUPYTERHUB_SINGLEUSER_APP=jupyter-server), "
|
||||
'and set `c.Spawner.default_url = "/tree"` to make notebook v7 the default UI.'
|
||||
)
|
||||
App = import_item(JUPYTERHUB_SINGLEUSER_APP)
|
||||
else:
|
||||
App = None
|
||||
|
@@ -44,6 +44,7 @@ from jupyterhub._version import __version__, _check_version
|
||||
from jupyterhub.log import log_request
|
||||
from jupyterhub.services.auth import HubOAuth, HubOAuthCallbackHandler
|
||||
from jupyterhub.utils import (
|
||||
_bool_env,
|
||||
exponential_backoff,
|
||||
isoformat,
|
||||
make_ssl_context,
|
||||
@@ -55,17 +56,6 @@ from ._disable_user_config import _disable_user_config
|
||||
SINGLEUSER_TEMPLATES_DIR = str(Path(__file__).parent.resolve().joinpath("templates"))
|
||||
|
||||
|
||||
def _bool_env(key):
|
||||
"""Cast an environment variable to bool
|
||||
|
||||
0, empty, or unset is False; All other values are True.
|
||||
"""
|
||||
if os.environ.get(key, "") in {"", "0"}:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def _exclude_home(path_list):
|
||||
"""Filter out any entries in a path list that are in my home directory.
|
||||
|
||||
@@ -127,25 +117,36 @@ class JupyterHubIdentityProvider(IdentityProvider):
|
||||
# HubAuth gets most of its config from the environment
|
||||
return HubOAuth(parent=self)
|
||||
|
||||
def _patch_xsrf(self, handler):
|
||||
self.hub_auth._patch_xsrf(handler)
|
||||
|
||||
def _patch_get_login_url(self, handler):
|
||||
original_get_login_url = handler.get_login_url
|
||||
|
||||
_hub_login_url = None
|
||||
|
||||
def get_login_url():
|
||||
"""Return the Hub's login URL, to begin login redirect"""
|
||||
login_url = self.hub_auth.login_url
|
||||
# add state argument to OAuth url
|
||||
state = self.hub_auth.set_state_cookie(
|
||||
handler, next_url=handler.request.uri
|
||||
)
|
||||
login_url = url_concat(login_url, {'state': state})
|
||||
# temporary override at setting level,
|
||||
nonlocal _hub_login_url
|
||||
if _hub_login_url is not None:
|
||||
# cached value, don't call this more than once per handler
|
||||
return _hub_login_url
|
||||
# temporary override at settings level,
|
||||
# to allow any subclass overrides of get_login_url to preserve their effect;
|
||||
# for example, APIHandler raises 403 to prevent redirects
|
||||
with mock.patch.dict(
|
||||
handler.application.settings, {"login_url": login_url}
|
||||
handler.application.settings, {"login_url": self.hub_auth.login_url}
|
||||
):
|
||||
self.log.debug("Redirecting to login url: %s", login_url)
|
||||
return original_get_login_url()
|
||||
login_url = original_get_login_url()
|
||||
self.log.debug("Redirecting to login url: %s", login_url)
|
||||
# add state argument to OAuth url
|
||||
# must do this _after_ allowing get_login_url to raise
|
||||
# so we don't set unused cookies
|
||||
state = self.hub_auth.set_state_cookie(
|
||||
handler, next_url=handler.request.uri
|
||||
)
|
||||
_hub_login_url = url_concat(login_url, {'state': state})
|
||||
return _hub_login_url
|
||||
|
||||
handler.get_login_url = get_login_url
|
||||
|
||||
@@ -153,6 +154,7 @@ class JupyterHubIdentityProvider(IdentityProvider):
|
||||
if hasattr(handler, "_jupyterhub_user"):
|
||||
return handler._jupyterhub_user
|
||||
self._patch_get_login_url(handler)
|
||||
self._patch_xsrf(handler)
|
||||
user = await self.hub_auth.get_user(handler, sync=False)
|
||||
if user is None:
|
||||
handler._jupyterhub_user = None
|
||||
@@ -187,6 +189,7 @@ class JupyterHubIdentityProvider(IdentityProvider):
|
||||
|
||||
return None
|
||||
handler._jupyterhub_user = JupyterHubUser(user)
|
||||
self.hub_auth._persist_url_token_if_set(handler)
|
||||
return handler._jupyterhub_user
|
||||
|
||||
def get_handlers(self):
|
||||
@@ -483,6 +486,11 @@ class JupyterHubSingleUser(ExtensionApp):
|
||||
cfg.answer_yes = True
|
||||
self.config.FileContentsManager.delete_to_trash = False
|
||||
|
||||
# load Spawner.notebook_dir configuration, if given
|
||||
root_dir = os.getenv("JUPYTERHUB_ROOT_DIR", None)
|
||||
if root_dir:
|
||||
cfg.root_dir = os.path.expanduser(root_dir)
|
||||
|
||||
# load http server config from environment
|
||||
url = urlparse(os.environ['JUPYTERHUB_SERVICE_URL'])
|
||||
if url.port:
|
||||
@@ -511,16 +519,25 @@ class JupyterHubSingleUser(ExtensionApp):
|
||||
|
||||
# Jupyter Server default: config files have higher priority than extensions,
|
||||
# by:
|
||||
# 1. load config files
|
||||
# 1. load config files and CLI
|
||||
# 2. load extension config
|
||||
# 3. merge file config into extension config
|
||||
|
||||
# we invert that by merging our extension config into server config before
|
||||
# they get merged the other way
|
||||
# this way config from this extension should always have highest priority
|
||||
|
||||
# but this also puts our config above _CLI_ options,
|
||||
# and CLI should come before env,
|
||||
# so merge that into _our_ config before loading
|
||||
if self.serverapp.cli_config:
|
||||
for cls_name, cls_config in self.serverapp.cli_config.items():
|
||||
if cls_name in self.config:
|
||||
self.config[cls_name].merge(cls_config)
|
||||
|
||||
self.serverapp.update_config(self.config)
|
||||
|
||||
# add our custom templates
|
||||
# config below here has _lower_ priority than user config
|
||||
self.config.NotebookApp.extra_template_paths.append(SINGLEUSER_TEMPLATES_DIR)
|
||||
|
||||
@default("default_url")
|
||||
@@ -590,9 +607,9 @@ class JupyterHubSingleUser(ExtensionApp):
|
||||
jinja_template_vars['logo_url'] = self.hub_auth.hub_host + url_path_join(
|
||||
self.hub_auth.hub_prefix, 'logo'
|
||||
)
|
||||
jinja_template_vars[
|
||||
'hub_control_panel_url'
|
||||
] = self.hub_auth.hub_host + url_path_join(self.hub_auth.hub_prefix, 'home')
|
||||
jinja_template_vars['hub_control_panel_url'] = (
|
||||
self.hub_auth.hub_host + url_path_join(self.hub_auth.hub_prefix, 'home')
|
||||
)
|
||||
|
||||
_activity_task = None
|
||||
|
||||
@@ -605,10 +622,15 @@ class JupyterHubSingleUser(ExtensionApp):
|
||||
|
||||
super().initialize()
|
||||
app = self.serverapp
|
||||
app.web_app.settings[
|
||||
"page_config_hook"
|
||||
] = app.identity_provider.page_config_hook
|
||||
app.web_app.settings["log_function"] = log_request
|
||||
app.web_app.settings["page_config_hook"] = (
|
||||
app.identity_provider.page_config_hook
|
||||
)
|
||||
# disable xsrf_cookie checks by Tornado, which run too early
|
||||
# checks in Jupyter Server are unconditional
|
||||
app.web_app.settings["xsrf_cookies"] = False
|
||||
# if the user has configured a log function in the tornado settings, do not override it
|
||||
if not 'log_function' in app.config.ServerApp.get('tornado_settings', {}):
|
||||
app.web_app.settings["log_function"] = log_request
|
||||
# add jupyterhub version header
|
||||
headers = app.web_app.settings.setdefault("headers", {})
|
||||
headers["X-JupyterHub-Version"] = __version__
|
||||
@@ -616,6 +638,9 @@ class JupyterHubSingleUser(ExtensionApp):
|
||||
# check jupyterhub version
|
||||
app.io_loop.run_sync(self.check_hub_version)
|
||||
|
||||
# set default CSP to prevent iframe embedding across jupyterhub components
|
||||
headers.setdefault("Content-Security-Policy", "frame-ancestors 'none'")
|
||||
|
||||
async def _start_activity():
|
||||
self._activity_task = asyncio.ensure_future(self.keep_activity_updated())
|
||||
|
||||
|
@@ -44,21 +44,15 @@ from traitlets.config import Configurable
|
||||
from .._version import __version__, _check_version
|
||||
from ..log import log_request
|
||||
from ..services.auth import HubOAuth, HubOAuthCallbackHandler, HubOAuthenticated
|
||||
from ..utils import exponential_backoff, isoformat, make_ssl_context, url_path_join
|
||||
from ..utils import (
|
||||
_bool_env,
|
||||
exponential_backoff,
|
||||
isoformat,
|
||||
make_ssl_context,
|
||||
url_path_join,
|
||||
)
|
||||
from ._disable_user_config import _disable_user_config, _exclude_home
|
||||
|
||||
|
||||
def _bool_env(key):
|
||||
"""Cast an environment variable to bool
|
||||
|
||||
0, empty, or unset is False; All other values are True.
|
||||
"""
|
||||
if os.environ.get(key, "") in {"", "0"}:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
# Authenticate requests with the Hub
|
||||
|
||||
|
||||
@@ -669,7 +663,8 @@ class SingleUserNotebookAppMixin(Configurable):
|
||||
# load the hub-related settings into the tornado settings dict
|
||||
self.init_hub_auth()
|
||||
s = self.tornado_settings
|
||||
s['log_function'] = log_request
|
||||
# if the user has configured a log function in the tornado settings, do not override it
|
||||
s.setdefault('log_function', log_request)
|
||||
s['user'] = self.user
|
||||
s['group'] = self.group
|
||||
s['hub_prefix'] = self.hub_prefix
|
||||
@@ -681,10 +676,10 @@ class SingleUserNotebookAppMixin(Configurable):
|
||||
)
|
||||
headers = s.setdefault('headers', {})
|
||||
headers['X-JupyterHub-Version'] = __version__
|
||||
# set CSP header directly to workaround bugs in jupyter/notebook 5.0
|
||||
# set default CSP to prevent iframe embedding across jupyterhub components
|
||||
headers.setdefault(
|
||||
'Content-Security-Policy',
|
||||
';'.join(["frame-ancestors 'self'", "report-uri " + csp_report_uri]),
|
||||
';'.join(["frame-ancestors 'none'", "report-uri " + csp_report_uri]),
|
||||
)
|
||||
super().init_webapp()
|
||||
|
||||
@@ -733,9 +728,9 @@ class SingleUserNotebookAppMixin(Configurable):
|
||||
)
|
||||
self.jinja_template_vars['hub_host'] = self.hub_host
|
||||
self.jinja_template_vars['hub_prefix'] = self.hub_prefix
|
||||
self.jinja_template_vars[
|
||||
'hub_control_panel_url'
|
||||
] = self.hub_host + url_path_join(self.hub_prefix, 'home')
|
||||
self.jinja_template_vars['hub_control_panel_url'] = (
|
||||
self.hub_host + url_path_join(self.hub_prefix, 'home')
|
||||
)
|
||||
|
||||
settings = self.web_app.settings
|
||||
# patch classic notebook jinja env
|
||||
@@ -855,13 +850,21 @@ def _patch_app_base_handlers(app):
|
||||
if BaseHandler is not None:
|
||||
base_handlers.append(BaseHandler)
|
||||
|
||||
# patch juptyer_server and notebook handlers if they have been imported
|
||||
# patch jupyter_server and notebook handlers if they have been imported
|
||||
for base_handler_name in [
|
||||
"jupyter_server.base.handlers.JupyterHandler",
|
||||
"notebook.base.handlers.IPythonHandler",
|
||||
]:
|
||||
modname, _ = base_handler_name.rsplit(".", 1)
|
||||
if modname in sys.modules:
|
||||
root_mod = modname.partition(".")[0]
|
||||
if root_mod == "notebook":
|
||||
import notebook
|
||||
|
||||
if int(notebook.__version__.partition(".")[0]) >= 7:
|
||||
# notebook 7 is a server extension,
|
||||
# it doesn't have IPythonHandler anymore
|
||||
continue
|
||||
base_handlers.append(import_item(base_handler_name))
|
||||
|
||||
if not base_handlers:
|
||||
|
@@ -1,6 +1,7 @@
|
||||
"""
|
||||
Contains base Spawner class & default implementation
|
||||
"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
import ast
|
||||
@@ -162,6 +163,7 @@ class Spawner(LoggingConfigurable):
|
||||
hub = Any()
|
||||
orm_spawner = Any()
|
||||
cookie_options = Dict()
|
||||
cookie_host_prefix_enabled = Bool()
|
||||
|
||||
db = Any()
|
||||
|
||||
@@ -274,8 +276,6 @@ class Spawner(LoggingConfigurable):
|
||||
api_token = Unicode()
|
||||
oauth_client_id = Unicode()
|
||||
|
||||
oauth_scopes = List(Unicode())
|
||||
|
||||
@property
|
||||
def oauth_scopes(self):
|
||||
warnings.warn(
|
||||
@@ -971,6 +971,10 @@ class Spawner(LoggingConfigurable):
|
||||
env['JUPYTERHUB_CLIENT_ID'] = self.oauth_client_id
|
||||
if self.cookie_options:
|
||||
env['JUPYTERHUB_COOKIE_OPTIONS'] = json.dumps(self.cookie_options)
|
||||
|
||||
env["JUPYTERHUB_COOKIE_HOST_PREFIX_ENABLED"] = str(
|
||||
int(self.cookie_host_prefix_enabled)
|
||||
)
|
||||
env['JUPYTERHUB_HOST'] = self.hub.public_host
|
||||
env['JUPYTERHUB_OAUTH_CALLBACK_URL'] = url_path_join(
|
||||
self.user.url, url_escape_path(self.name), 'oauth_callback'
|
||||
@@ -1455,14 +1459,13 @@ def set_user_setuid(username, chdir=True):
|
||||
Returned preexec_fn will set uid/gid, and attempt to chdir to the target user's
|
||||
home directory.
|
||||
"""
|
||||
import grp
|
||||
import pwd
|
||||
|
||||
user = pwd.getpwnam(username)
|
||||
uid = user.pw_uid
|
||||
gid = user.pw_gid
|
||||
home = user.pw_dir
|
||||
gids = [g.gr_gid for g in grp.getgrall() if username in g.gr_mem]
|
||||
gids = os.getgrouplist(username, gid)
|
||||
|
||||
def preexec():
|
||||
"""Set uid/gid of current process
|
||||
|
28
jupyterhub/tests/browser/conftest.py
Normal file
28
jupyterhub/tests/browser/conftest.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from collections import namedtuple
|
||||
|
||||
import pytest
|
||||
from playwright.async_api import async_playwright
|
||||
|
||||
from ..conftest import add_user, new_username
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
async def browser():
|
||||
# browser_type in ["chromium", "firefox", "webkit"]
|
||||
async with async_playwright() as playwright:
|
||||
browser = await playwright.firefox.launch(headless=True)
|
||||
context = await browser.new_context()
|
||||
page = await context.new_page()
|
||||
yield page
|
||||
await context.clear_cookies()
|
||||
await browser.close()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def user_special_chars(app):
|
||||
"""Fixture for creating a temporary user with special characters in the name"""
|
||||
user = add_user(app.db, app, name=new_username("testuser<'&\">"))
|
||||
yield namedtuple('UserSpecialChars', ['user', 'urlname'])(
|
||||
user,
|
||||
user.name.replace("<'&\">", "%3C%27%26%22%3E"),
|
||||
)
|
1343
jupyterhub/tests/browser/test_browser.py
Normal file
1343
jupyterhub/tests/browser/test_browser.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -23,6 +23,7 @@ Fixtures to add functionality or spawning behavior
|
||||
- `slow_bad_spawn`
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
import asyncio
|
||||
|
@@ -30,6 +30,7 @@ class JupyterHubTestHandler(JupyterHandler):
|
||||
info = {
|
||||
"current_user": self.current_user,
|
||||
"config": self.app.config,
|
||||
"root_dir": self.contents_manager.root_dir,
|
||||
"disable_user_config": getattr(self.app, "disable_user_config", None),
|
||||
"settings": self.settings,
|
||||
"config_file_paths": self.app.config_file_paths,
|
||||
|
@@ -26,6 +26,7 @@ Other components
|
||||
- public_url
|
||||
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
import sys
|
||||
@@ -42,8 +43,8 @@ from .. import metrics, orm, roles
|
||||
from ..app import JupyterHub
|
||||
from ..auth import PAMAuthenticator
|
||||
from ..spawner import SimpleLocalProcessSpawner
|
||||
from ..utils import random_port, utcnow
|
||||
from .utils import async_requests, public_url, ssl_setup
|
||||
from ..utils import random_port, url_path_join, utcnow
|
||||
from .utils import AsyncSession, public_url, ssl_setup
|
||||
|
||||
|
||||
def mock_authenticate(username, password, service, encoding):
|
||||
@@ -355,29 +356,32 @@ class MockHub(JupyterHub):
|
||||
async def login_user(self, name):
|
||||
"""Login a user by name, returning her cookies."""
|
||||
base_url = public_url(self)
|
||||
external_ca = None
|
||||
s = AsyncSession()
|
||||
if self.internal_ssl:
|
||||
external_ca = self.external_certs['files']['ca']
|
||||
s.verify = self.external_certs['files']['ca']
|
||||
login_url = base_url + 'hub/login'
|
||||
r = await async_requests.get(login_url)
|
||||
r = await s.get(login_url)
|
||||
r.raise_for_status()
|
||||
xsrf = r.cookies['_xsrf']
|
||||
|
||||
r = await async_requests.post(
|
||||
r = await s.post(
|
||||
url_concat(login_url, {"_xsrf": xsrf}),
|
||||
cookies=r.cookies,
|
||||
data={'username': name, 'password': name},
|
||||
allow_redirects=False,
|
||||
verify=external_ca,
|
||||
)
|
||||
r.raise_for_status()
|
||||
r.cookies["_xsrf"] = xsrf
|
||||
assert sorted(r.cookies.keys()) == [
|
||||
# make second request to get updated xsrf cookie
|
||||
r2 = await s.get(
|
||||
url_path_join(base_url, "hub/home"),
|
||||
allow_redirects=False,
|
||||
)
|
||||
assert r2.status_code == 200
|
||||
assert sorted(s.cookies.keys()) == [
|
||||
'_xsrf',
|
||||
'jupyterhub-hub-login',
|
||||
'jupyterhub-session-id',
|
||||
]
|
||||
return r.cookies
|
||||
return s.cookies
|
||||
|
||||
|
||||
class InstrumentedSpawner(MockSpawner):
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""Example JupyterServer app subclass"""
|
||||
|
||||
from jupyter_server.base.handlers import JupyterHandler
|
||||
from jupyter_server.serverapp import ServerApp
|
||||
from tornado import web
|
||||
|
@@ -12,6 +12,7 @@ Handlers and their purpose include:
|
||||
- WhoAmIHandler: returns name of user making a request (deprecated cookie login)
|
||||
- OWhoAmIHandler: returns name of user making a request (OAuth login)
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import pprint
|
||||
|
@@ -11,6 +11,7 @@ Handlers and their purpose include:
|
||||
- ArgsHandler: allowing retrieval of `sys.argv`.
|
||||
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
@@ -4,6 +4,7 @@ Run with old versions of jupyterhub to test upgrade/downgrade
|
||||
|
||||
used in test_db.py
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from functools import partial
|
||||
|
||||
|
@@ -1,23 +0,0 @@
|
||||
import pytest
|
||||
from selenium import webdriver
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def browser_session():
|
||||
"""Re-use one browser instance for the test session"""
|
||||
options = webdriver.FirefoxOptions()
|
||||
options.add_argument("-headless")
|
||||
driver = webdriver.Firefox(options=options)
|
||||
yield driver
|
||||
driver.close()
|
||||
driver.quit()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def browser(browser_session, cleanup_after):
|
||||
"""Get the browser session for one test
|
||||
|
||||
cookies are cleared after each test
|
||||
"""
|
||||
yield browser_session
|
||||
browser_session.delete_all_cookies()
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user