Merge commit '63b7defe1a40b3abc3582a65a0402c1e82a2e230' into group_property_feature

This commit is contained in:
vpopescu
2022-06-14 14:57:06 +02:00
62 changed files with 905 additions and 554 deletions

15
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,15 @@
# dependabot.yml reference: https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
#
# Notes:
# - Status and logs from dependabot are provided at
# https://github.com/jupyterhub/jupyterhub/network/updates.
#
version: 2
updates:
# Maintain dependencies in our GitHub Workflows
- package-ecosystem: github-actions
directory: "/"
schedule:
interval: weekly
time: "05:00"
timezone: "Etc/UTC"

View File

@@ -32,17 +32,18 @@ jobs:
build-release: build-release:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
- uses: actions/setup-python@v2 - uses: actions/setup-python@v4
with: with:
python-version: 3.8 python-version: "3.9"
- uses: actions/setup-node@v1 - uses: actions/setup-node@v3
with: with:
node-version: "14" node-version: "14"
- name: install build package - name: install build requirements
run: | run: |
npm install -g yarn
pip install --upgrade pip pip install --upgrade pip
pip install build pip install build
pip freeze pip freeze
@@ -52,28 +53,17 @@ jobs:
python -m build --sdist --wheel . python -m build --sdist --wheel .
ls -l dist ls -l dist
- name: verify wheel - name: verify sdist
run: | run: |
cd dist ./ci/check_sdist.py dist/jupyterhub-*.tar.gz
pip install ./*.whl
# verify data-files are installed where they are found - name: verify data-files are installed where they are found
cat <<EOF | python run: |
import os pip install dist/*.whl
from jupyterhub._data import DATA_FILES_PATH ./ci/check_installed_data.py
print(f"DATA_FILES_PATH={DATA_FILES_PATH}")
assert os.path.exists(DATA_FILES_PATH), DATA_FILES_PATH
for subpath in (
"templates/page.html",
"static/css/style.min.css",
"static/components/jquery/dist/jquery.js",
):
path = os.path.join(DATA_FILES_PATH, subpath)
assert os.path.exists(path), path
print("OK")
EOF
# ref: https://github.com/actions/upload-artifact#readme # ref: https://github.com/actions/upload-artifact#readme
- uses: actions/upload-artifact@v2 - uses: actions/upload-artifact@v3
with: with:
name: jupyterhub-${{ github.sha }} name: jupyterhub-${{ github.sha }}
path: "dist/*" path: "dist/*"
@@ -108,16 +98,16 @@ jobs:
echo "REGISTRY=localhost:5000/" >> $GITHUB_ENV echo "REGISTRY=localhost:5000/" >> $GITHUB_ENV
fi fi
- uses: actions/checkout@v2 - uses: actions/checkout@v3
# Setup docker to build for multiple platforms, see: # Setup docker to build for multiple platforms, see:
# https://github.com/docker/build-push-action/tree/v2.4.0#usage # https://github.com/docker/build-push-action/tree/v2.4.0#usage
# https://github.com/docker/build-push-action/blob/v2.4.0/docs/advanced/multi-platform.md # https://github.com/docker/build-push-action/blob/v2.4.0/docs/advanced/multi-platform.md
- name: Set up QEMU (for docker buildx) - name: Set up QEMU (for docker buildx)
uses: docker/setup-qemu-action@25f0500ff22e406f7191a2a8ba8cda16901ca018 # associated tag: v1.0.2 uses: docker/setup-qemu-action@8b122486cedac8393e77aa9734c3528886e4a1a8 # associated tag: v1.0.2
- name: Set up Docker Buildx (for multi-arch builds) - name: Set up Docker Buildx (for multi-arch builds)
uses: docker/setup-buildx-action@2a4b53665e15ce7d7049afb11ff1f70ff1610609 # associated tag: v1.1.2 uses: docker/setup-buildx-action@dc7b9719a96d48369863986a06765841d7ea23f6 # associated tag: v1.1.2
with: with:
# Allows pushing to registry on localhost:5000 # Allows pushing to registry on localhost:5000
driver-opts: network=host driver-opts: network=host
@@ -155,7 +145,7 @@ jobs:
branchRegex: ^\w[\w-.]*$ branchRegex: ^\w[\w-.]*$
- name: Build and push jupyterhub - name: Build and push jupyterhub
uses: docker/build-push-action@e1b7f96249f2e4c8e4ac1519b9608c0d48944a1f uses: docker/build-push-action@e551b19e49efd4e98792db7592c17c09b89db8d8
with: with:
context: . context: .
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
@@ -176,7 +166,7 @@ jobs:
branchRegex: ^\w[\w-.]*$ branchRegex: ^\w[\w-.]*$
- name: Build and push jupyterhub-onbuild - name: Build and push jupyterhub-onbuild
uses: docker/build-push-action@e1b7f96249f2e4c8e4ac1519b9608c0d48944a1f uses: docker/build-push-action@e551b19e49efd4e98792db7592c17c09b89db8d8
with: with:
build-args: | build-args: |
BASE_IMAGE=${{ fromJson(steps.jupyterhubtags.outputs.tags)[0] }} BASE_IMAGE=${{ fromJson(steps.jupyterhubtags.outputs.tags)[0] }}
@@ -197,7 +187,7 @@ jobs:
branchRegex: ^\w[\w-.]*$ branchRegex: ^\w[\w-.]*$
- name: Build and push jupyterhub-demo - name: Build and push jupyterhub-demo
uses: docker/build-push-action@e1b7f96249f2e4c8e4ac1519b9608c0d48944a1f uses: docker/build-push-action@e551b19e49efd4e98792db7592c17c09b89db8d8
with: with:
build-args: | build-args: |
BASE_IMAGE=${{ fromJson(steps.onbuildtags.outputs.tags)[0] }} BASE_IMAGE=${{ fromJson(steps.onbuildtags.outputs.tags)[0] }}
@@ -221,7 +211,7 @@ jobs:
branchRegex: ^\w[\w-.]*$ branchRegex: ^\w[\w-.]*$
- name: Build and push jupyterhub/singleuser - name: Build and push jupyterhub/singleuser
uses: docker/build-push-action@e1b7f96249f2e4c8e4ac1519b9608c0d48944a1f uses: docker/build-push-action@e551b19e49efd4e98792db7592c17c09b89db8d8
with: with:
build-args: | build-args: |
JUPYTERHUB_VERSION=${{ github.ref_type == 'tag' && github.ref_name || format('git:{0}', github.sha) }} JUPYTERHUB_VERSION=${{ github.ref_type == 'tag' && github.ref_name || format('git:{0}', github.sha) }}

View File

@@ -15,15 +15,13 @@ on:
- "docs/**" - "docs/**"
- "jupyterhub/_version.py" - "jupyterhub/_version.py"
- "jupyterhub/scopes.py" - "jupyterhub/scopes.py"
- ".github/workflows/*" - ".github/workflows/test-docs.yml"
- "!.github/workflows/test-docs.yml"
push: push:
paths: paths:
- "docs/**" - "docs/**"
- "jupyterhub/_version.py" - "jupyterhub/_version.py"
- "jupyterhub/scopes.py" - "jupyterhub/scopes.py"
- ".github/workflows/*" - ".github/workflows/test-docs.yml"
- "!.github/workflows/test-docs.yml"
branches-ignore: branches-ignore:
- "dependabot/**" - "dependabot/**"
- "pre-commit-ci-update-config" - "pre-commit-ci-update-config"
@@ -40,18 +38,18 @@ jobs:
validate-rest-api-definition: validate-rest-api-definition:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
- name: Validate REST API definition - name: Validate REST API definition
uses: char0n/swagger-editor-validate@182d1a5d26ff5c2f4f452c43bd55e2c7d8064003 uses: char0n/swagger-editor-validate@v1.3.1
with: with:
definition-file: docs/source/_static/rest-api.yml definition-file: docs/source/_static/rest-api.yml
test-docs: test-docs:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
- uses: actions/setup-python@v2 - uses: actions/setup-python@v4
with: with:
python-version: "3.9" python-version: "3.9"

View File

@@ -19,6 +19,9 @@ on:
- "**" - "**"
workflow_dispatch: workflow_dispatch:
permissions:
contents: read
jobs: jobs:
# The ./jsx folder contains React based source code files that are to compile # The ./jsx folder contains React based source code files that are to compile
# to share/jupyterhub/static/js/admin-react.js. The ./jsx folder includes # to share/jupyterhub/static/js/admin-react.js. The ./jsx folder includes
@@ -29,8 +32,8 @@ jobs:
timeout-minutes: 5 timeout-minutes: 5
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
- uses: actions/setup-node@v1 - uses: actions/setup-node@v3
with: with:
node-version: "14" node-version: "14"
@@ -47,62 +50,3 @@ jobs:
run: | run: |
cd jsx cd jsx
yarn test yarn test
# The ./jsx folder contains React based source files that are to compile to
# share/jupyterhub/static/js/admin-react.js. This job makes sure that whatever
# we have in jsx/src matches the compiled asset that we package and
# distribute.
#
# This job's purpose is to make sure we don't forget to compile changes and to
# verify nobody sneaks in a change in the hard to review compiled asset.
#
# NOTE: In the future we may want to stop version controlling the compiled
# artifact and instead generate it whenever we package JupyterHub. If we
# do this, we are required to setup node and compile the source code
# more often, at the same time we could avoid having this check be made.
#
compile-jsx-admin-react:
runs-on: ubuntu-20.04
timeout-minutes: 5
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v1
with:
node-version: "14"
- name: Install yarn
run: |
npm install -g yarn
- name: yarn
run: |
cd jsx
yarn
- name: yarn build
run: |
cd jsx
yarn build
- name: yarn place
run: |
cd jsx
yarn place
- name: Verify compiled jsx/src matches version controlled artifact
run: |
if [[ `git status --porcelain=v1` ]]; then
echo "The source code in ./jsx compiles to something different than found in ./share/jupyterhub/static/js/admin-react.js!"
echo
echo "Please re-compile the source code in ./jsx with the following commands:"
echo
echo "yarn"
echo "yarn build"
echo "yarn place"
echo
echo "See ./jsx/README.md for more details."
exit 1
else
echo "Compilation of jsx/src to share/jupyterhub/static/js/admin-react.js didn't lead to changes."
fi

View File

@@ -30,6 +30,9 @@ env:
LANG: C.UTF-8 LANG: C.UTF-8
PYTEST_ADDOPTS: "--verbose --color=yes" PYTEST_ADDOPTS: "--verbose --color=yes"
permissions:
contents: read
jobs: jobs:
# Run "pytest jupyterhub/tests" in various configurations # Run "pytest jupyterhub/tests" in various configurations
pytest: pytest:
@@ -110,25 +113,25 @@ jobs:
if [ "${{ matrix.jupyter_server }}" != "" ]; then if [ "${{ matrix.jupyter_server }}" != "" ]; then
echo "JUPYTERHUB_SINGLEUSER_APP=jupyterhub.tests.mockserverapp.MockServerApp" >> $GITHUB_ENV echo "JUPYTERHUB_SINGLEUSER_APP=jupyterhub.tests.mockserverapp.MockServerApp" >> $GITHUB_ENV
fi fi
- uses: actions/checkout@v2 - uses: actions/checkout@v3
# NOTE: actions/setup-node@v1 make use of a cache within the GitHub base # NOTE: actions/setup-node@v3 make use of a cache within the GitHub base
# environment and setup in a fraction of a second. # environment and setup in a fraction of a second.
- name: Install Node v14 - name: Install Node v14
uses: actions/setup-node@v1 uses: actions/setup-node@v3
with: with:
node-version: "14" node-version: "14"
- name: Install Node dependencies - name: Install Javascript dependencies
run: | run: |
npm install npm install
npm install -g configurable-http-proxy npm install -g configurable-http-proxy yarn
npm list npm list
# NOTE: actions/setup-python@v2 make use of a cache within the GitHub base # NOTE: actions/setup-python@v4 make use of a cache within the GitHub base
# environment and setup in a fraction of a second. # environment and setup in a fraction of a second.
- name: Install Python ${{ matrix.python }} - name: Install Python ${{ matrix.python }}
uses: actions/setup-python@v2 uses: actions/setup-python@v4
with: with:
python-version: ${{ matrix.python }} python-version: "${{ matrix.python }}"
- name: Install Python dependencies - name: Install Python dependencies
run: | run: |
pip install --upgrade pip pip install --upgrade pip
@@ -211,7 +214,7 @@ jobs:
timeout-minutes: 20 timeout-minutes: 20
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
- name: build images - name: build images
run: | run: |

2
.gitignore vendored
View File

@@ -10,6 +10,7 @@ docs/build
docs/source/_static/rest-api docs/source/_static/rest-api
docs/source/rbac/scope-table.md docs/source/rbac/scope-table.md
.ipynb_checkpoints .ipynb_checkpoints
jsx/build/
# ignore config file at the top-level of the repo # ignore config file at the top-level of the repo
# but not sub-dirs # but not sub-dirs
/jupyterhub_config.py /jupyterhub_config.py
@@ -19,6 +20,7 @@ package-lock.json
share/jupyterhub/static/components share/jupyterhub/static/components
share/jupyterhub/static/css/style.min.css share/jupyterhub/static/css/style.min.css
share/jupyterhub/static/css/style.min.css.map share/jupyterhub/static/css/style.min.css.map
share/jupyterhub/static/js/admin-react.js*
*.egg-info *.egg-info
MANIFEST MANIFEST
.coverage .coverage

View File

@@ -11,7 +11,7 @@
repos: repos:
# Autoformat: Python code, syntax patterns are modernized # Autoformat: Python code, syntax patterns are modernized
- repo: https://github.com/asottile/pyupgrade - repo: https://github.com/asottile/pyupgrade
rev: v2.32.0 rev: v2.34.0
hooks: hooks:
- id: pyupgrade - id: pyupgrade
args: args:
@@ -28,7 +28,6 @@ repos:
rev: 22.3.0 rev: 22.3.0
hooks: hooks:
- id: black - id: black
args: [--target-version=py36]
# Autoformat: markdown, yaml, javascript (see the file .prettierignore) # Autoformat: markdown, yaml, javascript (see the file .prettierignore)
- repo: https://github.com/pre-commit/mirrors-prettier - repo: https://github.com/pre-commit/mirrors-prettier
@@ -38,7 +37,7 @@ repos:
# Autoformat and linting, misc. details # Autoformat and linting, misc. details
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.2.0 rev: v4.3.0
hooks: hooks:
- id: end-of-file-fixer - id: end-of-file-fixer
exclude: share/jupyterhub/static/js/admin-react.js exclude: share/jupyterhub/static/js/admin-react.js

View File

@@ -6,134 +6,9 @@ you can follow the [Jupyter contributor guide](https://jupyter.readthedocs.io/en
Make sure to also follow [Project Jupyter's Code of Conduct](https://github.com/jupyter/governance/blob/HEAD/conduct/code_of_conduct.md) Make sure to also follow [Project Jupyter's Code of Conduct](https://github.com/jupyter/governance/blob/HEAD/conduct/code_of_conduct.md)
for a friendly and welcoming collaborative environment. for a friendly and welcoming collaborative environment.
## Setting up a development environment Please see our documentation on
<!-- - [Setting up a development install](https://jupyterhub.readthedocs.io/en/latest/contributing/setup.html)
https://jupyterhub.readthedocs.io/en/stable/contributing/setup.html - [Testing JupyterHub and linting code](https://jupyterhub.readthedocs.io/en/latest/contributing/tests.html)
contains a lot of the same information. Should we merge the docs and
just have this page link to that one?
-->
JupyterHub requires Python >= 3.5 and nodejs. If you need some help, feel free to ask on [Gitter](https://gitter.im/jupyterhub/jupyterhub) or [Discourse](https://discourse.jupyter.org/).
As a Python project, a development install of JupyterHub follows standard practices for the basics (steps 1-2).
1. clone the repo
```bash
git clone https://github.com/jupyterhub/jupyterhub
```
2. do a development install with pip
```bash
cd jupyterhub
python3 -m pip install --editable .
```
3. install the development requirements,
which include things like testing tools
```bash
python3 -m pip install -r dev-requirements.txt
```
4. install configurable-http-proxy with npm:
```bash
npm install -g configurable-http-proxy
```
5. set up pre-commit hooks for automatic code formatting, etc.
```bash
pre-commit install
```
You can also invoke the pre-commit hook manually at any time with
```bash
pre-commit run
```
## Contributing
JupyterHub has adopted automatic code formatting so you shouldn't
need to worry too much about your code style.
As long as your code is valid,
the pre-commit hook should take care of how it should look.
You can invoke the pre-commit hook by hand at any time with:
```bash
pre-commit run
```
which should run any autoformatting on your code
and tell you about any errors it couldn't fix automatically.
You may also install [black integration](https://github.com/psf/black#editor-integration)
into your text editor to format code automatically.
If you have already committed files before setting up the pre-commit
hook with `pre-commit install`, you can fix everything up using
`pre-commit run --all-files`. You need to make the fixing commit
yourself after that.
## Testing
It's a good idea to write tests to exercise any new features,
or that trigger any bugs that you have fixed to catch regressions.
You can run the tests with:
```bash
pytest -v
```
in the repo directory. If you want to just run certain tests,
check out the [pytest docs](https://pytest.readthedocs.io/en/latest/usage.html)
for how pytest can be called.
For instance, to test only spawner-related things in the REST API:
```bash
pytest -v -k spawn jupyterhub/tests/test_api.py
```
The tests live in `jupyterhub/tests` and are organized roughly into:
1. `test_api.py` tests the REST API
2. `test_pages.py` tests loading the HTML pages
and other collections of tests for different components.
When writing a new test, there should usually be a test of
similar functionality already written and related tests should
be added nearby.
The fixtures live in `jupyterhub/tests/conftest.py`. There are
fixtures that can be used for JupyterHub components, such as:
- `app`: an instance of JupyterHub with mocked parts
- `auth_state_enabled`: enables persisting auth_state (like authentication tokens)
- `db`: a sqlite in-memory DB session
- `io_loop`: a Tornado event loop
- `event_loop`: a new asyncio event loop
- `user`: creates a new temporary user
- `admin_user`: creates a new temporary admin user
- single user servers
- `cleanup_after`: allows cleanup of single user servers between tests
- mocked service
- `MockServiceSpawner`: a spawner that mocks services for testing with a short poll interval
- `mockservice`: mocked service with no external service url
- `mockservice_url`: mocked service with a url to test external services
And fixtures to add functionality or spawning behavior:
- `admin_access`: grants admin access
- `no_patience`: sets slow-spawning timeouts to zero
- `slow_spawn`: enables the SlowSpawner (a spawner that takes a few seconds to start)
- `never_spawn`: enables the NeverSpawner (a spawner that will never start)
- `bad_spawn`: enables the BadSpawner (a spawner that fails immediately)
- `slow_bad_spawn`: enables the SlowBadSpawner (a spawner that fails after a short delay)
To read more about fixtures check out the
[pytest docs](https://docs.pytest.org/en/latest/fixture.html)
for how to use the existing fixtures, and how to create new ones.
When in doubt, feel free to [ask](https://gitter.im/jupyterhub/jupyterhub).

View File

@@ -37,6 +37,7 @@ RUN apt-get update \
python3-pycurl \ python3-pycurl \
nodejs \ nodejs \
npm \ npm \
yarn \
&& apt-get clean \ && apt-get clean \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*

View File

@@ -8,6 +8,7 @@ include *requirements.txt
include Dockerfile include Dockerfile
graft onbuild graft onbuild
graft jsx
graft jupyterhub graft jupyterhub
graft scripts graft scripts
graft share graft share
@@ -18,6 +19,10 @@ graft ci
graft docs graft docs
prune docs/node_modules prune docs/node_modules
# Intermediate javascript files
prune jsx/node_modules
prune jsx/build
# prune some large unused files from components # prune some large unused files from components
prune share/jupyterhub/static/components/bootstrap/dist/css prune share/jupyterhub/static/components/bootstrap/dist/css
exclude share/jupyterhub/static/components/bootstrap/dist/fonts/*.svg exclude share/jupyterhub/static/components/bootstrap/dist/fonts/*.svg

20
ci/check_installed_data.py Executable file
View File

@@ -0,0 +1,20 @@
#!/usr/bin/env python
# Check that installed package contains everything we expect
import os
from jupyterhub._data import DATA_FILES_PATH
print("Checking jupyterhub._data")
print(f"DATA_FILES_PATH={DATA_FILES_PATH}")
assert os.path.exists(DATA_FILES_PATH), DATA_FILES_PATH
for subpath in (
"templates/page.html",
"static/css/style.min.css",
"static/components/jquery/dist/jquery.js",
"static/js/admin-react.js",
):
path = os.path.join(DATA_FILES_PATH, subpath)
assert os.path.exists(path), path
print("OK")

28
ci/check_sdist.py Executable file
View File

@@ -0,0 +1,28 @@
#!/usr/bin/env python
# Check that sdist contains everything we expect
import sys
import tarfile
from tarfile import TarFile
expected_files = [
"docs/requirements.txt",
"jsx/package.json",
"package.json",
"README.md",
]
assert len(sys.argv) == 2, "Expected one file"
print(f"Checking {sys.argv[1]}")
tar = tarfile.open(name=sys.argv[1], mode="r:gz")
try:
# Remove leading jupyterhub-VERSION/
filelist = {f.partition('/')[2] for f in tar.getnames()}
finally:
tar.close()
for e in expected_files:
assert e in filelist, f"{e} not found"
print("OK")

View File

@@ -1391,6 +1391,9 @@ components:
inherit: inherit:
Everything that the token-owning entity can access _(metascope Everything that the token-owning entity can access _(metascope
for tokens)_ for tokens)_
admin-ui:
Access the admin page. Permission to take actions via the admin
page granted separately.
admin:users: admin:users:
Read, write, create and delete users and their authentication Read, write, create and delete users and their authentication
state, not including their servers or tokens. state, not including their servers or tokens.

File diff suppressed because one or more lines are too long

View File

@@ -16,7 +16,7 @@ Install Python
-------------- --------------
JupyterHub is written in the `Python <https://python.org>`_ programming language, and JupyterHub is written in the `Python <https://python.org>`_ programming language, and
requires you have at least version 3.5 installed locally. If you havent requires you have at least version 3.6 installed locally. If you havent
installed Python before, the recommended way to install it is to use installed Python before, the recommended way to install it is to use
`miniconda <https://conda.io/miniconda.html>`_. Remember to get the Python 3 version, `miniconda <https://conda.io/miniconda.html>`_. Remember to get the Python 3 version,
and **not** the Python 2 version! and **not** the Python 2 version!
@@ -24,11 +24,10 @@ and **not** the Python 2 version!
Install nodejs Install nodejs
-------------- --------------
``configurable-http-proxy``, the default proxy implementation for `NodeJS 12+ <https://nodejs.org/en/>`_ is required for building some JavaScript components.
JupyterHub, is written in Javascript to run on `NodeJS ``configurable-http-proxy``, the default proxy implementation for JupyterHub, is written in Javascript.
<https://nodejs.org/en/>`_. If you have not installed nodejs before, we If you have not installed nodejs before, we recommend installing it in the ``miniconda`` environment you set up for Python.
recommend installing it in the ``miniconda`` environment you set up for You can do so with ``conda install nodejs``.
Python. You can do so with ``conda install nodejs``.
Install git Install git
----------- -----------
@@ -46,7 +45,7 @@ their effects quickly. You need to do a developer install to make that
happen. happen.
.. note:: This guide does not attempt to dictate *how* development .. note:: This guide does not attempt to dictate *how* development
environements should be isolated since that is a personal preference and can environments should be isolated since that is a personal preference and can
be achieved in many ways, for example `tox`, `conda`, `docker`, etc. See this be achieved in many ways, for example `tox`, `conda`, `docker`, etc. See this
`forum thread <https://discourse.jupyter.org/t/thoughts-on-using-tox/3497>`_ for `forum thread <https://discourse.jupyter.org/t/thoughts-on-using-tox/3497>`_ for
a more detailed discussion. a more detailed discussion.
@@ -66,7 +65,7 @@ happen.
python -V python -V
This should return a version number greater than or equal to 3.5. This should return a version number greater than or equal to 3.6.
.. code:: bash .. code:: bash
@@ -74,12 +73,11 @@ happen.
This should return a version number greater than or equal to 5.0. This should return a version number greater than or equal to 5.0.
3. Install ``configurable-http-proxy``. This is required to run 3. Install ``configurable-http-proxy`` (required to run and test the default JupyterHub configuration) and ``yarn`` (required to build some components):
JupyterHub.
.. code:: bash .. code:: bash
npm install -g configurable-http-proxy npm install -g configurable-http-proxy yarn
If you get an error that says ``Error: EACCES: permission denied``, If you get an error that says ``Error: EACCES: permission denied``,
you might need to prefix the command with ``sudo``. If you do not you might need to prefix the command with ``sudo``. If you do not
@@ -87,11 +85,17 @@ happen.
.. code:: bash .. code:: bash
npm install configurable-http-proxy npm install configurable-http-proxy yarn
export PATH=$PATH:$(pwd)/node_modules/.bin export PATH=$PATH:$(pwd)/node_modules/.bin
The second line needs to be run every time you open a new terminal. The second line needs to be run every time you open a new terminal.
If you are using conda you can instead run:
.. code:: bash
conda install configurable-http-proxy yarn
4. Install the python packages required for JupyterHub development. 4. Install the python packages required for JupyterHub development.
.. code:: bash .. code:: bash
@@ -186,3 +190,4 @@ development updates, with:
python3 setup.py js # fetch updated client-side js python3 setup.py js # fetch updated client-side js
python3 setup.py css # recompile CSS from LESS sources python3 setup.py css # recompile CSS from LESS sources
python3 setup.py jsx # build React admin app

View File

@@ -1,8 +1,8 @@
.. _contributing/tests: .. _contributing/tests:
================== ===================================
Testing JupyterHub Testing JupyterHub and linting code
================== ===================================
Unit test help validate that JupyterHub works the way we think it does, Unit test help validate that JupyterHub works the way we think it does,
and continues to do so when changes occur. They also help communicate and continues to do so when changes occur. They also help communicate
@@ -57,6 +57,50 @@ Running the tests
pytest -v jupyterhub/tests/test_api.py::test_shutdown pytest -v jupyterhub/tests/test_api.py::test_shutdown
See the `pytest usage documentation <https://pytest.readthedocs.io/en/latest/usage.html>`_ for more details.
Test organisation
=================
The tests live in ``jupyterhub/tests`` and are organized roughly into:
#. ``test_api.py`` tests the REST API
#. ``test_pages.py`` tests loading the HTML pages
and other collections of tests for different components.
When writing a new test, there should usually be a test of
similar functionality already written and related tests should
be added nearby.
The fixtures live in ``jupyterhub/tests/conftest.py``. There are
fixtures that can be used for JupyterHub components, such as:
- ``app``: an instance of JupyterHub with mocked parts
- ``auth_state_enabled``: enables persisting auth_state (like authentication tokens)
- ``db``: a sqlite in-memory DB session
- ``io_loop```: a Tornado event loop
- ``event_loop``: a new asyncio event loop
- ``user``: creates a new temporary user
- ``admin_user``: creates a new temporary admin user
- single user servers
- ``cleanup_after``: allows cleanup of single user servers between tests
- mocked service
- ``MockServiceSpawner``: a spawner that mocks services for testing with a short poll interval
- ``mockservice```: mocked service with no external service url
- ``mockservice_url``: mocked service with a url to test external services
And fixtures to add functionality or spawning behavior:
- ``admin_access``: grants admin access
- ``no_patience```: sets slow-spawning timeouts to zero
- ``slow_spawn``: enables the SlowSpawner (a spawner that takes a few seconds to start)
- ``never_spawn``: enables the NeverSpawner (a spawner that will never start)
- ``bad_spawn``: enables the BadSpawner (a spawner that fails immediately)
- ``slow_bad_spawn``: enables the SlowBadSpawner (a spawner that fails after a short delay)
See the `pytest fixtures documentation <https://pytest.readthedocs.io/en/latest/fixture.html>`_
for how to use the existing fixtures, and how to create new ones.
Troubleshooting Test Failures Troubleshooting Test Failures
============================= =============================
@@ -66,3 +110,27 @@ All the tests are failing
Make sure you have completed all the steps in :ref:`contributing/setup` successfully, and Make sure you have completed all the steps in :ref:`contributing/setup` successfully, and
can launch ``jupyterhub`` from the terminal. can launch ``jupyterhub`` from the terminal.
Code formatting and linting
===========================
JupyterHub has adopted automatic code formatting and linting.
As long as your code is valid, the pre-commit hook should take care of how it should look.
You can invoke the pre-commit hook by hand at any time with:
.. code:: bash
pre-commit run
which should run any autoformatting on your code and tell you about any errors it couldn't fix automatically.
You may also install `black integration <https://github.com/psf/black#editor-integration>`_
into your text editor to format code automatically.
If you have already committed files before running pre-commit you can fix everything using:
.. code:: bash
pre-commit run --all-files
And committing the changes.

View File

@@ -72,13 +72,29 @@ Requested resources are filtered based on the filter of the corresponding scope.
In case a user resource is being accessed, any scopes with _group_ filters will be expanded to filters for each _user_ in those groups. In case a user resource is being accessed, any scopes with _group_ filters will be expanded to filters for each _user_ in those groups.
### `!user` filter ### Self-referencing filters
There are some 'shortcut' filters,
which can be applied to all scopes,
that filter based on the entities associated with the request.
The `!user` filter is a special horizontal filter that strictly refers to the **"owner only"** scopes, where _owner_ is a user entity. The filter resolves internally into `!user=<ownerusername>` ensuring that only the owner's resources may be accessed through the associated scopes. The `!user` filter is a special horizontal filter that strictly refers to the **"owner only"** scopes, where _owner_ is a user entity. The filter resolves internally into `!user=<ownerusername>` ensuring that only the owner's resources may be accessed through the associated scopes.
For example, the `server` role assigned by default to server tokens contains `access:servers!user` and `users:activity!user` scopes. This allows the token to access and post activity of only the servers owned by the token owner. For example, the `server` role assigned by default to server tokens contains `access:servers!user` and `users:activity!user` scopes. This allows the token to access and post activity of only the servers owned by the token owner.
The filter can be applied to any scope. :::{versionadded} 2.3
`!service` and `!server` filters.
:::
In addition to `!user`, _tokens_ may have filters `!service`
or `!server`, which expand similarly to `!service=servicename`
and `!server=servername`.
This only applies to tokens issued via the OAuth flow.
In these cases, the name is the _issuing_ entity (a service or single-user server),
so that access can be restricted to the issuing service,
e.g. `access:servers!server` would grant access only to the server that requested the token.
These filters can be applied to any scope.
(vertical-filtering-target)= (vertical-filtering-target)=

View File

@@ -231,8 +231,8 @@ In case of the need to run the jupyterhub under /jhub/ or other location please
httpd.conf amendments: httpd.conf amendments:
```bash ```bash
RewriteRule /jhub/(.*) ws://127.0.0.1:8000/jhub/$1 [NE,P,L] RewriteRule /jhub/(.*) ws://127.0.0.1:8000/jhub/$1 [P,L]
RewriteRule /jhub/(.*) http://127.0.0.1:8000/jhub/$1 [NE,P,L] RewriteRule /jhub/(.*) http://127.0.0.1:8000/jhub/$1 [P,L]
ProxyPass /jhub/ http://127.0.0.1:8000/jhub/ ProxyPass /jhub/ http://127.0.0.1:8000/jhub/
ProxyPassReverse /jhub/ http://127.0.0.1:8000/jhub/ ProxyPassReverse /jhub/ http://127.0.0.1:8000/jhub/

View File

@@ -35,6 +35,8 @@ A Service may have the following properties:
the service will be added to the proxy at `/services/:name` the service will be added to the proxy at `/services/:name`
- `api_token: str (default - None)` - For Externally-Managed Services you need to specify - `api_token: str (default - None)` - For Externally-Managed Services you need to specify
an API token to perform API requests to the Hub an API token to perform API requests to the Hub
- `display: bool (default - True)` - When set to true, display a link to the
service's URL under the 'Services' dropdown in user's hub home page.
If a service is also to be managed by the Hub, it has a few extra options: If a service is also to be managed by the Hub, it has a few extra options:

View File

@@ -371,7 +371,7 @@ a JupyterHub deployment. The commands are:
- System and deployment information - System and deployment information
```bash ```bash
jupyter troubleshooting jupyter troubleshoot
``` ```
- Kernel information - Kernel information

View File

@@ -1,56 +0,0 @@
/*
object-assign
(c) Sindre Sorhus
@license MIT
*/
/*!
Copyright (c) 2018 Jed Watson.
Licensed under the MIT License (MIT), see
http://jedwatson.github.io/classnames
*/
/** @license React v0.20.2
* scheduler.production.min.js
*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
/** @license React v16.13.1
* react-is.production.min.js
*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
/** @license React v17.0.2
* react-dom.production.min.js
*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
/** @license React v17.0.2
* react-jsx-runtime.production.min.js
*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
/** @license React v17.0.2
* react.production.min.js
*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/

View File

@@ -1,6 +0,0 @@
<!DOCTYPE html>
<head></head>
<body>
<div id="admin-react-hook"></div>
<script src="admin-react.js"></script>
</body>

View File

@@ -8,7 +8,7 @@
"scripts": { "scripts": {
"build": "yarn && webpack", "build": "yarn && webpack",
"hot": "webpack && webpack-dev-server", "hot": "webpack && webpack-dev-server",
"place": "cp -r build/admin-react.js ../share/jupyterhub/static/js/admin-react.js", "place": "cp build/admin-react.js* ../share/jupyterhub/static/js/",
"test": "jest --verbose", "test": "jest --verbose",
"snap": "jest --updateSnapshot", "snap": "jest --updateSnapshot",
"lint": "eslint --ext .jsx --ext .js src/", "lint": "eslint --ext .jsx --ext .js src/",

View File

@@ -60,7 +60,10 @@ const AddUser = (props) => {
placeholder="usernames separated by line" placeholder="usernames separated by line"
data-testid="user-textarea" data-testid="user-textarea"
onBlur={(e) => { onBlur={(e) => {
let split_users = e.target.value.split("\n"); let split_users = e.target.value
.split("\n")
.map((u) => u.trim())
.filter((u) => u.length > 0);
setUsers(split_users); setUsers(split_users);
}} }}
></textarea> ></textarea>
@@ -88,17 +91,7 @@ const AddUser = (props) => {
data-testid="submit" data-testid="submit"
className="btn btn-primary" className="btn btn-primary"
onClick={() => { onClick={() => {
let filtered_users = users.filter( addUsers(users, admin)
(e) =>
e.length > 2 &&
/[!@#$%^&*(),.?":{}|<>]/g.test(e) == false
);
if (filtered_users.length < users.length) {
setUsers(filtered_users);
failRegexEvent();
}
addUsers(filtered_users, admin)
.then((data) => .then((data) =>
data.status < 300 data.status < 300
? updateUsers(0, limit) ? updateUsers(0, limit)

View File

@@ -70,12 +70,12 @@ test("Removes users when they fail Regex", async () => {
let textarea = screen.getByTestId("user-textarea"); let textarea = screen.getByTestId("user-textarea");
let submit = screen.getByTestId("submit"); let submit = screen.getByTestId("submit");
fireEvent.blur(textarea, { target: { value: "foo\nbar\n!!*&*" } }); fireEvent.blur(textarea, { target: { value: "foo \n bar\na@b.co\n \n\n" } });
await act(async () => { await act(async () => {
fireEvent.click(submit); fireEvent.click(submit);
}); });
expect(callbackSpy).toHaveBeenCalledWith(["foo", "bar"], false); expect(callbackSpy).toHaveBeenCalledWith(["foo", "bar", "a@b.co"], false);
}); });
test("Correctly submits admin", async () => { test("Correctly submits admin", async () => {

View File

@@ -59,7 +59,7 @@ const CreateGroup = (props) => {
value={groupName} value={groupName}
placeholder="group name..." placeholder="group name..."
onChange={(e) => { onChange={(e) => {
setGroupName(e.target.value); setGroupName(e.target.value.trim());
}} }}
></input> ></input>
</div> </div>

View File

@@ -30,7 +30,7 @@ const AccessServerButton = ({ url }) => (
); );
const ServerDashboard = (props) => { const ServerDashboard = (props) => {
let base_url = window.base_url; let base_url = window.base_url || "/";
// sort methods // sort methods
var usernameDesc = (e) => e.sort((a, b) => (a.name > b.name ? 1 : -1)), var usernameDesc = (e) => e.sort((a, b) => (a.name > b.name ? 1 : -1)),
usernameAsc = (e) => e.sort((a, b) => (a.name < b.name ? 1 : -1)), usernameAsc = (e) => e.sort((a, b) => (a.name < b.name ? 1 : -1)),
@@ -200,6 +200,25 @@ const ServerDashboard = (props) => {
); );
}; };
const ServerRowTable = ({ data }) => {
return (
<ReactObjectTableViewer
className="table-striped table-bordered"
style={{
padding: "3px 6px",
margin: "auto",
}}
keyStyle={{
padding: "4px",
}}
valueStyle={{
padding: "4px",
}}
data={data}
/>
);
};
const serverRow = (user, server) => { const serverRow = (user, server) => {
const { servers, ...userNoServers } = user; const { servers, ...userNoServers } = user;
const serverNameDash = server.name ? `-${server.name}` : ""; const serverNameDash = server.name ? `-${server.name}` : "";
@@ -258,7 +277,7 @@ const ServerDashboard = (props) => {
/> />
<a <a
href={`${base_url}spawn/${user.name}${ href={`${base_url}spawn/${user.name}${
server.name && "/" + server.name server.name ? "/" + server.name : ""
}`} }`}
> >
<button <button
@@ -286,37 +305,11 @@ const ServerDashboard = (props) => {
> >
<Card style={{ width: "100%", padding: 3, margin: "0 auto" }}> <Card style={{ width: "100%", padding: 3, margin: "0 auto" }}>
<Card.Title>User</Card.Title> <Card.Title>User</Card.Title>
<ReactObjectTableViewer <ServerRowTable data={userNoServers} />
className="table-striped table-bordered admin-table-head"
style={{
padding: "3px 6px",
margin: "auto",
}}
keyStyle={{
padding: "4px",
}}
valueStyle={{
padding: "4px",
}}
data={userNoServers}
/>
</Card> </Card>
<Card style={{ width: "100%", padding: 3, margin: "0 auto" }}> <Card style={{ width: "100%", padding: 3, margin: "0 auto" }}>
<Card.Title>Server</Card.Title> <Card.Title>Server</Card.Title>
<ReactObjectTableViewer <ServerRowTable data={server} />
className="table-striped table-bordered admin-table-head"
style={{
padding: "3px 6px",
margin: "auto",
}}
keyStyle={{
padding: "4px",
}}
valueStyle={{
padding: "4px",
}}
data={server}
/>
</Card> </Card>
</CardGroup> </CardGroup>
</Collapse> </Collapse>

View File

@@ -98,6 +98,18 @@ test("Renders correctly the status of a single-user server", async () => {
expect(stop).toBeVisible(); expect(stop).toBeVisible();
}); });
test("Renders spawn page link", async () => {
let callbackSpy = mockAsync();
await act(async () => {
render(serverDashboardJsx(callbackSpy));
});
let link = screen.getByText("Spawn Page").closest("a");
let url = new URL(link.href);
expect(url.pathname).toEqual("/spawn/bar");
});
test("Invokes the startServer event on button click", async () => { test("Invokes the startServer event on button click", async () => {
let callbackSpy = mockAsync(); let callbackSpy = mockAsync();

View File

@@ -1,5 +1,5 @@
export const jhapiRequest = (endpoint, method, data) => { export const jhapiRequest = (endpoint, method, data) => {
let base_url = window.base_url, let base_url = window.base_url || "/",
api_url = `${base_url}hub/api`; api_url = `${base_url}hub/api`;
return fetch(api_url + endpoint, { return fetch(api_url + endpoint, {
method: method, method: method,

View File

@@ -1974,9 +1974,9 @@ async-limiter@~1.0.0:
integrity sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ== integrity sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ==
async@^2.6.2: async@^2.6.2:
version "2.6.3" version "2.6.4"
resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff" resolved "https://registry.yarnpkg.com/async/-/async-2.6.4.tgz#706b7ff6084664cd7eae713f6f965433b5504221"
integrity sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg== integrity sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA==
dependencies: dependencies:
lodash "^4.17.14" lodash "^4.17.14"
@@ -3294,9 +3294,9 @@ events@^3.2.0:
integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==
eventsource@^1.1.0: eventsource@^1.1.0:
version "1.1.0" version "1.1.1"
resolved "https://registry.yarnpkg.com/eventsource/-/eventsource-1.1.0.tgz#00e8ca7c92109e94b0ddf32dac677d841028cfaf" resolved "https://registry.yarnpkg.com/eventsource/-/eventsource-1.1.1.tgz#4544a35a57d7120fba4fa4c86cb4023b2c09df2f"
integrity sha512-VSJjT5oCNrFvCS6igjzPAt5hBzQ2qPBFIbJ03zLI9SE0mxwZpMw6BfJrbFHm1a141AavMEB8JHmBhWAd66PfCg== integrity sha512-qV5ZC0h7jYIAOhArFJgSfdyz6rALJyb270714o7ZtNnw2WSJ+eexhKtE0O8LYPRsHZHf2osHKZBxGPvm3kPkCA==
dependencies: dependencies:
original "^1.0.0" original "^1.0.0"

View File

@@ -12,7 +12,7 @@ from tornado import web
from .. import orm from .. import orm
from ..handlers import BaseHandler from ..handlers import BaseHandler
from ..scopes import get_scopes_for from ..scopes import get_scopes_for
from ..utils import get_browser_protocol, isoformat, url_path_join from ..utils import get_browser_protocol, isoformat, url_escape_path, url_path_join
PAGINATION_MEDIA_TYPE = "application/jupyterhub-pagination+json" PAGINATION_MEDIA_TYPE = "application/jupyterhub-pagination+json"
@@ -196,7 +196,7 @@ class APIHandler(BaseHandler):
'started': isoformat(spawner.orm_spawner.started), 'started': isoformat(spawner.orm_spawner.started),
'pending': spawner.pending, 'pending': spawner.pending,
'ready': spawner.ready, 'ready': spawner.ready,
'url': url_path_join(spawner.user.url, spawner.name, '/'), 'url': url_path_join(spawner.user.url, url_escape_path(spawner.name), '/'),
'user_options': spawner.user_options, 'user_options': spawner.user_options,
'progress_url': spawner._progress_url, 'progress_url': spawner._progress_url,
} }

View File

@@ -49,7 +49,7 @@ class GroupListAPIHandler(_GroupAPIHandler):
# the only valid filter is group=... # the only valid filter is group=...
# don't expand invalid !server=x to all groups! # don't expand invalid !server=x to all groups!
self.log.warning( self.log.warning(
"Invalid filter on list:group for {self.current_user}: {sub_scope}" f"Invalid filter on list:group for {self.current_user}: {sub_scope}"
) )
raise web.HTTPError(403) raise web.HTTPError(403)
query = query.filter(orm.Group.name.in_(sub_scope['group'])) query = query.filter(orm.Group.name.in_(sub_scope['group']))

View File

@@ -47,9 +47,8 @@ class ShutdownAPIHandler(APIHandler):
self.set_status(202) self.set_status(202)
self.finish(json.dumps({"message": "Shutting down Hub"})) self.finish(json.dumps({"message": "Shutting down Hub"}))
# stop the eventloop, which will trigger cleanup # instruct the app to stop, which will trigger cleanup
loop = IOLoop.current() app.stop()
loop.add_callback(loop.stop)
class RootAPIHandler(APIHandler): class RootAPIHandler(APIHandler):

View File

@@ -15,7 +15,13 @@ from .. import orm, scopes
from ..roles import assign_default_roles from ..roles import assign_default_roles
from ..scopes import needs_scope from ..scopes import needs_scope
from ..user import User from ..user import User
from ..utils import isoformat, iterate_until, maybe_future, url_path_join from ..utils import (
isoformat,
iterate_until,
maybe_future,
url_escape_path,
url_path_join,
)
from .base import APIHandler from .base import APIHandler
@@ -124,7 +130,7 @@ class UserListAPIHandler(APIHandler):
if not set(sub_scope).issubset({'group', 'user'}): if not set(sub_scope).issubset({'group', 'user'}):
# don't expand invalid !server=x filter to all users! # don't expand invalid !server=x filter to all users!
self.log.warning( self.log.warning(
"Invalid filter on list:user for {self.current_user}: {sub_scope}" f"Invalid filter on list:user for {self.current_user}: {sub_scope}"
) )
raise web.HTTPError(403) raise web.HTTPError(403)
filters = [] filters = []
@@ -685,7 +691,7 @@ class SpawnProgressAPIHandler(APIHandler):
# - spawner not running at all # - spawner not running at all
# - spawner failed # - spawner failed
# - spawner pending start (what we expect) # - spawner pending start (what we expect)
url = url_path_join(user.url, server_name, '/') url = url_path_join(user.url, url_escape_path(server_name), '/')
ready_event = { ready_event = {
'progress': 100, 'progress': 100,
'ready': True, 'ready': True,

View File

@@ -1129,7 +1129,7 @@ class JupyterHub(Application):
@default('authenticator') @default('authenticator')
def _authenticator_default(self): def _authenticator_default(self):
return self.authenticator_class(parent=self, db=self.db) return self.authenticator_class(parent=self, _deprecated_db_session=self.db)
implicit_spawn_seconds = Float( implicit_spawn_seconds = Float(
0, 0,
@@ -1317,11 +1317,14 @@ class JupyterHub(Application):
admin_access = Bool( admin_access = Bool(
False, False,
help="""Grant admin users permission to access single-user servers. help="""DEPRECATED since version 2.0.0.
Users should be properly informed if this is enabled. The default admin role has full permissions, use custom RBAC scopes instead to
create restricted administrator roles.
https://jupyterhub.readthedocs.io/en/stable/rbac/index.html
""", """,
).tag(config=True) ).tag(config=True)
admin_users = Set( admin_users = Set(
help="""DEPRECATED since version 0.7.2, use Authenticator.admin_users instead.""" help="""DEPRECATED since version 0.7.2, use Authenticator.admin_users instead."""
).tag(config=True) ).tag(config=True)
@@ -1699,7 +1702,9 @@ class JupyterHub(Application):
for authority, files in self.internal_ssl_authorities.items(): for authority, files in self.internal_ssl_authorities.items():
if files: if files:
self.log.info("Adding CA for %s", authority) self.log.info("Adding CA for %s", authority)
certipy.store.add_record(authority, is_ca=True, files=files) certipy.store.add_record(
authority, is_ca=True, files=files, overwrite=True
)
self.internal_trust_bundles = certipy.trust_from_graph( self.internal_trust_bundles = certipy.trust_from_graph(
self.internal_ssl_components_trust self.internal_ssl_components_trust
@@ -3234,9 +3239,15 @@ class JupyterHub(Application):
loop.make_current() loop.make_current()
loop.run_sync(self.cleanup) loop.run_sync(self.cleanup)
async def shutdown_cancel_tasks(self, sig): async def shutdown_cancel_tasks(self, sig=None):
"""Cancel all other tasks of the event loop and initiate cleanup""" """Cancel all other tasks of the event loop and initiate cleanup"""
self.log.critical("Received signal %s, initiating shutdown...", sig.name) if sig is None:
self.log.critical("Initiating shutdown...")
else:
self.log.critical("Received signal %s, initiating shutdown...", sig.name)
await self.cleanup()
tasks = [t for t in asyncio_all_tasks() if t is not asyncio_current_task()] tasks = [t for t in asyncio_all_tasks() if t is not asyncio_current_task()]
if tasks: if tasks:
@@ -3253,7 +3264,6 @@ class JupyterHub(Application):
tasks = [t for t in asyncio_all_tasks()] tasks = [t for t in asyncio_all_tasks()]
for t in tasks: for t in tasks:
self.log.debug("Task status: %s", t) self.log.debug("Task status: %s", t)
await self.cleanup()
asyncio.get_event_loop().stop() asyncio.get_event_loop().stop()
def stop(self): def stop(self):
@@ -3261,7 +3271,7 @@ class JupyterHub(Application):
return return
if self.http_server: if self.http_server:
self.http_server.stop() self.http_server.stop()
self.io_loop.add_callback(self.io_loop.stop) self.io_loop.add_callback(self.shutdown_cancel_tasks)
async def start_show_config(self): async def start_show_config(self):
"""Async wrapper around base start_show_config method""" """Async wrapper around base start_show_config method"""

View File

@@ -10,6 +10,7 @@ from concurrent.futures import ThreadPoolExecutor
from functools import partial from functools import partial
from shutil import which from shutil import which
from subprocess import PIPE, STDOUT, Popen from subprocess import PIPE, STDOUT, Popen
from textwrap import dedent
try: try:
import pamela import pamela
@@ -31,6 +32,23 @@ class Authenticator(LoggingConfigurable):
db = Any() db = Any()
@default("db")
def _deprecated_db(self):
self.log.warning(
dedent(
"""
The shared database session at Authenticator.db is deprecated, and will be removed.
Please manage your own database and connections.
Contact JupyterHub at https://github.com/jupyterhub/jupyterhub/issues/3700
if you have questions or ideas about direct database needs for your Authenticator.
"""
),
)
return self._deprecated_db_session
_deprecated_db_session = Any()
enable_auth_state = Bool( enable_auth_state = Bool(
False, False,
config=True, config=True,

View File

@@ -43,6 +43,7 @@ from ..utils import (
get_accepted_mimetype, get_accepted_mimetype,
get_browser_protocol, get_browser_protocol,
maybe_future, maybe_future,
url_escape_path,
url_path_join, url_path_join,
) )
@@ -623,33 +624,34 @@ class BaseHandler(RequestHandler):
next_url = self.get_argument('next', default='') next_url = self.get_argument('next', default='')
# protect against some browsers' buggy handling of backslash as slash # protect against some browsers' buggy handling of backslash as slash
next_url = next_url.replace('\\', '%5C') next_url = next_url.replace('\\', '%5C')
if (next_url + '/').startswith( proto = get_browser_protocol(self.request)
( host = self.request.host
f'{self.request.protocol}://{self.request.host}/', if next_url.startswith("///"):
f'//{self.request.host}/', # strip more than 2 leading // down to 2
) # because urlparse treats that as empty netloc,
) or ( # whereas browsers treat more than two leading // the same as //,
# so netloc is the first non-/ bit
next_url = "//" + next_url.lstrip("/")
parsed_next_url = urlparse(next_url)
if (next_url + '/').startswith((f'{proto}://{host}/', f'//{host}/',)) or (
self.subdomain_host self.subdomain_host
and urlparse(next_url).netloc and parsed_next_url.netloc
and ("." + urlparse(next_url).netloc).endswith( and ("." + parsed_next_url.netloc).endswith(
"." + urlparse(self.subdomain_host).netloc "." + urlparse(self.subdomain_host).netloc
) )
): ):
# treat absolute URLs for our host as absolute paths: # treat absolute URLs for our host as absolute paths:
# below, redirects that aren't strictly paths # below, redirects that aren't strictly paths are rejected
parsed = urlparse(next_url) next_url = parsed_next_url.path
next_url = parsed.path if parsed_next_url.query:
if parsed.query: next_url = next_url + '?' + parsed_next_url.query
next_url = next_url + '?' + parsed.query if parsed_next_url.fragment:
if parsed.fragment: next_url = next_url + '#' + parsed_next_url.fragment
next_url = next_url + '#' + parsed.fragment parsed_next_url = urlparse(next_url)
# if it still has host info, it didn't match our above check for *this* host # if it still has host info, it didn't match our above check for *this* host
if next_url and ( if next_url and (parsed_next_url.netloc or not next_url.startswith('/')):
'://' in next_url
or next_url.startswith('//')
or not next_url.startswith('/')
):
self.log.warning("Disallowing redirect outside JupyterHub: %r", next_url) self.log.warning("Disallowing redirect outside JupyterHub: %r", next_url)
next_url = '' next_url = ''
@@ -833,6 +835,12 @@ class BaseHandler(RequestHandler):
user_server_name = user.name user_server_name = user.name
if server_name: if server_name:
if '/' in server_name:
error_message = (
f"Invalid server_name (may not contain '/'): {server_name}"
)
self.log.error(error_message)
raise web.HTTPError(400, error_message)
user_server_name = f'{user.name}:{server_name}' user_server_name = f'{user.name}:{server_name}'
if server_name in user.spawners and user.spawners[server_name].pending: if server_name in user.spawners and user.spawners[server_name].pending:
@@ -1518,6 +1526,7 @@ class UserUrlHandler(BaseHandler):
server_name = '' server_name = ''
else: else:
server_name = '' server_name = ''
escaped_server_name = url_escape_path(server_name)
spawner = user.spawners[server_name] spawner = user.spawners[server_name]
if spawner.ready: if spawner.ready:
@@ -1536,7 +1545,10 @@ class UserUrlHandler(BaseHandler):
pending_url = url_concat( pending_url = url_concat(
url_path_join( url_path_join(
self.hub.base_url, 'spawn-pending', user.escaped_name, server_name self.hub.base_url,
'spawn-pending',
user.escaped_name,
escaped_server_name,
), ),
{'next': self.request.uri}, {'next': self.request.uri},
) )
@@ -1550,7 +1562,9 @@ class UserUrlHandler(BaseHandler):
# page *in* the server is not found, we return a 424 instead of a 404. # page *in* the server is not found, we return a 424 instead of a 404.
# We allow retaining the old behavior to support older JupyterLab versions # We allow retaining the old behavior to support older JupyterLab versions
spawn_url = url_concat( spawn_url = url_concat(
url_path_join(self.hub.base_url, "spawn", user.escaped_name, server_name), url_path_join(
self.hub.base_url, "spawn", user.escaped_name, escaped_server_name
),
{"next": self.request.uri}, {"next": self.request.uri},
) )
self.set_status( self.set_status(

View File

@@ -14,7 +14,7 @@ from tornado.httputil import url_concat
from .. import __version__ from .. import __version__
from ..metrics import SERVER_POLL_DURATION_SECONDS, ServerPollStatus from ..metrics import SERVER_POLL_DURATION_SECONDS, ServerPollStatus
from ..scopes import needs_scope from ..scopes import needs_scope
from ..utils import maybe_future, url_path_join from ..utils import maybe_future, url_escape_path, url_path_join
from .base import BaseHandler from .base import BaseHandler
@@ -268,15 +268,6 @@ class SpawnHandler(BaseHandler):
) )
self.finish(form) self.finish(form)
return return
if current_user is user:
self.set_login_cookie(user)
next_url = self.get_next_url(
user,
default=url_path_join(
self.hub.base_url, "spawn-pending", user.escaped_name, server_name
),
)
self.redirect(next_url)
def _get_pending_url(self, user, server_name): def _get_pending_url(self, user, server_name):
# resolve `?next=...`, falling back on the spawn-pending url # resolve `?next=...`, falling back on the spawn-pending url
@@ -284,7 +275,10 @@ class SpawnHandler(BaseHandler):
# which may get handled by the default server if they aren't ready yet # which may get handled by the default server if they aren't ready yet
pending_url = url_path_join( pending_url = url_path_join(
self.hub.base_url, "spawn-pending", user.escaped_name, server_name self.hub.base_url,
"spawn-pending",
user.escaped_name,
url_escape_path(server_name),
) )
pending_url = self.append_query_parameters(pending_url, exclude=['next']) pending_url = self.append_query_parameters(pending_url, exclude=['next'])
@@ -353,6 +347,7 @@ class SpawnPendingHandler(BaseHandler):
if server_name and server_name not in user.spawners: if server_name and server_name not in user.spawners:
raise web.HTTPError(404, f"{user.name} has no such server {server_name}") raise web.HTTPError(404, f"{user.name} has no such server {server_name}")
escaped_server_name = url_escape_path(server_name)
spawner = user.spawners[server_name] spawner = user.spawners[server_name]
if spawner.ready: if spawner.ready:
@@ -375,7 +370,7 @@ class SpawnPendingHandler(BaseHandler):
exc = spawner._spawn_future.exception() exc = spawner._spawn_future.exception()
self.log.error("Previous spawn for %s failed: %s", spawner._log_name, exc) self.log.error("Previous spawn for %s failed: %s", spawner._log_name, exc)
spawn_url = url_path_join( spawn_url = url_path_join(
self.hub.base_url, "spawn", user.escaped_name, server_name self.hub.base_url, "spawn", user.escaped_name, escaped_server_name
) )
self.set_status(500) self.set_status(500)
html = await self.render_template( html = await self.render_template(
@@ -428,7 +423,7 @@ class SpawnPendingHandler(BaseHandler):
# serving the expected page # serving the expected page
if status is not None: if status is not None:
spawn_url = url_path_join( spawn_url = url_path_join(
self.hub.base_url, "spawn", user.escaped_name, server_name self.hub.base_url, "spawn", user.escaped_name, escaped_server_name
) )
html = await self.render_template( html = await self.render_template(
"not_running.html", "not_running.html",
@@ -454,15 +449,14 @@ class AdminHandler(BaseHandler):
@web.authenticated @web.authenticated
# stacked decorators: all scopes must be present # stacked decorators: all scopes must be present
# note: keep in sync with admin link condition in page.html # note: keep in sync with admin link condition in page.html
@needs_scope('admin:users') @needs_scope('admin-ui')
@needs_scope('admin:servers')
async def get(self): async def get(self):
auth_state = await self.current_user.get_auth_state() auth_state = await self.current_user.get_auth_state()
html = await self.render_template( html = await self.render_template(
'admin.html', 'admin.html',
current_user=self.current_user, current_user=self.current_user,
auth_state=auth_state, auth_state=auth_state,
admin_access=self.settings.get('admin_access', False), admin_access=True,
allow_named_servers=self.allow_named_servers, allow_named_servers=self.allow_named_servers,
named_server_limit_per_user=self.named_server_limit_per_user, named_server_limit_per_user=self.named_server_limit_per_user,
server_version=f'{__version__} {self.version_hash}', server_version=f'{__version__} {self.version_hash}',
@@ -496,7 +490,7 @@ class TokenPageHandler(BaseHandler):
continue continue
if not token.client_id: if not token.client_id:
# token should have been deleted when client was deleted # token should have been deleted when client was deleted
self.log.warning("Deleting stale oauth token {token}") self.log.warning(f"Deleting stale oauth token {token}")
self.db.delete(token) self.db.delete(token)
self.db.commit() self.db.commit()
continue continue

View File

@@ -351,7 +351,7 @@ class JupyterHubRequestValidator(RequestValidator):
# APIToken.new commits the token to the db # APIToken.new commits the token to the db
orm.APIToken.new( orm.APIToken.new(
client_id=client.identifier, oauth_client=client,
expires_in=token['expires_in'], expires_in=token['expires_in'],
scopes=request.scopes, scopes=request.scopes,
token=token['access_token'], token=token['access_token'],

View File

@@ -529,9 +529,7 @@ class Hashed(Expiring):
prefix = token[: cls.prefix_length] prefix = token[: cls.prefix_length]
# since we can't filter on hashed values, filter on prefix # since we can't filter on hashed values, filter on prefix
# so we aren't comparing with all tokens # so we aren't comparing with all tokens
prefix_match = db.query(cls).filter( prefix_match = db.query(cls).filter_by(prefix=prefix)
bindparam('prefix', prefix).startswith(cls.prefix)
)
prefix_match = prefix_match.filter( prefix_match = prefix_match.filter(
or_(cls.expires_at == None, cls.expires_at >= cls.now()) or_(cls.expires_at == None, cls.expires_at >= cls.now())
) )
@@ -683,7 +681,8 @@ class APIToken(Hashed, Base):
generated=True, generated=True,
session_id=None, session_id=None,
expires_in=None, expires_in=None,
client_id='jupyterhub', client_id=None,
oauth_client=None,
return_orm=False, return_orm=False,
): ):
"""Generate a new API token for a user or service""" """Generate a new API token for a user or service"""
@@ -727,11 +726,20 @@ class APIToken(Hashed, Base):
orm_roles.append(role) orm_roles.append(role)
scopes = roles_to_scopes(orm_roles) scopes = roles_to_scopes(orm_roles)
if oauth_client is None:
# lookup oauth client by identifier
if client_id is None:
# default: global 'jupyterhub' client
client_id = "jupyterhub"
oauth_client = db.query(OAuthClient).filter_by(identifier=client_id).one()
if client_id is None:
client_id = oauth_client.identifier
# avoid circular import # avoid circular import
from .scopes import _check_scopes_exist, _check_token_scopes from .scopes import _check_scopes_exist, _check_token_scopes
_check_scopes_exist(scopes, who_for="token") _check_scopes_exist(scopes, who_for="token")
_check_token_scopes(scopes, owner=user or service) _check_token_scopes(scopes, owner=user or service, oauth_client=oauth_client)
# two stages to ensure orm_token.generated has been set # two stages to ensure orm_token.generated has been set
# before token setter is called # before token setter is called
@@ -761,7 +769,9 @@ class APIToken(Hashed, Base):
from .scopes import _check_scopes_exist, _check_token_scopes from .scopes import _check_scopes_exist, _check_token_scopes
_check_scopes_exist(new_scopes, who_for="token") _check_scopes_exist(new_scopes, who_for="token")
_check_token_scopes(new_scopes, owner=self.owner) _check_token_scopes(
new_scopes, owner=self.owner, oauth_client=self.oauth_client
)
self.scopes = new_scopes self.scopes = new_scopes

View File

@@ -36,7 +36,7 @@ from jupyterhub.traitlets import Command
from . import utils from . import utils
from .metrics import CHECK_ROUTES_DURATION_SECONDS, PROXY_POLL_DURATION_SECONDS from .metrics import CHECK_ROUTES_DURATION_SECONDS, PROXY_POLL_DURATION_SECONDS
from .objects import Server from .objects import Server
from .utils import AnyTimeoutError, exponential_backoff, url_path_join from .utils import AnyTimeoutError, exponential_backoff, url_escape_path, url_path_join
def _one_at_a_time(method): def _one_at_a_time(method):
@@ -295,7 +295,9 @@ class Proxy(LoggingConfigurable):
"""Remove a user's server from the proxy table.""" """Remove a user's server from the proxy table."""
routespec = user.proxy_spec routespec = user.proxy_spec
if server_name: if server_name:
routespec = url_path_join(user.proxy_spec, server_name, '/') routespec = url_path_join(
user.proxy_spec, url_escape_path(server_name), '/'
)
self.log.info("Removing user %s from proxy (%s)", user.name, routespec) self.log.info("Removing user %s from proxy (%s)", user.name, routespec)
await self.delete_route(routespec) await self.delete_route(routespec)

View File

@@ -31,6 +31,7 @@ def get_default_roles():
'name': 'admin', 'name': 'admin',
'description': 'Elevated privileges (can do anything)', 'description': 'Elevated privileges (can do anything)',
'scopes': [ 'scopes': [
'admin-ui',
'admin:users', 'admin:users',
'admin:servers', 'admin:servers',
'tokens', 'tokens',

View File

@@ -42,6 +42,10 @@ scope_definitions = {
'description': 'Anything you have access to', 'description': 'Anything you have access to',
'doc_description': 'Everything that the token-owning entity can access _(metascope for tokens)_', 'doc_description': 'Everything that the token-owning entity can access _(metascope for tokens)_',
}, },
'admin-ui': {
'description': 'Access the admin page.',
'doc_description': 'Access the admin page. Permission to take actions via the admin page granted separately.',
},
'admin:users': { 'admin:users': {
'description': 'Read, write, create and delete users and their authentication state, not including their servers or tokens.', 'description': 'Read, write, create and delete users and their authentication state, not including their servers or tokens.',
'subscopes': ['admin:auth_state', 'users', 'read:roles:users', 'delete:users'], 'subscopes': ['admin:auth_state', 'users', 'read:roles:users', 'delete:users'],
@@ -341,7 +345,13 @@ def get_scopes_for(orm_object):
# only thing we miss by short-circuiting here: warning about excluded extra scopes # only thing we miss by short-circuiting here: warning about excluded extra scopes
return owner_scopes return owner_scopes
token_scopes = set(expand_scopes(token_scopes, owner=owner)) token_scopes = set(
expand_scopes(
token_scopes,
owner=owner,
oauth_client=orm_object.oauth_client,
)
)
if orm_object.client_id != "jupyterhub": if orm_object.client_id != "jupyterhub":
# oauth tokens can be used to access the service issuing the token, # oauth tokens can be used to access the service issuing the token,
@@ -468,7 +478,7 @@ def _expand_scope(scope):
return frozenset(expanded_scopes) return frozenset(expanded_scopes)
def _expand_scopes_key(scopes, owner=None): def _expand_scopes_key(scopes, owner=None, oauth_client=None):
"""Cache key function for expand_scopes """Cache key function for expand_scopes
scopes is usually a mutable list or set, scopes is usually a mutable list or set,
@@ -484,11 +494,15 @@ def _expand_scopes_key(scopes, owner=None):
else: else:
# owner key is the type and name # owner key is the type and name
owner_key = (type(owner).__name__, owner.name) owner_key = (type(owner).__name__, owner.name)
return (frozen_scopes, owner_key) if oauth_client is None:
oauth_client_key = None
else:
oauth_client_key = oauth_client.identifier
return (frozen_scopes, owner_key, oauth_client_key)
@lru_cache_key(_expand_scopes_key) @lru_cache_key(_expand_scopes_key)
def expand_scopes(scopes, owner=None): def expand_scopes(scopes, owner=None, oauth_client=None):
"""Returns a set of fully expanded scopes for a collection of raw scopes """Returns a set of fully expanded scopes for a collection of raw scopes
Arguments: Arguments:
@@ -496,38 +510,57 @@ def expand_scopes(scopes, owner=None):
owner (obj, optional): orm.User or orm.Service as owner of orm.APIToken owner (obj, optional): orm.User or orm.Service as owner of orm.APIToken
Used for expansion of metascopes such as `self` Used for expansion of metascopes such as `self`
and owner-based filters such as `!user` and owner-based filters such as `!user`
oauth_client (obj, optional): orm.OAuthClient
The issuing OAuth client of an API token.
Returns: Returns:
expanded scopes (set): set of all expanded scopes, with filters applied for the owner expanded scopes (set): set of all expanded scopes, with filters applied for the owner
""" """
expanded_scopes = set(chain.from_iterable(map(_expand_scope, scopes))) expanded_scopes = set(chain.from_iterable(map(_expand_scope, scopes)))
filter_replacements = {
"user": None,
"service": None,
"server": None,
}
user_name = None
if isinstance(owner, orm.User): if isinstance(owner, orm.User):
owner_name = owner.name user_name = owner.name
else: filter_replacements["user"] = f"user={user_name}"
owner_name = None elif isinstance(owner, orm.Service):
filter_replacements["service"] = f"service={owner.name}"
if oauth_client is not None:
if oauth_client.service is not None:
filter_replacements["service"] = f"service={oauth_client.service.name}"
elif oauth_client.spawner is not None:
spawner = oauth_client.spawner
filter_replacements["server"] = f"server={spawner.user.name}/{spawner.name}"
for scope in expanded_scopes.copy(): for scope in expanded_scopes.copy():
base_scope, _, filter = scope.partition('!') base_scope, _, filter = scope.partition('!')
if filter == 'user': if filter in filter_replacements:
# translate !user into !user={username} # translate !user into !user={username}
# and !service into !service={servicename}
# and !server into !server={username}/{servername}
expanded_scopes.remove(scope) expanded_scopes.remove(scope)
if owner_name: expanded_filter = filter_replacements[filter]
if expanded_filter:
# translate # translate
expanded_scopes.add(f'{base_scope}!user={owner_name}') expanded_scopes.add(f'{base_scope}!{expanded_filter}')
else: else:
warnings.warn( warnings.warn(
f"Not expanding !user filter without owner in {scope}", f"Not expanding !{filter} filter without target {filter} in {scope}",
stacklevel=2, stacklevel=2,
) )
if 'self' in expanded_scopes: if 'self' in expanded_scopes:
expanded_scopes.remove('self') expanded_scopes.remove('self')
if owner_name: if user_name:
expanded_scopes |= _expand_self_scope(owner_name) expanded_scopes |= _expand_self_scope(user_name)
else: else:
warnings.warn( warnings.warn(
"Not expanding 'self' scope without owner", f"Not expanding 'self' scope for owner {owner} which is not a User",
stacklevel=2, stacklevel=2,
) )
@@ -610,7 +643,8 @@ def _check_scopes_exist(scopes, who_for=None):
""" """
allowed_scopes = set(scope_definitions.keys()) allowed_scopes = set(scope_definitions.keys())
allowed_filters = ('!user=', '!service=', '!group=', '!server=', '!user') filter_prefixes = ('!user=', '!service=', '!group=', '!server=')
exact_filters = {"!user", "!service", "!server"}
if who_for: if who_for:
log_for = f"for {who_for}" log_for = f"for {who_for}"
@@ -625,13 +659,15 @@ def _check_scopes_exist(scopes, who_for=None):
raise KeyError(f"Scope '{scope}' {log_for} does not exist") raise KeyError(f"Scope '{scope}' {log_for} does not exist")
if filter_: if filter_:
full_filter = f"!{filter_}" full_filter = f"!{filter_}"
if not full_filter.startswith(allowed_filters): if full_filter not in exact_filters and not full_filter.startswith(
filter_prefixes
):
raise KeyError( raise KeyError(
f"Scope filter {filter_} '{full_filter}' in scope '{scope}' {log_for} does not exist" f"Scope filter {filter_} '{full_filter}' in scope '{scope}' {log_for} does not exist"
) )
def _check_token_scopes(scopes, owner): def _check_token_scopes(scopes, owner, oauth_client):
"""Check that scopes to be assigned to a token """Check that scopes to be assigned to a token
are in fact are in fact
@@ -648,7 +684,7 @@ def _check_token_scopes(scopes, owner):
return return
scopes.discard("inherit") scopes.discard("inherit")
# common short circuit # common short circuit
token_scopes = expand_scopes(scopes, owner=owner) token_scopes = expand_scopes(scopes, owner=owner, oauth_client=oauth_client)
if not token_scopes: if not token_scopes:
return return

View File

@@ -175,6 +175,7 @@ page_template = """
<span> <span>
<a href='{{hub_control_panel_url}}' <a href='{{hub_control_panel_url}}'
id='jupyterhub-control-panel-link'
class='btn btn-default btn-sm navbar-btn pull-right' class='btn btn-default btn-sm navbar-btn pull-right'
style='margin-right: 4px; margin-left: 2px;'> style='margin-right: 4px; margin-left: 2px;'>
Control Panel Control Panel
@@ -603,7 +604,15 @@ class SingleUserNotebookAppMixin(Configurable):
# disable trash by default # disable trash by default
# this can be re-enabled by config # this can be re-enabled by config
self.config.FileContentsManager.delete_to_trash = False self.config.FileContentsManager.delete_to_trash = False
return super().initialize(argv) # load default-url env at higher priority than `@default`,
# which may have their own _defaults_ which should not override explicit default_url config
# via e.g. c.Spawner.default_url. Seen in jupyterlab's SingleUserLabApp.
default_url = os.environ.get("JUPYTERHUB_DEFAULT_URL")
if default_url:
self.config[self.__class__.__name__].default_url = default_url
self._log_app_versions()
super().initialize(argv)
self.patch_templates()
def start(self): def start(self):
self.log.info("Starting jupyterhub-singleuser server version %s", __version__) self.log.info("Starting jupyterhub-singleuser server version %s", __version__)
@@ -673,7 +682,6 @@ class SingleUserNotebookAppMixin(Configurable):
# apply X-JupyterHub-Version to *all* request handlers (even redirects) # apply X-JupyterHub-Version to *all* request handlers (even redirects)
self.patch_default_headers() self.patch_default_headers()
self.patch_templates()
def patch_default_headers(self): def patch_default_headers(self):
if hasattr(RequestHandler, '_orig_set_default_headers'): if hasattr(RequestHandler, '_orig_set_default_headers'):
@@ -694,19 +702,30 @@ class SingleUserNotebookAppMixin(Configurable):
) )
self.jinja_template_vars['hub_host'] = self.hub_host self.jinja_template_vars['hub_host'] = self.hub_host
self.jinja_template_vars['hub_prefix'] = self.hub_prefix self.jinja_template_vars['hub_prefix'] = self.hub_prefix
env = self.web_app.settings['jinja2_env'] self.jinja_template_vars[
'hub_control_panel_url'
] = self.hub_host + url_path_join(self.hub_prefix, 'home')
env.globals['hub_control_panel_url'] = self.hub_host + url_path_join( settings = self.web_app.settings
self.hub_prefix, 'home' # patch classic notebook jinja env
) jinja_envs = []
if 'jinja2_env' in settings:
# default jinja env (should we do this on jupyter-server, or only notebook?)
jinja_envs.append(settings['jinja2_env'])
if 'notebook_jinja2_env' in settings:
# when running with jupyter-server, classic notebook (nbclassic server extension)
# gets its own jinja env, which needs the same patch
jinja_envs.append(settings['notebook_jinja2_env'])
# patch jinja env loading to modify page template # patch jinja env loading to get modified template, only for base page.html
def get_page(name): def get_page(name):
if name == 'page.html': if name == 'page.html':
return page_template return page_template
orig_loader = env.loader for jinja_env in jinja_envs:
env.loader = ChoiceLoader([FunctionLoader(get_page), orig_loader]) jinja_env.loader = ChoiceLoader(
[FunctionLoader(get_page), jinja_env.loader]
)
def load_server_extensions(self): def load_server_extensions(self):
# Loading LabApp sets $JUPYTERHUB_API_TOKEN on load, which is incorrect # Loading LabApp sets $JUPYTERHUB_API_TOKEN on load, which is incorrect

View File

@@ -14,6 +14,7 @@ import warnings
from inspect import signature from inspect import signature
from subprocess import Popen from subprocess import Popen
from tempfile import mkdtemp from tempfile import mkdtemp
from textwrap import dedent
from urllib.parse import urlparse from urllib.parse import urlparse
from async_generator import aclosing from async_generator import aclosing
@@ -42,6 +43,7 @@ from .utils import (
exponential_backoff, exponential_backoff,
maybe_future, maybe_future,
random_port, random_port,
url_escape_path,
url_path_join, url_path_join,
) )
@@ -99,10 +101,15 @@ class Spawner(LoggingConfigurable):
Used in logging for consistency with named servers. Used in logging for consistency with named servers.
""" """
if self.name: if self.user:
return f'{self.user.name}:{self.name}' user_name = self.user.name
else: else:
return self.user.name # no user, only happens in mock tests
user_name = "(no user)"
if self.name:
return f"{user_name}:{self.name}"
else:
return user_name
@property @property
def _failed(self): def _failed(self):
@@ -152,9 +159,27 @@ class Spawner(LoggingConfigurable):
authenticator = Any() authenticator = Any()
hub = Any() hub = Any()
orm_spawner = Any() orm_spawner = Any()
db = Any()
cookie_options = Dict() cookie_options = Dict()
db = Any()
@default("db")
def _deprecated_db(self):
self.log.warning(
dedent(
"""
The shared database session at Spawner.db is deprecated, and will be removed.
Please manage your own database and connections.
Contact JupyterHub at https://github.com/jupyterhub/jupyterhub/issues/3700
if you have questions or ideas about direct database needs for your Spawner.
"""
),
)
return self._deprecated_db_session
_deprecated_db_session = Any()
@observe('orm_spawner') @observe('orm_spawner')
def _orm_spawner_changed(self, change): def _orm_spawner_changed(self, change):
if change.new and change.new.server: if change.new and change.new.server:
@@ -230,7 +255,7 @@ class Spawner(LoggingConfigurable):
self.orm_spawner.server = server.orm_server self.orm_spawner.server = server.orm_server
elif server is not None: elif server is not None:
self.log.warning( self.log.warning(
"Setting Spawner.server for {self._log_name} with no underlying orm_spawner" f"Setting Spawner.server for {self._log_name} with no underlying orm_spawner"
) )
@property @property
@@ -847,7 +872,7 @@ class Spawner(LoggingConfigurable):
env['JUPYTERHUB_COOKIE_OPTIONS'] = json.dumps(self.cookie_options) env['JUPYTERHUB_COOKIE_OPTIONS'] = json.dumps(self.cookie_options)
env['JUPYTERHUB_HOST'] = self.hub.public_host env['JUPYTERHUB_HOST'] = self.hub.public_host
env['JUPYTERHUB_OAUTH_CALLBACK_URL'] = url_path_join( env['JUPYTERHUB_OAUTH_CALLBACK_URL'] = url_path_join(
self.user.url, self.name, 'oauth_callback' self.user.url, url_escape_path(self.name), 'oauth_callback'
) )
env['JUPYTERHUB_OAUTH_SCOPES'] = json.dumps(self.oauth_scopes) env['JUPYTERHUB_OAUTH_SCOPES'] = json.dumps(self.oauth_scopes)
@@ -1118,10 +1143,7 @@ class Spawner(LoggingConfigurable):
async def run_auth_state_hook(self, auth_state): async def run_auth_state_hook(self, auth_state):
"""Run the auth_state_hook if defined""" """Run the auth_state_hook if defined"""
if self.auth_state_hook is not None: if self.auth_state_hook is not None:
try: await maybe_future(self.auth_state_hook(self, auth_state))
await maybe_future(self.auth_state_hook(self, auth_state))
except Exception:
self.log.exception("auth_state_hook failed with exception: %s", self)
@property @property
def _progress_url(self): def _progress_url(self):

View File

@@ -188,6 +188,8 @@ def cleanup_after(request, io_loop):
if not MockHub.initialized(): if not MockHub.initialized():
return return
app = MockHub.instance() app = MockHub.instance()
if app.db_file.closed:
return
for uid, user in list(app.users.items()): for uid, user in list(app.users.items()):
for name, spawner in list(user.spawners.items()): for name, spawner in list(user.spawners.items()):
if spawner.active: if spawner.active:
@@ -285,7 +287,22 @@ class MockServiceSpawner(jupyterhub.services.service._ServiceSpawner):
_mock_service_counter = 0 _mock_service_counter = 0
def _mockservice(request, app, url=False): def _mockservice(request, app, external=False, url=False):
"""
Add a service to the application
Args:
request: pytest request fixture
app: MockHub application
external (bool):
If False (default), launch the service.
Otherwise, consider it 'external,
registering a service in the database,
but don't start it.
url (bool):
If True, register the service at a URL
(as opposed to headless, API-only).
"""
global _mock_service_counter global _mock_service_counter
_mock_service_counter += 1 _mock_service_counter += 1
name = 'mock-service-%i' % _mock_service_counter name = 'mock-service-%i' % _mock_service_counter
@@ -296,6 +313,10 @@ def _mockservice(request, app, url=False):
else: else:
spec['url'] = 'http://127.0.0.1:%i' % random_port() spec['url'] = 'http://127.0.0.1:%i' % random_port()
if external:
spec['oauth_redirect_uri'] = 'http://127.0.0.1:%i' % random_port()
io_loop = app.io_loop io_loop = app.io_loop
with mock.patch.object( with mock.patch.object(
@@ -313,17 +334,20 @@ def _mockservice(request, app, url=False):
await app.proxy.add_all_services(app._service_map) await app.proxy.add_all_services(app._service_map)
await service.start() await service.start()
io_loop.run_sync(start) if not external:
io_loop.run_sync(start)
def cleanup(): def cleanup():
asyncio.get_event_loop().run_until_complete(service.stop()) if not external:
asyncio.get_event_loop().run_until_complete(service.stop())
app.services[:] = [] app.services[:] = []
app._service_map.clear() app._service_map.clear()
request.addfinalizer(cleanup) request.addfinalizer(cleanup)
# ensure process finishes starting # ensure process finishes starting
with raises(TimeoutExpired): if not external:
service.proc.wait(1) with raises(TimeoutExpired):
service.proc.wait(1)
if url: if url:
io_loop.run_sync(partial(service.server.wait_up, http=True)) io_loop.run_sync(partial(service.server.wait_up, http=True))
return service return service
@@ -335,6 +359,12 @@ def mockservice(request, app):
yield _mockservice(request, app, url=False) yield _mockservice(request, app, url=False)
@fixture
def mockservice_external(request, app):
"""Mock an externally managed service (don't start anything)"""
yield _mockservice(request, app, external=True, url=False)
@fixture @fixture
def mockservice_url(request, app): def mockservice_url(request, app):
"""Mock a service with its own url to test external services""" """Mock a service with its own url to test external services"""

View File

@@ -325,26 +325,28 @@ class MockHub(JupyterHub):
roles.assign_default_roles(self.db, entity=user) roles.assign_default_roles(self.db, entity=user)
self.db.commit() self.db.commit()
def stop(self): _stop_called = False
super().stop()
def stop(self):
if self._stop_called:
return
self._stop_called = True
# run cleanup in a background thread # run cleanup in a background thread
# to avoid multiple eventloops in the same thread errors from asyncio # to avoid multiple eventloops in the same thread errors from asyncio
def cleanup(): def cleanup():
asyncio.set_event_loop(asyncio.new_event_loop()) loop = asyncio.new_event_loop()
loop = IOLoop.current() loop.run_until_complete(self.cleanup())
loop.run_sync(self.cleanup)
loop.close() loop.close()
pool = ThreadPoolExecutor(1) with ThreadPoolExecutor(1) as pool:
f = pool.submit(cleanup) f = pool.submit(cleanup)
# wait for cleanup to finish # wait for cleanup to finish
f.result() f.result()
pool.shutdown()
# ignore the call that will fire in atexit # prevent redundant atexit from running
self.cleanup = lambda: None self._atexit_ran = True
super().stop()
self.db_file.close() self.db_file.close()
async def login_user(self, name): async def login_user(self, name):

View File

@@ -2158,14 +2158,23 @@ def test_shutdown(app):
) )
return r return r
real_stop = loop.stop real_stop = loop.asyncio_loop.stop
def stop(): def stop():
stop.called = True stop.called = True
loop.call_later(1, real_stop) loop.call_later(1, real_stop)
with mock.patch.object(loop, 'stop', stop): real_cleanup = app.cleanup
def cleanup():
cleanup.called = True
return real_cleanup()
app.cleanup = cleanup
with mock.patch.object(loop.asyncio_loop, 'stop', stop):
r = loop.run_sync(shutdown, timeout=5) r = loop.run_sync(shutdown, timeout=5)
r.raise_for_status() r.raise_for_status()
reply = r.json() reply = r.json()
assert cleanup.called
assert stop.called assert stop.called

View File

@@ -2,12 +2,14 @@
import asyncio import asyncio
import json import json
from unittest import mock from unittest import mock
from urllib.parse import urlencode, urlparse from urllib.parse import unquote, urlencode, urlparse
import pytest import pytest
from requests.exceptions import HTTPError
from tornado.httputil import url_concat from tornado.httputil import url_concat
from ..utils import url_path_join from .. import orm
from ..utils import url_escape_path, url_path_join
from .mocking import FormSpawner, public_url from .mocking import FormSpawner, public_url
from .test_api import TIMESTAMP, add_user, api_request, fill_user, normalize_user from .test_api import TIMESTAMP, add_user, api_request, fill_user, normalize_user
from .utils import async_requests, get_page from .utils import async_requests, get_page
@@ -83,29 +85,55 @@ async def test_default_server(app, named_servers):
) )
async def test_create_named_server(app, named_servers): @pytest.mark.parametrize(
'servername,escapedname,caller_escape',
[
('trevor', 'trevor', False),
('$p~c|a! ch@rs', '%24p~c%7Ca%21%20ch@rs', False),
('$p~c|a! ch@rs', '%24p~c%7Ca%21%20ch@rs', True),
('hash#?question', 'hash%23%3Fquestion', True),
],
)
async def test_create_named_server(
app, named_servers, servername, escapedname, caller_escape
):
username = 'walnut' username = 'walnut'
user = add_user(app.db, app, name=username) user = add_user(app.db, app, name=username)
# assert user.allow_named_servers == True # assert user.allow_named_servers == True
cookies = await app.login_user(username) cookies = await app.login_user(username)
servername = 'trevor' request_servername = servername
r = await api_request(app, 'users', username, 'servers', servername, method='post') if caller_escape:
request_servername = url_escape_path(servername)
r = await api_request(
app, 'users', username, 'servers', request_servername, method='post'
)
r.raise_for_status() r.raise_for_status()
assert r.status_code == 201 assert r.status_code == 201
assert r.text == '' assert r.text == ''
url = url_path_join(public_url(app, user), servername, 'env') url = url_path_join(public_url(app, user), request_servername, 'env')
expected_url = url_path_join(public_url(app, user), escapedname, 'env')
r = await async_requests.get(url, cookies=cookies) r = await async_requests.get(url, cookies=cookies)
r.raise_for_status() r.raise_for_status()
assert r.url == url # requests doesn't fully encode the servername: "$p~c%7Ca!%20ch@rs".
# Since this is the internal requests representation and not the JupyterHub
# representation it just needs to be equivalent.
assert unquote(r.url) == unquote(expected_url)
env = r.json() env = r.json()
prefix = env.get('JUPYTERHUB_SERVICE_PREFIX') prefix = env.get('JUPYTERHUB_SERVICE_PREFIX')
assert prefix == user.spawners[servername].server.base_url assert prefix == user.spawners[servername].server.base_url
assert prefix.endswith(f'/user/{username}/{servername}/') assert prefix.endswith(f'/user/{username}/{escapedname}/')
r = await api_request(app, 'users', username) r = await api_request(app, 'users', username)
r.raise_for_status() r.raise_for_status()
# Ensure the unescaped name is stored in the DB
db_server_names = set(
app.db.query(orm.User).filter_by(name=username).first().orm_spawners.keys()
)
assert db_server_names == {"", servername}
user_model = normalize_user(r.json()) user_model = normalize_user(r.json())
assert user_model == fill_user( assert user_model == fill_user(
{ {
@@ -117,11 +145,11 @@ async def test_create_named_server(app, named_servers):
'name': name, 'name': name,
'started': TIMESTAMP, 'started': TIMESTAMP,
'last_activity': TIMESTAMP, 'last_activity': TIMESTAMP,
'url': url_path_join(user.url, name, '/'), 'url': url_path_join(user.url, escapedname, '/'),
'pending': None, 'pending': None,
'ready': True, 'ready': True,
'progress_url': 'PREFIX/hub/api/users/{}/servers/{}/progress'.format( 'progress_url': 'PREFIX/hub/api/users/{}/servers/{}/progress'.format(
username, servername username, escapedname
), ),
'state': {'pid': 0}, 'state': {'pid': 0},
'user_options': {}, 'user_options': {},
@@ -132,6 +160,26 @@ async def test_create_named_server(app, named_servers):
) )
async def test_create_invalid_named_server(app, named_servers):
username = 'walnut'
user = add_user(app.db, app, name=username)
# assert user.allow_named_servers == True
cookies = await app.login_user(username)
server_name = "a$/b"
request_servername = 'a%24%2fb'
r = await api_request(
app, 'users', username, 'servers', request_servername, method='post'
)
with pytest.raises(HTTPError) as exc:
r.raise_for_status()
assert exc.value.response.json() == {
'status': 400,
'message': "Invalid server_name (may not contain '/'): a$/b",
}
async def test_delete_named_server(app, named_servers): async def test_delete_named_server(app, named_servers):
username = 'donaar' username = 'donaar'
user = add_user(app.db, app, name=username) user = add_user(app.db, app, name=username)

View File

@@ -768,6 +768,10 @@ async def test_login_strip(app):
(False, '/user/other', '/hub/user/other', None), (False, '/user/other', '/hub/user/other', None),
(False, '/absolute', '/absolute', None), (False, '/absolute', '/absolute', None),
(False, '/has?query#andhash', '/has?query#andhash', None), (False, '/has?query#andhash', '/has?query#andhash', None),
# :// in query string or fragment
(False, '/has?repo=https/host.git', '/has?repo=https/host.git', None),
(False, '/has?repo=https://host.git', '/has?repo=https://host.git', None),
(False, '/has#repo=https://host.git', '/has#repo=https://host.git', None),
# next_url outside is not allowed # next_url outside is not allowed
(False, 'relative/path', '', None), (False, 'relative/path', '', None),
(False, 'https://other.domain', '', None), (False, 'https://other.domain', '', None),
@@ -807,7 +811,9 @@ async def test_login_redirect(app, running, next_url, location, params):
if params: if params:
url = url_concat(url, params) url = url_concat(url, params)
if next_url: if next_url:
if '//' not in next_url and next_url.startswith('/'): if next_url.startswith('/') and not (
next_url.startswith("//") or urlparse(next_url).netloc
):
next_url = ujoin(app.base_url, next_url, '') next_url = ujoin(app.base_url, next_url, '')
url = url_concat(url, dict(next=next_url)) url = url_concat(url, dict(next=next_url))
@@ -1105,17 +1111,27 @@ async def test_bad_oauth_get(app, params):
[ [
(["users"], False), (["users"], False),
(["admin:users"], False), (["admin:users"], False),
(["users", "admin:users", "admin:servers"], True), (["users", "admin:users", "admin:servers"], False),
(["admin-ui"], True),
], ],
) )
async def test_admin_page_access(app, scopes, has_access, create_user_with_scopes): async def test_admin_page_access(app, scopes, has_access, create_user_with_scopes):
user = create_user_with_scopes(*scopes) user = create_user_with_scopes(*scopes)
cookies = await app.login_user(user.name) cookies = await app.login_user(user.name)
r = await get_page("/admin", app, cookies=cookies) home_resp = await get_page("/home", app, cookies=cookies)
admin_resp = await get_page("/admin", app, cookies=cookies)
assert home_resp.status_code == 200
soup = BeautifulSoup(home_resp.text, "html.parser")
nav = soup.find("div", id="thenavbar")
links = [a["href"] for a in nav.find_all("a")]
admin_url = app.base_url + "hub/admin"
if has_access: if has_access:
assert r.status_code == 200 assert admin_resp.status_code == 200
assert admin_url in links
else: else:
assert r.status_code == 403 assert admin_resp.status_code == 403
assert admin_url not in links
async def test_oauth_page_scope_appearance( async def test_oauth_page_scope_appearance(

View File

@@ -1152,28 +1152,52 @@ async def test_user_filter_expansion(app, create_user_with_scopes):
@pytest.mark.parametrize( @pytest.mark.parametrize(
"scopes, expected", "scopes, expected",
[ [
("read:users:name!user", ["read:users:name!user=$user"]), ("read:users:name!user", ["read:users:name!user={user}"]),
( (
"users:activity!user", "users:activity!user",
[ [
"read:users:activity!user=$user", "read:users:activity!user={user}",
"users:activity!user=$user", "users:activity!user={user}",
], ],
), ),
("self", ["*"]), ("self", ["*"]),
(["access:services", "access:services!service=x"], ["access:services"]), (["access:services", "access:services!service=x"], ["access:services"]),
("access:services!service", ["access:services!service={service}"]),
("access:servers!server", ["access:servers!server={server}"]),
], ],
) )
def test_expand_scopes(user, scopes, expected): def test_expand_scopes(app, user, scopes, expected, mockservice_external):
if isinstance(scopes, str): if isinstance(scopes, str):
scopes = [scopes] scopes = [scopes]
scopes = {s.replace("$user", user.name) for s in scopes}
expected = {s.replace("$user", user.name) for s in expected} db = app.db
service = mockservice_external
spawner_name = "salmon"
server_name = f"{user.name}/{spawner_name}"
if 'server' in str(scopes):
oauth_client = orm.OAuthClient()
db.add(oauth_client)
spawner = user.spawners[spawner_name]
spawner.orm_spawner.oauth_client = oauth_client
db.commit()
assert oauth_client.spawner is spawner.orm_spawner
else:
oauth_client = service.oauth_client
assert oauth_client is not None
def format_scopes(scopes):
return {
s.format(service=service.name, server=server_name, user=user.name)
for s in scopes
}
scopes = format_scopes(scopes)
expected = format_scopes(expected)
if "*" in expected: if "*" in expected:
expected.remove("*") expected.remove("*")
expected.update(_expand_self_scope(user.name)) expected.update(_expand_self_scope(user.name))
expanded = expand_scopes(scopes, owner=user.orm_user) expanded = expand_scopes(scopes, owner=user.orm_user, oauth_client=oauth_client)
assert isinstance(expanded, frozenset) assert isinstance(expanded, frozenset)
assert sorted(expanded) == sorted(expected) assert sorted(expanded) == sorted(expected)

View File

@@ -4,16 +4,17 @@ import sys
from contextlib import contextmanager from contextlib import contextmanager
from subprocess import CalledProcessError, check_output from subprocess import CalledProcessError, check_output
from unittest import mock from unittest import mock
from urllib.parse import urlparse from urllib.parse import urlencode, urlparse
import pytest import pytest
from bs4 import BeautifulSoup
import jupyterhub import jupyterhub
from .. import orm from .. import orm
from ..utils import url_path_join from ..utils import url_path_join
from .mocking import StubSingleUserSpawner, public_url from .mocking import StubSingleUserSpawner, public_url
from .utils import AsyncSession, async_requests from .utils import AsyncSession, async_requests, get_page
@contextmanager @contextmanager
@@ -223,3 +224,22 @@ def test_singleuser_app_class(JUPYTERHUB_SINGLEUSER_APP):
else: else:
assert '--ServerApp.' in out assert '--ServerApp.' in out
assert '--NotebookApp.' not in out assert '--NotebookApp.' not in out
async def test_nbclassic_control_panel(app, user):
# use StubSingleUserSpawner to launch a single-user app in a thread
app.spawner_class = StubSingleUserSpawner
app.tornado_settings['spawner_class'] = StubSingleUserSpawner
# login, start the server
await user.spawn()
cookies = await app.login_user(user.name)
next_url = url_path_join(user.url, "tree/")
url = '/?' + urlencode({'next': next_url})
r = await get_page(url, app, cookies=cookies)
r.raise_for_status()
assert urlparse(r.url).path == urlparse(next_url).path
page = BeautifulSoup(r.text, "html.parser")
link = page.find("a", id="jupyterhub-control-panel-link")
assert link, f"Missing jupyterhub-control-panel-link in {page}"
assert link["href"] == url_path_join(app.base_url, "hub/home")

View File

@@ -3,7 +3,14 @@ Traitlets that are used in JupyterHub
""" """
# Copyright (c) Jupyter Development Team. # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License. # Distributed under the terms of the Modified BSD License.
import entrypoints import sys
# See compatibility note on `group` keyword in https://docs.python.org/3/library/importlib.metadata.html#entry-points
if sys.version_info < (3, 10):
from importlib_metadata import entry_points
else:
from importlib.metadata import entry_points
from traitlets import Integer, List, TraitError, TraitType, Type, Undefined, Unicode from traitlets import Integer, List, TraitError, TraitType, Type, Undefined, Unicode
@@ -125,11 +132,7 @@ class EntryPointType(Type):
chunks = [self._original_help] chunks = [self._original_help]
chunks.append("Currently installed: ") chunks.append("Currently installed: ")
for key, entry_point in self.load_entry_points().items(): for key, entry_point in self.load_entry_points().items():
chunks.append( chunks.append(f" - {key}: {entry_point.module}.{entry_point.attr}")
" - {}: {}.{}".format(
key, entry_point.module_name, entry_point.object_name
)
)
return '\n'.join(chunks) return '\n'.join(chunks)
@help.setter @help.setter
@@ -137,11 +140,14 @@ class EntryPointType(Type):
self._original_help = value self._original_help = value
def load_entry_points(self): def load_entry_points(self):
"""Load my entry point group""" """Load my entry point group
# load the group
group = entrypoints.get_group_named(self.entry_point_group) Returns a dict whose keys are lowercase entrypoint names
# make it case-insensitive """
return {key.lower(): value for key, value in group.items()} return {
entry_point.name.lower(): entry_point
for entry_point in entry_points(group=self.entry_point_group)
}
def validate(self, obj, value): def validate(self, obj, value):
if isinstance(value, str): if isinstance(value, str):

View File

@@ -17,7 +17,13 @@ from .crypto import CryptKeeper, EncryptionUnavailable, InvalidToken, decrypt, e
from .metrics import RUNNING_SERVERS, TOTAL_USERS from .metrics import RUNNING_SERVERS, TOTAL_USERS
from .objects import Server from .objects import Server
from .spawner import LocalProcessSpawner from .spawner import LocalProcessSpawner
from .utils import AnyTimeoutError, make_ssl_context, maybe_future, url_path_join from .utils import (
AnyTimeoutError,
make_ssl_context,
maybe_future,
url_escape_path,
url_path_join,
)
# detailed messages about the most common failure-to-start errors, # detailed messages about the most common failure-to-start errors,
# which manifest timeouts during start # which manifest timeouts during start
@@ -269,9 +275,9 @@ class User:
new_groups = set(group_names).difference(current_groups) new_groups = set(group_names).difference(current_groups)
removed_groups = current_groups.difference(group_names) removed_groups = current_groups.difference(group_names)
if new_groups: if new_groups:
self.log.info("Adding user {self.name} to group(s): {new_groups}") self.log.info(f"Adding user {self.name} to group(s): {new_groups}")
if removed_groups: if removed_groups:
self.log.info("Removing user {self.name} from group(s): {removed_groups}") self.log.info(f"Removing user {self.name} from group(s): {removed_groups}")
if group_names: if group_names:
groups = ( groups = (
@@ -410,8 +416,10 @@ class User:
hub=self.settings.get('hub'), hub=self.settings.get('hub'),
authenticator=self.authenticator, authenticator=self.authenticator,
config=self.settings.get('config'), config=self.settings.get('config'),
proxy_spec=url_path_join(self.proxy_spec, server_name, '/'), proxy_spec=url_path_join(
db=self.db, self.proxy_spec, url_escape_path(server_name), '/'
),
_deprecated_db_session=self.db,
oauth_client_id=client_id, oauth_client_id=client_id,
cookie_options=self.settings.get('cookie_options', {}), cookie_options=self.settings.get('cookie_options', {}),
trusted_alt_names=trusted_alt_names, trusted_alt_names=trusted_alt_names,
@@ -494,7 +502,7 @@ class User:
@property @property
def escaped_name(self): def escaped_name(self):
"""My name, escaped for use in URLs, cookies, etc.""" """My name, escaped for use in URLs, cookies, etc."""
return quote(self.name, safe='@~') return url_escape_path(self.name)
@property @property
def json_escaped_name(self): def json_escaped_name(self):
@@ -543,13 +551,13 @@ class User:
if not server_name: if not server_name:
return self.url return self.url
else: else:
return url_path_join(self.url, server_name) return url_path_join(self.url, url_escape_path(server_name))
def progress_url(self, server_name=''): def progress_url(self, server_name=''):
"""API URL for progress endpoint for a server with a given name""" """API URL for progress endpoint for a server with a given name"""
url_parts = [self.settings['hub'].base_url, 'api/users', self.escaped_name] url_parts = [self.settings['hub'].base_url, 'api/users', self.escaped_name]
if server_name: if server_name:
url_parts.extend(['servers', server_name, 'progress']) url_parts.extend(['servers', url_escape_path(server_name), 'progress'])
else: else:
url_parts.extend(['server/progress']) url_parts.extend(['server/progress'])
return url_path_join(*url_parts) return url_path_join(*url_parts)
@@ -623,7 +631,7 @@ class User:
if handler: if handler:
await self.refresh_auth(handler) await self.refresh_auth(handler)
base_url = url_path_join(self.base_url, server_name) + '/' base_url = url_path_join(self.base_url, url_escape_path(server_name)) + '/'
orm_server = orm.Server(base_url=base_url) orm_server = orm.Server(base_url=base_url)
db.add(orm_server) db.add(orm_server)
@@ -678,7 +686,7 @@ class User:
oauth_client = oauth_provider.add_client( oauth_client = oauth_provider.add_client(
client_id, client_id,
api_token, api_token,
url_path_join(self.url, server_name, 'oauth_callback'), url_path_join(self.url, url_escape_path(server_name), 'oauth_callback'),
allowed_roles=allowed_roles, allowed_roles=allowed_roles,
description="Server at %s" description="Server at %s"
% (url_path_join(self.base_url, server_name) + '/'), % (url_path_join(self.base_url, server_name) + '/'),
@@ -785,7 +793,9 @@ class User:
oauth_provider.add_client( oauth_provider.add_client(
client_id, client_id,
spawner.api_token, spawner.api_token,
url_path_join(self.url, server_name, 'oauth_callback'), url_path_join(
self.url, url_escape_path(server_name), 'oauth_callback'
),
) )
db.commit() db.commit()
@@ -799,7 +809,7 @@ class User:
e.reason = 'timeout' e.reason = 'timeout'
self.settings['statsd'].incr('spawner.failure.timeout') self.settings['statsd'].incr('spawner.failure.timeout')
else: else:
self.log.error( self.log.exception(
"Unhandled error starting {user}'s server: {error}".format( "Unhandled error starting {user}'s server: {error}".format(
user=self.name, error=e user=self.name, error=e
) )
@@ -809,7 +819,7 @@ class User:
try: try:
await self.stop(spawner.name) await self.stop(spawner.name)
except Exception: except Exception:
self.log.error( self.log.exception(
"Failed to cleanup {user}'s server that failed to start".format( "Failed to cleanup {user}'s server that failed to start".format(
user=self.name user=self.name
), ),
@@ -857,7 +867,7 @@ class User:
self.settings['statsd'].incr('spawner.failure.http_timeout') self.settings['statsd'].incr('spawner.failure.http_timeout')
else: else:
e.reason = 'error' e.reason = 'error'
self.log.error( self.log.exception(
"Unhandled error waiting for {user}'s server to show up at {url}: {error}".format( "Unhandled error waiting for {user}'s server to show up at {url}: {error}".format(
user=self.name, url=server.url, error=e user=self.name, url=server.url, error=e
) )
@@ -866,7 +876,7 @@ class User:
try: try:
await self.stop(spawner.name) await self.stop(spawner.name)
except Exception: except Exception:
self.log.error( self.log.exception(
"Failed to cleanup {user}'s server that failed to start".format( "Failed to cleanup {user}'s server that failed to start".format(
user=self.name user=self.name
), ),

View File

@@ -19,6 +19,7 @@ from binascii import b2a_hex
from datetime import datetime, timezone from datetime import datetime, timezone
from hmac import compare_digest from hmac import compare_digest
from operator import itemgetter from operator import itemgetter
from urllib.parse import quote
from async_generator import aclosing from async_generator import aclosing
from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.exc import SQLAlchemyError
@@ -371,6 +372,11 @@ def compare_token(compare, token):
return False return False
def url_escape_path(value):
"""Escape a value to be used in URLs, cookies, etc."""
return quote(value, safe='@~')
def url_path_join(*pieces): def url_path_join(*pieces):
"""Join components of url into a relative url. """Join components of url into a relative url.

View File

@@ -22,7 +22,7 @@
"bootstrap": "^3.4.1", "bootstrap": "^3.4.1",
"font-awesome": "^4.7.0", "font-awesome": "^4.7.0",
"jquery": "^3.5.1", "jquery": "^3.5.1",
"moment": "^2.24.0", "moment": "^2.29.2",
"requirejs": "^2.3.6" "requirejs": "^2.3.6"
} }
} }

View File

@@ -3,10 +3,14 @@ profile = "black"
[tool.black] [tool.black]
skip-string-normalization = true skip-string-normalization = true
# target-version should be all supported versions, see
# https://github.com/psf/black/issues/751#issuecomment-473066811
target_version = [ target_version = [
"py36", "py36",
"py37", "py37",
"py38", "py38",
"py39",
"py310",
] ]
[tool.tbump] [tool.tbump]

View File

@@ -1,7 +1,7 @@
alembic>=1.4 alembic>=1.4
async_generator>=1.9 async_generator>=1.9
certipy>=0.1.2 certipy>=0.1.2
entrypoints importlib_metadata>=3.6; python_version < '3.10'
jinja2>=2.11.0 jinja2>=2.11.0
jupyter_telemetry>=0.1.0 jupyter_telemetry>=0.1.0
oauthlib>=3.0 oauthlib>=3.0

View File

@@ -135,6 +135,19 @@ def mtime(path):
return os.stat(path).st_mtime return os.stat(path).st_mtime
def recursive_mtime(path):
"""Recursively get newest mtime of files"""
if os.path.isfile(path):
return mtime(path)
current = 0
for dirname, _, filenames in os.walk(path):
if filenames:
current = max(
current, max(mtime(os.path.join(dirname, f)) for f in filenames)
)
return current
class BaseCommand(Command): class BaseCommand(Command):
"""Dumb empty command because Command needs subclasses to override too much""" """Dumb empty command because Command needs subclasses to override too much"""
@@ -250,12 +263,72 @@ class CSS(BaseCommand):
self.distribution.data_files = get_data_files() self.distribution.data_files = get_data_files()
class JSX(BaseCommand):
description = "build admin app"
jsx_dir = pjoin(here, 'jsx')
js_target = pjoin(static, 'js', 'admin-react.js')
def should_run(self):
if os.getenv('READTHEDOCS'):
# yarn not available on RTD
return False
if not os.path.exists(self.js_target):
return True
js_target_mtime = mtime(self.js_target)
jsx_mtime = recursive_mtime(self.jsx_dir)
if js_target_mtime < jsx_mtime:
return True
return False
def run(self):
if not self.should_run():
print("JSX admin app is up to date")
return
# jlpm is a version of yarn bundled with JupyterLab
if shutil.which('yarn'):
yarn = 'yarn'
elif shutil.which('jlpm'):
print("yarn not found, using jlpm")
yarn = 'jlpm'
else:
raise Exception('JSX needs to be updated but yarn is not installed')
print("Installing JSX admin app requirements")
check_call(
[yarn],
cwd=self.jsx_dir,
shell=shell,
)
print("Building JSX admin app")
check_call(
[yarn, 'build'],
cwd=self.jsx_dir,
shell=shell,
)
print("Copying JSX admin app to static/js")
check_call(
[yarn, 'place'],
cwd=self.jsx_dir,
shell=shell,
)
# update data-files in case this created new files
self.distribution.data_files = get_data_files()
def js_css_first(cls, strict=True): def js_css_first(cls, strict=True):
class Command(cls): class Command(cls):
def run(self): def run(self):
try: try:
self.run_command('js') self.run_command('js')
self.run_command('css') self.run_command('css')
self.run_command('jsx')
except Exception: except Exception:
if strict: if strict:
raise raise
@@ -282,6 +355,7 @@ class bdist_egg_disabled(bdist_egg):
setup_args['cmdclass'] = { setup_args['cmdclass'] = {
'js': NPM, 'js': NPM,
'css': CSS, 'css': CSS,
'jsx': JSX,
'build_py': js_css_first(build_py, strict=is_repo), 'build_py': js_css_first(build_py, strict=is_repo),
'sdist': js_css_first(sdist, strict=True), 'sdist': js_css_first(sdist, strict=True),
'bdist_egg': bdist_egg if 'bdist_egg' in sys.argv else bdist_egg_disabled, 'bdist_egg': bdist_egg if 'bdist_egg' in sys.argv else bdist_egg_disabled,

File diff suppressed because one or more lines are too long

View File

@@ -6,7 +6,7 @@
window.api_page_limit = parseInt("{{ api_page_limit|safe }}") window.api_page_limit = parseInt("{{ api_page_limit|safe }}")
window.base_url = "{{ base_url|safe }}" window.base_url = "{{ base_url|safe }}"
</script> </script>
<script src="static/js/admin-react.js"></script> <script src={{ static_url("js/admin-react.js") }}></script>
</div> </div>
{% endblock %} {% endblock %}

View File

@@ -122,7 +122,7 @@
{% block nav_bar_left_items %} {% block nav_bar_left_items %}
<li><a href="{{base_url}}home">Home</a></li> <li><a href="{{base_url}}home">Home</a></li>
<li><a href="{{base_url}}token">Token</a></li> <li><a href="{{base_url}}token">Token</a></li>
{% if 'admin:users' in parsed_scopes and 'admin:servers' in parsed_scopes %} {% if 'admin-ui' in parsed_scopes %}
<li><a href="{{base_url}}admin">Admin</a></li> <li><a href="{{base_url}}admin">Admin</a></li>
{% endif %} {% endif %}
{% if services %} {% if services %}