diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index 03efe746..00000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,21 +0,0 @@ -# Python CircleCI 2.0 configuration file -# Updating CircleCI configuration from v1 to v2 -# Check https://circleci.com/docs/2.0/language-python/ for more details -# -version: 2 -jobs: - build: - machine: true - steps: - - checkout - - run: - name: build images - command: | - docker build -t jupyterhub/jupyterhub . - docker build -t jupyterhub/jupyterhub-onbuild onbuild - docker build -t jupyterhub/jupyterhub:alpine -f dockerfiles/Dockerfile.alpine . - docker build -t jupyterhub/singleuser singleuser - - run: - name: smoke test jupyterhub - command: | - docker run --rm -it jupyterhub/jupyterhub jupyterhub --help diff --git a/.flake8 b/.flake8 index 58538de2..41852dd2 100644 --- a/.flake8 +++ b/.flake8 @@ -3,14 +3,9 @@ # E: style errors # W: style warnings # C: complexity -# F401: module imported but unused -# F403: import * -# F811: redefinition of unused `name` from line `N` +# D: docstring warnings (unused pydocstyle extension) # F841: local variable assigned but never used -# E402: module level import not at top of file -# I100: Import statements are in the wrong order -# I101: Imported names are in the wrong order. Should be -ignore = E, C, W, F401, F403, F811, F841, E402, I100, I101, D400 +ignore = E, C, W, D, F841 builtins = c, get_config exclude = .cache, diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index 476d8bb6..00000000 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,37 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve - ---- - -Hi! Thanks for using JupyterHub. - -If you are reporting an issue with JupyterHub, please use the [GitHub issue](https://github.com/jupyterhub/jupyterhub/issues) search feature to check if your issue has been asked already. If it has, please add your comments to the existing issue. - -**Describe the bug** -A clear and concise description of what the bug is. - -**To Reproduce** -Steps to reproduce the behavior: -1. Go to '...' -2. Click on '....' -3. Scroll down to '....' -4. See error - -**Expected behavior** -A clear and concise description of what you expected to happen. - -**Screenshots** -If applicable, add screenshots to help explain your problem. - -**Desktop (please complete the following information):** - - OS: [e.g. iOS] - - Browser [e.g. chrome, safari] - - Version [e.g. 22] - -**Additional context** -Add any other context about the problem here. - -- Running `jupyter troubleshoot` from the command line, if possible, and posting -its output would also be helpful. -- Running in `--debug` mode can also be helpful for troubleshooting. diff --git a/.github/ISSUE_TEMPLATE/installation-and-configuration-issues.md b/.github/ISSUE_TEMPLATE/installation-and-configuration-issues.md deleted file mode 100644 index 9a6c731b..00000000 --- a/.github/ISSUE_TEMPLATE/installation-and-configuration-issues.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -name: Installation and configuration issues -about: Installation and configuration assistance - ---- - -If you are having issues with installation or configuration, you may ask for help on the JupyterHub gitter channel or file an issue here. diff --git a/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE/PULL_REQUEST_TEMPLATE.md similarity index 100% rename from PULL_REQUEST_TEMPLATE.md rename to .github/PULL_REQUEST_TEMPLATE/PULL_REQUEST_TEMPLATE.md diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..c724be77 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,15 @@ +# dependabot.yml reference: https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file +# +# Notes: +# - Status and logs from dependabot are provided at +# https://github.com/jupyterhub/jupyterhub/network/updates. +# +version: 2 +updates: + # Maintain dependencies in our GitHub Workflows + - package-ecosystem: github-actions + directory: "/" + schedule: + interval: weekly + time: "05:00" + timezone: "Etc/UTC" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000..aa3a6f5e --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,225 @@ +# This is a GitHub workflow defining a set of jobs with a set of steps. +# ref: https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions +# +# Test build release artifacts (PyPI package, Docker images) and publish them on +# pushed git tags. +# +name: Release + +on: + pull_request: + paths-ignore: + - "docs/**" + - "**.md" + - "**.rst" + - ".github/workflows/*" + - "!.github/workflows/release.yml" + push: + paths-ignore: + - "docs/**" + - "**.md" + - "**.rst" + - ".github/workflows/*" + - "!.github/workflows/release.yml" + branches-ignore: + - "dependabot/**" + - "pre-commit-ci-update-config" + tags: + - "**" + workflow_dispatch: + +jobs: + build-release: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: "3.9" + + - uses: actions/setup-node@v3 + with: + node-version: "14" + + - name: install build requirements + run: | + npm install -g yarn + pip install --upgrade pip + pip install build + pip freeze + + - name: build release + run: | + python -m build --sdist --wheel . + ls -l dist + + - name: verify sdist + run: | + ./ci/check_sdist.py dist/jupyterhub-*.tar.gz + + - name: verify data-files are installed where they are found + run: | + pip install dist/*.whl + ./ci/check_installed_data.py + + - name: verify sdist can be installed without npm/yarn + run: | + docker run --rm -v $PWD/dist:/dist:ro docker.io/library/python:3.9-slim-bullseye bash -c 'pip install /dist/jupyterhub-*.tar.gz' + + # ref: https://github.com/actions/upload-artifact#readme + - uses: actions/upload-artifact@v3 + with: + name: jupyterhub-${{ github.sha }} + path: "dist/*" + if-no-files-found: error + + - name: Publish to PyPI + if: startsWith(github.ref, 'refs/tags/') + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} + run: | + pip install twine + twine upload --skip-existing dist/* + + publish-docker: + runs-on: ubuntu-20.04 + + services: + # So that we can test this in PRs/branches + local-registry: + image: registry:2 + ports: + - 5000:5000 + + steps: + - name: Should we push this image to a public registry? + run: | + if [ "${{ startsWith(github.ref, 'refs/tags/') || (github.ref == 'refs/heads/main') }}" = "true" ]; then + # Empty => Docker Hub + echo "REGISTRY=" >> $GITHUB_ENV + else + echo "REGISTRY=localhost:5000/" >> $GITHUB_ENV + fi + + - uses: actions/checkout@v3 + + # Setup docker to build for multiple platforms, see: + # https://github.com/docker/build-push-action/tree/v2.4.0#usage + # https://github.com/docker/build-push-action/blob/v2.4.0/docs/advanced/multi-platform.md + - name: Set up QEMU (for docker buildx) + uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # associated tag: v1.0.2 + + - name: Set up Docker Buildx (for multi-arch builds) + uses: docker/setup-buildx-action@8c0edbc76e98fa90f69d9a2c020dcb50019dc325 + with: + # Allows pushing to registry on localhost:5000 + driver-opts: network=host + + - name: Setup push rights to Docker Hub + # This was setup by... + # 1. Creating a Docker Hub service account "jupyterhubbot" + # 2. Creating a access token for the service account specific to this + # repository: https://hub.docker.com/settings/security + # 3. Making the account part of the "bots" team, and granting that team + # permissions to push to the relevant images: + # https://hub.docker.com/orgs/jupyterhub/teams/bots/permissions + # 4. Registering the username and token as a secret for this repo: + # https://github.com/jupyterhub/jupyterhub/settings/secrets/actions + if: env.REGISTRY != 'localhost:5000/' + run: | + docker login -u "${{ secrets.DOCKERHUB_USERNAME }}" -p "${{ secrets.DOCKERHUB_TOKEN }}" + + # image: jupyterhub/jupyterhub + # + # https://github.com/jupyterhub/action-major-minor-tag-calculator + # If this is a tagged build this will return additional parent tags. + # E.g. 1.2.3 is expanded to Docker tags + # [{prefix}:1.2.3, {prefix}:1.2, {prefix}:1, {prefix}:latest] unless + # this is a backported tag in which case the newer tags aren't updated. + # For branches this will return the branch name. + # If GITHUB_TOKEN isn't available (e.g. in PRs) returns no tags []. + - name: Get list of jupyterhub tags + id: jupyterhubtags + uses: jupyterhub/action-major-minor-tag-calculator@v2 + with: + githubToken: ${{ secrets.GITHUB_TOKEN }} + prefix: "${{ env.REGISTRY }}jupyterhub/jupyterhub:" + defaultTag: "${{ env.REGISTRY }}jupyterhub/jupyterhub:noref" + branchRegex: ^\w[\w-.]*$ + + - name: Build and push jupyterhub + uses: docker/build-push-action@c56af957549030174b10d6867f20e78cfd7debc5 + with: + context: . + platforms: linux/amd64,linux/arm64 + push: true + # tags parameter must be a string input so convert `gettags` JSON + # array into a comma separated list of tags + tags: ${{ join(fromJson(steps.jupyterhubtags.outputs.tags)) }} + + # image: jupyterhub/jupyterhub-onbuild + # + - name: Get list of jupyterhub-onbuild tags + id: onbuildtags + uses: jupyterhub/action-major-minor-tag-calculator@v2 + with: + githubToken: ${{ secrets.GITHUB_TOKEN }} + prefix: "${{ env.REGISTRY }}jupyterhub/jupyterhub-onbuild:" + defaultTag: "${{ env.REGISTRY }}jupyterhub/jupyterhub-onbuild:noref" + branchRegex: ^\w[\w-.]*$ + + - name: Build and push jupyterhub-onbuild + uses: docker/build-push-action@c56af957549030174b10d6867f20e78cfd7debc5 + with: + build-args: | + BASE_IMAGE=${{ fromJson(steps.jupyterhubtags.outputs.tags)[0] }} + context: onbuild + platforms: linux/amd64,linux/arm64 + push: true + tags: ${{ join(fromJson(steps.onbuildtags.outputs.tags)) }} + + # image: jupyterhub/jupyterhub-demo + # + - name: Get list of jupyterhub-demo tags + id: demotags + uses: jupyterhub/action-major-minor-tag-calculator@v2 + with: + githubToken: ${{ secrets.GITHUB_TOKEN }} + prefix: "${{ env.REGISTRY }}jupyterhub/jupyterhub-demo:" + defaultTag: "${{ env.REGISTRY }}jupyterhub/jupyterhub-demo:noref" + branchRegex: ^\w[\w-.]*$ + + - name: Build and push jupyterhub-demo + uses: docker/build-push-action@c56af957549030174b10d6867f20e78cfd7debc5 + with: + build-args: | + BASE_IMAGE=${{ fromJson(steps.onbuildtags.outputs.tags)[0] }} + context: demo-image + # linux/arm64 currently fails: + # ERROR: Could not build wheels for argon2-cffi which use PEP 517 and cannot be installed directly + # ERROR: executor failed running [/bin/sh -c python3 -m pip install notebook]: exit code: 1 + platforms: linux/amd64 + push: true + tags: ${{ join(fromJson(steps.demotags.outputs.tags)) }} + + # image: jupyterhub/singleuser + # + - name: Get list of jupyterhub/singleuser tags + id: singleusertags + uses: jupyterhub/action-major-minor-tag-calculator@v2 + with: + githubToken: ${{ secrets.GITHUB_TOKEN }} + prefix: "${{ env.REGISTRY }}jupyterhub/singleuser:" + defaultTag: "${{ env.REGISTRY }}jupyterhub/singleuser:noref" + branchRegex: ^\w[\w-.]*$ + + - name: Build and push jupyterhub/singleuser + uses: docker/build-push-action@c56af957549030174b10d6867f20e78cfd7debc5 + with: + build-args: | + JUPYTERHUB_VERSION=${{ github.ref_type == 'tag' && github.ref_name || format('git:{0}', github.sha) }} + context: singleuser + platforms: linux/amd64,linux/arm64 + push: true + tags: ${{ join(fromJson(steps.singleusertags.outputs.tags)) }} diff --git a/.github/workflows/support-bot.yml b/.github/workflows/support-bot.yml new file mode 100644 index 00000000..341d5e41 --- /dev/null +++ b/.github/workflows/support-bot.yml @@ -0,0 +1,31 @@ +# https://github.com/dessant/support-requests +name: "Support Requests" + +on: + issues: + types: [labeled, unlabeled, reopened] + +permissions: + issues: write + +jobs: + action: + runs-on: ubuntu-latest + steps: + - uses: dessant/support-requests@v2 + with: + github-token: ${{ github.token }} + support-label: "support" + issue-comment: | + Hi there @{issue-author} :wave:! + + I closed this issue because it was labelled as a support question. + + Please help us organize discussion by posting this on the http://discourse.jupyter.org/ forum. + + Our goal is to sustain a positive experience for both users and developers. We use GitHub issues for specific discussions related to changing a repository's content, and let the forum be where we can more generally help and inspire each other. + + Thanks you for being an active member of our community! :heart: + close-issue: true + lock-issue: false + issue-lock-reason: "off-topic" diff --git a/.github/workflows/test-docs.yml b/.github/workflows/test-docs.yml new file mode 100644 index 00000000..0e96a2ec --- /dev/null +++ b/.github/workflows/test-docs.yml @@ -0,0 +1,62 @@ +# This is a GitHub workflow defining a set of jobs with a set of steps. +# ref: https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions +# +# This workflow validates the REST API definition and runs the pytest tests in +# the docs/ folder. This workflow does not build the documentation. That is +# instead tested via ReadTheDocs (https://readthedocs.org/projects/jupyterhub/). +# +name: Test docs + +# The tests defined in docs/ are currently influenced by changes to _version.py +# and scopes.py. +on: + pull_request: + paths: + - "docs/**" + - "jupyterhub/_version.py" + - "jupyterhub/scopes.py" + - ".github/workflows/test-docs.yml" + push: + paths: + - "docs/**" + - "jupyterhub/_version.py" + - "jupyterhub/scopes.py" + - ".github/workflows/test-docs.yml" + branches-ignore: + - "dependabot/**" + - "pre-commit-ci-update-config" + tags: + - "**" + workflow_dispatch: + +env: + # UTF-8 content may be interpreted as ascii and causes errors without this. + LANG: C.UTF-8 + PYTEST_ADDOPTS: "--verbose --color=yes" + +jobs: + validate-rest-api-definition: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v3 + + - name: Validate REST API definition + uses: char0n/swagger-editor-validate@v1.3.2 + with: + definition-file: docs/source/_static/rest-api.yml + + test-docs: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: "3.9" + + - name: Install requirements + run: | + pip install -r docs/requirements.txt pytest + + - name: pytest docs/ + run: | + pytest docs/ diff --git a/.github/workflows/test-jsx.yml b/.github/workflows/test-jsx.yml new file mode 100644 index 00000000..02e8bd07 --- /dev/null +++ b/.github/workflows/test-jsx.yml @@ -0,0 +1,52 @@ +# This is a GitHub workflow defining a set of jobs with a set of steps. +# ref: https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions +# +name: Test jsx (admin-react.js) + +on: + pull_request: + paths: + - "jsx/**" + - ".github/workflows/test-jsx.yml" + push: + paths: + - "jsx/**" + - ".github/workflows/test-jsx.yml" + branches-ignore: + - "dependabot/**" + - "pre-commit-ci-update-config" + tags: + - "**" + workflow_dispatch: + +permissions: + contents: read + +jobs: + # The ./jsx folder contains React based source code files that are to compile + # to share/jupyterhub/static/js/admin-react.js. The ./jsx folder includes + # tests also has tests that this job is meant to run with `yarn test` + # according to the documentation in jsx/README.md. + test-jsx-admin-react: + runs-on: ubuntu-20.04 + timeout-minutes: 5 + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-node@v3 + with: + node-version: "14" + + - name: Install yarn + run: | + npm install -g yarn + + - name: yarn + run: | + cd jsx + yarn + + - name: yarn test + run: | + cd jsx + yarn test diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 00000000..78bb9b8f --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,249 @@ +# This is a GitHub workflow defining a set of jobs with a set of steps. +# ref: https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions +# +name: Test + +on: + pull_request: + paths-ignore: + - "docs/**" + - "**.md" + - "**.rst" + - ".github/workflows/*" + - "!.github/workflows/test.yml" + push: + paths-ignore: + - "docs/**" + - "**.md" + - "**.rst" + - ".github/workflows/*" + - "!.github/workflows/test.yml" + branches-ignore: + - "dependabot/**" + - "pre-commit-ci-update-config" + tags: + - "**" + workflow_dispatch: + +env: + # UTF-8 content may be interpreted as ascii and causes errors without this. + LANG: C.UTF-8 + PYTEST_ADDOPTS: "--verbose --color=yes" + +permissions: + contents: read + +jobs: + # Run "pytest jupyterhub/tests" in various configurations + pytest: + runs-on: ubuntu-20.04 + timeout-minutes: 15 + + strategy: + # Keep running even if one variation of the job fail + fail-fast: false + matrix: + # We run this job multiple times with different parameterization + # specified below, these parameters have no meaning on their own and + # gain meaning on how job steps use them. + # + # subdomain: + # Tests everything when JupyterHub is configured to add routes for + # users with dedicated subdomains like user1.jupyter.example.com + # rather than jupyter.example.com/user/user1. + # + # db: [mysql/postgres] + # Tests everything when JupyterHub works against a dedicated mysql or + # postgresql server. + # + # legacy_notebook: + # Tests everything when the user instances are started with + # the legacy notebook server instead of jupyter_server. + # + # ssl: + # Tests everything using internal SSL connections instead of + # unencrypted HTTP + # + # main_dependencies: + # Tests everything when the we use the latest available dependencies + # from: traitlets. + # + # NOTE: Since only the value of these parameters are presented in the + # GitHub UI when the workflow run, we avoid using true/false as + # values by instead duplicating the name to signal true. + # Python versions available at: + # https://github.com/actions/python-versions/blob/HEAD/versions-manifest.json + include: + - python: "3.7" + oldest_dependencies: oldest_dependencies + legacy_notebook: legacy_notebook + - python: "3.8" + legacy_notebook: legacy_notebook + - python: "3.9" + db: mysql + - python: "3.10" + db: postgres + - python: "3.11" + subdomain: subdomain + - python: "3.11" + ssl: ssl + - python: "3.11" + selenium: selenium + - python: "3.11" + main_dependencies: main_dependencies + + steps: + # NOTE: In GitHub workflows, environment variables are set by writing + # assignment statements to a file. They will be set in the following + # steps as if would used `export MY_ENV=my-value`. + - name: Configure environment variables + run: | + if [ "${{ matrix.subdomain }}" != "" ]; then + echo "JUPYTERHUB_TEST_SUBDOMAIN_HOST=http://localhost.jovyan.org:8000" >> $GITHUB_ENV + fi + if [ "${{ matrix.db }}" == "mysql" ]; then + echo "MYSQL_HOST=127.0.0.1" >> $GITHUB_ENV + echo "JUPYTERHUB_TEST_DB_URL=mysql+mysqlconnector://root@127.0.0.1:3306/jupyterhub" >> $GITHUB_ENV + fi + if [ "${{ matrix.ssl }}" == "ssl" ]; then + echo "SSL_ENABLED=1" >> $GITHUB_ENV + fi + if [ "${{ matrix.db }}" == "postgres" ]; then + echo "PGHOST=127.0.0.1" >> $GITHUB_ENV + echo "PGUSER=test_user" >> $GITHUB_ENV + echo "PGPASSWORD=hub[test/:?" >> $GITHUB_ENV + echo "JUPYTERHUB_TEST_DB_URL=postgresql://test_user:hub%5Btest%2F%3A%3F@127.0.0.1:5432/jupyterhub" >> $GITHUB_ENV + fi + if [ "${{ matrix.jupyter_server }}" != "" ]; then + echo "JUPYTERHUB_SINGLEUSER_APP=jupyterhub.tests.mockserverapp.MockServerApp" >> $GITHUB_ENV + fi + - uses: actions/checkout@v3 + # NOTE: actions/setup-node@v3 make use of a cache within the GitHub base + # environment and setup in a fraction of a second. + - name: Install Node v14 + uses: actions/setup-node@v3 + with: + node-version: "14" + - name: Install Javascript dependencies + run: | + npm install + npm install -g configurable-http-proxy yarn + npm list + + # NOTE: actions/setup-python@v4 make use of a cache within the GitHub base + # environment and setup in a fraction of a second. + - name: Install Python ${{ matrix.python }} + uses: actions/setup-python@v4 + with: + python-version: "${{ matrix.python }}" + + - name: Install Python dependencies + run: | + pip install --upgrade pip + pip install ".[test]" + + if [ "${{ matrix.oldest_dependencies }}" != "" ]; then + # take any dependencies in requirements.txt such as tornado>=5.0 + # and transform them to tornado==5.0 so we can run tests with + # the earliest-supported versions + cat requirements.txt | grep '>=' | sed -e 's@>=@==@g' > oldest-requirements.txt + pip install -r oldest-requirements.txt + fi + + if [ "${{ matrix.main_dependencies }}" != "" ]; then + pip install git+https://github.com/ipython/traitlets#egg=traitlets --force + fi + if [ "${{ matrix.legacy_notebook }}" != "" ]; then + pip uninstall jupyter_server --yes + pip install 'notebook<7' + fi + if [ "${{ matrix.db }}" == "mysql" ]; then + pip install mysql-connector-python + fi + if [ "${{ matrix.db }}" == "postgres" ]; then + pip install psycopg2-binary + fi + + pip freeze + + # NOTE: If you need to debug this DB setup step, consider the following. + # + # 1. mysql/postgressql are database servers we start as docker containers, + # and we use clients named mysql/psql. + # + # 2. When we start a database server we need to pass environment variables + # explicitly as part of the `docker run` command. These environment + # variables are named differently from the similarly named environment + # variables used by the clients. + # + # - mysql server ref: https://hub.docker.com/_/mysql/ + # - mysql client ref: https://dev.mysql.com/doc/refman/5.7/en/environment-variables.html + # - postgres server ref: https://hub.docker.com/_/postgres/ + # - psql client ref: https://www.postgresql.org/docs/9.5/libpq-envars.html + # + # 3. When we connect, they should use 127.0.0.1 rather than the + # default way of connecting which leads to errors like below both for + # mysql and postgresql unless we set MYSQL_HOST/PGHOST to 127.0.0.1. + # + # - ERROR 2002 (HY000): Can't connect to local MySQL server through socket '/var/run/mysqld/mysqld.sock' (2) + # + - name: Start a database server (${{ matrix.db }}) + if: ${{ matrix.db }} + run: | + if [ "${{ matrix.db }}" == "mysql" ]; then + if [[ -z "$(which mysql)" ]]; then + sudo apt-get update + sudo apt-get install -y mysql-client + fi + DB=mysql bash ci/docker-db.sh + DB=mysql bash ci/init-db.sh + fi + if [ "${{ matrix.db }}" == "postgres" ]; then + if [[ -z "$(which psql)" ]]; then + sudo apt-get update + sudo apt-get install -y postgresql-client + fi + DB=postgres bash ci/docker-db.sh + DB=postgres bash ci/init-db.sh + fi + - name: Setup Firefox + if: matrix.selenium + uses: browser-actions/setup-firefox@latest + with: + firefox-version: latest + + - name: Setup Geckodriver + if: matrix.selenium + uses: browser-actions/setup-geckodriver@latest + + - name: Configure selenium tests + if: matrix.selenium + run: echo "PYTEST_ADDOPTS=$PYTEST_ADDOPTS -m selenium" >> "${GITHUB_ENV}" + + - name: Run pytest + run: | + pytest --maxfail=2 --cov=jupyterhub jupyterhub/tests + + - uses: codecov/codecov-action@v3 + + docker-build: + runs-on: ubuntu-20.04 + timeout-minutes: 20 + + steps: + - uses: actions/checkout@v3 + + - name: build images + run: | + docker build -t jupyterhub/jupyterhub . + docker build -t jupyterhub/jupyterhub-onbuild onbuild + docker build -t jupyterhub/jupyterhub:alpine -f dockerfiles/Dockerfile.alpine . + docker build -t jupyterhub/singleuser singleuser + + - name: smoke test jupyterhub + run: | + docker run --rm -t jupyterhub/jupyterhub jupyterhub --help + + - name: verify static files + run: | + docker run --rm -t -v $PWD/dockerfiles:/io jupyterhub/jupyterhub python3 /io/test.py diff --git a/.gitignore b/.gitignore index 90608475..f7d5ea40 100644 --- a/.gitignore +++ b/.gitignore @@ -8,7 +8,9 @@ dist docs/_build docs/build docs/source/_static/rest-api +docs/source/rbac/scope-table.md .ipynb_checkpoints +jsx/build/ # ignore config file at the top-level of the repo # but not sub-dirs /jupyterhub_config.py @@ -18,11 +20,17 @@ package-lock.json share/jupyterhub/static/components share/jupyterhub/static/css/style.min.css share/jupyterhub/static/css/style.min.css.map +share/jupyterhub/static/js/admin-react.js* *.egg-info MANIFEST .coverage .coverage.* htmlcov .idea/ +.vscode/ .pytest_cache pip-wheel-metadata +docs/source/reference/metrics.rst +oldest-requirements.txt +jupyterhub-proxy.pid +examples/server-api/service-token diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ae6a999d..392d23cd 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,20 +1,61 @@ +# pre-commit is a tool to perform a predefined set of tasks manually and/or +# automatically before git commits are made. +# +# Config reference: https://pre-commit.com/#pre-commit-configyaml---top-level +# +# Common tasks +# +# - Run on all files: pre-commit run --all-files +# - Register git hooks: pre-commit install --install-hooks +# repos: -- repo: https://github.com/asottile/reorder_python_imports - rev: v1.3.5 - hooks: - - id: reorder-python-imports - language_version: python3.6 -- repo: https://github.com/ambv/black - rev: 18.9b0 - hooks: - - id: black -- repo: https://github.com/pre-commit/pre-commit-hooks - rev: v2.1.0 - hooks: - - id: end-of-file-fixer - - id: check-json - - id: check-yaml - - id: check-case-conflict - - id: check-executables-have-shebangs - - id: requirements-txt-fixer - - id: flake8 + # Autoformat: Python code, syntax patterns are modernized + - repo: https://github.com/asottile/pyupgrade + rev: v3.2.2 + hooks: + - id: pyupgrade + args: + - --py36-plus + + # Autoformat: Python code + - repo: https://github.com/PyCQA/autoflake + rev: v1.7.7 + hooks: + - id: autoflake + # args ref: https://github.com/PyCQA/autoflake#advanced-usage + args: + - --in-place + + # Autoformat: Python code + - repo: https://github.com/pycqa/isort + rev: 5.10.1 + hooks: + - id: isort + + # Autoformat: Python code + - repo: https://github.com/psf/black + rev: 22.10.0 + hooks: + - id: black + + # Autoformat: markdown, yaml, javascript (see the file .prettierignore) + - repo: https://github.com/pre-commit/mirrors-prettier + rev: v3.0.0-alpha.4 + hooks: + - id: prettier + + # Autoformat and linting, misc. details + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.3.0 + hooks: + - id: end-of-file-fixer + exclude: share/jupyterhub/static/js/admin-react.js + - id: requirements-txt-fixer + - id: check-case-conflict + - id: check-executables-have-shebangs + + # Linting: Python code (see the file .flake8) + - repo: https://github.com/PyCQA/flake8 + rev: "5.0.4" + hooks: + - id: flake8 diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 00000000..d0730bb6 --- /dev/null +++ b/.prettierignore @@ -0,0 +1,2 @@ +share/jupyterhub/templates/ +share/jupyterhub/static/js/admin-react.js diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 00000000..d2d85361 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,25 @@ +# Configuration on how ReadTheDocs (RTD) builds our documentation +# ref: https://readthedocs.org/projects/jupyterhub/ +# ref: https://docs.readthedocs.io/en/stable/config-file/v2.html +# +version: 2 + +sphinx: + configuration: docs/source/conf.py + +build: + os: ubuntu-20.04 + tools: + nodejs: "16" + python: "3.9" + +python: + install: + - requirements: docs/requirements.txt + +formats: + # Adding htmlzip enables a Downloads section in the rendered website's RTD + # menu where the html build can be downloaded. This doesn't require any + # additional configuration in docs/source/conf.py. + # + - htmlzip diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 788bfa4f..00000000 --- a/.travis.yml +++ /dev/null @@ -1,100 +0,0 @@ -language: python -sudo: false -cache: - - pip -python: - - 3.6 - - 3.5 - - nightly -env: - global: - - ASYNC_TEST_TIMEOUT=15 - - MYSQL_HOST=127.0.0.1 - - MYSQL_TCP_PORT=13306 -services: - - postgres - - docker - -# installing dependencies -before_install: - - set -e - - nvm install 6; nvm use 6 - - npm install - - npm install -g configurable-http-proxy - - | - # setup database - if [[ $JUPYTERHUB_TEST_DB_URL == mysql* ]]; then - unset MYSQL_UNIX_PORT - DB=mysql bash ci/docker-db.sh - DB=mysql bash ci/init-db.sh - # FIXME: mysql-connector-python 8.0.16 incorrectly decodes bytes to str - # ref: https://bugs.mysql.com/bug.php?id=94944 - pip install 'mysql-connector-python==8.0.15' - elif [[ $JUPYTERHUB_TEST_DB_URL == postgresql* ]]; then - psql -c "CREATE USER $PGUSER WITH PASSWORD '$PGPASSWORD';" -U postgres - DB=postgres bash ci/init-db.sh - pip install psycopg2-binary - fi -install: - - pip install --upgrade pip - - pip install --upgrade --pre -r dev-requirements.txt . - - pip freeze - -# running tests -script: - - | - # run tests - if [[ -z "$TEST" ]]; then - pytest -v --maxfail=2 --cov=jupyterhub jupyterhub/tests - fi - - | - # run autoformat - if [[ "$TEST" == "lint" ]]; then - pre-commit run --all-files - fi - - | - # build docs - if [[ "$TEST" == "docs" ]]; then - pushd docs - pip install --upgrade -r requirements.txt - pip install --upgrade alabaster_jupyterhub - make html - popd - fi -after_success: - - codecov -after_failure: - - | - # point to auto-lint-fix - if [[ "$TEST" == "lint" ]]; then - echo "You can install pre-commit hooks to automatically run formatting" - echo "on each commit with:" - echo " pre-commit install" - echo "or you can run by hand on staged files with" - echo " pre-commit run" - echo "or after-the-fact on already committed files with" - echo " pre-commit run --all-files" - fi - -matrix: - fast_finish: true - include: - - python: 3.6 - env: TEST=lint - - python: 3.6 - env: TEST=docs - - python: 3.6 - env: JUPYTERHUB_TEST_SUBDOMAIN_HOST=http://localhost.jovyan.org:8000 - - python: 3.6 - env: - - JUPYTERHUB_TEST_DB_URL=mysql+mysqlconnector://root@127.0.0.1:$MYSQL_TCP_PORT/jupyterhub - - python: 3.6 - env: - - PGUSER=jupyterhub - - PGPASSWORD=hub[test/:? - # password in url is url-encoded (urllib.parse.quote($PGPASSWORD, safe='')) - - JUPYTERHUB_TEST_DB_URL=postgresql://jupyterhub:hub%5Btest%2F%3A%3F@127.0.0.1/jupyterhub - - python: 3.7 - dist: xenial - allow_failures: - - python: nightly diff --git a/CHECKLIST-Release.md b/CHECKLIST-Release.md deleted file mode 100644 index 9c4fb2da..00000000 --- a/CHECKLIST-Release.md +++ /dev/null @@ -1,26 +0,0 @@ -# Release checklist - -- [ ] Upgrade Docs prior to Release - - - [ ] Change log - - [ ] New features documented - - [ ] Update the contributor list - thank you page - -- [ ] Upgrade and test Reference Deployments - -- [ ] Release software - - - [ ] Make sure 0 issues in milestone - - [ ] Follow release process steps - - [ ] Send builds to PyPI (Warehouse) and Conda Forge - -- [ ] Blog post and/or release note - -- [ ] Notify users of release - - - [ ] Email Jupyter and Jupyter In Education mailing lists - - [ ] Tweet (optional) - -- [ ] Increment the version number for the next release - -- [ ] Update roadmap diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index fb710f7c..12d406b7 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1 +1 @@ -Please refer to [Project Jupyter's Code of Conduct](https://github.com/jupyter/governance/blob/master/conduct/code_of_conduct.md). +Please refer to [Project Jupyter's Code of Conduct](https://github.com/jupyter/governance/blob/HEAD/conduct/code_of_conduct.md). diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index cfced3c8..cf4e4755 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,102 +1,14 @@ # Contributing to JupyterHub Welcome! As a [Jupyter](https://jupyter.org) project, -you can follow the [Jupyter contributor guide](https://jupyter.readthedocs.io/en/latest/contributor/content-contributor.html). +you can follow the [Jupyter contributor guide](https://jupyter.readthedocs.io/en/latest/contributing/content-contributor.html). -Make sure to also follow [Project Jupyter's Code of Conduct](https://github.com/jupyter/governance/blob/master/conduct/code_of_conduct.md) +Make sure to also follow [Project Jupyter's Code of Conduct](https://github.com/jupyter/governance/blob/HEAD/conduct/code_of_conduct.md) for a friendly and welcoming collaborative environment. -## Setting up a development environment +Please see our documentation on -JupyterHub requires Python >= 3.5 and nodejs. +- [Setting up a development install](https://jupyterhub.readthedocs.io/en/latest/contributing/setup.html) +- [Testing JupyterHub and linting code](https://jupyterhub.readthedocs.io/en/latest/contributing/tests.html) -As a Python project, a development install of JupyterHub follows standard practices for the basics (steps 1-2). - - -1. clone the repo - ```bash - git clone https://github.com/jupyterhub/jupyterhub - ``` -2. do a development install with pip - - ```bash - cd jupyterhub - python3 -m pip install --editable . - ``` -3. install the development requirements, - which include things like testing tools - - ```bash - python3 -m pip install -r dev-requirements.txt - ``` -4. install configurable-http-proxy with npm: - - ```bash - npm install -g configurable-http-proxy - ``` -5. set up pre-commit hooks for automatic code formatting, etc. - - ```bash - pre-commit install - ``` - - You can also invoke the pre-commit hook manually at any time with - - ```bash - pre-commit run - ``` - -## Contributing - -JupyterHub has adopted automatic code formatting so you shouldn't -need to worry too much about your code style. -As long as your code is valid, -the pre-commit hook should take care of how it should look. -You can invoke the pre-commit hook by hand at any time with: - -```bash -pre-commit run -``` - -which should run any autoformatting on your code -and tell you about any errors it couldn't fix automatically. -You may also install [black integration](https://github.com/ambv/black#editor-integration) -into your text editor to format code automatically. - -If you have already committed files before setting up the pre-commit -hook with `pre-commit install`, you can fix everything up using -`pre-commit run --all-files`. You need to make the fixing commit -yourself after that. - -## Testing - -It's a good idea to write tests to exercise any new features, -or that trigger any bugs that you have fixed to catch regressions. - -You can run the tests with: - -```bash -pytest -v -``` - -in the repo directory. If you want to just run certain tests, -check out the [pytest docs](https://pytest.readthedocs.io/en/latest/usage.html) -for how pytest can be called. -For instance, to test only spawner-related things in the REST API: - -```bash -pytest -v -k spawn jupyterhub/tests/test_api.py -``` - -The tests live in `jupyterhub/tests` and are organized roughly into: - -1. `test_api.py` tests the REST API -2. `test_pages.py` tests loading the HTML pages - -and other collections of tests for different components. -When writing a new test, there should usually be a test of -similar functionality already written and related tests should -be added nearby. -When in doubt, feel free to ask. - -TODO: describe some details about fixtures, etc. +If you need some help, feel free to ask on [Gitter](https://gitter.im/jupyterhub/jupyterhub) or [Discourse](https://discourse.jupyter.org/). diff --git a/COPYING.md b/COPYING.md index d62c3724..cd834f89 100644 --- a/COPYING.md +++ b/COPYING.md @@ -24,7 +24,7 @@ software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER @@ -46,8 +46,8 @@ Jupyter uses a shared copyright model. Each contributor maintains copyright over their contributions to Jupyter. But, it is important to note that these contributions are typically only changes to the repositories. Thus, the Jupyter source code, in its entirety is not the copyright of any single person or -institution. Instead, it is the collective copyright of the entire Jupyter -Development Team. If individual contributors want to maintain a record of what +institution. Instead, it is the collective copyright of the entire Jupyter +Development Team. If individual contributors want to maintain a record of what changes/contributions they have specific copyright on, they should indicate their copyright in the commit message of the change, when they commit the change to one of the Jupyter repositories. diff --git a/Dockerfile b/Dockerfile index 9277cdc6..e8060ecb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -21,40 +21,83 @@ # your jupyterhub_config.py will be added automatically # from your docker directory. -FROM ubuntu:18.04 -LABEL maintainer="Jupyter Project " +ARG BASE_IMAGE=ubuntu:22.04 +FROM $BASE_IMAGE AS builder + +USER root -# install nodejs, utf8 locale, set CDN because default httpredir is unreliable ENV DEBIAN_FRONTEND noninteractive -RUN apt-get -y update && \ - apt-get -y upgrade && \ - apt-get -y install wget git bzip2 && \ - apt-get purge && \ - apt-get clean && \ - rm -rf /var/lib/apt/lists/* -ENV LANG C.UTF-8 +RUN apt-get update \ + && apt-get install -yq --no-install-recommends \ + build-essential \ + ca-certificates \ + locales \ + python3-dev \ + python3-pip \ + python3-pycurl \ + python3-venv \ + nodejs \ + npm \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* -# install Python + NodeJS with conda -RUN wget -q https://repo.continuum.io/miniconda/Miniconda3-4.5.11-Linux-x86_64.sh -O /tmp/miniconda.sh && \ - echo 'e1045ee415162f944b6aebfe560b8fee */tmp/miniconda.sh' | md5sum -c - && \ - bash /tmp/miniconda.sh -f -b -p /opt/conda && \ - /opt/conda/bin/conda install --yes -c conda-forge \ - python=3.6 sqlalchemy tornado jinja2 traitlets requests pip pycurl \ - nodejs configurable-http-proxy && \ - /opt/conda/bin/pip install --upgrade pip && \ - rm /tmp/miniconda.sh -ENV PATH=/opt/conda/bin:$PATH +RUN python3 -m pip install --upgrade setuptools pip build wheel +RUN npm install --global yarn -ADD . /src/jupyterhub +# copy everything except whats in .dockerignore, its a +# compromise between needing to rebuild and maintaining +# what needs to be part of the build +COPY . /src/jupyterhub/ WORKDIR /src/jupyterhub -RUN pip install . && \ - rm -rf $PWD ~/.cache ~/.npm +# Build client component packages (they will be copied into ./share and +# packaged with the built wheel.) +RUN python3 -m build --wheel +RUN python3 -m pip wheel --wheel-dir wheelhouse dist/*.whl + + +FROM $BASE_IMAGE + +USER root + +ENV DEBIAN_FRONTEND=noninteractive + +RUN apt-get update \ + && apt-get install -yq --no-install-recommends \ + ca-certificates \ + curl \ + gnupg \ + locales \ + python3-pip \ + python3-pycurl \ + nodejs \ + npm \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +ENV SHELL=/bin/bash \ + LC_ALL=en_US.UTF-8 \ + LANG=en_US.UTF-8 \ + LANGUAGE=en_US.UTF-8 + +RUN locale-gen $LC_ALL + +# always make sure pip is up to date! +RUN python3 -m pip install --no-cache --upgrade setuptools pip + +RUN npm install -g configurable-http-proxy@^4.2.0 \ + && rm -rf ~/.npm + +# install the wheels we built in the first stage +COPY --from=builder /src/jupyterhub/wheelhouse /tmp/wheelhouse +RUN python3 -m pip install --no-cache /tmp/wheelhouse/* RUN mkdir -p /srv/jupyterhub/ WORKDIR /srv/jupyterhub/ + EXPOSE 8000 +LABEL maintainer="Jupyter Project " LABEL org.jupyter.service="jupyterhub" CMD ["jupyterhub"] diff --git a/MANIFEST.in b/MANIFEST.in index 8b6e380e..9c46d79b 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -8,6 +8,7 @@ include *requirements.txt include Dockerfile graft onbuild +graft jsx graft jupyterhub graft scripts graft share @@ -18,6 +19,10 @@ graft ci graft docs prune docs/node_modules +# Intermediate javascript files +prune jsx/node_modules +prune jsx/build + # prune some large unused files from components prune share/jupyterhub/static/components/bootstrap/dist/css exclude share/jupyterhub/static/components/bootstrap/dist/fonts/*.svg diff --git a/README.md b/README.md index 9e9c504f..4ca2fde9 100644 --- a/README.md +++ b/README.md @@ -6,27 +6,37 @@ **[License](#license)** | **[Help and Resources](#help-and-resources)** +--- + +Please note that this repository is participating in a study into the sustainability of open source projects. Data will be gathered about this repository for approximately the next 12 months, starting from 2021-06-11. + +Data collected will include the number of contributors, number of PRs, time taken to close/merge these PRs, and issues closed. + +For more information, please visit +[our informational page](https://sustainable-open-science-and-software.github.io/) or download our [participant information sheet](https://sustainable-open-science-and-software.github.io/assets/PIS_sustainable_software.pdf). + +--- # [JupyterHub](https://github.com/jupyterhub/jupyterhub) - -[![PyPI](https://img.shields.io/pypi/v/jupyterhub.svg)](https://pypi.python.org/pypi/jupyterhub) -[![Documentation Status](https://readthedocs.org/projects/jupyterhub/badge/?version=latest)](https://jupyterhub.readthedocs.org/en/latest/?badge=latest) -[![Build Status](https://travis-ci.org/jupyterhub/jupyterhub.svg?branch=master)](https://travis-ci.org/jupyterhub/jupyterhub) -[![Circle CI](https://circleci.com/gh/jupyterhub/jupyterhub.svg?style=shield&circle-token=b5b65862eb2617b9a8d39e79340b0a6b816da8cc)](https://circleci.com/gh/jupyterhub/jupyterhub) -[![codecov.io](https://codecov.io/github/jupyterhub/jupyterhub/coverage.svg?branch=master)](https://codecov.io/github/jupyterhub/jupyterhub?branch=master) -[![GitHub](https://img.shields.io/badge/issue_tracking-github-blue.svg)](https://github.com/jupyterhub/jupyterhub/issues) -[![Discourse](https://img.shields.io/badge/help_forum-discourse-blue.svg)](https://discourse.jupyter.org/c/jupyterhub) -[![Gitter](https://img.shields.io/badge/social_chat-gitter-blue.svg)](https://gitter.im/jupyterhub/jupyterhub) +[![Latest PyPI version](https://img.shields.io/pypi/v/jupyterhub?logo=pypi)](https://pypi.python.org/pypi/jupyterhub) +[![Latest conda-forge version](https://img.shields.io/conda/vn/conda-forge/jupyterhub?logo=conda-forge)](https://anaconda.org/conda-forge/jupyterhub) +[![Documentation build status](https://img.shields.io/readthedocs/jupyterhub?logo=read-the-docs)](https://jupyterhub.readthedocs.org/en/latest/) +[![GitHub Workflow Status - Test](https://img.shields.io/github/workflow/status/jupyterhub/jupyterhub/Test?logo=github&label=tests)](https://github.com/jupyterhub/jupyterhub/actions) +[![DockerHub build status](https://img.shields.io/docker/build/jupyterhub/jupyterhub?logo=docker&label=build)](https://hub.docker.com/r/jupyterhub/jupyterhub/tags) +[![Test coverage of code](https://codecov.io/gh/jupyterhub/jupyterhub/branch/main/graph/badge.svg)](https://codecov.io/gh/jupyterhub/jupyterhub) +[![GitHub](https://img.shields.io/badge/issue_tracking-github-blue?logo=github)](https://github.com/jupyterhub/jupyterhub/issues) +[![Discourse](https://img.shields.io/badge/help_forum-discourse-blue?logo=discourse)](https://discourse.jupyter.org/c/jupyterhub) +[![Gitter](https://img.shields.io/badge/social_chat-gitter-blue?logo=gitter)](https://gitter.im/jupyterhub/jupyterhub) With [JupyterHub](https://jupyterhub.readthedocs.io) you can create a -**multi-user Hub** which spawns, manages, and proxies multiple instances of the +**multi-user Hub** that spawns, manages, and proxies multiple instances of the single-user [Jupyter notebook](https://jupyter-notebook.readthedocs.io) server. [Project Jupyter](https://jupyter.org) created JupyterHub to support many users. The Hub can offer notebook servers to a class of students, a corporate -data science workgroup, a scientific research project, or a high performance +data science workgroup, a scientific research project, or a high-performance computing group. ## Technical overview @@ -40,38 +50,32 @@ Three main actors make up JupyterHub: Basic principles for operation are: - Hub launches a proxy. -- Proxy forwards all requests to Hub by default. -- Hub handles login, and spawns single-user servers on demand. -- Hub configures proxy to forward url prefixes to the single-user notebook +- The Proxy forwards all requests to Hub by default. +- Hub handles login and spawns single-user servers on demand. +- Hub configures proxy to forward URL prefixes to the single-user notebook servers. JupyterHub also provides a -[REST API](http://petstore.swagger.io/?url=https://raw.githubusercontent.com/jupyter/jupyterhub/master/docs/rest-api.yml#/default) +[REST API][] for administration of the Hub and its users. -## Installation +[rest api]: https://jupyterhub.readthedocs.io/en/latest/reference/rest-api.html +## Installation ### Check prerequisites - A Linux/Unix based system -- [Python](https://www.python.org/downloads/) 3.5 or greater +- [Python](https://www.python.org/downloads/) 3.6 or greater - [nodejs/npm](https://www.npmjs.com/) - * If you are using **`conda`**, the nodejs and npm dependencies will be installed for + - If you are using **`conda`**, the nodejs and npm dependencies will be installed for you by conda. - * If you are using **`pip`**, install a recent version of + - If you are using **`pip`**, install a recent version (at least 12.0) of [nodejs/npm](https://docs.npmjs.com/getting-started/installing-node). - For example, install it on Linux (Debian/Ubuntu) using: - - ``` - sudo apt-get install npm nodejs-legacy - ``` - - The `nodejs-legacy` package installs the `node` executable and is currently - required for npm to work on Debian/Ubuntu. +- If using the default PAM Authenticator, a [pluggable authentication module (PAM)](https://en.wikipedia.org/wiki/Pluggable_authentication_module). - TLS certificate and key for HTTPS communication - Domain name @@ -85,12 +89,11 @@ To install JupyterHub along with its dependencies including nodejs/npm: conda install -c conda-forge jupyterhub ``` -If you plan to run notebook servers locally, install the Jupyter notebook -or JupyterLab: +If you plan to run notebook servers locally, install JupyterLab or Jupyter notebook: ```bash -conda install notebook conda install jupyterlab +conda install notebook ``` #### Using `pip` @@ -99,13 +102,13 @@ JupyterHub can be installed with `pip`, and the proxy with `npm`: ```bash npm install -g configurable-http-proxy -python3 -m pip install jupyterhub +python3 -m pip install jupyterhub ``` -If you plan to run notebook servers locally, you will need to install the -[Jupyter notebook](https://jupyter.readthedocs.io/en/latest/install.html) -package: +If you plan to run notebook servers locally, you will need to install +[JupyterLab or Jupyter notebook](https://jupyter.readthedocs.io/en/latest/install.html): + python3 -m pip install --upgrade jupyterlab python3 -m pip install --upgrade notebook ### Run the Hub server @@ -114,13 +117,12 @@ To start the Hub server, run the command: jupyterhub -Visit `https://localhost:8000` in your browser, and sign in with your unix -PAM credentials. +Visit `http://localhost:8000` in your browser, and sign in with your system username and password. -*Note*: To allow multiple users to sign into the server, you will need to -run the `jupyterhub` command as a *privileged user*, such as root. +_Note_: To allow multiple users to sign in to the server, you will need to +run the `jupyterhub` command as a _privileged user_, such as root. The [wiki](https://github.com/jupyterhub/jupyterhub/wiki/Using-sudo-to-run-JupyterHub-without-root-privileges) -describes how to run the server as a *less privileged user*, which requires +describes how to run the server as a _less privileged user_, which requires more configuration of the system. ## Configuration @@ -139,7 +141,7 @@ To generate a default config file with settings and descriptions: ### Start the Hub -To start the Hub on a specific url and port ``10.0.1.2:443`` with **https**: +To start the Hub on a specific url and port `10.0.1.2:443` with **https**: jupyterhub --ip 10.0.1.2 --port 443 --ssl-key my_ssl.key --ssl-cert my_ssl.cert @@ -201,7 +203,7 @@ These accounts will be used for authentication in JupyterHub's default configura ## Contributing If you would like to contribute to the project, please read our -[contributor documentation](http://jupyter.readthedocs.io/en/latest/contributor/content-contributor.html) +[contributor documentation](https://jupyter.readthedocs.io/en/latest/contributing/content-contributor.html) and the [`CONTRIBUTING.md`](CONTRIBUTING.md). The `CONTRIBUTING.md` file explains how to set up a development installation, how to run the test suite, and how to contribute to documentation. @@ -228,20 +230,20 @@ docker container or Linux VM. We use a shared copyright model that enables all contributors to maintain the copyright on their contributions. -All code is licensed under the terms of the revised BSD license. +All code is licensed under the terms of the [revised BSD license](./COPYING.md). ## Help and resources -We encourage you to ask questions on the [Jupyter mailing list](https://groups.google.com/forum/#!forum/jupyter). -To participate in development discussions or get help, talk with us on -our JupyterHub [Gitter](https://gitter.im/jupyterhub/jupyterhub) channel. +We encourage you to ask questions and share ideas on the [Jupyter community forum](https://discourse.jupyter.org/). +You can also talk with us on our JupyterHub [Gitter](https://gitter.im/jupyterhub/jupyterhub) channel. - [Reporting Issues](https://github.com/jupyterhub/jupyterhub/issues) - [JupyterHub tutorial](https://github.com/jupyterhub/jupyterhub-tutorial) - [Documentation for JupyterHub](https://jupyterhub.readthedocs.io/en/latest/) | [PDF (latest)](https://media.readthedocs.org/pdf/jupyterhub/latest/jupyterhub.pdf) | [PDF (stable)](https://media.readthedocs.org/pdf/jupyterhub/stable/jupyterhub.pdf) -- [Documentation for JupyterHub's REST API](http://petstore.swagger.io/?url=https://raw.githubusercontent.com/jupyter/jupyterhub/master/docs/rest-api.yml#/default) +- [Documentation for JupyterHub's REST API][rest api] - [Documentation for Project Jupyter](http://jupyter.readthedocs.io/en/latest/index.html) | [PDF](https://media.readthedocs.org/pdf/jupyter/latest/jupyter.pdf) - [Project Jupyter website](https://jupyter.org) +- [Project Jupyter community](https://jupyter.org/community) JupyterHub follows the Jupyter [Community Guides](https://jupyter.readthedocs.io/en/latest/community/content-community.html). diff --git a/RELEASE.md b/RELEASE.md new file mode 100644 index 00000000..b00a089e --- /dev/null +++ b/RELEASE.md @@ -0,0 +1,55 @@ +# How to make a release + +`jupyterhub` is a package available on [PyPI][] and [conda-forge][]. +These are instructions on how to make a release. + +## Pre-requisites + +- Push rights to [jupyterhub/jupyterhub][] +- Push rights to [conda-forge/jupyterhub-feedstock][] + +## Steps to make a release + +1. Create a PR updating `docs/source/changelog.md` with [github-activity][] and + continue only when its merged. + + ```shell + pip install github-activity + + github-activity --heading-level=3 jupyterhub/jupyterhub + ``` + +1. Checkout main and make sure it is up to date. + + ```shell + git checkout main + git fetch origin main + git reset --hard origin/main + ``` + +1. Update the version, make commits, and push a git tag with `tbump`. + + ```shell + pip install tbump + tbump --dry-run ${VERSION} + + tbump ${VERSION} + ``` + + Following this, the [CI system][] will build and publish a release. + +1. Reset the version back to dev, e.g. `2.1.0.dev` after releasing `2.0.0` + + ```shell + tbump --no-tag ${NEXT_VERSION}.dev + ``` + +1. Following the release to PyPI, an automated PR should arrive to + [conda-forge/jupyterhub-feedstock][] with instructions. + +[pypi]: https://pypi.org/project/jupyterhub/ +[conda-forge]: https://anaconda.org/conda-forge/jupyterhub +[jupyterhub/jupyterhub]: https://github.com/jupyterhub/jupyterhub +[conda-forge/jupyterhub-feedstock]: https://github.com/conda-forge/jupyterhub-feedstock +[github-activity]: https://github.com/executablebooks/github-activity +[ci system]: https://github.com/jupyterhub/jupyterhub/actions/workflows/release.yml diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 00000000..10d32e23 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,5 @@ +# Reporting a Vulnerability + +If you believe you’ve found a security vulnerability in a Jupyter +project, please report it to security@ipython.org. If you prefer to +encrypt your security reports, you can use [this PGP public key](https://jupyter-notebook.readthedocs.io/en/stable/_downloads/1d303a645f2505a8fd283826fafc9908/ipython_security.asc). diff --git a/bower-lite b/bower-lite index 5276b6c8..f6b176bd 100755 --- a/bower-lite +++ b/bower-lite @@ -29,5 +29,5 @@ dependencies = package_json['dependencies'] for dep in dependencies: src = join(node_modules, dep) dest = join(components, dep) - print("%s -> %s" % (src, dest)) + print(f"{src} -> {dest}") shutil.copytree(src, dest) diff --git a/ci/check_installed_data.py b/ci/check_installed_data.py new file mode 100755 index 00000000..726c0408 --- /dev/null +++ b/ci/check_installed_data.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python +# Check that installed package contains everything we expect + + +import os + +from jupyterhub._data import DATA_FILES_PATH + +print("Checking jupyterhub._data") +print(f"DATA_FILES_PATH={DATA_FILES_PATH}") +assert os.path.exists(DATA_FILES_PATH), DATA_FILES_PATH +for subpath in ( + "templates/page.html", + "static/css/style.min.css", + "static/components/jquery/dist/jquery.js", + "static/js/admin-react.js", +): + path = os.path.join(DATA_FILES_PATH, subpath) + assert os.path.exists(path), path +print("OK") diff --git a/ci/check_sdist.py b/ci/check_sdist.py new file mode 100755 index 00000000..b3ff18de --- /dev/null +++ b/ci/check_sdist.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python +# Check that sdist contains everything we expect + +import sys +import tarfile + +expected_files = [ + "docs/requirements.txt", + "jsx/package.json", + "package.json", + "README.md", +] + +assert len(sys.argv) == 2, "Expected one file" +print(f"Checking {sys.argv[1]}") + +tar = tarfile.open(name=sys.argv[1], mode="r:gz") +try: + # Remove leading jupyterhub-VERSION/ + filelist = {f.partition('/')[2] for f in tar.getnames()} +finally: + tar.close() + +for e in expected_files: + assert e in filelist, f"{e} not found" + +print("OK") diff --git a/ci/docker-db.sh b/ci/docker-db.sh index 14e06388..9bb69b73 100755 --- a/ci/docker-db.sh +++ b/ci/docker-db.sh @@ -1,59 +1,60 @@ #!/usr/bin/env bash -# source this file to setup postgres and mysql -# for local testing (as similar as possible to docker) +# The goal of this script is to start a database server as a docker container. +# +# Required environment variables: +# - DB: The database server to start, either "postgres" or "mysql". +# +# - PGUSER/PGPASSWORD: For the creation of a postgresql user with associated +# password. set -eu -export MYSQL_HOST=127.0.0.1 -export MYSQL_TCP_PORT=${MYSQL_TCP_PORT:-13306} -export PGHOST=127.0.0.1 -NAME="hub-test-$DB" -DOCKER_RUN="docker run -d --name $NAME" +# Stop and remove any existing database container +DOCKER_CONTAINER="hub-test-$DB" +docker rm -f "$DOCKER_CONTAINER" 2>/dev/null || true -docker rm -f "$NAME" 2>/dev/null || true +# Prepare environment variables to startup and await readiness of either a mysql +# or postgresql server. +if [[ "$DB" == "mysql" ]]; then + # Environment variables can influence both the mysql server in the docker + # container and the mysql client. + # + # ref server: https://hub.docker.com/_/mysql/ + # ref client: https://dev.mysql.com/doc/refman/5.7/en/setting-environment-variables.html + # + DOCKER_RUN_ARGS="-p 3306:3306 --env MYSQL_ALLOW_EMPTY_PASSWORD=1 mysql:5.7" + READINESS_CHECK="mysql --user root --execute \q" +elif [[ "$DB" == "postgres" ]]; then + # Environment variables can influence both the postgresql server in the + # docker container and the postgresql client (psql). + # + # ref server: https://hub.docker.com/_/postgres/ + # ref client: https://www.postgresql.org/docs/9.5/libpq-envars.html + # + # POSTGRES_USER / POSTGRES_PASSWORD will create a user on startup of the + # postgres server, but PGUSER and PGPASSWORD are the environment variables + # used by the postgresql client psql, so we configure the user based on how + # we want to connect. + # + DOCKER_RUN_ARGS="-p 5432:5432 --env "POSTGRES_USER=${PGUSER}" --env "POSTGRES_PASSWORD=${PGPASSWORD}" postgres:9.5" + READINESS_CHECK="psql --command \q" +else + echo '$DB must be mysql or postgres' + exit 1 +fi -case "$DB" in -"mysql") - RUN_ARGS="-e MYSQL_ALLOW_EMPTY_PASSWORD=1 -p $MYSQL_TCP_PORT:3306 mysql:5.7" - CHECK="mysql --host $MYSQL_HOST --port $MYSQL_TCP_PORT --user root -e \q" - ;; -"postgres") - RUN_ARGS="-p 5432:5432 postgres:9.5" - CHECK="psql --user postgres -c \q" - ;; -*) - echo '$DB must be mysql or postgres' - exit 1 -esac - -$DOCKER_RUN $RUN_ARGS +# Start the database server +docker run --detach --name "$DOCKER_CONTAINER" $DOCKER_RUN_ARGS +# Wait for the database server to start echo -n "waiting for $DB " for i in {1..60}; do - if $CHECK; then - echo 'done' - break - else - echo -n '.' - sleep 1 - fi + if $READINESS_CHECK; then + echo 'done' + break + else + echo -n '.' + sleep 1 + fi done -$CHECK - -case "$DB" in -"mysql") - ;; -"postgres") - # create the user - psql --user postgres -c "CREATE USER $PGUSER WITH PASSWORD '$PGPASSWORD';" - ;; -*) -esac - -echo -e " -Set these environment variables: - - export MYSQL_HOST=127.0.0.1 - export MYSQL_TCP_PORT=$MYSQL_TCP_PORT - export PGHOST=127.0.0.1 -" +$READINESS_CHECK diff --git a/ci/init-db.sh b/ci/init-db.sh index dfeb12b4..46672cf6 100755 --- a/ci/init-db.sh +++ b/ci/init-db.sh @@ -1,27 +1,26 @@ #!/usr/bin/env bash -# initialize jupyterhub databases for testing +# The goal of this script is to initialize a running database server with clean +# databases for use during tests. +# +# Required environment variables: +# - DB: The database server to start, either "postgres" or "mysql". set -eu -MYSQL="mysql --user root --host $MYSQL_HOST --port $MYSQL_TCP_PORT -e " -PSQL="psql --user postgres -c " - -case "$DB" in -"mysql") - EXTRA_CREATE='CHARACTER SET utf8 COLLATE utf8_general_ci' - SQL="$MYSQL" - ;; -"postgres") - SQL="$PSQL" - ;; -*) - echo '$DB must be mysql or postgres' - exit 1 -esac +# Prepare env vars SQL_CLIENT and EXTRA_CREATE_DATABASE_ARGS +if [[ "$DB" == "mysql" ]]; then + SQL_CLIENT="mysql --user root --execute " + EXTRA_CREATE_DATABASE_ARGS='CHARACTER SET utf8 COLLATE utf8_general_ci' +elif [[ "$DB" == "postgres" ]]; then + SQL_CLIENT="psql --command " +else + echo '$DB must be mysql or postgres' + exit 1 +fi +# Configure a set of databases in the database server for upgrade tests set -x - -for SUFFIX in '' _upgrade_072 _upgrade_081 _upgrade_094; do - $SQL "DROP DATABASE jupyterhub${SUFFIX};" 2>/dev/null || true - $SQL "CREATE DATABASE jupyterhub${SUFFIX} ${EXTRA_CREATE:-};" +for SUFFIX in '' _upgrade_100 _upgrade_122 _upgrade_130 _upgrade_150 _upgrade_211; do + $SQL_CLIENT "DROP DATABASE jupyterhub${SUFFIX};" 2>/dev/null || true + $SQL_CLIENT "CREATE DATABASE jupyterhub${SUFFIX} ${EXTRA_CREATE_DATABASE_ARGS:-};" done diff --git a/demo-image/Dockerfile b/demo-image/Dockerfile new file mode 100644 index 00000000..c5add75e --- /dev/null +++ b/demo-image/Dockerfile @@ -0,0 +1,16 @@ +# Demo JupyterHub Docker image +# +# This should only be used for demo or testing and not as a base image to build on. +# +# It includes the notebook package and it uses the DummyAuthenticator and the SimpleLocalProcessSpawner. +ARG BASE_IMAGE=jupyterhub/jupyterhub-onbuild +FROM ${BASE_IMAGE} + +# Install the notebook package +RUN python3 -m pip install notebook + +# Create a demo user +RUN useradd --create-home demo +RUN chown demo . + +USER demo diff --git a/demo-image/README.md b/demo-image/README.md new file mode 100644 index 00000000..b8ecefb7 --- /dev/null +++ b/demo-image/README.md @@ -0,0 +1,26 @@ +## Demo Dockerfile + +This is a demo JupyterHub Docker image to help you get a quick overview of what +JupyterHub is and how it works. + +It uses the SimpleLocalProcessSpawner to spawn new user servers and +DummyAuthenticator for authentication. +The DummyAuthenticator allows you to log in with any username & password and the +SimpleLocalProcessSpawner allows starting servers without having to create a +local user for each JupyterHub user. + +### Important! + +This should only be used for demo or testing purposes! +It shouldn't be used as a base image to build on. + +### Try it + +1. `cd` to the root of your jupyterhub repo. + +2. Build the demo image with `docker build -t jupyterhub-demo demo-image`. + +3. Run the demo image with `docker run -d -p 8000:8000 jupyterhub-demo`. + +4. Visit http://localhost:8000 and login with any username and password +5. Happy demo-ing :tada:! diff --git a/demo-image/jupyterhub_config.py b/demo-image/jupyterhub_config.py new file mode 100644 index 00000000..312df765 --- /dev/null +++ b/demo-image/jupyterhub_config.py @@ -0,0 +1,7 @@ +# Configuration file for jupyterhub-demo + +c = get_config() + +# Use DummyAuthenticator and SimpleSpawner +c.JupyterHub.spawner_class = "simple" +c.JupyterHub.authenticator_class = "dummy" diff --git a/dev-requirements.txt b/dev-requirements.txt deleted file mode 100644 index 54a64481..00000000 --- a/dev-requirements.txt +++ /dev/null @@ -1,17 +0,0 @@ --r requirements.txt -# temporary pin of attrs for jsonschema 0.3.0a1 -# seems to be a pip bug -attrs>=17.4.0 -beautifulsoup4 -codecov -coverage -cryptography -html5lib # needed for beautifulsoup -mock -notebook -pre-commit -pytest-asyncio -pytest-cov -pytest>=3.3 -requests-mock -virtualenv diff --git a/dockerfiles/Dockerfile.alpine b/dockerfiles/Dockerfile.alpine index be7f64b0..79ccd774 100644 --- a/dockerfiles/Dockerfile.alpine +++ b/dockerfiles/Dockerfile.alpine @@ -1,9 +1,14 @@ -FROM python:3.6.3-alpine3.6 - -ARG JUPYTERHUB_VERSION=0.8.1 - -RUN pip3 install --no-cache jupyterhub==${JUPYTERHUB_VERSION} +FROM alpine:3.13 ENV LANG=en_US.UTF-8 +RUN apk add --no-cache \ + python3 \ + py3-pip \ + py3-ruamel.yaml \ + py3-cryptography \ + py3-sqlalchemy + +ARG JUPYTERHUB_VERSION=1.3.0 +RUN pip3 install --no-cache jupyterhub==${JUPYTERHUB_VERSION} USER nobody CMD ["jupyterhub"] diff --git a/dockerfiles/README.md b/dockerfiles/README.md index b17546bd..2ea2db4b 100644 --- a/dockerfiles/README.md +++ b/dockerfiles/README.md @@ -1,20 +1,20 @@ ## What is Dockerfile.alpine -Dockerfile.alpine contains base image for jupyterhub. It does not work independently, but only as part of a full jupyterhub cluster + +Dockerfile.alpine contains base image for jupyterhub. It does not work independently, but only as part of a full jupyterhub cluster ## How to use it? -1. A running configurable-http-proxy, whose API is accessible. +1. A running configurable-http-proxy, whose API is accessible. 2. A jupyterhub_config file. 3. Authentication and other libraries required by the specific jupyterhub_config file. - ## Steps to test it outside a cluster -* start configurable-http-proxy in another container -* specify CONFIGPROXY_AUTH_TOKEN env in both containers -* put both containers on the same network (e.g. docker network create jupyterhub; docker run ... --net jupyterhub) -* tell jupyterhub where CHP is (e.g. c.ConfigurableHTTPProxy.api_url = 'http://chp:8001') -* tell jupyterhub not to start the proxy itself (c.ConfigurableHTTPProxy.should_start = False) -* Use dummy authenticator for ease of testing. Update following in jupyterhub_config file - - c.JupyterHub.authenticator_class = 'dummyauthenticator.DummyAuthenticator' - - c.DummyAuthenticator.password = "your strong password" +- start configurable-http-proxy in another container +- specify CONFIGPROXY_AUTH_TOKEN env in both containers +- put both containers on the same network (e.g. docker network create jupyterhub; docker run ... --net jupyterhub) +- tell jupyterhub where CHP is (e.g. c.ConfigurableHTTPProxy.api_url = 'http://chp:8001') +- tell jupyterhub not to start the proxy itself (c.ConfigurableHTTPProxy.should_start = False) +- Use dummy authenticator for ease of testing. Update following in jupyterhub_config file + - c.JupyterHub.authenticator_class = 'dummyauthenticator.DummyAuthenticator' + - c.DummyAuthenticator.password = "your strong password" diff --git a/dockerfiles/test.py b/dockerfiles/test.py new file mode 100644 index 00000000..4ce86ebf --- /dev/null +++ b/dockerfiles/test.py @@ -0,0 +1,14 @@ +import os + +from jupyterhub._data import DATA_FILES_PATH + +print(f"DATA_FILES_PATH={DATA_FILES_PATH}") + +for sub_path in ( + "templates", + "static/components", + "static/css/style.min.css", + "static/js/admin-react.js", +): + path = os.path.join(DATA_FILES_PATH, sub_path) + assert os.path.exists(path), path diff --git a/docs/Makefile b/docs/Makefile index b20dc238..5aa434a9 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -48,19 +48,25 @@ help: @echo " doctest to run all doctests embedded in the documentation (if enabled)" @echo " coverage to run coverage check of the documentation (if enabled)" @echo " spelling to run spell check on documentation" + @echo " metrics to generate documentation for metrics by inspecting the source code" clean: rm -rf $(BUILDDIR)/* -node_modules: package.json - npm install && touch node_modules +metrics: source/reference/metrics.rst -rest-api: source/_static/rest-api/index.html +source/reference/metrics.rst: generate-metrics.py + python3 generate-metrics.py -source/_static/rest-api/index.html: rest-api.yml node_modules - npm run rest-api +scopes: source/rbac/scope-table.md -html: rest-api +source/rbac/scope-table.md: source/rbac/generate-scope-table.py + python3 source/rbac/generate-scope-table.py + +# If the pre-requisites for the html target is updated, also update the Read The +# Docs section in docs/source/conf.py. +# +html: metrics scopes $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." diff --git a/docs/environment.yml b/docs/environment.yml deleted file mode 100644 index 2dbfd535..00000000 --- a/docs/environment.yml +++ /dev/null @@ -1,26 +0,0 @@ -# ReadTheDocs uses the `environment.yaml` so make sure to update that as well -# if you change the dependencies of JupyterHub in the various `requirements.txt` -name: jhub_docs -channels: - - conda-forge -dependencies: -- pip -- nodejs -- python=3.6 -- alembic -- jinja2 -- pamela -- requests -- sqlalchemy>=1 -- tornado>=5.0 -- traitlets>=4.1 -- sphinx>=1.7 -- pip: - - entrypoints - - oauthlib>=2.0 - - recommonmark==0.5.0 - - async_generator - - prometheus_client - - attrs>=17.4.0 - - sphinx-copybutton - - alabaster_jupyterhub diff --git a/docs/generate-metrics.py b/docs/generate-metrics.py new file mode 100644 index 00000000..05cc6c35 --- /dev/null +++ b/docs/generate-metrics.py @@ -0,0 +1,56 @@ +import os + +from pytablewriter import RstSimpleTableWriter +from pytablewriter.style import Style + +import jupyterhub.metrics + +HERE = os.path.abspath(os.path.dirname(__file__)) + + +class Generator: + @classmethod + def create_writer(cls, table_name, headers, values): + writer = RstSimpleTableWriter() + writer.table_name = table_name + writer.headers = headers + writer.value_matrix = values + writer.margin = 1 + [writer.set_style(header, Style(align="center")) for header in headers] + return writer + + def _parse_metrics(self): + table_rows = [] + for name in dir(jupyterhub.metrics): + obj = getattr(jupyterhub.metrics, name) + if obj.__class__.__module__.startswith('prometheus_client.'): + for metric in obj.describe(): + table_rows.append([metric.type, metric.name, metric.documentation]) + return table_rows + + def prometheus_metrics(self): + generated_directory = f"{HERE}/source/reference" + if not os.path.exists(generated_directory): + os.makedirs(generated_directory) + + filename = f"{generated_directory}/metrics.rst" + table_name = "" + headers = ["Type", "Name", "Description"] + values = self._parse_metrics() + writer = self.create_writer(table_name, headers, values) + + title = "List of Prometheus Metrics" + underline = "============================" + content = f"{title}\n{underline}\n{writer.dumps()}" + with open(filename, 'w') as f: + f.write(content) + print(f"Generated {filename}.") + + +def main(): + doc_generator = Generator() + doc_generator.prometheus_metrics() + + +if __name__ == "__main__": + main() diff --git a/docs/package.json b/docs/package.json deleted file mode 100644 index 5ba8b2b2..00000000 --- a/docs/package.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "jupyterhub-docs-build", - "version": "0.8.0", - "description": "build JupyterHub swagger docs", - "scripts": { - "rest-api": "bootprint openapi ./rest-api.yml source/_static/rest-api" - }, - "author": "", - "license": "BSD-3-Clause", - "devDependencies": { - "bootprint": "^1.0.0", - "bootprint-openapi": "^1.0.0" - } -} diff --git a/docs/requirements.txt b/docs/requirements.txt index 5f8b447a..6748578b 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,7 +1,21 @@ -# ReadTheDocs uses the `environment.yaml` so make sure to update that as well -# if you change this file --r ../requirements.txt -alabaster_jupyterhub -recommonmark==0.5.0 +# We install the jupyterhub package to help autodoc-traits inspect it and +# generate documentation. +# +# FIXME: If there is a way for this requirements.txt file to pass a flag that +# the build system can intercept to not build the javascript artifacts, +# then do so so. That would mean that installing the documentation can +# avoid needing node/npm installed. +# +--editable . + +autodoc-traits +myst-parser +pre-commit +pydata-sphinx-theme +pytablewriter>=0.56 +ruamel.yaml +sphinx>=4 sphinx-copybutton -sphinx>=1.7 +sphinx-jsonschema +sphinxext-opengraph +sphinxext-rediraffe diff --git a/docs/rest-api.yml b/docs/rest-api.yml deleted file mode 100644 index 0835686e..00000000 --- a/docs/rest-api.yml +++ /dev/null @@ -1,868 +0,0 @@ -# see me at: http://petstore.swagger.io/?url=https://raw.githubusercontent.com/jupyterhub/jupyterhub/master/docs/rest-api.yml#/default -swagger: '2.0' -info: - title: JupyterHub - description: The REST API for JupyterHub - version: 0.9.0dev - license: - name: BSD-3-Clause -schemes: - [http, https] -securityDefinitions: - token: - type: apiKey - name: Authorization - in: header -security: - - token: [] -basePath: /hub/api -produces: - - application/json -consumes: - - application/json -paths: - /: - get: - summary: Get JupyterHub version - description: | - This endpoint is not authenticated for the purpose of clients and user - to identify the JupyterHub version before setting up authentication. - responses: - '200': - description: The JupyterHub version - schema: - type: object - properties: - version: - type: string - description: The version of JupyterHub itself - /info: - get: - summary: Get detailed info about JupyterHub - description: | - Detailed JupyterHub information, including Python version, - JupyterHub's version and executable path, - and which Authenticator and Spawner are active. - responses: - '200': - description: Detailed JupyterHub info - schema: - type: object - properties: - version: - type: string - description: The version of JupyterHub itself - python: - type: string - description: The Python version, as returned by sys.version - sys_executable: - type: string - description: The path to sys.executable running JupyterHub - authenticator: - type: object - properties: - class: - type: string - description: The Python class currently active for JupyterHub Authentication - version: - type: string - description: The version of the currently active Authenticator - spawner: - type: object - properties: - class: - type: string - description: The Python class currently active for spawning single-user notebook servers - version: - type: string - description: The version of the currently active Spawner - /users: - get: - summary: List users - responses: - '200': - description: The Hub's user list - schema: - type: array - items: - $ref: '#/definitions/User' - post: - summary: Create multiple users - parameters: - - name: body - in: body - required: true - schema: - type: object - properties: - usernames: - type: array - description: list of usernames to create on the Hub - items: - type: string - admin: - description: whether the created users should be admins - type: boolean - responses: - '201': - description: The users have been created - schema: - type: array - description: The created users - items: - $ref: '#/definitions/User' - /users/{name}: - get: - summary: Get a user by name - parameters: - - name: name - description: username - in: path - required: true - type: string - responses: - '200': - description: The User model - schema: - $ref: '#/definitions/User' - post: - summary: Create a single user - parameters: - - name: name - description: username - in: path - required: true - type: string - responses: - '201': - description: The user has been created - schema: - $ref: '#/definitions/User' - patch: - summary: Modify a user - description: Change a user's name or admin status - parameters: - - name: name - description: username - in: path - required: true - type: string - - name: body - in: body - required: true - description: Updated user info. At least one key to be updated (name or admin) is required. - schema: - type: object - properties: - name: - type: string - description: the new name (optional, if another key is updated i.e. admin) - admin: - type: boolean - description: update admin (optional, if another key is updated i.e. name) - responses: - '200': - description: The updated user info - schema: - $ref: '#/definitions/User' - delete: - summary: Delete a user - parameters: - - name: name - description: username - in: path - required: true - type: string - responses: - '204': - description: The user has been deleted - /users/{name}/activity: - post: - summary: - Notify Hub of activity for a given user. - description: - Notify the Hub of activity by the user, - e.g. accessing a service or (more likely) - actively using a server. - parameters: - - name: name - description: username - in: path - required: true - type: string - - name: body - in: body - schema: - type: object - properties: - last_activity: - type: string - format: date-time - description: | - Timestamp of last-seen activity for this user. - Only needed if this is not activity associated - with using a given server. - servers: - description: | - Register activity for specific servers by name. - The keys of this dict are the names of servers. - The default server has an empty name (''). - type: object - properties: - '': - description: | - Activity for a single server. - type: object - required: - - last_activity - properties: - last_activity: - type: string - format: date-time - description: | - Timestamp of last-seen activity on this server. - example: - last_activity: '2019-02-06T12:54:14Z' - servers: - '': - last_activity: '2019-02-06T12:54:14Z' - gpu: - last_activity: '2019-02-06T12:54:14Z' - responses: - '401': - $ref: '#/responses/Unauthorized' - '404': - description: No such user - /users/{name}/server: - post: - summary: Start a user's single-user notebook server - parameters: - - name: name - description: username - in: path - required: true - type: string - - name: options - description: | - Spawn options can be passed as a JSON body - when spawning via the API instead of spawn form. - The structure of the options - will depend on the Spawner's configuration. - in: body - required: false - schema: - type: object - responses: - '201': - description: The user's notebook server has started - '202': - description: The user's notebook server has not yet started, but has been requested - delete: - summary: Stop a user's server - parameters: - - name: name - description: username - in: path - required: true - type: string - responses: - '204': - description: The user's notebook server has stopped - '202': - description: The user's notebook server has not yet stopped as it is taking a while to stop - /users/{name}/servers/{server_name}: - post: - summary: Start a user's single-user named-server notebook server - parameters: - - name: name - description: username - in: path - required: true - type: string - - name: server_name - description: name given to a named-server - in: path - required: true - type: string - - name: options - description: | - Spawn options can be passed as a JSON body - when spawning via the API instead of spawn form. - The structure of the options - will depend on the Spawner's configuration. - in: body - required: false - schema: - type: object - responses: - '201': - description: The user's notebook named-server has started - '202': - description: The user's notebook named-server has not yet started, but has been requested - delete: - summary: Stop a user's named-server - parameters: - - name: name - description: username - in: path - required: true - type: string - - name: server_name - description: name given to a named-server - in: path - required: true - type: string - - name: remove - description: | - Whether to fully remove the server, rather than just stop it. - Removing a server deletes things like the state of the stopped server. - in: body - required: false - schema: - type: boolean - responses: - '204': - description: The user's notebook named-server has stopped - '202': - description: The user's notebook named-server has not yet stopped as it is taking a while to stop - /users/{name}/tokens: - parameters: - - name: name - description: username - in: path - required: true - type: string - get: - summary: List tokens for the user - responses: - '200': - description: The list of tokens - schema: - type: array - items: - $ref: '#/definitions/Token' - '401': - $ref: '#/responses/Unauthorized' - '404': - description: No such user - post: - summary: Create a new token for the user - parameters: - - name: token_params - in: body - required: false - schema: - type: object - properties: - expires_in: - type: number - description: lifetime (in seconds) after which the requested token will expire. - note: - type: string - description: A note attached to the token for future bookkeeping - responses: - '201': - description: The newly created token - schema: - $ref: '#/definitions/Token' - '400': - description: Body must be a JSON dict or empty - /users/{name}/tokens/{token_id}: - parameters: - - name: name - description: username - in: path - required: true - type: string - - name: token_id - in: path - required: true - type: string - get: - summary: Get the model for a token by id - responses: - '200': - description: The info for the new token - schema: - $ref: '#/definitions/Token' - delete: - summary: Delete (revoke) a token by id - responses: - '204': - description: The token has been deleted - /user: - get: - summary: Return authenticated user's model - responses: - '200': - description: The authenticated user's model is returned. - schema: - $ref: '#/definitions/User' - /groups: - get: - summary: List groups - responses: - '200': - description: The list of groups - schema: - type: array - items: - $ref: '#/definitions/Group' - /groups/{name}: - get: - summary: Get a group by name - parameters: - - name: name - description: group name - in: path - required: true - type: string - responses: - '200': - description: The group model - schema: - $ref: '#/definitions/Group' - post: - summary: Create a group - parameters: - - name: name - description: group name - in: path - required: true - type: string - responses: - '201': - description: The group has been created - schema: - $ref: '#/definitions/Group' - delete: - summary: Delete a group - parameters: - - name: name - description: group name - in: path - required: true - type: string - responses: - '204': - description: The group has been deleted - /groups/{name}/users: - post: - summary: Add users to a group - parameters: - - name: name - description: group name - in: path - required: true - type: string - - name: body - in: body - required: true - description: The users to add to the group - schema: - type: object - properties: - users: - type: array - description: List of usernames to add to the group - items: - type: string - responses: - '200': - description: The users have been added to the group - schema: - $ref: '#/definitions/Group' - delete: - summary: Remove users from a group - parameters: - - name: name - description: group name - in: path - required: true - type: string - - name: body - in: body - required: true - description: The users to remove from the group - schema: - type: object - properties: - users: - type: array - description: List of usernames to remove from the group - items: - type: string - responses: - '200': - description: The users have been removed from the group - /services: - get: - summary: List services - responses: - '200': - description: The service list - schema: - type: array - items: - $ref: '#/definitions/Service' - /services/{name}: - get: - summary: Get a service by name - parameters: - - name: name - description: service name - in: path - required: true - type: string - responses: - '200': - description: The Service model - schema: - $ref: '#/definitions/Service' - /proxy: - get: - summary: Get the proxy's routing table - description: A convenience alias for getting the routing table directly from the proxy - responses: - '200': - description: Routing table - schema: - type: object - description: configurable-http-proxy routing table (see configurable-http-proxy docs for details) - post: - summary: Force the Hub to sync with the proxy - responses: - '200': - description: Success - patch: - summary: Notify the Hub about a new proxy - description: Notifies the Hub of a new proxy to use. - parameters: - - name: body - in: body - required: true - description: Any values that have changed for the new proxy. All keys are optional. - schema: - type: object - properties: - ip: - type: string - description: IP address of the new proxy - port: - type: string - description: Port of the new proxy - protocol: - type: string - description: Protocol of new proxy, if changed - auth_token: - type: string - description: CONFIGPROXY_AUTH_TOKEN for the new proxy - responses: - '200': - description: Success - /authorizations/token: - post: - summary: Request a new API token - description: | - Request a new API token to use with the JupyterHub REST API. - If not already authenticated, username and password can be sent - in the JSON request body. - Logging in via this method is only available when the active Authenticator - accepts passwords (e.g. not OAuth). - parameters: - - name: credentials - in: body - schema: - type: object - properties: - username: - type: string - password: - type: string - responses: - '200': - description: The new API token - schema: - type: object - properties: - token: - type: string - description: The new API token. - '403': - description: The user can not be authenticated. - /authorizations/token/{token}: - get: - summary: Identify a user or service from an API token - parameters: - - name: token - in: path - required: true - type: string - responses: - '200': - description: The user or service identified by the API token - '404': - description: A user or service is not found. - /authorizations/cookie/{cookie_name}/{cookie_value}: - get: - summary: Identify a user from a cookie - description: Used by single-user notebook servers to hand off cookie authentication to the Hub - parameters: - - name: cookie_name - in: path - required: true - type: string - - name: cookie_value - in: path - required: true - type: string - responses: - '200': - description: The user identified by the cookie - schema: - $ref: '#/definitions/User' - '404': - description: A user is not found. - /oauth2/authorize: - get: - summary: 'OAuth 2.0 authorize endpoint' - description: | - Redirect users to this URL to begin the OAuth process. - It is not an API endpoint. - parameters: - - name: client_id - description: The client id - in: query - required: true - type: string - - name: response_type - description: The response type (always 'code') - in: query - required: true - type: string - - name: state - description: A state string - in: query - required: false - type: string - - name: redirect_uri - description: The redirect url - in: query - required: true - type: string - responses: - '200': - description: Success - '400': - description: OAuth2Error - /oauth2/token: - post: - summary: Request an OAuth2 token - description: | - Request an OAuth2 token from an authorization code. - This request completes the OAuth process. - consumes: - - application/x-www-form-urlencoded - parameters: - - name: client_id - description: The client id - in: formData - required: true - type: string - - name: client_secret - description: The client secret - in: formData - required: true - type: string - - name: grant_type - description: The grant type (always 'authorization_code') - in: formData - required: true - type: string - - name: code - description: The code provided by the authorization redirect - in: formData - required: true - type: string - - name: redirect_uri - description: The redirect url - in: formData - required: true - type: string - responses: - '200': - description: JSON response including the token - schema: - type: object - properties: - access_token: - type: string - description: The new API token for the user - token_type: - type: string - description: Will always be 'Bearer' - /shutdown: - post: - summary: Shutdown the Hub - parameters: - - name: body - in: body - schema: - type: object - properties: - proxy: - type: boolean - description: Whether the proxy should be shutdown as well (default from Hub config) - servers: - type: boolean - description: Whether users' notebook servers should be shutdown as well (default from Hub config) - responses: - '202': - description: Shutdown successful - '400': - description: Unexpeced value for proxy or servers -# Descriptions of common responses -responses: - NotFound: - description: The specified resource was not found - Unauthorized: - description: Authentication/Authorization error -definitions: - User: - type: object - properties: - name: - type: string - description: The user's name - admin: - type: boolean - description: Whether the user is an admin - groups: - type: array - description: The names of groups where this user is a member - items: - type: string - server: - type: string - description: The user's notebook server's base URL, if running; null if not. - pending: - type: string - enum: ["spawn", "stop", null] - description: The currently pending action, if any - last_activity: - type: string - format: date-time - description: Timestamp of last-seen activity from the user - servers: - type: array - description: The active servers for this user. - items: - $ref: '#/definitions/Server' - Server: - type: object - properties: - name: - type: string - description: The server's name. The user's default server has an empty name ('') - ready: - type: boolean - description: | - Whether the server is ready for traffic. - Will always be false when any transition is pending. - pending: - type: string - enum: ["spawn", "stop", null] - description: | - The currently pending action, if any. - A server is not ready if an action is pending. - url: - type: string - description: | - The URL where the server can be accessed - (typically /user/:name/:server.name/). - progress_url: - type: string - description: | - The URL for an event-stream to retrieve events during a spawn. - started: - type: string - format: date-time - description: UTC timestamp when the server was last started. - last_activity: - type: string - format: date-time - description: UTC timestamp last-seen activity on this server. - state: - type: object - description: Arbitrary internal state from this server's spawner. Only available on the hub's users list or get-user-by-name method, and only if a hub admin. None otherwise. - Group: - type: object - properties: - name: - type: string - description: The group's name - users: - type: array - description: The names of users who are members of this group - items: - type: string - Service: - type: object - properties: - name: - type: string - description: The service's name - admin: - type: boolean - description: Whether the service is an admin - url: - type: string - description: The internal url where the service is running - prefix: - type: string - description: The proxied URL prefix to the service's url - pid: - type: number - description: The PID of the service process (if managed) - command: - type: array - description: The command used to start the service (if managed) - items: - type: string - info: - type: object - description: | - Additional information a deployment can attach to a service. - JupyterHub does not use this field. - Token: - type: object - properties: - token: - type: string - description: The token itself. Only present in responses to requests for a new token. - id: - type: string - description: The id of the API token. Used for modifying or deleting the token. - user: - type: string - description: The user that owns a token (undefined if owned by a service) - service: - type: string - description: The service that owns the token (undefined of owned by a user) - note: - type: string - description: A note about the token, typically describing what it was created for. - created: - type: string - format: date-time - description: Timestamp when this token was created - expires_at: - type: string - format: date-time - description: Timestamp when this token expires. Null if there is no expiry. - last_activity: - type: string - format: date-time - description: | - Timestamp of last-seen activity using this token. - Can be null if token has never been used. diff --git a/docs/source/_static/custom.css b/docs/source/_static/custom.css index 3c232768..56c21625 100644 --- a/docs/source/_static/custom.css +++ b/docs/source/_static/custom.css @@ -1,106 +1,10 @@ -div#helm-chart-schema h2, -div#helm-chart-schema h3, -div#helm-chart-schema h4, -div#helm-chart-schema h5, -div#helm-chart-schema h6 { - font-family: courier new; -} - -h3, h3 ~ * { - margin-left: 3% !important; -} - -h4, h4 ~ * { - margin-left: 6% !important; -} - -h5, h5 ~ * { - margin-left: 9% !important; -} - -h6, h6 ~ * { - margin-left: 12% !important; -} - -h7, h7 ~ * { - margin-left: 15% !important; -} - -img.logo { - width:100% -} - -.right-next { - float: right; - max-width: 45%; - overflow: auto; - text-overflow: ellipsis; - white-space: nowrap; -} - -.right-next::after{ - content: ' »'; -} - -.left-prev { - float: left; - max-width: 45%; - overflow: auto; - text-overflow: ellipsis; - white-space: nowrap; -} - -.left-prev::before{ - content: '« '; -} - -.prev-next-bottom { - margin-top: 3em; -} - -.prev-next-top { - margin-bottom: 1em; -} - -/* Sidebar TOC and headers */ - -div.sphinxsidebarwrapper div { - margin-bottom: .8em; -} -div.sphinxsidebar h3 { - font-size: 1.3em; - padding-top: 0px; - font-weight: 800; - margin-left: 0px !important; -} - -div.sphinxsidebar p.caption { - font-size: 1.2em; - margin-bottom: 0px; - margin-left: 0px !important; - font-weight: 900; - color: #767676; -} - -div.sphinxsidebar ul { - font-size: .8em; - margin-top: 0px; - padding-left: 3%; - margin-left: 0px !important; -} - -div.relations ul { - font-size: 1em; - margin-left: 0px !important; -} - -div#searchbox form { - margin-left: 0px !important; -} - -/* body elements */ -.toctree-wrapper span.caption-text { - color: #767676; - font-style: italic; - font-weight: 300; -} +/* Added to avoid logo being too squeezed */ +.navbar-brand { + height: 4rem !important; +} + +/* hide redundant funky-formatted swagger-ui version */ + +.swagger-ui .info .title small { + display: none !important; +} diff --git a/docs/source/_static/images/logo/logo.png b/docs/source/_static/images/logo/logo.png index 7cdfc55b..a7a4e61b 100644 Binary files a/docs/source/_static/images/logo/logo.png and b/docs/source/_static/images/logo/logo.png differ diff --git a/docs/source/_static/rest-api.yml b/docs/source/_static/rest-api.yml new file mode 100644 index 00000000..5b02cdaa --- /dev/null +++ b/docs/source/_static/rest-api.yml @@ -0,0 +1,1469 @@ +openapi: 3.0.3 +# note: 3.1.0 required for requestBody on DELETE +# which we should maybe move away from +info: + title: JupyterHub + description: The REST API for JupyterHub + license: + name: BSD-3-Clause + version: 3.1.0.dev +servers: + - url: /hub/api +security: + - token: [] + - oauth2: + - self +paths: + /: + get: + summary: Get JupyterHub version + description: | + This endpoint is not authenticated for the purpose of clients and user + to identify the JupyterHub version before setting up authentication. + responses: + 200: + description: The JupyterHub version + content: + application/json: + schema: + type: object + properties: + version: + type: string + description: The version of JupyterHub itself + /info: + get: + summary: Get detailed info about JupyterHub + description: | + Detailed JupyterHub information, including Python version, + JupyterHub's version and executable path, + and which Authenticator and Spawner are active. + responses: + 200: + description: Detailed JupyterHub info + content: + application/json: + schema: + type: object + properties: + version: + type: string + description: The version of JupyterHub itself + python: + type: string + description: The Python version, as returned by sys.version + sys_executable: + type: string + description: The path to sys.executable running JupyterHub + authenticator: + type: object + properties: + class: + type: string + description: + The Python class currently active for JupyterHub + Authentication + version: + type: string + description: The version of the currently active Authenticator + spawner: + type: object + properties: + class: + type: string + description: + The Python class currently active for spawning + single-user notebook servers + version: + type: string + description: The version of the currently active Spawner + security: + - oauth2: + - read:hub + /user: + get: + summary: Return authenticated user's model + responses: + 200: + description: | + The authenticated user or service's model is returned + with additional information about the permissions associated with the request token. + content: + application/json: + schema: + $ref: "#/components/schemas/RequestIdentity" + security: + - oauth2: + - read:users + - read:users:name + - read:users:groups + - read:users:activity + - read:servers + - read:roles:users + - admin:auth_state + - admin:server_state + /users: + get: + summary: List users + parameters: + - name: state + in: query + description: | + Return only users who have servers in the given state. + If unspecified, return all users. + + active: all users with any active servers (ready OR pending) + ready: all users who have any ready servers (running, not pending) + inactive: all users who have *no* active servers (complement of active) + + Added in JupyterHub 1.3 + schema: + type: string + enum: + - inactive + - active + - ready + - name: offset + in: query + description: | + Return a number users starting at the given offset. + Can be used with limit to paginate. + If unspecified, return all users. + schema: + type: number + - name: limit + in: query + description: | + Return a finite number of users. + Can be used with offset to paginate. + If unspecified, use api_page_default_limit. + schema: + type: number + - name: include_stopped_servers + in: query + description: | + Include stopped servers in user model(s). + Added in JupyterHub 3.0. + Allows retrieval of information about stopped servers, + such as activity and state fields. + schema: + type: boolean + allowEmptyValue: true + responses: + 200: + description: The Hub's user list + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/User" + security: + - oauth2: + - read:users + - read:users:name + - read:users:groups + - read:users:activity + - read:servers + - read:roles:users + - admin:auth_state + - admin:server_state + post: + summary: Create multiple users + requestBody: + content: + application/json: + schema: + type: object + properties: + usernames: + type: array + description: list of usernames to create on the Hub + items: + type: string + admin: + type: boolean + description: whether the created users should be admins + required: true + responses: + 201: + description: The users have been created + content: + application/json: + schema: + type: array + description: The created users + items: + $ref: "#/components/schemas/User" + security: + - oauth2: + - admin:users + x-codegen-request-body-name: body + /users/{name}: + get: + summary: Get a user by name + parameters: + - name: name + in: path + description: username + required: true + schema: + type: string + responses: + 200: + description: The User model + content: + application/json: + schema: + $ref: "#/components/schemas/User" + security: + - oauth2: + - read:users + - read:users:name + - read:users:groups + - read:users:activity + - read:servers + - read:roles:users + - admin:auth_state + - admin:server_state + post: + summary: Create a single user + parameters: + - name: name + in: path + description: username + required: true + schema: + type: string + responses: + 201: + description: The user has been created + content: + application/json: + schema: + $ref: "#/components/schemas/User" + security: + - oauth2: + - admin:users + delete: + summary: Delete a user + parameters: + - name: name + in: path + description: username + required: true + schema: + type: string + responses: + 204: + description: The user has been deleted + content: {} + security: + - oauth2: + - admin:users + patch: + summary: Modify a user + description: Change a user's name or admin status + parameters: + - name: name + in: path + description: username + required: true + schema: + type: string + requestBody: + description: + Updated user info. At least one key to be updated (name or admin) + is required. + content: + application/json: + schema: + type: object + properties: + name: + type: string + description: + the new name (optional, if another key is updated i.e. + admin) + admin: + type: boolean + description: + update admin (optional, if another key is updated i.e. + name) + required: true + responses: + 200: + description: The updated user info + content: + application/json: + schema: + $ref: "#/components/schemas/User" + security: + - oauth2: + - admin:users + x-codegen-request-body-name: body + /users/{name}/activity: + post: + summary: Notify Hub of activity for a given user. + description: + Notify the Hub of activity by the user, e.g. accessing a service + or (more likely) actively using a server. + parameters: + - name: name + in: path + description: username + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + type: object + properties: + last_activity: + type: string + description: | + Timestamp of last-seen activity for this user. + Only needed if this is not activity associated + with using a given server. + format: date-time + servers: + type: object + properties: + : + required: + - last_activity + type: object + properties: + last_activity: + type: string + description: | + Timestamp of last-seen activity on this server. + format: date-time + description: | + Activity for a single server. + description: | + Register activity for specific servers by name. + The keys of this dict are the names of servers. + The default server has an empty name (''). + example: + last_activity: 2019-02-06T12:54:14Z + servers: + "": + last_activity: 2019-02-06T12:54:14Z + gpu: + last_activity: 2019-02-06T12:54:14Z + required: false + responses: + 401: + description: Authentication/Authorization error + content: {} + 404: + description: No such user + content: {} + security: + - oauth2: + - users:activity + x-codegen-request-body-name: body + /users/{name}/server: + post: + summary: Start a user's single-user notebook server + parameters: + - name: name + in: path + description: username + required: true + schema: + type: string + requestBody: + description: | + Spawn options can be passed as a JSON body + when spawning via the API instead of spawn form. + The structure of the options + will depend on the Spawner's configuration. + The body itself will be available as `user_options` for the + Spawner. + content: + application/json: + schema: + type: object + required: false + responses: + 201: + description: The user's notebook server has started + content: {} + 202: + description: + The user's notebook server has not yet started, but has been + requested + content: {} + security: + - oauth2: + - servers + x-codegen-request-body-name: options + delete: + summary: Stop a user's server + parameters: + - name: name + in: path + description: username + required: true + schema: + type: string + responses: + 202: + description: + The user's notebook server has not yet stopped as it is taking + a while to stop + content: {} + 204: + description: The user's notebook server has stopped + content: {} + security: + - oauth2: + - servers + /users/{name}/servers/{server_name}: + post: + summary: Start a user's single-user named-server notebook server + parameters: + - name: name + in: path + description: username + required: true + schema: + type: string + - name: server_name + in: path + description: | + name given to a named-server. + + Note that depending on your JupyterHub infrastructure there are chracterter size limitation to `server_name`. Default spawner with K8s pod will not allow Jupyter Notebooks to be spawned with a name that contains more than 253 characters (keep in mind that the pod will be spawned with extra characters to identify the user and hub). + required: true + schema: + type: string + requestBody: + description: | + Spawn options can be passed as a JSON body + when spawning via the API instead of spawn form. + The structure of the options + will depend on the Spawner's configuration. + content: + application/json: + schema: + type: object + required: false + responses: + 201: + description: The user's notebook named-server has started + content: {} + 202: + description: + The user's notebook named-server has not yet started, but has + been requested + content: {} + security: + - oauth2: + - servers + x-codegen-request-body-name: options + delete: + summary: Stop a user's named server + description: | + To remove the named server in addition to deleting it, + the body may be a JSON dictionary with a boolean `remove` field: + + ```json + {"remove": true} + ``` + parameters: + - name: name + in: path + description: username + required: true + schema: + type: string + - name: server_name + in: path + description: name given to a named-server + required: true + schema: + type: string + + # FIXME: openapi 3.1 is required for requestBody on DELETE + # we probably shouldn't have request bodies on DELETE + # requestBody: + # content: + # application/json: + # schema: + # type: object + # properties: + # remove: + # type: boolean + # description: | + # Whether to fully remove the server, rather than just stop it. + # Removing a server deletes things like the state of the stopped server. + # Default: false. + # required: false + responses: + 202: + description: + The user's notebook named-server has not yet stopped as it + is taking a while to stop + content: {} + 204: + description: The user's notebook named-server has stopped + content: {} + security: + - oauth2: + - servers + # x-codegen-request-body-name: body + /users/{name}/tokens: + get: + summary: List tokens for the user + parameters: + - name: name + in: path + description: username + required: true + schema: + type: string + responses: + 200: + description: The list of tokens + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/Token" + 401: + description: Authentication/Authorization error + content: {} + 404: + description: No such user + content: {} + security: + - oauth2: + - read:tokens + post: + summary: Create a new token for the user + parameters: + - name: name + in: path + description: username + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + type: object + properties: + expires_in: + type: number + description: + lifetime (in seconds) after which the requested token + will expire. + note: + type: string + description: A note attached to the token for future bookkeeping + roles: + type: array + description: | + A list of role names from which to derive scopes. + This is a shortcut for assigning collections of scopes; + Tokens do not retain role assignment. + (Changed in 3.0: roles are immediately resolved to scopes + instead of stored on roles.) + items: + type: string + scopes: + type: array + description: | + A list of scopes that the token should have. + (new in JupyterHub 3.0). + items: + type: string + required: false + responses: + 201: + description: The newly created token + content: + application/json: + schema: + $ref: "#/components/schemas/Token" + 400: + description: Body must be a JSON dict or empty + content: {} + 403: + description: Requested role does not exist + content: {} + security: + - oauth2: + - tokens + x-codegen-request-body-name: token_params + /users/{name}/tokens/{token_id}: + get: + summary: Get the model for a token by id + parameters: + - name: name + in: path + description: username + required: true + schema: + type: string + - name: token_id + in: path + required: true + schema: + type: string + responses: + 200: + description: The info for the new token + content: + application/json: + schema: + $ref: "#/components/schemas/Token" + security: + - oauth2: + - read:tokens + delete: + summary: Delete (revoke) a token by id + parameters: + - name: name + in: path + description: username + required: true + schema: + type: string + - name: token_id + in: path + required: true + schema: + type: string + responses: + 204: + description: The token has been deleted + content: {} + security: + - oauth2: + - tokens + /groups: + get: + summary: List groups + parameters: + - name: offset + in: query + description: | + Return a number of groups starting at the specified offset. + Can be used with limit to paginate. + If unspecified, return all groups. + schema: + type: number + - name: limit + in: query + description: | + Return a finite number of groups. + Can be used with offset to paginate. + If unspecified, use api_page_default_limit. + schema: + type: number + responses: + 200: + description: The list of groups + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/Group" + security: + - oauth2: + - read:groups + - read:groups:name + - read:roles:groups + /groups/{name}: + get: + summary: Get a group by name + parameters: + - name: name + in: path + description: group name + required: true + schema: + type: string + responses: + 200: + description: The group model + content: + application/json: + schema: + $ref: "#/components/schemas/Group" + security: + - oauth2: + - read:groups + - read:groups:name + - read:roles:groups + post: + summary: Create a group + parameters: + - name: name + in: path + description: group name + required: true + schema: + type: string + responses: + 201: + description: The group has been created + content: + application/json: + schema: + $ref: "#/components/schemas/Group" + security: + - oauth2: + - admin:groups + delete: + summary: Delete a group + parameters: + - name: name + in: path + description: group name + required: true + schema: + type: string + responses: + 204: + description: The group has been deleted + content: {} + security: + - oauth2: + - admin:groups + /groups/{name}/users: + post: + summary: Add users to a group + parameters: + - name: name + in: path + description: group name + required: true + schema: + type: string + requestBody: + description: The users to add to the group + content: + application/json: + schema: + type: object + properties: + users: + type: array + description: List of usernames to add to the group + items: + type: string + required: true + responses: + 200: + description: The users have been added to the group + content: + application/json: + schema: + $ref: "#/components/schemas/Group" + security: + - oauth2: + - groups + x-codegen-request-body-name: body + delete: + summary: | + Remove users from a group + description: | + Body should be a JSON dictionary + where `users` is a list of usernames to remove from the groups. + + ```json + { + "users": ["name1", "name2"] + } + ``` + + parameters: + - name: name + in: path + description: group name + required: true + schema: + type: string + # requestBody: + # description: The users to remove from the group + # content: + # application/json: + # schema: + # type: object + # properties: + # users: + # type: array + # description: List of usernames to remove from the group + # items: + # type: string + # required: true + responses: + 200: + description: The users have been removed from the group + content: {} + security: + - oauth2: + - groups + x-codegen-request-body-name: body + /services: + get: + summary: List services + responses: + 200: + description: The service list + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/Service" + security: + - oauth2: + - read:services + - read:services:name + - read:roles:services + /services/{name}: + get: + summary: Get a service by name + parameters: + - name: name + in: path + description: service name + required: true + schema: + type: string + responses: + 200: + description: The Service model + content: + application/json: + schema: + $ref: "#/components/schemas/Service" + security: + - oauth2: + - read:services + - read:services:name + - read:roles:services + /proxy: + get: + summary: Get the proxy's routing table + description: + A convenience alias for getting the routing table directly from + the proxy + parameters: + - name: offset + in: query + description: | + Return a number of routes starting at the given offset. + Can be used with limit to paginate. + If unspecified, return all routes. + schema: + type: number + - name: limit + in: query + description: | + Return a finite number of routes. + Can be used with offset to paginate. + If unspecified, use api_page_default_limit + schema: + type: number + responses: + 200: + description: Routing table + content: + application/json: + schema: + type: object + description: + configurable-http-proxy routing table (see configurable-http-proxy + docs for details) + security: + - oauth2: + - proxy + post: + summary: Force the Hub to sync with the proxy + responses: + 200: + description: Success + content: {} + security: + - oauth2: + - proxy + patch: + summary: Notify the Hub about a new proxy + description: Notifies the Hub of a new proxy to use. + requestBody: + description: + Any values that have changed for the new proxy. All keys are + optional. + content: + application/json: + schema: + type: object + properties: + ip: + type: string + description: IP address of the new proxy + port: + type: string + description: Port of the new proxy + protocol: + type: string + description: Protocol of new proxy, if changed + auth_token: + type: string + description: CONFIGPROXY_AUTH_TOKEN for the new proxy + required: true + responses: + 200: + description: Success + content: {} + security: + - oauth2: + - proxy + x-codegen-request-body-name: body + /authorizations/token: + post: + summary: Request a new API token + description: | + Request a new API token to use with the JupyterHub REST API. + If not already authenticated, username and password can be sent + in the JSON request body. + Logging in via this method is only available when the active Authenticator + accepts passwords (e.g. not OAuth). + requestBody: + content: + application/json: + schema: + type: object + properties: + username: + type: string + password: + type: string + required: false + responses: + 200: + description: The new API token + content: + application/json: + schema: + type: object + properties: + token: + type: string + description: The new API token. + 403: + description: The user can not be authenticated. + content: {} + security: + - oauth2: + - tokens + x-codegen-request-body-name: credentials + /authorizations/token/{token}: + get: + summary: Identify a user or service from an API token + parameters: + - name: token + in: path + required: true + schema: + type: string + responses: + 200: + description: The user or service identified by the API token + content: {} + 404: + description: A user or service is not found. + content: {} + security: + - oauth2: + - (no_scope) + /authorizations/cookie/{cookie_name}/{cookie_value}: + get: + summary: Identify a user from a cookie + description: + Used by single-user notebook servers to hand off cookie authentication + to the Hub + parameters: + - name: cookie_name + in: path + required: true + schema: + type: string + - name: cookie_value + in: path + required: true + schema: + type: string + responses: + 200: + description: The user identified by the cookie + content: + application/json: + schema: + $ref: "#/components/schemas/User" + 404: + description: A user is not found. + content: {} + deprecated: true + /oauth2/authorize: + get: + summary: OAuth 2.0 authorize endpoint + description: | + Redirect users to this URL to begin the OAuth process. + It is not an API endpoint. + parameters: + - name: client_id + in: query + description: The client id + required: true + schema: + type: string + - name: response_type + in: query + description: The response type (always 'code') + required: true + schema: + type: string + - name: state + in: query + description: A state string + schema: + type: string + - name: redirect_uri + in: query + description: The redirect url + required: true + schema: + type: string + responses: + 200: + description: Success + content: {} + 400: + description: OAuth2Error + content: {} + /oauth2/token: + post: + summary: Request an OAuth2 token + description: | + Request an OAuth2 token from an authorization code. + This request completes the OAuth process. + requestBody: + content: + application/x-www-form-urlencoded: + schema: + required: + - client_id + - client_secret + - code + - grant_type + - redirect_uri + properties: + client_id: + type: string + description: The client id + client_secret: + type: string + description: The client secret + grant_type: + type: string + description: The grant type (always 'authorization_code') + code: + type: string + description: The code provided by the authorization redirect + redirect_uri: + type: string + description: The redirect url + required: true + responses: + 200: + description: JSON response including the token + content: + application/json: + schema: + type: object + properties: + access_token: + type: string + description: The new API token for the user + token_type: + type: string + description: Will always be 'Bearer' + /shutdown: + post: + summary: Shutdown the Hub + requestBody: + content: + application/json: + schema: + type: object + properties: + proxy: + type: boolean + description: + Whether the proxy should be shutdown as well (default + from Hub config) + servers: + type: boolean + description: + Whether users' notebook servers should be shutdown + as well (default from Hub config) + required: false + responses: + 202: + description: Shutdown successful + content: {} + 400: + description: Unexpeced value for proxy or servers + content: {} + security: + - oauth2: + - shutdown + x-codegen-request-body-name: body +components: + schemas: + User: + type: object + properties: + name: + type: string + description: The user's name + admin: + type: boolean + description: Whether the user is an admin + roles: + type: array + description: The names of roles this user has + items: + type: string + groups: + type: array + description: The names of groups where this user is a member + items: + type: string + server: + type: string + description: + The user's notebook server's base URL, if running; null if + not. + pending: + type: string + description: The currently pending action, if any + enum: + - spawn + - stop + last_activity: + type: string + description: Timestamp of last-seen activity from the user + format: date-time + servers: + type: array + description: | + The servers for this user. + By default: only includes _active_ servers. + Changed in 3.0: if `?include_stopped_servers` parameter is specified, + stopped servers will be included as well. + items: + $ref: "#/components/schemas/Server" + auth_state: + type: object + properties: {} + description: | + Authentication state of the user. Only available with admin:users:auth_state + scope. None otherwise. + Server: + type: object + properties: + name: + type: string + description: + The server's name. The user's default server has an empty name + ('') + ready: + type: boolean + description: | + Whether the server is ready for traffic. + Will always be false when any transition is pending. + stopped: + type: boolean + description: | + Whether the server is stopped. Added in JupyterHub 3.0, + and only useful when using the `?include_stopped_servers` + request parameter. + Now that stopped servers may be included (since JupyterHub 3.0), + this is the simplest way to select stopped servers. + Always equivalent to `not (ready or pending)`. + pending: + type: string + description: | + The currently pending action, if any. + A server is not ready if an action is pending. + enum: + - spawn + - stop + url: + type: string + description: | + The URL where the server can be accessed + (typically /user/:name/:server.name/). + progress_url: + type: string + description: | + The URL for an event-stream to retrieve events during a spawn. + started: + type: string + description: UTC timestamp when the server was last started. + format: date-time + last_activity: + type: string + description: UTC timestamp last-seen activity on this server. + format: date-time + state: + type: object + properties: {} + description: + Arbitrary internal state from this server's spawner. Only available + on the hub's users list or get-user-by-name method, and only with admin:users:server_state + scope. None otherwise. + user_options: + type: object + properties: {} + description: + User specified options for the user's spawned instance of a + single-user server. + RequestIdentity: + description: | + The model for the entity making the request. + Extends User or Service model to add information about the specific credentials (e.g. session or token-authorised scopes). + allOf: + - type: object + oneOf: + - $ref: "#/components/schemas/User" + - $ref: "#/components/schemas/Service" + discriminator: + propertyName: kind + mapping: + user: "#/components/schemas/User" + service: "#/components/schemas/Service" + - type: object + properties: + session_id: + type: string + nullable: true + description: | + The session id associated with the request's OAuth token, if any. + null, if the request token not associated with a session id. + + Added in 2.0. + scopes: + type: array + description: | + The list of all expanded scopes the request credentials have access to. + + Added in 2.0. + items: + type: string + example: + - "read:users" + - "access:servers!user=name" + Group: + type: object + properties: + name: + type: string + description: The group's name + users: + type: array + description: The names of users who are members of this group + items: + type: string + roles: + type: array + description: The names of roles this group has + items: + type: string + Service: + type: object + properties: + name: + type: string + description: The service's name + admin: + type: boolean + description: Whether the service is an admin + roles: + type: array + description: The names of roles this service has + items: + type: string + url: + type: string + description: The internal url where the service is running + prefix: + type: string + description: The proxied URL prefix to the service's url + pid: + type: number + description: The PID of the service process (if managed) + command: + type: array + description: The command used to start the service (if managed) + items: + type: string + info: + type: object + properties: {} + description: | + Additional information a deployment can attach to a service. + JupyterHub does not use this field. + Token: + type: object + properties: + token: + type: string + description: + The token itself. Only present in responses to requests for + a new token. + id: + type: string + description: + The id of the API token. Used for modifying or deleting the + token. + user: + type: string + description: The user that owns a token (undefined if owned by a service) + service: + type: string + description: The service that owns the token (undefined of owned by a user) + roles: + type: array + description: + Deprecated in JupyterHub 3, always an empty list. Tokens have + 'scopes' starting from JupyterHub 3. + items: + type: string + scopes: + type: array + description: + List of scopes this token has been assigned. New in JupyterHub + 3. In JupyterHub 2.x, tokens were assigned 'roles' insead of scopes. + items: + type: string + note: + type: string + description: + A note about the token, typically describing what it was created + for. + created: + type: string + description: Timestamp when this token was created + format: date-time + expires_at: + type: string + description: Timestamp when this token expires. Null if there is no expiry. + format: date-time + last_activity: + type: string + description: | + Timestamp of last-seen activity using this token. + Can be null if token has never been used. + format: date-time + session_id: + type: string + nullable: true + description: | + The session id associated with the token, if any. + Only used for tokens set during oauth flows. + + Added in 2.0. + responses: + NotFound: + description: The specified resource was not found + content: {} + Unauthorized: + description: Authentication/Authorization error + content: {} + securitySchemes: + token: + type: apiKey + name: Authorization + in: header + oauth2: + type: oauth2 + flows: + authorizationCode: + authorizationUrl: https://hub.example/hub/api/oauth2/authorize + tokenUrl: https://hub.example/hub/api/oauth2/token + scopes: + (no_scope): Identify the owner of the requesting entity. + self: + The user’s own resources _(metascope for users, resolves to (no_scope) + for services)_ + inherit: + Everything that the token-owning entity can access _(metascope + for tokens)_ + admin-ui: + Access the admin page. Permission to take actions via the admin + page granted separately. + admin:users: + Read, write, create and delete users and their authentication + state, not including their servers or tokens. + admin:auth_state: Read a user’s authentication state. + users: + Read and write permissions to user models (excluding servers, tokens + and authentication state). + delete:users: Delete users. + list:users: List users, including at least their names. + read:users: + Read user models (excluding including servers, tokens and + authentication state). + read:users:name: Read names of users. + read:users:groups: Read users’ group membership. + read:users:activity: Read time of last user activity. + read:roles: Read role assignments. + read:roles:users: Read user role assignments. + read:roles:services: Read service role assignments. + read:roles:groups: Read group role assignments. + users:activity: Update time of last user activity. + admin:servers: + Read, start, stop, create and delete user servers and their + state. + admin:server_state: Read and write users’ server state. + servers: Start and stop user servers. + read:servers: + Read users’ names and their server models (excluding the + server state). + delete:servers: Stop and delete users' servers. + tokens: Read, write, create and delete user tokens. + read:tokens: Read user tokens. + admin:groups: Read and write group information, create and delete groups. + groups: + Read and write group information, including adding/removing users + to/from groups. + list:groups: List groups, including at least their names. + read:groups: Read group models. + read:groups:name: Read group names. + delete:groups: Delete groups. + list:services: List services, including at least their names. + read:services: Read service models. + read:services:name: Read service names. + read:hub: Read detailed information about the Hub. + access:servers: Access user servers via API or browser. + access:services: Access services via API or browser. + proxy: + Read information about the proxy’s routing table, sync the Hub + with the proxy and notify the Hub about a new proxy. + shutdown: Shutdown the hub. + read:metrics: Read prometheus metrics. diff --git a/docs/source/_templates/navigation.html b/docs/source/_templates/navigation.html deleted file mode 100644 index 9fa52ff2..00000000 --- a/docs/source/_templates/navigation.html +++ /dev/null @@ -1,16 +0,0 @@ -{# Custom template for navigation.html - - alabaster theme does not provide blocks for titles to - be overridden so this custom theme handles title and - toctree for sidebar -#} -

{{ _('Table of Contents') }}

-{{ toctree(includehidden=theme_sidebar_includehidden, collapse=theme_sidebar_collapse) }} -{% if theme_extra_nav_links %} -
-
    - {% for text, uri in theme_extra_nav_links.items() %} -
  • {{ text }}
  • - {% endfor %} -
-{% endif %} diff --git a/docs/source/_templates/relations.html b/docs/source/_templates/relations.html deleted file mode 100644 index d7ace383..00000000 --- a/docs/source/_templates/relations.html +++ /dev/null @@ -1,17 +0,0 @@ -{# Custom template for relations.html - - alabaster theme does not provide previous/next page by default -#} -
-

Navigation

- -
diff --git a/docs/source/admin/log-messages.md b/docs/source/admin/log-messages.md new file mode 100644 index 00000000..cda307df --- /dev/null +++ b/docs/source/admin/log-messages.md @@ -0,0 +1,72 @@ +# Interpreting common log messages + +When debugging errors and outages, looking at the logs emitted by +JupyterHub is very helpful. This document intends to describe some common +log messages, what they mean and what are the most common causes that generated them, as well as some possible ways to fix them. + +## Failing suspected API request to not-running server + +### Example + +Your logs might be littered with lines that look scary + +``` +[W 2022-03-10 17:25:19.774 JupyterHub base:1349] Failing suspected API request to not-running server: /hub/user//api/metrics/v1 +``` + +### Cause + +This likely means that the user's server has stopped running but they +still have a browser tab open. For example, you might have 3 tabs open and you shut +the server down via one. +Another possible reason could be that you closed your laptop and the server was culled for inactivity, then reopened the laptop! +However, the client-side code (JupyterLab, Classic Notebook, etc) doesn't interpret the shut-down server and continues to make some API requests. + +JupyterHub's architecture means that the proxy routes all requests that +don't go to a running user server to the hub process itself. The hub +process then explicitly returns a failure response, so the client knows +that the server is not running anymore. This is used by JupyterLab to +inform the user that the server is not running anymore, and provide an option +to restart it. + +Most commonly, you'll see this in reference to the `/api/metrics/v1` +URL, used by [jupyter-resource-usage](https://github.com/jupyter-server/jupyter-resource-usage). + +### Actions you can take + +This log message is benign, and there is usually no action for you to take. + +## JupyterHub Singleuser Version mismatch + +### Example + +``` + jupyterhub version 1.5.0 != jupyterhub-singleuser version 1.3.0. This could cause failure to authenticate and result in redirect loops! +``` + +### Cause + +JupyterHub requires the `jupyterhub` python package installed inside the image or +environment, the user server starts in. This message indicates that the version of +the `jupyterhub` package installed inside the user image or environment is not +the same as the JupyterHub server's version itself. This is not necessarily always a +problem - some version drift is mostly acceptable, and the only two known cases of +breakage are across the 0.7 and 2.0 version releases. In those cases, issues pop +up immediately after upgrading your version of JupyterHub, so **always check the JupyterHub +changelog before upgrading!**. The primary problems this _could_ cause are: + +1. Infinite redirect loops after the user server starts +2. Missing expected environment variables in the user server once it starts +3. Failure for the started user server to authenticate with the JupyterHub server - + note that this is _not_ the same as _user authentication_ failing! + +However, for the most part, unless you are seeing these specific issues, the log +message should be counted as a warning to get the `jupyterhub` package versions +aligned, rather than as an indicator of an existing problem. + +### Actions you can take + +Upgrade the version of the `jupyterhub` package in your user environment or image +so that it matches the version of JupyterHub running your JupyterHub server! If you +are using the [zero-to-jupyterhub](https://z2jh.jupyter.org) helm chart, you can find the appropriate +version of the `jupyterhub` package to install in your user image [here](https://jupyterhub.github.io/helm-chart/) diff --git a/docs/source/admin/upgrading.rst b/docs/source/admin/upgrading.rst index 874aae97..fe793915 100644 --- a/docs/source/admin/upgrading.rst +++ b/docs/source/admin/upgrading.rst @@ -1,5 +1,3 @@ -.. _admin/upgrading: - ==================== Upgrading JupyterHub ==================== @@ -7,35 +5,36 @@ Upgrading JupyterHub JupyterHub offers easy upgrade pathways between minor versions. This document describes how to do these upgrades. -If you are using :ref:`a JupyterHub distribution `, you +If you use :ref:`a JupyterHub distribution `, you should consult the distribution's documentation on how to upgrade. This -document is if you have set up your own JupyterHub without using a +document is applicable if you have set up your own JupyterHub without using a distribution. -It is long because is pretty detailed! Most likely, upgrading +This documentation is lengthy because it is quite detailed. Most likely, upgrading JupyterHub is painless, quick and with minimal user interruption. +The steps are discussed in detail, so if you get stuck at any step you can always refer to this guide. + Read the Changelog ================== -The `changelog <../changelog.html>`_ contains information on what has -changed with the new JupyterHub release, and any deprecation warnings. +The `changelog <../changelog.md>`_ contains information on what has +changed with the new JupyterHub release and any deprecation warnings. Read these notes to familiarize yourself with the coming changes. There -might be new releases of authenticators & spawners you are using, so +might be new releases of the authenticators & spawners you use, so read the changelogs for those too! Notify your users ================= -If you are using the default configuration where ``configurable-http-proxy`` +If you use the default configuration where ``configurable-http-proxy`` is managed by JupyterHub, your users will see service disruption during the upgrade process. You should notify them, and pick a time to do the upgrade where they will be least disrupted. -If you are using a different proxy, or running ``configurable-http-proxy`` +If you use a different proxy or run ``configurable-http-proxy`` independent of JupyterHub, your users will be able to continue using notebook -servers they had already launched, but will not be able to launch new servers -nor sign in. +servers they had already launched, but will not be able to launch new servers or sign in. Backup database & config @@ -43,37 +42,37 @@ Backup database & config Before doing an upgrade, it is critical to back up: -#. Your JupyterHub database (sqlite by default, or MySQL / Postgres - if you used those). If you are using sqlite (the default), you - should backup the ``jupyterhub.sqlite`` file. +#. Your JupyterHub database (SQLite by default, or MySQL / Postgres if you used those). + If you use SQLite (the default), you should backup the ``jupyterhub.sqlite`` file. + #. Your ``jupyterhub_config.py`` file. -#. Your user's home directories. This is unlikely to be affected directly by - a JupyterHub upgrade, but we recommend a backup since user data is very - critical. + +#. Your users' home directories. This is unlikely to be affected directly by + a JupyterHub upgrade, but we recommend a backup since user data is critical. -Shutdown JupyterHub -=================== +Shut down JupyterHub +==================== -Shutdown the JupyterHub process. This would vary depending on how you -have set up JupyterHub to run. Most likely, it is using a process +Shut down the JupyterHub process. This would vary depending on how you +have set up JupyterHub to run. It is most likely using a process supervisor of some sort (``systemd`` or ``supervisord`` or even ``docker``). -Use the supervisor specific command to stop the JupyterHub process. +Use the supervisor-specific command to stop the JupyterHub process. Upgrade JupyterHub packages =========================== There are two environments where the ``jupyterhub`` package is installed: -#. The *hub environment*, which is where the JupyterHub server process +#. The *hub environment*: where the JupyterHub server process runs. This is started with the ``jupyterhub`` command, and is what people generally think of as JupyterHub. -#. The *notebook user environments*. This is where the user notebook +#. The *notebook user environments*: where the user notebook servers are launched from, and is probably custom to your own installation. This could be just one environment (different from the hub environment) that is shared by all users, one environment - per user, or same environment as the hub environment. The hub + per user, or the same environment as the hub environment. The hub launched the ``jupyterhub-singleuser`` command in this environment, which in turn starts the notebook server. @@ -94,10 +93,8 @@ with: conda install -c conda-forge jupyterhub== -Where ```` is the version of JupyterHub you are upgrading to. - You should also check for new releases of the authenticator & spawner you -are using. You might wish to upgrade those packages too along with JupyterHub, +are using. You might wish to upgrade those packages, too, along with JupyterHub or upgrade them separately. Upgrade JupyterHub database @@ -111,7 +108,7 @@ database. From the hub environment, in the same directory as your jupyterhub upgrade-db -This should find the location of your database, and run necessary upgrades +This should find the location of your database, and run the necessary upgrades for it. SQLite database disadvantages @@ -120,11 +117,11 @@ SQLite database disadvantages SQLite has some disadvantages when it comes to upgrading JupyterHub. These are: -- ``upgrade-db`` may not work, and you may need delete your database +- ``upgrade-db`` may not work, and you may need to delete your database and start with a fresh one. - ``downgrade-db`` **will not** work if you want to rollback to an earlier version, so backup the ``jupyterhub.sqlite`` file before - upgrading + upgrading. What happens if I delete my database? ------------------------------------- @@ -139,10 +136,10 @@ resides only in the Hub database includes: If the following conditions are true, you should be fine clearing the Hub database and starting over: -- users specified in config file, or login using an external +- users specified in the config file, or login using an external authentication provider (Google, GitHub, LDAP, etc) -- user servers are stopped during upgrade -- don't mind causing users to login again after upgrade +- user servers are stopped during the upgrade +- don't mind causing users to log in again after the upgrade Start JupyterHub ================ @@ -150,7 +147,7 @@ Start JupyterHub Once the database upgrade is completed, start the ``jupyterhub`` process again. -#. Log-in and start the server to make sure things work as +#. Log in and start the server to make sure things work as expected. #. Check the logs for any errors or deprecation warnings. You might have to update your ``jupyterhub_config.py`` file to diff --git a/docs/source/api/index.rst b/docs/source/api/index.rst index 7ce11d2a..38f1328a 100644 --- a/docs/source/api/index.rst +++ b/docs/source/api/index.rst @@ -1,8 +1,8 @@ .. _api-index: -################## -The JupyterHub API -################## +############## +JupyterHub API +############## :Release: |release| :Date: |today| @@ -17,11 +17,6 @@ information on: - making an API request programmatically using the requests library - learning more about JupyterHub's API -The same JupyterHub API spec, as found here, is available in an interactive form -`here (on swagger's petstore) `__. -The `OpenAPI Initiative`_ (fka Swagger™) is a project used to describe -and document RESTful APIs. - JupyterHub API Reference: .. toctree:: diff --git a/docs/source/changelog.md b/docs/source/changelog.md index 4cd14dbd..47562fb4 100644 --- a/docs/source/changelog.md +++ b/docs/source/changelog.md @@ -4,9 +4,1178 @@ For detailed changes from the prior release, click on the version number, and its link will bring up a GitHub listing of changes. Use `git log` on the command line for details. - ## [Unreleased] +## 3.0 + +### 3.0.0 - 2022-09-08 + +3.0 is a major upgrade, but a small one. + +It qualifies as a major upgrade because of two changes: + +1. It includes a database schema change (`jupyterhub --upgrade-db`). + The schema change should not be disruptive, but we've decided that + any schema change qualifies as a major version upgrade. +2. We've dropped support for Python 3.6, which reached End-of-Life in 2021. + If you are using at least Python 3.7, this change should have no effect. + +The database schema change is small and should not be disruptive, +but downgrading is always harder than upgrading after a db migration, +which makes rolling back the update more likely to be problematic. + +#### Changes in RBAC + +The biggest changes in 3.0 relate to {ref}`RBAC`, +which also means they shouldn't affect most users. +The users most affected will be JupyterHub admins using JupyterHub roles +extensively to define user permissions. + +After testing 2.0 in the wild, +we learned that we had used _roles_ in a few places that should have been _scopes_. +Specifically, OAuth tokens now have _scopes_ instead of _roles_ +(and token-issuing oauth clients now have `allowed_scopes` instead of `allowed_roles`). +The consequences should be fairly transparent to users, +but anyone who ran into the restrictions of roles in the oauth process +should find scopes easier to work with. +We tried not to break anything here, so any prior use of roles will still work with a deprecation, +but the role will be resolved _immediately_ at token-issue time, +rather than every time the token is used. + +This especially came up testing the new {ref}`custom-scopes` feature. +Authors of JupyterHub-authenticated services can now extend JupyterHub's RBAC functionality to define their own scopes, +and assign them to users and groups via roles. +This can be used to e.g. limit student/grader/instructor permissions in a grading service, +or grant instructors read-only access to their students' single-user servers starting with upcoming Jupyter Server 2.0. + +Further extending granular control of permissions, +we have added `!service` and `!server` filters for scopes (:ref:`self-referencing-filters`), +like we had for `!user`. + +Access to the admin UI is now governed by a dedicated `admin-ui` scope, +rather than combined `admin:servers` and `admin:users` in 2.0. +More info in `ref`{available-scopes-target}. + +#### More highlights + +- The admin UI can now show more detailed info about users and their servers in a drop-down details table: + + ![Details view in admin UI](./images/dropdown-details-3.0.png) + +- Several bugfixes and improvements in the new admin UI. +- Direct access to the Hub's database is deprecated. + We intend to change the database connection lifecycle in the future to enable scalability and high-availability (HA), + and limiting where connections and transactions can occur is an important part of making that possible. +- Lots more bugfixes and error-handling improvements. + +([full changelog](https://github.com/jupyterhub/jupyterhub/compare/2.3.1...3.0.0)) + +#### New features added + +- Add ConfigurableHTTPProxy.log_level [#3962](https://github.com/jupyterhub/jupyterhub/pull/3962) ([@manics](https://github.com/manics), [@minrk](https://github.com/minrk)) +- include stopped servers in user model [#3909](https://github.com/jupyterhub/jupyterhub/pull/3909) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) +- allow HubAuth to be async [#3883](https://github.com/jupyterhub/jupyterhub/pull/3883) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio), [@sgibson91](https://github.com/sgibson91)) +- add 'admin-ui' scope for access to the admin ui [#3878](https://github.com/jupyterhub/jupyterhub/pull/3878) ([@minrk](https://github.com/minrk), [@GeorgianaElena](https://github.com/GeorgianaElena), [@manics](https://github.com/manics)) +- store scopes on oauth clients, too [#3877](https://github.com/jupyterhub/jupyterhub/pull/3877) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio), [@manics](https://github.com/manics)) +- !service and !server filters [#3851](https://github.com/jupyterhub/jupyterhub/pull/3851) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) +- allow user-defined custom scopes [#3713](https://github.com/jupyterhub/jupyterhub/pull/3713) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio), [@manics](https://github.com/manics)) + +#### Enhancements made + +- Integrate Pagination API into Admin JSX [#4002](https://github.com/jupyterhub/jupyterhub/pull/4002) ([@naatebarber](https://github.com/naatebarber), [@minrk](https://github.com/minrk)) +- admin: format user/server-info tables [#4001](https://github.com/jupyterhub/jupyterhub/pull/4001) ([@minrk](https://github.com/minrk)) +- add correct autocomplete fields for login form [#3958](https://github.com/jupyterhub/jupyterhub/pull/3958) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) +- memoize some scope functions [#3850](https://github.com/jupyterhub/jupyterhub/pull/3850) ([@minrk](https://github.com/minrk), [@manics](https://github.com/manics)) +- Tokens have scopes instead of roles [#3833](https://github.com/jupyterhub/jupyterhub/pull/3833) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) + +#### Bugs fixed + +- Use correct expiration labels in drop-down menu on token page. [#4022](https://github.com/jupyterhub/jupyterhub/pull/4022) ([@possiblyMikeB](https://github.com/possiblyMikeB), [@consideRatio](https://github.com/consideRatio)) +- avoid database error on repeated group name in sync_groups [#4019](https://github.com/jupyterhub/jupyterhub/pull/4019) ([@minrk](https://github.com/minrk), [@manics](https://github.com/manics)) +- reset offset to 0 on name filter change [#4018](https://github.com/jupyterhub/jupyterhub/pull/4018) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) +- admin: avoid redundant client-side username validation in edit-user [#4016](https://github.com/jupyterhub/jupyterhub/pull/4016) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) +- restore trimming of username input [#4011](https://github.com/jupyterhub/jupyterhub/pull/4011) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) +- nbclassic extension name has been renamed [#3971](https://github.com/jupyterhub/jupyterhub/pull/3971) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) +- Fix disabling of individual page template announcements [#3969](https://github.com/jupyterhub/jupyterhub/pull/3969) ([@consideRatio](https://github.com/consideRatio), [@manics](https://github.com/manics), [@minrk](https://github.com/minrk)) +- validate proxy.extra_routes [#3967](https://github.com/jupyterhub/jupyterhub/pull/3967) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) +- Fix GET /api/proxy with pagination [#3960](https://github.com/jupyterhub/jupyterhub/pull/3960) ([@cqzlxl](https://github.com/cqzlxl), [@minrk](https://github.com/minrk)) +- FreeBSD, missing -n for pw useradd [#3953](https://github.com/jupyterhub/jupyterhub/pull/3953) ([@silenius](https://github.com/silenius), [@minrk](https://github.com/minrk), [@manics](https://github.com/manics)) +- admin: Hub is responsible for username validation [#3936](https://github.com/jupyterhub/jupyterhub/pull/3936) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio), [@NarekA](https://github.com/NarekA), [@yuvipanda](https://github.com/yuvipanda)) +- admin: Fix spawn page link for default server [#3935](https://github.com/jupyterhub/jupyterhub/pull/3935) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio), [@benz0li](https://github.com/benz0li)) +- let errors raised in an auth_state_hook halt spawn [#3908](https://github.com/jupyterhub/jupyterhub/pull/3908) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) +- Escape named server name [#3904](https://github.com/jupyterhub/jupyterhub/pull/3904) ([@manics](https://github.com/manics), [@minrk](https://github.com/minrk)) + +#### Maintenance and upkeep improvements + +- Test 3.11 [#4013](https://github.com/jupyterhub/jupyterhub/pull/4013) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) +- [admin] update, clean jsx deps [#4000](https://github.com/jupyterhub/jupyterhub/pull/4000) ([@minrk](https://github.com/minrk)) +- Avoid IOLoop.current in singleuser mixins [#3992](https://github.com/jupyterhub/jupyterhub/pull/3992) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) +- Increase stacklevel for decorated warnings [#3978](https://github.com/jupyterhub/jupyterhub/pull/3978) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) +- Bump Dockerfile base image to 22.04 [#3975](https://github.com/jupyterhub/jupyterhub/pull/3975) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio), [@manics](https://github.com/manics)) +- Avoid deprecated 'IOLoop.current' method [#3974](https://github.com/jupyterhub/jupyterhub/pull/3974) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio), [@manics](https://github.com/manics)) +- switch to importlib_metadata for entrypoints [#3937](https://github.com/jupyterhub/jupyterhub/pull/3937) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) +- pages.py: Remove unreachable code [#3921](https://github.com/jupyterhub/jupyterhub/pull/3921) ([@manics](https://github.com/manics), [@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) +- Build admin app in setup.py [#3914](https://github.com/jupyterhub/jupyterhub/pull/3914) ([@manics](https://github.com/manics), [@minrk](https://github.com/minrk)) +- Use isort for import formatting [#3852](https://github.com/jupyterhub/jupyterhub/pull/3852) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio), [@choldgraf](https://github.com/choldgraf), [@yuvipanda](https://github.com/yuvipanda)) + +#### Documentation improvements + +- document oauth_no_confirm in services [#4012](https://github.com/jupyterhub/jupyterhub/pull/4012) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) +- Remove outdated cookie-secret note in security docs [#3997](https://github.com/jupyterhub/jupyterhub/pull/3997) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) +- Update Contributing documentation [#3915](https://github.com/jupyterhub/jupyterhub/pull/3915) ([@manics](https://github.com/manics), [@minrk](https://github.com/minrk)) +- `jupyter troubleshooting` ➡️ `jupyter troubleshoot` [#3903](https://github.com/jupyterhub/jupyterhub/pull/3903) ([@manics](https://github.com/manics), [@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) +- `admin_access` no longer works as it is overridden by RBAC scopes [#3899](https://github.com/jupyterhub/jupyterhub/pull/3899) ([@manics](https://github.com/manics), [@minrk](https://github.com/minrk)) +- Document the 'display' attribute of services [#3895](https://github.com/jupyterhub/jupyterhub/pull/3895) ([@yuvipanda](https://github.com/yuvipanda), [@minrk](https://github.com/minrk), [@sgibson91](https://github.com/sgibson91)) +- remove apache NE flag as it prevents opening folders and renaming fil… [#3891](https://github.com/jupyterhub/jupyterhub/pull/3891) ([@bbrauns](https://github.com/bbrauns), [@minrk](https://github.com/minrk)) + +#### API and Breaking Changes + +- Require Python 3.7 [#3976](https://github.com/jupyterhub/jupyterhub/pull/3976) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio), [@manics](https://github.com/manics)) +- Deprecate Authenticator.db, Spawner.db [#3885](https://github.com/jupyterhub/jupyterhub/pull/3885) ([@minrk](https://github.com/minrk), [@manics](https://github.com/manics)) + +#### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2022-03-14&to=2022-09-07&type=c)) + +[@ajcollett](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aajcollett+updated%3A2022-08-02..2022-09-07&type=Issues) | [@bbrauns](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abbrauns+updated%3A2022-03-14..2022-08-02&type=Issues) | [@benz0li](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abenz0li+updated%3A2022-03-14..2022-08-02&type=Issues) | [@betatim](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abetatim+updated%3A2022-03-14..2022-08-02&type=Issues) | [@blink1073](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ablink1073+updated%3A2022-03-14..2022-08-02&type=Issues) | [@brospars](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abrospars+updated%3A2022-03-14..2022-08-02&type=Issues) | [@Carreau](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3ACarreau+updated%3A2022-03-14..2022-08-02&type=Issues) | [@choldgraf](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Acholdgraf+updated%3A2022-03-14..2022-08-02&type=Issues) | [@cmd-ntrf](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Acmd-ntrf+updated%3A2022-03-14..2022-08-02&type=Issues) | [@code-review-doctor](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Acode-review-doctor+updated%3A2022-03-14..2022-08-02&type=Issues) | [@consideRatio](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2022-03-14..2022-08-02&type=Issues) | [@cqzlxl](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Acqzlxl+updated%3A2022-03-14..2022-08-02&type=Issues) | [@dependabot](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adependabot+updated%3A2022-03-14..2022-08-02&type=Issues) | [@fabianbaier](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Afabianbaier+updated%3A2022-03-14..2022-08-02&type=Issues) | [@GeorgianaElena](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AGeorgianaElena+updated%3A2022-03-14..2022-08-02&type=Issues) | [@github-actions](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Agithub-actions+updated%3A2022-03-14..2022-08-02&type=Issues) | [@hansen-m](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ahansen-m+updated%3A2022-03-14..2022-08-02&type=Issues) | [@huage1994](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ahuage1994+updated%3A2022-03-14..2022-08-02&type=Issues) | [@jbaksta](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ajbaksta+updated%3A2022-03-14..2022-08-02&type=Issues) | [@jgwerner](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ajgwerner+updated%3A2022-03-14..2022-08-02&type=Issues) | [@jhermann](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ajhermann+updated%3A2022-03-14..2022-08-02&type=Issues) | [@johnkpark](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ajohnkpark+updated%3A2022-03-14..2022-08-02&type=Issues) | [@jwclark](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ajwclark+updated%3A2022-03-14..2022-08-02&type=Issues) | [@maluhoss](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amaluhoss+updated%3A2022-03-14..2022-08-02&type=Issues) | [@manics](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amanics+updated%3A2022-03-14..2022-08-02&type=Issues) | [@mathematicalmichael](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amathematicalmichael+updated%3A2022-03-14..2022-08-02&type=Issues) | [@meeseeksdev](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ameeseeksdev+updated%3A2022-03-14..2022-08-02&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2022-03-14..2022-08-02&type=Issues) | [@mriedem](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amriedem+updated%3A2022-03-14..2022-08-02&type=Issues) | [@naatebarber](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Anaatebarber+updated%3A2022-03-14..2022-08-02&type=Issues) | [@NarekA](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3ANarekA+updated%3A2022-03-14..2022-08-02&type=Issues) | [@naveensrinivasan](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Anaveensrinivasan+updated%3A2022-03-14..2022-08-02&type=Issues) | [@nicorikken](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Anicorikken+updated%3A2022-03-14..2022-08-02&type=Issues) | [@nsshah1288](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ansshah1288+updated%3A2022-03-14..2022-08-02&type=Issues) | [@panruipr](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Apanruipr+updated%3A2022-03-14..2022-08-02&type=Issues) | [@paulkerry1](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Apaulkerry1+updated%3A2022-03-14..2022-08-02&type=Issues) | [@possiblyMikeB](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3ApossiblyMikeB+updated%3A2022-08-02..2022-09-07&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Apre-commit-ci+updated%3A2022-03-14..2022-08-02&type=Issues) | [@rcthomas](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arcthomas+updated%3A2022-03-14..2022-08-02&type=Issues) | [@robnagler](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arobnagler+updated%3A2022-03-14..2022-08-02&type=Issues) | [@rpwagner](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arpwagner+updated%3A2022-03-14..2022-08-02&type=Issues) | [@ryogesh](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aryogesh+updated%3A2022-03-14..2022-08-02&type=Issues) | [@sgibson91](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asgibson91+updated%3A2022-03-14..2022-08-02&type=Issues) | [@silenius](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asilenius+updated%3A2022-03-14..2022-08-02&type=Issues) | [@SonakshiGrover](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3ASonakshiGrover+updated%3A2022-03-14..2022-08-02&type=Issues) | [@superfive666](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asuperfive666+updated%3A2022-08-02..2022-09-07&type=Issues) | [@tharwan](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Atharwan+updated%3A2022-03-14..2022-08-02&type=Issues) | [@vpavlin](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Avpavlin+updated%3A2022-03-14..2022-08-02&type=Issues) | [@willingc](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Awillingc+updated%3A2022-03-14..2022-08-02&type=Issues) | [@ykazakov](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aykazakov+updated%3A2022-03-14..2022-08-02&type=Issues) | [@yuvipanda](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ayuvipanda+updated%3A2022-03-14..2022-08-02&type=Issues) | [@zoltan-fedor](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Azoltan-fedor+updated%3A2022-03-14..2022-08-02&type=Issues) + +## 2.3 + +### 2.3.1 - 2022-06-06 + +This release includes a selection of bugfixes. + +#### Bugs fixed + +- use equality to filter token prefixes [#3910](https://github.com/jupyterhub/jupyterhub/pull/3910) ([@minrk](https://github.com/minrk), [@yuvipanda](https://github.com/yuvipanda)) +- ensure custom template is loaded with jupyter-server notebook extension [#3919](https://github.com/jupyterhub/jupyterhub/pull/3919) ([@minrk](https://github.com/minrk), [@yuvipanda](https://github.com/yuvipanda)) +- set default_url via config [#3918](https://github.com/jupyterhub/jupyterhub/pull/3918) ([@minrk](https://github.com/minrk), [@yuvipanda](https://github.com/yuvipanda)) +- Force add existing certificates [#3906](https://github.com/jupyterhub/jupyterhub/pull/3906) ([@fabianbaier](https://github.com/fabianbaier), [@minrk](https://github.com/minrk)) +- admin: make user-info table selectable [#3889](https://github.com/jupyterhub/jupyterhub/pull/3889) ([@johnkpark](https://github.com/johnkpark), [@minrk](https://github.com/minrk), [@naatebarber](https://github.com/naatebarber), [@NarekA](https://github.com/NarekA)) +- ensure \_import_error is set when JUPYTERHUB_SINGLEUSER_APP is unavailable [#3837](https://github.com/jupyterhub/jupyterhub/pull/3837) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) + +#### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2022-05-06&to=2022-06-06&type=c)) + +[@bbrauns](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abbrauns+updated%3A2022-05-06..2022-06-06&type=Issues) | [@betatim](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abetatim+updated%3A2022-05-06..2022-06-06&type=Issues) | [@blink1073](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ablink1073+updated%3A2022-05-06..2022-06-06&type=Issues) | [@brospars](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abrospars+updated%3A2022-05-06..2022-06-06&type=Issues) | [@Carreau](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3ACarreau+updated%3A2022-05-06..2022-06-06&type=Issues) | [@choldgraf](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Acholdgraf+updated%3A2022-05-06..2022-06-06&type=Issues) | [@consideRatio](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2022-05-06..2022-06-06&type=Issues) | [@fabianbaier](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Afabianbaier+updated%3A2022-05-06..2022-06-06&type=Issues) | [@GeorgianaElena](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AGeorgianaElena+updated%3A2022-05-06..2022-06-06&type=Issues) | [@github-actions](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Agithub-actions+updated%3A2022-05-06..2022-06-06&type=Issues) | [@hansen-m](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ahansen-m+updated%3A2022-05-06..2022-06-06&type=Issues) | [@jbaksta](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ajbaksta+updated%3A2022-05-06..2022-06-06&type=Issues) | [@jgwerner](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ajgwerner+updated%3A2022-05-06..2022-06-06&type=Issues) | [@jhermann](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ajhermann+updated%3A2022-05-06..2022-06-06&type=Issues) | [@johnkpark](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ajohnkpark+updated%3A2022-05-06..2022-06-06&type=Issues) | [@maluhoss](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amaluhoss+updated%3A2022-05-06..2022-06-06&type=Issues) | [@manics](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amanics+updated%3A2022-05-06..2022-06-06&type=Issues) | [@mathematicalmichael](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amathematicalmichael+updated%3A2022-05-06..2022-06-06&type=Issues) | [@meeseeksdev](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ameeseeksdev+updated%3A2022-05-06..2022-06-06&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2022-05-06..2022-06-06&type=Issues) | [@mriedem](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amriedem+updated%3A2022-05-06..2022-06-06&type=Issues) | [@naatebarber](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Anaatebarber+updated%3A2022-05-06..2022-06-06&type=Issues) | [@NarekA](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3ANarekA+updated%3A2022-05-06..2022-06-06&type=Issues) | [@nicorikken](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Anicorikken+updated%3A2022-05-06..2022-06-06&type=Issues) | [@nsshah1288](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ansshah1288+updated%3A2022-05-06..2022-06-06&type=Issues) | [@panruipr](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Apanruipr+updated%3A2022-05-06..2022-06-06&type=Issues) | [@paulkerry1](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Apaulkerry1+updated%3A2022-05-06..2022-06-06&type=Issues) | [@rcthomas](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arcthomas+updated%3A2022-05-06..2022-06-06&type=Issues) | [@robnagler](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arobnagler+updated%3A2022-05-06..2022-06-06&type=Issues) | [@ryogesh](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aryogesh+updated%3A2022-05-06..2022-06-06&type=Issues) | [@sgibson91](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asgibson91+updated%3A2022-05-06..2022-06-06&type=Issues) | [@SonakshiGrover](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3ASonakshiGrover+updated%3A2022-05-06..2022-06-06&type=Issues) | [@tharwan](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Atharwan+updated%3A2022-05-06..2022-06-06&type=Issues) | [@vpavlin](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Avpavlin+updated%3A2022-05-06..2022-06-06&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Awelcome+updated%3A2022-05-06..2022-06-06&type=Issues) | [@willingc](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Awillingc+updated%3A2022-05-06..2022-06-06&type=Issues) | [@yuvipanda](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ayuvipanda+updated%3A2022-05-06..2022-06-06&type=Issues) | [@zoltan-fedor](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Azoltan-fedor+updated%3A2022-05-06..2022-06-06&type=Issues) + +### 2.3.0 - 2022-05-06 + +#### Enhancements made + +- Admin Dashboard - Collapsible Details View [#3834](https://github.com/jupyterhub/jupyterhub/pull/3834) ([@NarekA](https://github.com/NarekA), [@minrk](https://github.com/minrk), [@ykazakov](https://github.com/ykazakov), [@johnkpark](https://github.com/johnkpark)) +- Admin Dashboard - Add search bar for user name [#3827](https://github.com/jupyterhub/jupyterhub/pull/3827) ([@NarekA](https://github.com/NarekA), [@minrk](https://github.com/minrk), [@ykazakov](https://github.com/ykazakov)) + +#### Bugs fixed + +- Cleanup everything on API shutdown [#3886](https://github.com/jupyterhub/jupyterhub/pull/3886) ([@minrk](https://github.com/minrk), [@manics](https://github.com/manics)) +- don't confuse :// in next_url query params for a redirect hostname [#3876](https://github.com/jupyterhub/jupyterhub/pull/3876) ([@minrk](https://github.com/minrk), [@GeorgianaElena](https://github.com/GeorgianaElena)) +- Search bar disabled on admin dashboard [#3863](https://github.com/jupyterhub/jupyterhub/pull/3863) ([@NarekA](https://github.com/NarekA), [@minrk](https://github.com/minrk)) +- Do not store Spawner.ip/port on spawner.server during get_env [#3859](https://github.com/jupyterhub/jupyterhub/pull/3859) ([@minrk](https://github.com/minrk), [@manics](https://github.com/manics), [@consideRatio](https://github.com/consideRatio)) +- Fix xsrf_cookie_kwargs ValueError [#3853](https://github.com/jupyterhub/jupyterhub/pull/3853) ([@jwclark](https://github.com/jwclark), [@minrk](https://github.com/minrk)) +- ensure \_import_error is set when JUPYTERHUB_SINGLEUSER_APP is unavailable [#3837](https://github.com/jupyterhub/jupyterhub/pull/3837) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) + +#### Maintenance and upkeep improvements + +- Use log.exception when logging exceptions [#3882](https://github.com/jupyterhub/jupyterhub/pull/3882) ([@yuvipanda](https://github.com/yuvipanda), [@minrk](https://github.com/minrk), [@sgibson91](https://github.com/sgibson91)) +- Missing `f` prefix on f-strings fix [#3874](https://github.com/jupyterhub/jupyterhub/pull/3874) ([@code-review-doctor](https://github.com/code-review-doctor), [@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) +- adopt pytest-asyncio asyncio_mode='auto' [#3841](https://github.com/jupyterhub/jupyterhub/pull/3841) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio), [@manics](https://github.com/manics)) +- remove lingering reference to distutils [#3835](https://github.com/jupyterhub/jupyterhub/pull/3835) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) + +#### Documentation improvements + +- Fix typo in REST API link in README.md [#3862](https://github.com/jupyterhub/jupyterhub/pull/3862) ([@cmd-ntrf](https://github.com/cmd-ntrf), [@consideRatio](https://github.com/consideRatio)) +- The word `used` is duplicated in upgrade.md [#3849](https://github.com/jupyterhub/jupyterhub/pull/3849) ([@huage1994](https://github.com/huage1994), [@consideRatio](https://github.com/consideRatio)) +- Some typos in docs [#3843](https://github.com/jupyterhub/jupyterhub/pull/3843) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) +- Document version mismatch log message [#3839](https://github.com/jupyterhub/jupyterhub/pull/3839) ([@yuvipanda](https://github.com/yuvipanda), [@consideRatio](https://github.com/consideRatio), [@minrk](https://github.com/minrk)) + +#### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2022-03-14&to=2022-05-05&type=c)) + +[@choldgraf](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Acholdgraf+updated%3A2022-03-14..2022-05-05&type=Issues) | [@cmd-ntrf](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Acmd-ntrf+updated%3A2022-03-14..2022-05-05&type=Issues) | [@code-review-doctor](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Acode-review-doctor+updated%3A2022-03-14..2022-05-05&type=Issues) | [@consideRatio](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2022-03-14..2022-05-05&type=Issues) | [@dependabot](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adependabot+updated%3A2022-03-14..2022-05-05&type=Issues) | [@GeorgianaElena](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AGeorgianaElena+updated%3A2022-03-14..2022-05-05&type=Issues) | [@github-actions](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Agithub-actions+updated%3A2022-03-14..2022-05-05&type=Issues) | [@huage1994](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ahuage1994+updated%3A2022-03-14..2022-05-05&type=Issues) | [@johnkpark](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ajohnkpark+updated%3A2022-03-14..2022-05-05&type=Issues) | [@jwclark](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ajwclark+updated%3A2022-03-14..2022-05-05&type=Issues) | [@manics](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amanics+updated%3A2022-03-14..2022-05-05&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2022-03-14..2022-05-05&type=Issues) | [@NarekA](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3ANarekA+updated%3A2022-03-14..2022-05-05&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Apre-commit-ci+updated%3A2022-03-14..2022-05-05&type=Issues) | [@sgibson91](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asgibson91+updated%3A2022-03-14..2022-05-05&type=Issues) | [@ykazakov](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aykazakov+updated%3A2022-03-14..2022-05-05&type=Issues) | [@yuvipanda](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ayuvipanda+updated%3A2022-03-14..2022-05-05&type=Issues) + +## 2.2 + +### 2.2.2 2022-03-14 + +2.2.2 fixes a small regressions in 2.2.1. + +([full changelog](https://github.com/jupyterhub/jupyterhub/compare/2.2.1...6c5e5452bc734dfd5c5a9482e4980b988ddd304e)) + +#### Bugs fixed + +- Fix failure to update admin-react.js by re-compiling from our source [#3825](https://github.com/jupyterhub/jupyterhub/pull/3825) ([@NarekA](https://github.com/NarekA), [@consideRatio](https://github.com/consideRatio), [@minrk](https://github.com/minrk), [@manics](https://github.com/manics)) + +#### Continuous integration improvements + +- ci: standalone jsx workflow and verify compiled asset matches source code [#3826](https://github.com/jupyterhub/jupyterhub/pull/3826) ([@consideRatio](https://github.com/consideRatio), [@NarekA](https://github.com/NarekA)) + +#### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2022-03-11&to=2022-03-14&type=c)) + +[@consideRatio](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2022-03-11..2022-03-14&type=Issues) | [@manics](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amanics+updated%3A2022-03-11..2022-03-14&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2022-03-11..2022-03-14&type=Issues) | [@NarekA](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3ANarekA+updated%3A2022-03-11..2022-03-14&type=Issues) + +### 2.2.1 2022-03-11 + +2.2.1 fixes a few small regressions in 2.2.0. + +([full changelog](https://github.com/jupyterhub/jupyterhub/compare/2.2.0...2.2.1)) + +#### Bugs fixed + +- Fix clearing cookie with custom xsrf cookie options [#3823](https://github.com/jupyterhub/jupyterhub/pull/3823) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) +- Fix admin dashboard table sorting [#3822](https://github.com/jupyterhub/jupyterhub/pull/3822) ([@NarekA](https://github.com/NarekA), [@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) + +#### Maintenance and upkeep improvements + +- allow Spawner.server to be mocked without underlying orm_spawner [#3819](https://github.com/jupyterhub/jupyterhub/pull/3819) ([@minrk](https://github.com/minrk), [@yuvipanda](https://github.com/yuvipanda), [@consideRatio](https://github.com/consideRatio)) + +#### Documentation + +- Add some docs on common log messages [#3820](https://github.com/jupyterhub/jupyterhub/pull/3820) ([@yuvipanda](https://github.com/yuvipanda), [@choldgraf](https://github.com/choldgraf), [@consideRatio](https://github.com/consideRatio)) + +#### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2022-03-07&to=2022-03-11&type=c)) + +[@choldgraf](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Acholdgraf+updated%3A2022-03-07..2022-03-11&type=Issues) | [@consideRatio](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2022-03-07..2022-03-11&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2022-03-07..2022-03-11&type=Issues) | [@NarekA](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3ANarekA+updated%3A2022-03-07..2022-03-11&type=Issues) | [@yuvipanda](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ayuvipanda+updated%3A2022-03-07..2022-03-11&type=Issues) + +### 2.2.0 2022-03-07 + +JupyterHub 2.2.0 is a small release. +The main new feature is the ability of Authenticators to [manage group membership](authenticator-groups), +e.g. when the identity provider has its own concept of groups that should be preserved +in JupyterHub. + +The links to access user servers from the admin page have been restored. + +([full changelog](https://github.com/jupyterhub/jupyterhub/compare/2.1.1...2.2.0)) + +#### New features added + +- Enable `options_from_form(spawner, form_data)` signature from configuration file [#3791](https://github.com/jupyterhub/jupyterhub/pull/3791) ([@rcthomas](https://github.com/rcthomas), [@minrk](https://github.com/minrk)) +- Authenticator user group management [#3548](https://github.com/jupyterhub/jupyterhub/pull/3548) ([@thomafred](https://github.com/thomafred), [@minrk](https://github.com/minrk)) + +#### Enhancements made + +- Add user token to JupyterLab PageConfig [#3809](https://github.com/jupyterhub/jupyterhub/pull/3809) ([@minrk](https://github.com/minrk), [@manics](https://github.com/manics), [@consideRatio](https://github.com/consideRatio)) +- show insecure-login-warning for all authenticators [#3793](https://github.com/jupyterhub/jupyterhub/pull/3793) ([@satra](https://github.com/satra), [@minrk](https://github.com/minrk)) +- short-circuit token permission check if token and owner share role [#3792](https://github.com/jupyterhub/jupyterhub/pull/3792) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) +- Named server support, access links in admin page [#3790](https://github.com/jupyterhub/jupyterhub/pull/3790) ([@NarekA](https://github.com/NarekA), [@minrk](https://github.com/minrk), [@ykazakov](https://github.com/ykazakov), [@manics](https://github.com/manics)) + +#### Bugs fixed + +- Keep Spawner.server in sync with underlying orm_spawner.server [#3810](https://github.com/jupyterhub/jupyterhub/pull/3810) ([@minrk](https://github.com/minrk), [@manics](https://github.com/manics), [@GeorgianaElena](https://github.com/GeorgianaElena), [@consideRatio](https://github.com/consideRatio)) +- Replace failed spawners when starting new launch [#3802](https://github.com/jupyterhub/jupyterhub/pull/3802) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) +- Log proxy's public_url only when started by JupyterHub [#3781](https://github.com/jupyterhub/jupyterhub/pull/3781) ([@cqzlxl](https://github.com/cqzlxl), [@consideRatio](https://github.com/consideRatio), [@minrk](https://github.com/minrk)) + +#### Documentation improvements + +- Apache2 Documentation: Updates Reverse Proxy Configuration (TLS/SSL, Protocols, Headers) [#3813](https://github.com/jupyterhub/jupyterhub/pull/3813) ([@rzo1](https://github.com/rzo1), [@minrk](https://github.com/minrk)) +- Update example to not reference an undefined scope [#3812](https://github.com/jupyterhub/jupyterhub/pull/3812) ([@ktaletsk](https://github.com/ktaletsk), [@minrk](https://github.com/minrk)) +- Apache: set X-Forwarded-Proto header [#3808](https://github.com/jupyterhub/jupyterhub/pull/3808) ([@manics](https://github.com/manics), [@consideRatio](https://github.com/consideRatio), [@rzo1](https://github.com/rzo1), [@tobi45](https://github.com/tobi45)) +- idle-culler example config missing closing bracket [#3803](https://github.com/jupyterhub/jupyterhub/pull/3803) ([@tmtabor](https://github.com/tmtabor), [@consideRatio](https://github.com/consideRatio)) + +#### Behavior Changes + +- Stop opening PAM sessions by default [#3787](https://github.com/jupyterhub/jupyterhub/pull/3787) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) + +#### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2022-01-25&to=2022-03-07&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ablink1073+updated%3A2022-01-25..2022-03-07&type=Issues) | [@clkao](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aclkao+updated%3A2022-01-25..2022-03-07&type=Issues) | [@consideRatio](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2022-01-25..2022-03-07&type=Issues) | [@cqzlxl](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Acqzlxl+updated%3A2022-01-25..2022-03-07&type=Issues) | [@dependabot](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adependabot+updated%3A2022-01-25..2022-03-07&type=Issues) | [@dtaniwaki](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adtaniwaki+updated%3A2022-01-25..2022-03-07&type=Issues) | [@fcollonval](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Afcollonval+updated%3A2022-01-25..2022-03-07&type=Issues) | [@GeorgianaElena](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AGeorgianaElena+updated%3A2022-01-25..2022-03-07&type=Issues) | [@github-actions](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Agithub-actions+updated%3A2022-01-25..2022-03-07&type=Issues) | [@kshitija08](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Akshitija08+updated%3A2022-01-25..2022-03-07&type=Issues) | [@ktaletsk](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aktaletsk+updated%3A2022-01-25..2022-03-07&type=Issues) | [@manics](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amanics+updated%3A2022-01-25..2022-03-07&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2022-01-25..2022-03-07&type=Issues) | [@NarekA](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3ANarekA+updated%3A2022-01-25..2022-03-07&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Apre-commit-ci+updated%3A2022-01-25..2022-03-07&type=Issues) | [@rajat404](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arajat404+updated%3A2022-01-25..2022-03-07&type=Issues) | [@rcthomas](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arcthomas+updated%3A2022-01-25..2022-03-07&type=Issues) | [@ryogesh](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aryogesh+updated%3A2022-01-25..2022-03-07&type=Issues) | [@rzo1](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arzo1+updated%3A2022-01-25..2022-03-07&type=Issues) | [@satra](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asatra+updated%3A2022-01-25..2022-03-07&type=Issues) | [@thomafred](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Athomafred+updated%3A2022-01-25..2022-03-07&type=Issues) | [@tmtabor](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Atmtabor+updated%3A2022-01-25..2022-03-07&type=Issues) | [@tobi45](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Atobi45+updated%3A2022-01-25..2022-03-07&type=Issues) | [@ykazakov](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aykazakov+updated%3A2022-01-25..2022-03-07&type=Issues) + +## 2.1 + +### 2.1.1 2022-01-25 + +2.1.1 is a tiny bugfix release, +fixing an issue where admins did not receive the new `read:metrics` permission. + +([full changelog](https://github.com/jupyterhub/jupyterhub/compare/2.1.0...2.1.1)) + +#### Bugs fixed + +- add missing read:metrics scope to admin role [#3778](https://github.com/jupyterhub/jupyterhub/pull/3778) ([@minrk](https://github.com/minrk), [@consideRatio](https://github.com/consideRatio)) + +#### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2022-01-21&to=2022-01-25&type=c)) + +[@consideRatio](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2022-01-21..2022-01-25&type=Issues) | [@dependabot](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adependabot+updated%3A2022-01-21..2022-01-25&type=Issues) | [@manics](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amanics+updated%3A2022-01-21..2022-01-25&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2022-01-21..2022-01-25&type=Issues) + +### 2.1.0 2022-01-21 + +2.1.0 is a small bugfix release, resolving regressions in 2.0 and further refinements. +In particular, the authenticated prometheus metrics endpoint did not work in 2.0 because it lacked a scope. +To access the authenticated metrics endpoint with a token, +upgrade to 2.1 and make sure the token/owner has the `read:metrics` scope. + +Custom error messages for failed spawns are now handled more consistently on the spawn-progress API and the spawn-failed HTML page. +Previously, spawn-progress did not relay the custom message provided by `exception.jupyterhub_message`, +and full HTML messages in `exception.jupyterhub_html_message` can now be displayed in both contexts. + +The long-deprecated, inconsistent behavior when users visited a URL for another user's server, +where they could sometimes be redirected back to their own server, +has been removed in favor of consistent behavior based on the user's permissions. +To share a URL that will take any user to their own server, use `https://my.hub/hub/user-redirect/path/...`. + +([full changelog](https://github.com/jupyterhub/jupyterhub/compare/2.0.2...2.1.0)) + +#### Enhancements made + +- relay custom messages in exception.jupyterhub_message in progress API [#3764](https://github.com/jupyterhub/jupyterhub/pull/3764) ([@minrk](https://github.com/minrk)) +- Add the capability to inform a connection to Alembic Migration Script [#3762](https://github.com/jupyterhub/jupyterhub/pull/3762) ([@DougTrajano](https://github.com/DougTrajano)) + +#### Bugs fixed + +- Fix loading Spawner.user_options from db [#3773](https://github.com/jupyterhub/jupyterhub/pull/3773) ([@IgorBerman](https://github.com/IgorBerman)) +- Add missing `read:metrics` scope for authenticated metrics endpoint [#3770](https://github.com/jupyterhub/jupyterhub/pull/3770) ([@minrk](https://github.com/minrk)) +- apply scope checks to some admin-or-self situations [#3763](https://github.com/jupyterhub/jupyterhub/pull/3763) ([@minrk](https://github.com/minrk)) + +#### Maintenance and upkeep improvements + +- DOCS: Add github metadata for edit button [#3775](https://github.com/jupyterhub/jupyterhub/pull/3775) ([@minrk](https://github.com/minrk)) + +#### Documentation improvements + +- Improve documentation about spawner exception handling [#3765](https://github.com/jupyterhub/jupyterhub/pull/3765) ([@twalcari](https://github.com/twalcari)) + +#### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2022-01-10&to=2022-01-21&type=c)) + +[@consideRatio](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2022-01-10..2022-01-21&type=Issues) | [@dependabot](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adependabot+updated%3A2022-01-10..2022-01-21&type=Issues) | [@DougTrajano](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3ADougTrajano+updated%3A2022-01-10..2022-01-21&type=Issues) | [@IgorBerman](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AIgorBerman+updated%3A2022-01-10..2022-01-21&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2022-01-10..2022-01-21&type=Issues) | [@twalcari](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Atwalcari+updated%3A2022-01-10..2022-01-21&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Awelcome+updated%3A2022-01-10..2022-01-21&type=Issues) + +## 2.0 + +### [2.0.2] 2022-01-10 + +2.0.2 fixes a regression in 2.0.1 causing false positives +rejecting valid requests as cross-origin, +mostly when JupyterHub is behind additional proxies. + +([full changelog](https://github.com/jupyterhub/jupyterhub/compare/2.0.1...2.0.2)) + +#### Bugs fixed + +- use outermost proxied entry when looking up browser protocol [#3757](https://github.com/jupyterhub/jupyterhub/pull/3757) ([@minrk](https://github.com/minrk)) + +#### Maintenance and upkeep improvements + +- remove unused macro with missing references [#3760](https://github.com/jupyterhub/jupyterhub/pull/3760) ([@minrk](https://github.com/minrk)) +- ci: refactor to avoid triggering all tests on changes to docs [#3750](https://github.com/jupyterhub/jupyterhub/pull/3750) ([@consideRatio](https://github.com/consideRatio)) +- Extra test_cors_check tests [#3746](https://github.com/jupyterhub/jupyterhub/pull/3746) ([@manics](https://github.com/manics)) + +#### Documentation improvements + +- DOCS: Update theme configuration [#3754](https://github.com/jupyterhub/jupyterhub/pull/3754) ([@choldgraf](https://github.com/choldgraf)) +- DOC: Add note about allowed_users not being set [#3748](https://github.com/jupyterhub/jupyterhub/pull/3748) ([@choldgraf](https://github.com/choldgraf)) +- localhost URL is http, not https [#3747](https://github.com/jupyterhub/jupyterhub/pull/3747) ([@minrk](https://github.com/minrk)) + +#### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2021-12-22&to=2022-01-10&type=c)) + +[@choldgraf](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Acholdgraf+updated%3A2021-12-22..2022-01-10&type=Issues) | [@consideRatio](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2021-12-22..2022-01-10&type=Issues) | [@github-actions](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Agithub-actions+updated%3A2021-12-22..2022-01-10&type=Issues) | [@jakob-keller](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ajakob-keller+updated%3A2021-12-22..2022-01-10&type=Issues) | [@manics](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amanics+updated%3A2021-12-22..2022-01-10&type=Issues) | [@meeseeksmachine](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ameeseeksmachine+updated%3A2021-12-22..2022-01-10&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2021-12-22..2022-01-10&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Apre-commit-ci+updated%3A2021-12-22..2022-01-10&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Awelcome+updated%3A2021-12-22..2022-01-10&type=Issues) + +### [2.0.1] + +([full changelog](https://github.com/jupyterhub/jupyterhub/compare/2.0.0...2.0.1)) + +2.0.1 is a bugfix release, with some additional small improvements, +especially in the new RBAC handling and admin page. + +Several issues are fixed where users might not have the +default 'user' role as expected. + +#### Enhancements made + +- Use URL from authenticator on default login form [#3723](https://github.com/jupyterhub/jupyterhub/pull/3723) ([@sgaist](https://github.com/sgaist)) +- always assign default roles on login [#3722](https://github.com/jupyterhub/jupyterhub/pull/3722) ([@minrk](https://github.com/minrk)) +- use intersect_scopes utility to check token permissions [#3705](https://github.com/jupyterhub/jupyterhub/pull/3705) ([@minrk](https://github.com/minrk)) +- React Error Handling [#3697](https://github.com/jupyterhub/jupyterhub/pull/3697) ([@naatebarber](https://github.com/naatebarber)) +- add option to use a different Host header for referer checks [#3195](https://github.com/jupyterhub/jupyterhub/pull/3195) ([@kylewm](https://github.com/kylewm)) + +#### Bugs fixed + +- initialize new admin users with default roles [#3735](https://github.com/jupyterhub/jupyterhub/pull/3735) ([@minrk](https://github.com/minrk)) +- Fix missing f-string modifier [#3733](https://github.com/jupyterhub/jupyterhub/pull/3733) ([@manics](https://github.com/manics)) +- accept token auth on `/hub/user/...` [#3731](https://github.com/jupyterhub/jupyterhub/pull/3731) ([@minrk](https://github.com/minrk)) +- simplify default role assignment [#3720](https://github.com/jupyterhub/jupyterhub/pull/3720) ([@minrk](https://github.com/minrk)) +- fix Spawner.oauth_roles config [#3717](https://github.com/jupyterhub/jupyterhub/pull/3717) ([@minrk](https://github.com/minrk)) +- Fix error message about Authenticator.pre_spawn_start [#3716](https://github.com/jupyterhub/jupyterhub/pull/3716) ([@minrk](https://github.com/minrk)) +- admin: Pass Base Url [#3715](https://github.com/jupyterhub/jupyterhub/pull/3715) ([@naatebarber](https://github.com/naatebarber)) +- Grant role after user creation during config load [#3714](https://github.com/jupyterhub/jupyterhub/pull/3714) ([@a3626a](https://github.com/a3626a)) +- Avoid clearing user role membership when defining custom user scopes [#3708](https://github.com/jupyterhub/jupyterhub/pull/3708) ([@minrk](https://github.com/minrk)) +- cors: handle mismatched implicit/explicit ports in host header [#3701](https://github.com/jupyterhub/jupyterhub/pull/3701) ([@minrk](https://github.com/minrk)) + +#### Maintenance and upkeep improvements + +- clarify `role` argument in grant/strip_role [#3727](https://github.com/jupyterhub/jupyterhub/pull/3727) ([@minrk](https://github.com/minrk)) +- check for db clients before requesting install [#3719](https://github.com/jupyterhub/jupyterhub/pull/3719) ([@minrk](https://github.com/minrk)) +- run jsx tests in their own job [#3698](https://github.com/jupyterhub/jupyterhub/pull/3698) ([@minrk](https://github.com/minrk)) + +#### Documentation improvements + +- update service-whoami example [#3726](https://github.com/jupyterhub/jupyterhub/pull/3726) ([@minrk](https://github.com/minrk)) + +#### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2021-12-01&to=2021-12-22&type=c)) + +[@a3626a](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aa3626a+updated%3A2021-12-01..2021-12-22&type=Issues) | [@betatim](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abetatim+updated%3A2021-12-01..2021-12-22&type=Issues) | [@consideRatio](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2021-12-01..2021-12-22&type=Issues) | [@github-actions](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Agithub-actions+updated%3A2021-12-01..2021-12-22&type=Issues) | [@kylewm](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Akylewm+updated%3A2021-12-01..2021-12-22&type=Issues) | [@manics](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amanics+updated%3A2021-12-01..2021-12-22&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2021-12-01..2021-12-22&type=Issues) | [@naatebarber](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Anaatebarber+updated%3A2021-12-01..2021-12-22&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Apre-commit-ci+updated%3A2021-12-01..2021-12-22&type=Issues) | [@sgaist](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asgaist+updated%3A2021-12-01..2021-12-22&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Awelcome+updated%3A2021-12-01..2021-12-22&type=Issues) + +### [2.0.0] + +JupyterHub 2.0 is a big release! + +The most significant change is the addition of [roles and scopes][rbac] +to the JupyterHub permissions model, +allowing more fine-grained access control. +Read more about it [in the docs][rbac]. + +In particular, the 'admin' level of permissions should not be needed anymore, +and you can now grant users and services only the permissions they need, not more. +We encourage you to review permissions, especially any service or user with `admin: true` +and consider assigning only the necessary roles and scopes. + +[rbac]: ./rbac/index.md + +JupyterHub 2.0 requires an update to the database schema, +so **make sure to [read the upgrade documentation and backup your database](admin/upgrading) +before upgrading**. + +:::{admonition} stop all servers before upgrading +Upgrading JupyterHub to 2.0 revokes all tokens issued before the upgrade, +which means that single-user servers started before the upgrade +will become inaccessible after the upgrade until they have been stopped and started again. +To avoid this, it is best to shutdown all servers prior to the upgrade. +::: + +Other major changes that may require updates to your deployment, +depending on what features you use: + +- List endpoints now support [pagination][], and have a max page size, + which means API consumers must be updated to make paginated requests + if you have a lot of users and/or groups. +- Spawners have stopped specifying _any_ command-line options to spawners by default. + Previously, `--ip` and `--port` could be specified on the command-line. + From 2.0 forward, JupyterHub will only communicate options to Spawners via environment variables, + and the command to be launched is configured exclusively via `Spawner.cmd` and `Spawner.args`. + +[pagination]: api-pagination + +Other new features: + +- new Admin page, written in React. + With RBAC, it should now be fully possible to implement a custom admin panel + as a service via the REST API. +- JupyterLab is the default UI for single-user servers, + if available in the user environment. + See [more info](classic-notebook-ui) + in the docs about switching back to the classic notebook, + if you are not ready to switch to JupyterLab. +- NullAuthenticator is now bundled with JupyterHub, + so you no longer need to install the `nullauthenticator` package to disable login, + you can set `c.JupyterHub.authenticator_class = 'null'`. +- Support `jupyterhub --show-config` option to see your current jupyterhub configuration. +- Add expiration date dropdown to Token page + +and major bug fixes: + +- Improve database rollback recovery on broken connections + +and other changes: + +- Requests to a not-running server (e.g. visiting `/user/someuser/`) + will return an HTTP 424 error instead of 503, + making it easier to monitor for real deployment problems. + JupyterLab in the user environment should be at least version 3.1.16 + to recognize this error code as a stopped server. + You can temporarily opt-in to the older behavior (e.g. if older JupyterLab is required) + by setting `c.JupyterHub.use_legacy_stopped_server_status_code = True`. + +Plus lots of little fixes along the way. + +### 2.0.0 - 2021-12-01 + +([full changelog](https://github.com/jupyterhub/jupyterhub/compare/1.5.0...2.0.0)) + +#### New features added + +- Add NullAuthenticator to jupyterhub [#3619](https://github.com/jupyterhub/jupyterhub/pull/3619) ([@manics](https://github.com/manics)) +- 2.0: jupyterlab by default [#3615](https://github.com/jupyterhub/jupyterhub/pull/3615) ([@minrk](https://github.com/minrk)) +- support inherited `--show-config` flags from base Application [#3559](https://github.com/jupyterhub/jupyterhub/pull/3559) ([@minrk](https://github.com/minrk)) +- Add expiration date dropdown to Token page [#3552](https://github.com/jupyterhub/jupyterhub/pull/3552) ([@dolfinus](https://github.com/dolfinus)) +- add opt-in model for paginated list results [#3535](https://github.com/jupyterhub/jupyterhub/pull/3535) ([@minrk](https://github.com/minrk)) +- Support auto login when used as a OAuth2 provider [#3488](https://github.com/jupyterhub/jupyterhub/pull/3488) ([@yuvipanda](https://github.com/yuvipanda)) +- Roles and Scopes (RBAC) [#3438](https://github.com/jupyterhub/jupyterhub/pull/3438) ([@minrk](https://github.com/minrk)) +- Make JupyterHub Admin page into a React app [#3398](https://github.com/jupyterhub/jupyterhub/pull/3398) ([@naatebarber](https://github.com/naatebarber)) +- Stop specifying `--ip` and `--port` on the command-line [#3381](https://github.com/jupyterhub/jupyterhub/pull/3381) ([@minrk](https://github.com/minrk)) + +#### Enhancements made + +- Add Session id to token/identify models [#3685](https://github.com/jupyterhub/jupyterhub/pull/3685) ([@minrk](https://github.com/minrk)) +- Log single-user app versions at startup [#3681](https://github.com/jupyterhub/jupyterhub/pull/3681) ([@minrk](https://github.com/minrk)) +- create groups declared in roles [#3664](https://github.com/jupyterhub/jupyterhub/pull/3664) ([@minrk](https://github.com/minrk)) +- Fail suspected API requests with 424, not 503 [#3636](https://github.com/jupyterhub/jupyterhub/pull/3636) ([@yuvipanda](https://github.com/yuvipanda)) +- add delete scopes for users, groups, servers [#3616](https://github.com/jupyterhub/jupyterhub/pull/3616) ([@minrk](https://github.com/minrk)) +- Reduce logging verbosity of 'checking routes' [#3604](https://github.com/jupyterhub/jupyterhub/pull/3604) ([@yuvipanda](https://github.com/yuvipanda)) +- Remove a couple every-request debug statements [#3582](https://github.com/jupyterhub/jupyterhub/pull/3582) ([@minrk](https://github.com/minrk)) +- Validate Content-Type Header for api POST requests [#3575](https://github.com/jupyterhub/jupyterhub/pull/3575) ([@VaishnaviHire](https://github.com/VaishnaviHire)) +- Improved Grammar for the Documentation [#3572](https://github.com/jupyterhub/jupyterhub/pull/3572) ([@eruditehassan](https://github.com/eruditehassan)) + +#### Bugs fixed + +- Hub: only accept tokens in API requests [#3686](https://github.com/jupyterhub/jupyterhub/pull/3686) ([@minrk](https://github.com/minrk)) +- Forward-port fixes from 1.5.0 security release [#3679](https://github.com/jupyterhub/jupyterhub/pull/3679) ([@minrk](https://github.com/minrk)) +- raise 404 on admin attempt to spawn nonexistent user [#3653](https://github.com/jupyterhub/jupyterhub/pull/3653) ([@minrk](https://github.com/minrk)) +- new user token returns 200 instead of 201 [#3646](https://github.com/jupyterhub/jupyterhub/pull/3646) ([@joegasewicz](https://github.com/joegasewicz)) +- Added base_url to path for jupyterhub-session-id cookie [#3625](https://github.com/jupyterhub/jupyterhub/pull/3625) ([@albertmichaelj](https://github.com/albertmichaelj)) +- Fix wrong name of auth_state_hook in the exception log [#3569](https://github.com/jupyterhub/jupyterhub/pull/3569) ([@dolfinus](https://github.com/dolfinus)) +- Stop injecting statsd parameters into the configurable HTTP proxy [#3568](https://github.com/jupyterhub/jupyterhub/pull/3568) ([@paccorsi](https://github.com/paccorsi)) +- explicit DB rollback for 500 errors [#3566](https://github.com/jupyterhub/jupyterhub/pull/3566) ([@nsshah1288](https://github.com/nsshah1288)) +- don't omit server model if it's empty [#3564](https://github.com/jupyterhub/jupyterhub/pull/3564) ([@minrk](https://github.com/minrk)) +- ensure admin requests for missing users 404 [#3563](https://github.com/jupyterhub/jupyterhub/pull/3563) ([@minrk](https://github.com/minrk)) +- Avoid zombie processes in case of using LocalProcessSpawner [#3543](https://github.com/jupyterhub/jupyterhub/pull/3543) ([@dolfinus](https://github.com/dolfinus)) +- Fix regression where external services api_token became required [#3531](https://github.com/jupyterhub/jupyterhub/pull/3531) ([@consideRatio](https://github.com/consideRatio)) +- Fix allow_all check when only allow_admin is set [#3526](https://github.com/jupyterhub/jupyterhub/pull/3526) ([@dolfinus](https://github.com/dolfinus)) +- Bug: save_bearer_token (provider.py) passes a float value to the expires_at field (int) [#3484](https://github.com/jupyterhub/jupyterhub/pull/3484) ([@weisdd](https://github.com/weisdd)) + +#### Maintenance and upkeep improvements + +- build jupyterhub/singleuser along with other images [#3690](https://github.com/jupyterhub/jupyterhub/pull/3690) ([@minrk](https://github.com/minrk)) +- always use relative paths in data_files [#3682](https://github.com/jupyterhub/jupyterhub/pull/3682) ([@minrk](https://github.com/minrk)) +- Forward-port fixes from 1.5.0 security release [#3679](https://github.com/jupyterhub/jupyterhub/pull/3679) ([@minrk](https://github.com/minrk)) +- verify that successful login assigns default role [#3674](https://github.com/jupyterhub/jupyterhub/pull/3674) ([@minrk](https://github.com/minrk)) +- more calculators [#3673](https://github.com/jupyterhub/jupyterhub/pull/3673) ([@minrk](https://github.com/minrk)) +- use v2 of jupyterhub/action-major-minor-tag-calculator [#3672](https://github.com/jupyterhub/jupyterhub/pull/3672) ([@minrk](https://github.com/minrk)) +- Add support-bot [#3670](https://github.com/jupyterhub/jupyterhub/pull/3670) ([@manics](https://github.com/manics)) +- use tbump to tag versions [#3669](https://github.com/jupyterhub/jupyterhub/pull/3669) ([@minrk](https://github.com/minrk)) +- use stable autodoc-traits [#3667](https://github.com/jupyterhub/jupyterhub/pull/3667) ([@minrk](https://github.com/minrk)) +- Tests for our openapi spec [#3665](https://github.com/jupyterhub/jupyterhub/pull/3665) ([@minrk](https://github.com/minrk)) +- clarify some log messages during role assignment [#3663](https://github.com/jupyterhub/jupyterhub/pull/3663) ([@minrk](https://github.com/minrk)) +- Rename 'all' metascope to more descriptive 'inherit' [#3661](https://github.com/jupyterhub/jupyterhub/pull/3661) ([@minrk](https://github.com/minrk)) +- minor refinement of excessive scopes error message [#3660](https://github.com/jupyterhub/jupyterhub/pull/3660) ([@minrk](https://github.com/minrk)) +- deprecate instead of remove `@admin_only` auth decorator [#3659](https://github.com/jupyterhub/jupyterhub/pull/3659) ([@minrk](https://github.com/minrk)) +- improve timeout handling and messages [#3658](https://github.com/jupyterhub/jupyterhub/pull/3658) ([@minrk](https://github.com/minrk)) +- add api-only doc [#3640](https://github.com/jupyterhub/jupyterhub/pull/3640) ([@minrk](https://github.com/minrk)) +- Add pyupgrade --py36-plus to pre-commit config [#3586](https://github.com/jupyterhub/jupyterhub/pull/3586) ([@consideRatio](https://github.com/consideRatio)) +- pyupgrade: run pyupgrade --py36-plus and black on all but tests [#3585](https://github.com/jupyterhub/jupyterhub/pull/3585) ([@consideRatio](https://github.com/consideRatio)) +- pyupgrade: run pyupgrade --py36-plus and black on jupyterhub/tests [#3584](https://github.com/jupyterhub/jupyterhub/pull/3584) ([@consideRatio](https://github.com/consideRatio)) +- remove use of deprecated distutils [#3562](https://github.com/jupyterhub/jupyterhub/pull/3562) ([@minrk](https://github.com/minrk)) +- remove old, unused tasks.py [#3561](https://github.com/jupyterhub/jupyterhub/pull/3561) ([@minrk](https://github.com/minrk)) +- remove very old backward-compat for LocalProcess subclasses [#3558](https://github.com/jupyterhub/jupyterhub/pull/3558) ([@minrk](https://github.com/minrk)) +- Remove pre-commit from GHA [#3524](https://github.com/jupyterhub/jupyterhub/pull/3524) ([@minrk](https://github.com/minrk)) +- bump autodoc-traits [#3510](https://github.com/jupyterhub/jupyterhub/pull/3510) ([@minrk](https://github.com/minrk)) +- release docker workflow: 'branchRegex: ^\w[\w-.]\*$' [#3509](https://github.com/jupyterhub/jupyterhub/pull/3509) ([@manics](https://github.com/manics)) +- exclude dependabot push events from release workflow [#3505](https://github.com/jupyterhub/jupyterhub/pull/3505) ([@minrk](https://github.com/minrk)) +- prepare to rename default branch to main [#3462](https://github.com/jupyterhub/jupyterhub/pull/3462) ([@minrk](https://github.com/minrk)) + +#### Documentation improvements + +- Service auth doc [#3695](https://github.com/jupyterhub/jupyterhub/pull/3695) ([@minrk](https://github.com/minrk)) +- changelog for 2.0.0rc5 [#3692](https://github.com/jupyterhub/jupyterhub/pull/3692) ([@minrk](https://github.com/minrk)) +- update 2.0 changelog [#3687](https://github.com/jupyterhub/jupyterhub/pull/3687) ([@minrk](https://github.com/minrk)) +- changelog for 2.0 release candidate [#3662](https://github.com/jupyterhub/jupyterhub/pull/3662) ([@minrk](https://github.com/minrk)) +- docs: fix typo in proxy config example [#3657](https://github.com/jupyterhub/jupyterhub/pull/3657) ([@edgarcosta](https://github.com/edgarcosta)) +- add 424 status code change to changelog [#3649](https://github.com/jupyterhub/jupyterhub/pull/3649) ([@minrk](https://github.com/minrk)) +- add latest changes to 2.0 changelog [#3628](https://github.com/jupyterhub/jupyterhub/pull/3628) ([@minrk](https://github.com/minrk)) +- server-api example typo: trim space in token file [#3626](https://github.com/jupyterhub/jupyterhub/pull/3626) ([@minrk](https://github.com/minrk)) +- Fix heading level in changelog [#3610](https://github.com/jupyterhub/jupyterhub/pull/3610) ([@mriedem](https://github.com/mriedem)) +- update quickstart requirements [#3607](https://github.com/jupyterhub/jupyterhub/pull/3607) ([@minrk](https://github.com/minrk)) +- 2.0 changelog [#3602](https://github.com/jupyterhub/jupyterhub/pull/3602) ([@minrk](https://github.com/minrk)) +- Update/cleanup README [#3601](https://github.com/jupyterhub/jupyterhub/pull/3601) ([@manics](https://github.com/manics)) +- mailto link typo [#3593](https://github.com/jupyterhub/jupyterhub/pull/3593) ([@minrk](https://github.com/minrk)) +- [doc] add example specifying scopes for a default role [#3581](https://github.com/jupyterhub/jupyterhub/pull/3581) ([@minrk](https://github.com/minrk)) +- Add detailed doc for starting/waiting for servers via api [#3565](https://github.com/jupyterhub/jupyterhub/pull/3565) ([@minrk](https://github.com/minrk)) +- doc: Mention a list of known proxies available [#3546](https://github.com/jupyterhub/jupyterhub/pull/3546) ([@AbdealiJK](https://github.com/AbdealiJK)) +- Update changelog for 1.4.2 in main branch [#3539](https://github.com/jupyterhub/jupyterhub/pull/3539) ([@consideRatio](https://github.com/consideRatio)) +- Retrospectively update changelog for 1.4.1 in main branch [#3537](https://github.com/jupyterhub/jupyterhub/pull/3537) ([@consideRatio](https://github.com/consideRatio)) +- Fix contributor documentation's link [#3521](https://github.com/jupyterhub/jupyterhub/pull/3521) ([@icankeep](https://github.com/icankeep)) +- Add research study participation notice to readme [#3506](https://github.com/jupyterhub/jupyterhub/pull/3506) ([@sgibson91](https://github.com/sgibson91)) +- Fix typo [#3494](https://github.com/jupyterhub/jupyterhub/pull/3494) ([@davidbrochart](https://github.com/davidbrochart)) +- Add Chameleon to JupyterHub deployment gallery [#3482](https://github.com/jupyterhub/jupyterhub/pull/3482) ([@diurnalist](https://github.com/diurnalist)) +- Initial SECURITY.md [#3445](https://github.com/jupyterhub/jupyterhub/pull/3445) ([@rpwagner](https://github.com/rpwagner)) + +#### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2021-04-19&to=2021-11-30&type=c)) + +[@0mar](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3A0mar+updated%3A2021-04-19..2021-11-30&type=Issues) | [@AbdealiJK](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AAbdealiJK+updated%3A2021-04-19..2021-11-30&type=Issues) | [@albertmichaelj](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aalbertmichaelj+updated%3A2021-04-19..2021-11-30&type=Issues) | [@betatim](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abetatim+updated%3A2021-04-19..2021-11-30&type=Issues) | [@bollwyvl](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abollwyvl+updated%3A2021-04-19..2021-11-30&type=Issues) | [@choldgraf](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Acholdgraf+updated%3A2021-04-19..2021-11-30&type=Issues) | [@consideRatio](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2021-04-19..2021-11-30&type=Issues) | [@cslocum](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Acslocum+updated%3A2021-04-19..2021-11-30&type=Issues) | [@danlester](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adanlester+updated%3A2021-04-19..2021-11-30&type=Issues) | [@davidbrochart](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adavidbrochart+updated%3A2021-04-19..2021-11-30&type=Issues) | [@dependabot](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adependabot+updated%3A2021-04-19..2021-11-30&type=Issues) | [@diurnalist](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adiurnalist+updated%3A2021-04-19..2021-11-30&type=Issues) | [@dolfinus](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adolfinus+updated%3A2021-04-19..2021-11-30&type=Issues) | [@echarles](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aecharles+updated%3A2021-04-19..2021-11-30&type=Issues) | [@edgarcosta](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aedgarcosta+updated%3A2021-04-19..2021-11-30&type=Issues) | [@ellisonbg](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aellisonbg+updated%3A2021-04-19..2021-11-30&type=Issues) | [@eruditehassan](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aeruditehassan+updated%3A2021-04-19..2021-11-30&type=Issues) | [@icankeep](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aicankeep+updated%3A2021-04-19..2021-11-30&type=Issues) | [@IvanaH8](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AIvanaH8+updated%3A2021-04-19..2021-11-30&type=Issues) | [@joegasewicz](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ajoegasewicz+updated%3A2021-04-19..2021-11-30&type=Issues) | [@manics](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amanics+updated%3A2021-04-19..2021-11-30&type=Issues) | [@meeseeksmachine](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ameeseeksmachine+updated%3A2021-04-19..2021-11-30&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2021-04-19..2021-11-30&type=Issues) | [@mriedem](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amriedem+updated%3A2021-04-19..2021-11-30&type=Issues) | [@naatebarber](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Anaatebarber+updated%3A2021-04-19..2021-11-30&type=Issues) | [@nsshah1288](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ansshah1288+updated%3A2021-04-19..2021-11-30&type=Issues) | [@octavd](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aoctavd+updated%3A2021-04-19..2021-11-30&type=Issues) | [@OrnithOrtion](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AOrnithOrtion+updated%3A2021-04-19..2021-11-30&type=Issues) | [@paccorsi](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Apaccorsi+updated%3A2021-04-19..2021-11-30&type=Issues) | [@panruipr](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Apanruipr+updated%3A2021-04-19..2021-11-30&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Apre-commit-ci+updated%3A2021-04-19..2021-11-30&type=Issues) | [@rpwagner](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arpwagner+updated%3A2021-04-19..2021-11-30&type=Issues) | [@sgibson91](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asgibson91+updated%3A2021-04-19..2021-11-30&type=Issues) | [@support](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asupport+updated%3A2021-04-19..2021-11-30&type=Issues) | [@twalcari](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Atwalcari+updated%3A2021-04-19..2021-11-30&type=Issues) | [@VaishnaviHire](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AVaishnaviHire+updated%3A2021-04-19..2021-11-30&type=Issues) | [@warwing](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Awarwing+updated%3A2021-04-19..2021-11-30&type=Issues) | [@weisdd](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aweisdd+updated%3A2021-04-19..2021-11-30&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Awelcome+updated%3A2021-04-19..2021-11-30&type=Issues) | [@willingc](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Awillingc+updated%3A2021-04-19..2021-11-30&type=Issues) | [@ykazakov](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aykazakov+updated%3A2021-04-19..2021-11-30&type=Issues) | [@yuvipanda](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ayuvipanda+updated%3A2021-04-19..2021-11-30&type=Issues) + +## 1.5 + +JupyterHub 1.5 is a **security release**, +fixing a vulnerability [ghsa-cw7p-q79f-m2v7][] where JupyterLab users +with multiple tabs open could fail to logout completely, +leaving their browser with valid credentials until they logout again. + +A few fully backward-compatible features have been backported from 2.0. + +[ghsa-cw7p-q79f-m2v7]: https://github.com/jupyterhub/jupyterhub/security/advisories/GHSA-cw7p-q79f-m2v7 + +### [1.5.0] 2021-11-04 + +([full changelog](https://github.com/jupyterhub/jupyterhub/compare/1.4.2...1.5.0)) + +#### New features added + +- Backport #3636 to 1.4.x (opt-in support for JupyterHub.use_legacy_stopped_server_status_code) [#3639](https://github.com/jupyterhub/jupyterhub/pull/3639) ([@yuvipanda](https://github.com/yuvipanda)) +- Backport PR #3552 on branch 1.4.x (Add expiration date dropdown to Token page) [#3580](https://github.com/jupyterhub/jupyterhub/pull/3580) ([@meeseeksmachine](https://github.com/meeseeksmachine)) +- Backport PR #3488 on branch 1.4.x (Support auto login when used as a OAuth2 provider) [#3579](https://github.com/jupyterhub/jupyterhub/pull/3579) ([@meeseeksmachine](https://github.com/meeseeksmachine)) + +#### Maintenance and upkeep improvements + +- 1.4.x: update doc requirements [#3677](https://github.com/jupyterhub/jupyterhub/pull/3677) ([@minrk](https://github.com/minrk)) + +#### Documentation improvements + +- use_legacy_stopped_server_status_code: use 1.\* language [#3676](https://github.com/jupyterhub/jupyterhub/pull/3676) ([@manics](https://github.com/manics)) + +#### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2021-07-16&to=2021-11-03&type=c)) + +[@choldgraf](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Acholdgraf+updated%3A2021-07-16..2021-11-03&type=Issues) | [@consideRatio](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2021-07-16..2021-11-03&type=Issues) | [@manics](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amanics+updated%3A2021-07-16..2021-11-03&type=Issues) | [@meeseeksmachine](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ameeseeksmachine+updated%3A2021-07-16..2021-11-03&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2021-07-16..2021-11-03&type=Issues) | [@support](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asupport+updated%3A2021-07-16..2021-11-03&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Awelcome+updated%3A2021-07-16..2021-11-03&type=Issues) | [@yuvipanda](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ayuvipanda+updated%3A2021-07-16..2021-11-03&type=Issues) + +## 1.4 + +JupyterHub 1.4 is a small release, with several enhancements, bug fixes, +and new configuration options. + +There are no database schema changes requiring migration from 1.3 to 1.4. + +1.4 is also the first version to start publishing docker images for arm64. + +In particular, OAuth tokens stored in user cookies, +used for accessing single-user servers and hub-authenticated services, +have changed their expiration from one hour to the expiry of the cookie +in which they are stored (default: two weeks). +This is now also configurable via `JupyterHub.oauth_token_expires_in`. + +The result is that it should be much less likely for auth tokens stored in cookies +to expire during the lifetime of a server. + +### [1.4.2] 2021-06-15 + +1.4.2 is a small bugfix release for 1.4. + +([full changelog](https://github.com/jupyterhub/jupyterhub/compare/1.4.1...d9860aa98cc537cf685022f81b8f725bfef41304)) + +#### Bugs fixed + +- Fix regression where external services api_token became required [#3531](https://github.com/jupyterhub/jupyterhub/pull/3531) ([@consideRatio](https://github.com/consideRatio)) +- Bug: save_bearer_token (provider.py) passes a float value to the expires_at field (int) [#3484](https://github.com/jupyterhub/jupyterhub/pull/3484) ([@weisdd](https://github.com/weisdd)) + +#### Maintenance and upkeep improvements + +- bump autodoc-traits [#3510](https://github.com/jupyterhub/jupyterhub/pull/3510) ([@minrk](https://github.com/minrk)) + +#### Documentation improvements + +- Fix contributor documentation's link [#3521](https://github.com/jupyterhub/jupyterhub/pull/3521) ([@icankeep](https://github.com/icankeep)) +- Fix typo [#3494](https://github.com/jupyterhub/jupyterhub/pull/3494) ([@davidbrochart](https://github.com/davidbrochart)) + +#### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2021-05-12&to=2021-07-15&type=c)) + +[@consideRatio](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2021-05-12..2021-07-15&type=Issues) | [@davidbrochart](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adavidbrochart+updated%3A2021-05-12..2021-07-15&type=Issues) | [@icankeep](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aicankeep+updated%3A2021-05-12..2021-07-15&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2021-05-12..2021-07-15&type=Issues) | [@weisdd](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aweisdd+updated%3A2021-05-12..2021-07-15&type=Issues) + +### [1.4.1] 2021-05-12 + +1.4.1 is a small bugfix release for 1.4. + +([full changelog](https://github.com/jupyterhub/jupyterhub/compare/1.4.0...1.4.1)) + +#### Enhancements made + +#### Bugs fixed + +- define Spawner.delete_forever on base Spawner [#3454](https://github.com/jupyterhub/jupyterhub/pull/3454) ([@minrk](https://github.com/minrk)) +- patch base handlers from both jupyter_server and notebook [#3437](https://github.com/jupyterhub/jupyterhub/pull/3437) ([@minrk](https://github.com/minrk)) + +#### Maintenance and upkeep improvements + +- ci: fix typo in environment variable [#3457](https://github.com/jupyterhub/jupyterhub/pull/3457) ([@consideRatio](https://github.com/consideRatio)) +- avoid re-using asyncio.Locks across event loops [#3456](https://github.com/jupyterhub/jupyterhub/pull/3456) ([@minrk](https://github.com/minrk)) +- ci: github workflow security, pin action to sha etc [#3436](https://github.com/jupyterhub/jupyterhub/pull/3436) ([@consideRatio](https://github.com/consideRatio)) + +#### Documentation improvements + +- Fix documentation [#3452](https://github.com/jupyterhub/jupyterhub/pull/3452) ([@davidbrochart](https://github.com/davidbrochart)) + +#### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2021-04-19&to=2021-05-12&type=c)) + +[@0mar](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3A0mar+updated%3A2021-04-19..2021-05-12&type=Issues) | [@betatim](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abetatim+updated%3A2021-04-19..2021-05-12&type=Issues) | [@consideRatio](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2021-04-19..2021-05-12&type=Issues) | [@danlester](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adanlester+updated%3A2021-04-19..2021-05-12&type=Issues) | [@davidbrochart](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adavidbrochart+updated%3A2021-04-19..2021-05-12&type=Issues) | [@IvanaH8](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AIvanaH8+updated%3A2021-04-19..2021-05-12&type=Issues) | [@manics](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amanics+updated%3A2021-04-19..2021-05-12&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2021-04-19..2021-05-12&type=Issues) | [@naatebarber](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Anaatebarber+updated%3A2021-04-19..2021-05-12&type=Issues) | [@OrnithOrtion](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AOrnithOrtion+updated%3A2021-04-19..2021-05-12&type=Issues) | [@support](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asupport+updated%3A2021-04-19..2021-05-12&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Awelcome+updated%3A2021-04-19..2021-05-12&type=Issues) + +### [1.4.0] 2021-04-19 + +([full changelog](https://github.com/jupyterhub/jupyterhub/compare/1.3.0...1.4.0)) + +#### New features added + +- Support Proxy.extra_routes [#3430](https://github.com/jupyterhub/jupyterhub/pull/3430) ([@yuvipanda](https://github.com/yuvipanda)) +- login-template: Add a "login_container" block inside the div-container. [#3422](https://github.com/jupyterhub/jupyterhub/pull/3422) ([@olifre](https://github.com/olifre)) +- Docker arm64 builds [#3421](https://github.com/jupyterhub/jupyterhub/pull/3421) ([@manics](https://github.com/manics)) +- make oauth token expiry configurable [#3411](https://github.com/jupyterhub/jupyterhub/pull/3411) ([@minrk](https://github.com/minrk)) +- allow the hub to not be the default route [#3373](https://github.com/jupyterhub/jupyterhub/pull/3373) ([@minrk](https://github.com/minrk)) +- Allow customization of service menu via templates [#3345](https://github.com/jupyterhub/jupyterhub/pull/3345) ([@stv0g](https://github.com/stv0g)) +- Add Spawner.delete_forever [#3337](https://github.com/jupyterhub/jupyterhub/pull/3337) ([@nsshah1288](https://github.com/nsshah1288)) +- Allow to set spawner-specific hub connect URL [#3326](https://github.com/jupyterhub/jupyterhub/pull/3326) ([@dtaniwaki](https://github.com/dtaniwaki)) +- Make Authenticator Custom HTML Flexible [#3315](https://github.com/jupyterhub/jupyterhub/pull/3315) ([@dtaniwaki](https://github.com/dtaniwaki)) + +#### Enhancements made + +- Log the exception raised in Spawner.post_stop_hook instead of raising it [#3418](https://github.com/jupyterhub/jupyterhub/pull/3418) ([@jiajunjie](https://github.com/jiajunjie)) +- Don't delete all oauth clients on startup [#3407](https://github.com/jupyterhub/jupyterhub/pull/3407) ([@yuvipanda](https://github.com/yuvipanda)) +- Use 'secrets' module to generate secrets [#3394](https://github.com/jupyterhub/jupyterhub/pull/3394) ([@yuvipanda](https://github.com/yuvipanda)) +- Allow cookie_secret to be set to a hexadecimal string [#3343](https://github.com/jupyterhub/jupyterhub/pull/3343) ([@consideRatio](https://github.com/consideRatio)) +- Clear tornado xsrf cookie on logout [#3341](https://github.com/jupyterhub/jupyterhub/pull/3341) ([@dtaniwaki](https://github.com/dtaniwaki)) +- always log slow requests at least at info-level [#3338](https://github.com/jupyterhub/jupyterhub/pull/3338) ([@minrk](https://github.com/minrk)) + +#### Bugs fixed + +- always start redirect count at 1 when redirecting /hub/user/:name -> /user/:name [#3377](https://github.com/jupyterhub/jupyterhub/pull/3377) ([@minrk](https://github.com/minrk)) +- Always raise on failed token creation [#3370](https://github.com/jupyterhub/jupyterhub/pull/3370) ([@minrk](https://github.com/minrk)) +- make_singleuser_app: patch-in HubAuthenticatedHandler at lower priority [#3347](https://github.com/jupyterhub/jupyterhub/pull/3347) ([@minrk](https://github.com/minrk)) +- Fix pagination with named servers [#3335](https://github.com/jupyterhub/jupyterhub/pull/3335) ([@rcthomas](https://github.com/rcthomas)) + +#### Maintenance and upkeep improvements + +- typos in onbuild, demo images for push [#3429](https://github.com/jupyterhub/jupyterhub/pull/3429) ([@minrk](https://github.com/minrk)) +- Disable docker jupyterhub-demo arm64 build [#3425](https://github.com/jupyterhub/jupyterhub/pull/3425) ([@manics](https://github.com/manics)) +- Docker arm64 builds [#3421](https://github.com/jupyterhub/jupyterhub/pull/3421) ([@manics](https://github.com/manics)) +- avoid deprecated engine.table_names [#3392](https://github.com/jupyterhub/jupyterhub/pull/3392) ([@minrk](https://github.com/minrk)) +- alpine dockerfile: avoid compilation by getting some deps from apk [#3386](https://github.com/jupyterhub/jupyterhub/pull/3386) ([@minrk](https://github.com/minrk)) +- Fix sqlachemy.interfaces.PoolListener deprecation for tests [#3383](https://github.com/jupyterhub/jupyterhub/pull/3383) ([@IvanaH8](https://github.com/IvanaH8)) +- Update pre-commit hooks versions [#3362](https://github.com/jupyterhub/jupyterhub/pull/3362) ([@consideRatio](https://github.com/consideRatio)) +- add (and run) prettier pre-commit hook [#3360](https://github.com/jupyterhub/jupyterhub/pull/3360) ([@minrk](https://github.com/minrk)) +- move get_custom_html to base Authenticator class [#3359](https://github.com/jupyterhub/jupyterhub/pull/3359) ([@minrk](https://github.com/minrk)) +- publish release outputs as artifacts [#3349](https://github.com/jupyterhub/jupyterhub/pull/3349) ([@minrk](https://github.com/minrk)) +- [TST] Do not implicitly create users in auth_header [#3344](https://github.com/jupyterhub/jupyterhub/pull/3344) ([@minrk](https://github.com/minrk)) +- specify minimum alembic 1.4 [#3339](https://github.com/jupyterhub/jupyterhub/pull/3339) ([@minrk](https://github.com/minrk)) +- ci: github actions, allow for manual test runs and fix badge in readme [#3324](https://github.com/jupyterhub/jupyterhub/pull/3324) ([@consideRatio](https://github.com/consideRatio)) +- publish releases from github actions [#3305](https://github.com/jupyterhub/jupyterhub/pull/3305) ([@minrk](https://github.com/minrk)) + +#### Documentation improvements + +- DOC: Conform to numpydoc. [#3428](https://github.com/jupyterhub/jupyterhub/pull/3428) ([@Carreau](https://github.com/Carreau)) +- Fix link to jupyterhub/jupyterhub-the-hard-way [#3417](https://github.com/jupyterhub/jupyterhub/pull/3417) ([@manics](https://github.com/manics)) +- Changelog for 1.4 [#3415](https://github.com/jupyterhub/jupyterhub/pull/3415) ([@minrk](https://github.com/minrk)) +- Fastapi example [#3403](https://github.com/jupyterhub/jupyterhub/pull/3403) ([@kafonek](https://github.com/kafonek)) +- Added Azure AD as a supported authenticator. [#3401](https://github.com/jupyterhub/jupyterhub/pull/3401) ([@maxshowarth](https://github.com/maxshowarth)) +- Remove the hard way guide [#3375](https://github.com/jupyterhub/jupyterhub/pull/3375) ([@manics](https://github.com/manics)) +- :memo: Fix telemetry section [#3333](https://github.com/jupyterhub/jupyterhub/pull/3333) ([@trallard](https://github.com/trallard)) +- Fix the help related to the proxy check [#3332](https://github.com/jupyterhub/jupyterhub/pull/3332) ([@jiajunjie](https://github.com/jiajunjie)) +- Mention Jupyter Server as optional single-user backend in documentation [#3329](https://github.com/jupyterhub/jupyterhub/pull/3329) ([@Zsailer](https://github.com/Zsailer)) +- Fix mixup in comment regarding the sync parameter [#3325](https://github.com/jupyterhub/jupyterhub/pull/3325) ([@andrewisplinghoff](https://github.com/andrewisplinghoff)) +- docs: fix simple typo, funciton -> function [#3314](https://github.com/jupyterhub/jupyterhub/pull/3314) ([@timgates42](https://github.com/timgates42)) + +#### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2020-12-11&to=2021-04-19&type=c)) + +[@00Kai0](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3A00Kai0+updated%3A2020-12-11..2021-04-19&type=Issues) | [@8rV1n](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3A8rV1n+updated%3A2020-12-11..2021-04-19&type=Issues) | [@akhilputhiry](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aakhilputhiry+updated%3A2020-12-11..2021-04-19&type=Issues) | [@alexal](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aalexal+updated%3A2020-12-11..2021-04-19&type=Issues) | [@analytically](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aanalytically+updated%3A2020-12-11..2021-04-19&type=Issues) | [@andreamazzoni](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aandreamazzoni+updated%3A2020-12-11..2021-04-19&type=Issues) | [@andrewisplinghoff](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aandrewisplinghoff+updated%3A2020-12-11..2021-04-19&type=Issues) | [@BertR](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3ABertR+updated%3A2020-12-11..2021-04-19&type=Issues) | [@betatim](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abetatim+updated%3A2020-12-11..2021-04-19&type=Issues) | [@bitnik](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abitnik+updated%3A2020-12-11..2021-04-19&type=Issues) | [@bollwyvl](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abollwyvl+updated%3A2020-12-11..2021-04-19&type=Issues) | [@carluri](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Acarluri+updated%3A2020-12-11..2021-04-19&type=Issues) | [@Carreau](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3ACarreau+updated%3A2020-12-11..2021-04-19&type=Issues) | [@consideRatio](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2020-12-11..2021-04-19&type=Issues) | [@davidedelvento](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adavidedelvento+updated%3A2020-12-11..2021-04-19&type=Issues) | [@dhirschfeld](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adhirschfeld+updated%3A2020-12-11..2021-04-19&type=Issues) | [@dmpe](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Admpe+updated%3A2020-12-11..2021-04-19&type=Issues) | [@dsblank](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adsblank+updated%3A2020-12-11..2021-04-19&type=Issues) | [@dtaniwaki](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adtaniwaki+updated%3A2020-12-11..2021-04-19&type=Issues) | [@echarles](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aecharles+updated%3A2020-12-11..2021-04-19&type=Issues) | [@elgalu](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aelgalu+updated%3A2020-12-11..2021-04-19&type=Issues) | [@eran-pinhas](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aeran-pinhas+updated%3A2020-12-11..2021-04-19&type=Issues) | [@gaebor](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Agaebor+updated%3A2020-12-11..2021-04-19&type=Issues) | [@GeorgianaElena](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AGeorgianaElena+updated%3A2020-12-11..2021-04-19&type=Issues) | [@gsemet](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Agsemet+updated%3A2020-12-11..2021-04-19&type=Issues) | [@gweis](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Agweis+updated%3A2020-12-11..2021-04-19&type=Issues) | [@hynek2001](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ahynek2001+updated%3A2020-12-11..2021-04-19&type=Issues) | [@ianabc](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aianabc+updated%3A2020-12-11..2021-04-19&type=Issues) | [@ibre5041](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aibre5041+updated%3A2020-12-11..2021-04-19&type=Issues) | [@IvanaH8](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AIvanaH8+updated%3A2020-12-11..2021-04-19&type=Issues) | [@jhegedus42](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ajhegedus42+updated%3A2020-12-11..2021-04-19&type=Issues) | [@jhermann](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ajhermann+updated%3A2020-12-11..2021-04-19&type=Issues) | [@jiajunjie](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ajiajunjie+updated%3A2020-12-11..2021-04-19&type=Issues) | [@jtlz2](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ajtlz2+updated%3A2020-12-11..2021-04-19&type=Issues) | [@kafonek](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Akafonek+updated%3A2020-12-11..2021-04-19&type=Issues) | [@katsar0v](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Akatsar0v+updated%3A2020-12-11..2021-04-19&type=Issues) | [@kinow](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Akinow+updated%3A2020-12-11..2021-04-19&type=Issues) | [@krinsman](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Akrinsman+updated%3A2020-12-11..2021-04-19&type=Issues) | [@laurensdv](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Alaurensdv+updated%3A2020-12-11..2021-04-19&type=Issues) | [@lits789](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Alits789+updated%3A2020-12-11..2021-04-19&type=Issues) | [@m-alekseev](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Am-alekseev+updated%3A2020-12-11..2021-04-19&type=Issues) | [@mabbasi90](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amabbasi90+updated%3A2020-12-11..2021-04-19&type=Issues) | [@manics](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amanics+updated%3A2020-12-11..2021-04-19&type=Issues) | [@manniche](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amanniche+updated%3A2020-12-11..2021-04-19&type=Issues) | [@maxshowarth](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amaxshowarth+updated%3A2020-12-11..2021-04-19&type=Issues) | [@mdivk](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amdivk+updated%3A2020-12-11..2021-04-19&type=Issues) | [@meeseeksmachine](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ameeseeksmachine+updated%3A2020-12-11..2021-04-19&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2020-12-11..2021-04-19&type=Issues) | [@mogthesprog](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amogthesprog+updated%3A2020-12-11..2021-04-19&type=Issues) | [@mriedem](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amriedem+updated%3A2020-12-11..2021-04-19&type=Issues) | [@nsshah1288](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ansshah1288+updated%3A2020-12-11..2021-04-19&type=Issues) | [@olifre](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aolifre+updated%3A2020-12-11..2021-04-19&type=Issues) | [@PandaWhoCodes](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3APandaWhoCodes+updated%3A2020-12-11..2021-04-19&type=Issues) | [@pawsaw](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Apawsaw+updated%3A2020-12-11..2021-04-19&type=Issues) | [@phozzy](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aphozzy+updated%3A2020-12-11..2021-04-19&type=Issues) | [@playermanny2](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aplayermanny2+updated%3A2020-12-11..2021-04-19&type=Issues) | [@rabsr](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arabsr+updated%3A2020-12-11..2021-04-19&type=Issues) | [@randy3k](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arandy3k+updated%3A2020-12-11..2021-04-19&type=Issues) | [@rawrgulmuffins](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arawrgulmuffins+updated%3A2020-12-11..2021-04-19&type=Issues) | [@rcthomas](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arcthomas+updated%3A2020-12-11..2021-04-19&type=Issues) | [@rebeca-maia](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arebeca-maia+updated%3A2020-12-11..2021-04-19&type=Issues) | [@rebenkoy](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arebenkoy+updated%3A2020-12-11..2021-04-19&type=Issues) | [@rkdarst](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arkdarst+updated%3A2020-12-11..2021-04-19&type=Issues) | [@robnagler](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arobnagler+updated%3A2020-12-11..2021-04-19&type=Issues) | [@ronaldpetty](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aronaldpetty+updated%3A2020-12-11..2021-04-19&type=Issues) | [@ryanlovett](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aryanlovett+updated%3A2020-12-11..2021-04-19&type=Issues) | [@ryogesh](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aryogesh+updated%3A2020-12-11..2021-04-19&type=Issues) | [@sbailey-auro](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asbailey-auro+updated%3A2020-12-11..2021-04-19&type=Issues) | [@sigurdurb](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asigurdurb+updated%3A2020-12-11..2021-04-19&type=Issues) | [@SivaAccionLabs](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3ASivaAccionLabs+updated%3A2020-12-11..2021-04-19&type=Issues) | [@sougou](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asougou+updated%3A2020-12-11..2021-04-19&type=Issues) | [@stv0g](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Astv0g+updated%3A2020-12-11..2021-04-19&type=Issues) | [@sudi007](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asudi007+updated%3A2020-12-11..2021-04-19&type=Issues) | [@support](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asupport+updated%3A2020-12-11..2021-04-19&type=Issues) | [@tathagata](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Atathagata+updated%3A2020-12-11..2021-04-19&type=Issues) | [@timgates42](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Atimgates42+updated%3A2020-12-11..2021-04-19&type=Issues) | [@trallard](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Atrallard+updated%3A2020-12-11..2021-04-19&type=Issues) | [@vlizanae](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Avlizanae+updated%3A2020-12-11..2021-04-19&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Awelcome+updated%3A2020-12-11..2021-04-19&type=Issues) | [@whitespaceninja](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Awhitespaceninja+updated%3A2020-12-11..2021-04-19&type=Issues) | [@whlteXbread](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AwhlteXbread+updated%3A2020-12-11..2021-04-19&type=Issues) | [@willingc](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Awillingc+updated%3A2020-12-11..2021-04-19&type=Issues) | [@yuvipanda](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ayuvipanda+updated%3A2020-12-11..2021-04-19&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AZsailer+updated%3A2020-12-11..2021-04-19&type=Issues) + +## 1.3 + +JupyterHub 1.3 is a small feature release. Highlights include: + +- Require Python >=3.6 (jupyterhub 1.2 is the last release to support 3.5) +- Add a `?state=` filter for getting user list, allowing much quicker responses + when retrieving a small fraction of users. + `state` can be `active`, `inactive`, or `ready`. +- prometheus metrics now include a `jupyterhub_` prefix, + so deployments may need to update their grafana charts to match. +- page templates can now be [async](https://jinja.palletsprojects.com/en/2.11.x/api/#async-support)! + +### [1.3.0] + +([full changelog](https://github.com/jupyterhub/jupyterhub/compare/1.2.1...1.3.0)) + +#### Enhancements made + +- allow services to call /api/user to identify themselves [#3293](https://github.com/jupyterhub/jupyterhub/pull/3293) ([@minrk](https://github.com/minrk)) +- Add optional user agreement to login screen [#3264](https://github.com/jupyterhub/jupyterhub/pull/3264) ([@tlvu](https://github.com/tlvu)) +- [Metrics] Add prefix to prometheus metrics to group all jupyterhub metrics [#3243](https://github.com/jupyterhub/jupyterhub/pull/3243) ([@agp8x](https://github.com/agp8x)) +- Allow options_from_form to be configurable [#3225](https://github.com/jupyterhub/jupyterhub/pull/3225) ([@cbanek](https://github.com/cbanek)) +- add ?state= filter for GET /users [#3177](https://github.com/jupyterhub/jupyterhub/pull/3177) ([@minrk](https://github.com/minrk)) +- Enable async support in jinja2 templates [#3176](https://github.com/jupyterhub/jupyterhub/pull/3176) ([@yuvipanda](https://github.com/yuvipanda)) + +#### Bugs fixed + +- fix increasing pagination limits [#3294](https://github.com/jupyterhub/jupyterhub/pull/3294) ([@minrk](https://github.com/minrk)) +- fix and test TOTAL_USERS count [#3289](https://github.com/jupyterhub/jupyterhub/pull/3289) ([@minrk](https://github.com/minrk)) +- Fix asyncio deprecation asyncio.Task.all_tasks [#3298](https://github.com/jupyterhub/jupyterhub/pull/3298) ([@coffeebenzene](https://github.com/coffeebenzene)) + +#### Maintenance and upkeep improvements + +- bump oldest-required prometheus-client [#3292](https://github.com/jupyterhub/jupyterhub/pull/3292) ([@minrk](https://github.com/minrk)) +- bump black pre-commit hook to 20.8 [#3287](https://github.com/jupyterhub/jupyterhub/pull/3287) ([@minrk](https://github.com/minrk)) +- Test internal_ssl separately [#3266](https://github.com/jupyterhub/jupyterhub/pull/3266) ([@0mar](https://github.com/0mar)) +- wait for pending spawns in spawn_form_admin_access [#3253](https://github.com/jupyterhub/jupyterhub/pull/3253) ([@minrk](https://github.com/minrk)) +- Assume py36 and remove @gen.coroutine etc. [#3242](https://github.com/jupyterhub/jupyterhub/pull/3242) ([@consideRatio](https://github.com/consideRatio)) + +#### Documentation improvements + +- Fix curl in jupyter announcements [#3286](https://github.com/jupyterhub/jupyterhub/pull/3286) ([@Sangarshanan](https://github.com/Sangarshanan)) +- CONTRIBUTING: Fix contributor guide URL [#3281](https://github.com/jupyterhub/jupyterhub/pull/3281) ([@olifre](https://github.com/olifre)) +- Update services.md [#3267](https://github.com/jupyterhub/jupyterhub/pull/3267) ([@slemonide](https://github.com/slemonide)) +- [Docs] Fix https reverse proxy redirect issues [#3244](https://github.com/jupyterhub/jupyterhub/pull/3244) ([@mhwasil](https://github.com/mhwasil)) +- Fixed idle-culler references. [#3300](https://github.com/jupyterhub/jupyterhub/pull/3300) ([@mxjeff](https://github.com/mxjeff)) +- Remove the extra parenthesis in service.md [#3303](https://github.com/jupyterhub/jupyterhub/pull/3303) ([@Sangarshanan](https://github.com/Sangarshanan)) + +#### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2020-10-30&to=2020-12-11&type=c)) + +[@0mar](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3A0mar+updated%3A2020-10-30..2020-12-11&type=Issues) | [@agp8x](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aagp8x+updated%3A2020-10-30..2020-12-11&type=Issues) | [@alexweav](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aalexweav+updated%3A2020-10-30..2020-12-11&type=Issues) | [@belfhi](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abelfhi+updated%3A2020-10-30..2020-12-11&type=Issues) | [@betatim](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abetatim+updated%3A2020-10-30..2020-12-11&type=Issues) | [@cbanek](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Acbanek+updated%3A2020-10-30..2020-12-11&type=Issues) | [@cmd-ntrf](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Acmd-ntrf+updated%3A2020-10-30..2020-12-11&type=Issues) | [@coffeebenzene](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Acoffeebenzene+updated%3A2020-10-30..2020-12-11&type=Issues) | [@consideRatio](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2020-10-30..2020-12-11&type=Issues) | [@danlester](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adanlester+updated%3A2020-10-30..2020-12-11&type=Issues) | [@fcollonval](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Afcollonval+updated%3A2020-10-30..2020-12-11&type=Issues) | [@GeorgianaElena](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AGeorgianaElena+updated%3A2020-10-30..2020-12-11&type=Issues) | [@ianabc](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aianabc+updated%3A2020-10-30..2020-12-11&type=Issues) | [@IvanaH8](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AIvanaH8+updated%3A2020-10-30..2020-12-11&type=Issues) | [@manics](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amanics+updated%3A2020-10-30..2020-12-11&type=Issues) | [@meeseeksmachine](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ameeseeksmachine+updated%3A2020-10-30..2020-12-11&type=Issues) | [@mhwasil](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amhwasil+updated%3A2020-10-30..2020-12-11&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2020-10-30..2020-12-11&type=Issues) | [@mriedem](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amriedem+updated%3A2020-10-30..2020-12-11&type=Issues) | [@mxjeff](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amxjeff+updated%3A2020-10-30..2020-12-11&type=Issues) | [@olifre](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aolifre+updated%3A2020-10-30..2020-12-11&type=Issues) | [@rcthomas](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arcthomas+updated%3A2020-10-30..2020-12-11&type=Issues) | [@rgbkrk](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Argbkrk+updated%3A2020-10-30..2020-12-11&type=Issues) | [@rkdarst](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arkdarst+updated%3A2020-10-30..2020-12-11&type=Issues) | [@Sangarshanan](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3ASangarshanan+updated%3A2020-10-30..2020-12-11&type=Issues) | [@slemonide](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aslemonide+updated%3A2020-10-30..2020-12-11&type=Issues) | [@support](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asupport+updated%3A2020-10-30..2020-12-11&type=Issues) | [@tlvu](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Atlvu+updated%3A2020-10-30..2020-12-11&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Awelcome+updated%3A2020-10-30..2020-12-11&type=Issues) | [@yuvipanda](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ayuvipanda+updated%3A2020-10-30..2020-12-11&type=Issues) + +## 1.2 + +### [1.2.2] 2020-11-27 + +([full changelog](https://github.com/jupyterhub/jupyterhub/compare/1.2.1...41f291c0c973223c33a6aa1fa86d5d57f297be78)) + +#### Enhancements made + +- Standardize "Sign in" capitalization on the login page [#3252](https://github.com/jupyterhub/jupyterhub/pull/3252) ([@cmd-ntrf](https://github.com/cmd-ntrf)) + +#### Bugs fixed + +- Fix RootHandler when default_url is a callable [#3265](https://github.com/jupyterhub/jupyterhub/pull/3265) ([@danlester](https://github.com/danlester)) +- Only preserve params when ?next= is unspecified [#3261](https://github.com/jupyterhub/jupyterhub/pull/3261) ([@minrk](https://github.com/minrk)) +- \[Windows\] Improve robustness when detecting and closing existing proxy processes [#3237](https://github.com/jupyterhub/jupyterhub/pull/3237) ([@alexweav](https://github.com/alexweav)) + +#### Maintenance and upkeep improvements + +- Environment marker on pamela [#3255](https://github.com/jupyterhub/jupyterhub/pull/3255) ([@fcollonval](https://github.com/fcollonval)) +- remove push-branch conditions for CI [#3250](https://github.com/jupyterhub/jupyterhub/pull/3250) ([@minrk](https://github.com/minrk)) +- Migrate from travis to GitHub actions [#3246](https://github.com/jupyterhub/jupyterhub/pull/3246) ([@consideRatio](https://github.com/consideRatio)) + +#### Documentation improvements + +- Update services-basics.md to use jupyterhub_idle_culler [#3257](https://github.com/jupyterhub/jupyterhub/pull/3257) ([@manics](https://github.com/manics)) + +#### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2020-10-30&to=2020-11-27&type=c)) + +[@alexweav](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aalexweav+updated%3A2020-10-30..2020-11-27&type=Issues) | [@belfhi](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abelfhi+updated%3A2020-10-30..2020-11-27&type=Issues) | [@betatim](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abetatim+updated%3A2020-10-30..2020-11-27&type=Issues) | [@cmd-ntrf](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Acmd-ntrf+updated%3A2020-10-30..2020-11-27&type=Issues) | [@consideRatio](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2020-10-30..2020-11-27&type=Issues) | [@danlester](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adanlester+updated%3A2020-10-30..2020-11-27&type=Issues) | [@fcollonval](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Afcollonval+updated%3A2020-10-30..2020-11-27&type=Issues) | [@GeorgianaElena](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AGeorgianaElena+updated%3A2020-10-30..2020-11-27&type=Issues) | [@ianabc](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aianabc+updated%3A2020-10-30..2020-11-27&type=Issues) | [@IvanaH8](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AIvanaH8+updated%3A2020-10-30..2020-11-27&type=Issues) | [@manics](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amanics+updated%3A2020-10-30..2020-11-27&type=Issues) | [@meeseeksmachine](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ameeseeksmachine+updated%3A2020-10-30..2020-11-27&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2020-10-30..2020-11-27&type=Issues) | [@mriedem](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amriedem+updated%3A2020-10-30..2020-11-27&type=Issues) | [@olifre](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aolifre+updated%3A2020-10-30..2020-11-27&type=Issues) | [@rcthomas](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arcthomas+updated%3A2020-10-30..2020-11-27&type=Issues) | [@rgbkrk](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Argbkrk+updated%3A2020-10-30..2020-11-27&type=Issues) | [@rkdarst](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arkdarst+updated%3A2020-10-30..2020-11-27&type=Issues) | [@slemonide](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aslemonide+updated%3A2020-10-30..2020-11-27&type=Issues) | [@support](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asupport+updated%3A2020-10-30..2020-11-27&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Awelcome+updated%3A2020-10-30..2020-11-27&type=Issues) | [@yuvipanda](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ayuvipanda+updated%3A2020-10-30..2020-11-27&type=Issues) + +### [1.2.1] 2020-10-30 + +([full changelog](https://github.com/jupyterhub/jupyterhub/compare/1.2.0...1.2.1)) + +#### Bugs fixed + +- JupyterHub services' oauth_no_confirm configuration regression in 1.2.0 [#3234](https://github.com/jupyterhub/jupyterhub/pull/3234) ([@bitnik](https://github.com/bitnik)) + +#### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2020-10-29&to=2020-10-30&type=c)) + +[@bitnik](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abitnik+updated%3A2020-10-29..2020-10-30&type=Issues) + +### [1.2.0] 2020-10-29 + +JupyterHub 1.2 is an incremental release with lots of small improvements. +It is unlikely that users will have to change much to upgrade, +but lots of new things are possible and/or better! + +There are no database schema changes requiring migration from 1.1 to 1.2. + +Highlights: + +- Deprecate black/whitelist configuration fields in favor of more inclusive blocked/allowed language. For example: `c.Authenticator.allowed_users = {'user', ...}` +- More configuration of page templates and service display +- Pagination of the admin page improving performance with large numbers of users +- Improved control of user redirect +- Support for [jupyter-server](https://jupyter-server.readthedocs.io/en/latest/)-based single-user servers, such as [Voilà](https://voila-gallery.org) and latest JupyterLab. +- Lots more improvements to documentation, HTML pages, and customizations + +([full changelog](https://github.com/jupyterhub/jupyterhub/compare/1.1.0...1.2.0)) + +#### Enhancements made + +- make pagination configurable [#3229](https://github.com/jupyterhub/jupyterhub/pull/3229) ([@minrk](https://github.com/minrk)) +- Make api_request to CHP's REST API more reliable [#3223](https://github.com/jupyterhub/jupyterhub/pull/3223) ([@consideRatio](https://github.com/consideRatio)) +- Control service display [#3160](https://github.com/jupyterhub/jupyterhub/pull/3160) ([@rcthomas](https://github.com/rcthomas)) +- Add a footer block + wrap the admin footer in this block [#3136](https://github.com/jupyterhub/jupyterhub/pull/3136) ([@pabepadu](https://github.com/pabepadu)) +- Allow JupyterHub.default_url to be a callable [#3133](https://github.com/jupyterhub/jupyterhub/pull/3133) ([@danlester](https://github.com/danlester)) +- Allow head requests for the health endpoint [#3131](https://github.com/jupyterhub/jupyterhub/pull/3131) ([@rkevin-arch](https://github.com/rkevin-arch)) +- Hide hamburger button menu in mobile/responsive mode and fix other minor issues [#3103](https://github.com/jupyterhub/jupyterhub/pull/3103) ([@kinow](https://github.com/kinow)) +- build jupyterhub/jupyterhub-demo image on docker hub [#3083](https://github.com/jupyterhub/jupyterhub/pull/3083) ([@minrk](https://github.com/minrk)) +- Add JupyterHub Demo docker image [#3059](https://github.com/jupyterhub/jupyterhub/pull/3059) ([@GeorgianaElena](https://github.com/GeorgianaElena)) +- Warn if both bind_url and ip/port/base_url are set [#3057](https://github.com/jupyterhub/jupyterhub/pull/3057) ([@GeorgianaElena](https://github.com/GeorgianaElena)) +- UI Feedback on Submit [#3028](https://github.com/jupyterhub/jupyterhub/pull/3028) ([@possiblyMikeB](https://github.com/possiblyMikeB)) +- Support kubespawner running on a IPv6 only cluster [#3020](https://github.com/jupyterhub/jupyterhub/pull/3020) ([@stv0g](https://github.com/stv0g)) +- Spawn with options passed in query arguments to /spawn [#3013](https://github.com/jupyterhub/jupyterhub/pull/3013) ([@twalcari](https://github.com/twalcari)) +- SpawnHandler POST with user form options displays the spawn-pending page [#2978](https://github.com/jupyterhub/jupyterhub/pull/2978) ([@danlester](https://github.com/danlester)) +- Start named servers by pressing the Enter key [#2960](https://github.com/jupyterhub/jupyterhub/pull/2960) ([@jtpio](https://github.com/jtpio)) +- Keep the URL fragments after spawning an application [#2952](https://github.com/jupyterhub/jupyterhub/pull/2952) ([@kinow](https://github.com/kinow)) +- Allow implicit spawn via javascript redirect [#2941](https://github.com/jupyterhub/jupyterhub/pull/2941) ([@minrk](https://github.com/minrk)) +- make init_spawners check O(running servers) not O(total users) [#2936](https://github.com/jupyterhub/jupyterhub/pull/2936) ([@minrk](https://github.com/minrk)) +- Add favicon to the base page template [#2930](https://github.com/jupyterhub/jupyterhub/pull/2930) ([@JohnPaton](https://github.com/JohnPaton)) +- Adding pagination in the admin panel [#2929](https://github.com/jupyterhub/jupyterhub/pull/2929) ([@cbjuan](https://github.com/cbjuan)) +- Generate prometheus metrics docs [#2891](https://github.com/jupyterhub/jupyterhub/pull/2891) ([@rajat404](https://github.com/rajat404)) +- Add support for Jupyter Server [#2601](https://github.com/jupyterhub/jupyterhub/pull/2601) ([@yuvipanda](https://github.com/yuvipanda)) + +#### Bugs fixed + +- Fix #2284 must be sent from authorization page [#3219](https://github.com/jupyterhub/jupyterhub/pull/3219) ([@elgalu](https://github.com/elgalu)) +- avoid specifying default_value=None in Command traits [#3208](https://github.com/jupyterhub/jupyterhub/pull/3208) ([@minrk](https://github.com/minrk)) +- Prevent OverflowErrors in exponential_backoff() [#3204](https://github.com/jupyterhub/jupyterhub/pull/3204) ([@kreuzert](https://github.com/kreuzert)) +- update prometheus metrics for server spawn when it fails with exception [#3150](https://github.com/jupyterhub/jupyterhub/pull/3150) ([@yhal-nesi](https://github.com/yhal-nesi)) +- jupyterhub/utils: Load system default CA certificates in make_ssl_context [#3140](https://github.com/jupyterhub/jupyterhub/pull/3140) ([@chancez](https://github.com/chancez)) +- admin page sorts on spawner last_activity instead of user last_activity [#3137](https://github.com/jupyterhub/jupyterhub/pull/3137) ([@lydian](https://github.com/lydian)) +- Fix the services dropdown on the admin page [#3132](https://github.com/jupyterhub/jupyterhub/pull/3132) ([@pabepadu](https://github.com/pabepadu)) +- Don't log a warning when slow_spawn_timeout is disabled [#3127](https://github.com/jupyterhub/jupyterhub/pull/3127) ([@mriedem](https://github.com/mriedem)) +- app.py: Work around incompatibility between Tornado 6 and asyncio proactor event loop in python 3.8 on Windows [#3123](https://github.com/jupyterhub/jupyterhub/pull/3123) ([@alexweav](https://github.com/alexweav)) +- jupyterhub/user: clear spawner state after post_stop_hook [#3121](https://github.com/jupyterhub/jupyterhub/pull/3121) ([@rkdarst](https://github.com/rkdarst)) +- fix for stopping named server deleting default server and tests [#3109](https://github.com/jupyterhub/jupyterhub/pull/3109) ([@kxiao-fn](https://github.com/kxiao-fn)) +- Hide hamburger button menu in mobile/responsive mode and fix other minor issues [#3103](https://github.com/jupyterhub/jupyterhub/pull/3103) ([@kinow](https://github.com/kinow)) +- Rename Authenticator.white/blacklist to allowed/blocked [#3090](https://github.com/jupyterhub/jupyterhub/pull/3090) ([@minrk](https://github.com/minrk)) +- Include the query string parameters when redirecting to a new URL [#3089](https://github.com/jupyterhub/jupyterhub/pull/3089) ([@kinow](https://github.com/kinow)) +- Make `delete_invalid_users` configurable [#3087](https://github.com/jupyterhub/jupyterhub/pull/3087) ([@fcollonval](https://github.com/fcollonval)) +- Ensure client dependencies build before wheel [#3082](https://github.com/jupyterhub/jupyterhub/pull/3082) ([@diurnalist](https://github.com/diurnalist)) +- make Spawner.environment config highest priority [#3081](https://github.com/jupyterhub/jupyterhub/pull/3081) ([@minrk](https://github.com/minrk)) +- Changing start my server button link to spawn url once server is stopped [#3042](https://github.com/jupyterhub/jupyterhub/pull/3042) ([@rabsr](https://github.com/rabsr)) +- Fix CSS on admin page version listing [#3035](https://github.com/jupyterhub/jupyterhub/pull/3035) ([@vilhelmen](https://github.com/vilhelmen)) +- Fix user_row endblock in admin template [#3015](https://github.com/jupyterhub/jupyterhub/pull/3015) ([@jtpio](https://github.com/jtpio)) +- Fix --generate-config bug when specifying a filename [#2907](https://github.com/jupyterhub/jupyterhub/pull/2907) ([@consideRatio](https://github.com/consideRatio)) +- Handle the protocol when ssl is enabled and log the right URL [#2773](https://github.com/jupyterhub/jupyterhub/pull/2773) ([@kinow](https://github.com/kinow)) + +#### Maintenance and upkeep improvements + +- Update travis-ci badge in README.md [#3232](https://github.com/jupyterhub/jupyterhub/pull/3232) ([@consideRatio](https://github.com/consideRatio)) +- stop building docs on circleci [#3209](https://github.com/jupyterhub/jupyterhub/pull/3209) ([@minrk](https://github.com/minrk)) +- Upgraded Jquery dep [#3174](https://github.com/jupyterhub/jupyterhub/pull/3174) ([@AngelOnFira](https://github.com/AngelOnFira)) +- Don't allow 'python:3.8 + master dependencies' to fail [#3157](https://github.com/jupyterhub/jupyterhub/pull/3157) ([@manics](https://github.com/manics)) +- Update Dockerfile to ubuntu:focal (Python 3.8) [#3156](https://github.com/jupyterhub/jupyterhub/pull/3156) ([@manics](https://github.com/manics)) +- Simplify code of the health check handler [#3149](https://github.com/jupyterhub/jupyterhub/pull/3149) ([@betatim](https://github.com/betatim)) +- Get error description from error key vs error_description key [#3147](https://github.com/jupyterhub/jupyterhub/pull/3147) ([@jgwerner](https://github.com/jgwerner)) +- Implement singleuser with mixins [#3128](https://github.com/jupyterhub/jupyterhub/pull/3128) ([@minrk](https://github.com/minrk)) +- only build tagged versions on docker tags [#3118](https://github.com/jupyterhub/jupyterhub/pull/3118) ([@minrk](https://github.com/minrk)) +- Log slow_stop_timeout when hit like slow_spawn_timeout [#3111](https://github.com/jupyterhub/jupyterhub/pull/3111) ([@mriedem](https://github.com/mriedem)) +- loosen jupyter-telemetry pin [#3102](https://github.com/jupyterhub/jupyterhub/pull/3102) ([@minrk](https://github.com/minrk)) +- Remove old context-less print statement [#3100](https://github.com/jupyterhub/jupyterhub/pull/3100) ([@mriedem](https://github.com/mriedem)) +- Allow `python:3.8 + master dependencies` to fail [#3079](https://github.com/jupyterhub/jupyterhub/pull/3079) ([@manics](https://github.com/manics)) +- Test with some master dependencies. [#3076](https://github.com/jupyterhub/jupyterhub/pull/3076) ([@Carreau](https://github.com/Carreau)) +- synchronize implementation of expiring values [#3072](https://github.com/jupyterhub/jupyterhub/pull/3072) ([@minrk](https://github.com/minrk)) +- More consistent behavior for UserDict.get and `key in UserDict` [#3071](https://github.com/jupyterhub/jupyterhub/pull/3071) ([@minrk](https://github.com/minrk)) +- pin jupyter_telemetry dependency [#3067](https://github.com/jupyterhub/jupyterhub/pull/3067) ([@Zsailer](https://github.com/Zsailer)) +- Use the issue templates from the central repo [#3056](https://github.com/jupyterhub/jupyterhub/pull/3056) ([@GeorgianaElena](https://github.com/GeorgianaElena)) +- Update links to the black GitHub repository [#3054](https://github.com/jupyterhub/jupyterhub/pull/3054) ([@jtpio](https://github.com/jtpio)) +- Log successful /health requests as debug level [#3047](https://github.com/jupyterhub/jupyterhub/pull/3047) ([@consideRatio](https://github.com/consideRatio)) +- Fix broken test due to BeautifulSoup 4.9.0 behavior change [#3025](https://github.com/jupyterhub/jupyterhub/pull/3025) ([@twalcari](https://github.com/twalcari)) +- Remove unused imports [#3019](https://github.com/jupyterhub/jupyterhub/pull/3019) ([@stv0g](https://github.com/stv0g)) +- Use pip instead of conda for building the docs on RTD [#3010](https://github.com/jupyterhub/jupyterhub/pull/3010) ([@GeorgianaElena](https://github.com/GeorgianaElena)) +- Avoid redundant logging of jupyterhub version mismatches [#2971](https://github.com/jupyterhub/jupyterhub/pull/2971) ([@mriedem](https://github.com/mriedem)) +- Add .vscode to gitignore [#2959](https://github.com/jupyterhub/jupyterhub/pull/2959) ([@jtpio](https://github.com/jtpio)) +- preserve auth type when logging obfuscated auth header [#2953](https://github.com/jupyterhub/jupyterhub/pull/2953) ([@minrk](https://github.com/minrk)) +- make spawner:server relationship explicitly one to one [#2944](https://github.com/jupyterhub/jupyterhub/pull/2944) ([@minrk](https://github.com/minrk)) +- Add what we need with some margin to Dockerfile's build stage [#2905](https://github.com/jupyterhub/jupyterhub/pull/2905) ([@consideRatio](https://github.com/consideRatio)) +- bump reorder-imports hook [#2899](https://github.com/jupyterhub/jupyterhub/pull/2899) ([@minrk](https://github.com/minrk)) + +#### Documentation improvements + +- Fix typo in documentation [#3226](https://github.com/jupyterhub/jupyterhub/pull/3226) ([@xlotlu](https://github.com/xlotlu)) +- [docs] Remove duplicate line in changelog for 1.1.0 [#3207](https://github.com/jupyterhub/jupyterhub/pull/3207) ([@kinow](https://github.com/kinow)) +- changelog for 1.2.0b1 [#3192](https://github.com/jupyterhub/jupyterhub/pull/3192) ([@consideRatio](https://github.com/consideRatio)) +- Add SELinux configuration for nginx [#3185](https://github.com/jupyterhub/jupyterhub/pull/3185) ([@rainwoodman](https://github.com/rainwoodman)) +- Mention the PAM pitfall on fedora. [#3184](https://github.com/jupyterhub/jupyterhub/pull/3184) ([@rainwoodman](https://github.com/rainwoodman)) +- Added extra documentation for endpoint /users/{name}/servers/{server_name}. [#3159](https://github.com/jupyterhub/jupyterhub/pull/3159) ([@synchronizing](https://github.com/synchronizing)) +- docs: please docs linter (move_cert docstring) [#3151](https://github.com/jupyterhub/jupyterhub/pull/3151) ([@consideRatio](https://github.com/consideRatio)) +- Needed NoEsacpe (NE) option for apache [#3143](https://github.com/jupyterhub/jupyterhub/pull/3143) ([@basvandervlies](https://github.com/basvandervlies)) +- Document external service api_tokens better [#3142](https://github.com/jupyterhub/jupyterhub/pull/3142) ([@snickell](https://github.com/snickell)) +- Remove idle culler example [#3114](https://github.com/jupyterhub/jupyterhub/pull/3114) ([@yuvipanda](https://github.com/yuvipanda)) +- docs: unsqueeze logo, remove unused CSS and templates [#3107](https://github.com/jupyterhub/jupyterhub/pull/3107) ([@consideRatio](https://github.com/consideRatio)) +- Update version in docs/rest-api.yaml [#3104](https://github.com/jupyterhub/jupyterhub/pull/3104) ([@cmd-ntrf](https://github.com/cmd-ntrf)) +- Replace zonca/remotespawner with NERSC/sshspawner [#3086](https://github.com/jupyterhub/jupyterhub/pull/3086) ([@manics](https://github.com/manics)) +- Remove already done named servers from roadmap [#3084](https://github.com/jupyterhub/jupyterhub/pull/3084) ([@elgalu](https://github.com/elgalu)) +- proxy settings might cause authentication errors [#3078](https://github.com/jupyterhub/jupyterhub/pull/3078) ([@gatoniel](https://github.com/gatoniel)) +- Add Configuration Reference section to docs [#3077](https://github.com/jupyterhub/jupyterhub/pull/3077) ([@kinow](https://github.com/kinow)) +- document upgrading from api_tokens to services config [#3055](https://github.com/jupyterhub/jupyterhub/pull/3055) ([@minrk](https://github.com/minrk)) +- [Docs] Disable proxy_buffering when using nginx reverse proxy [#3048](https://github.com/jupyterhub/jupyterhub/pull/3048) ([@mhwasil](https://github.com/mhwasil)) +- docs: add proxy_http_version 1.1 [#3046](https://github.com/jupyterhub/jupyterhub/pull/3046) ([@ceocoder](https://github.com/ceocoder)) +- #1018 PAM added in prerequisites [#3040](https://github.com/jupyterhub/jupyterhub/pull/3040) ([@romainx](https://github.com/romainx)) +- Fix use of auxiliary verb on index.rst [#3022](https://github.com/jupyterhub/jupyterhub/pull/3022) ([@joshmeek](https://github.com/joshmeek)) +- Fix docs CI test failure: duplicate object description [#3021](https://github.com/jupyterhub/jupyterhub/pull/3021) ([@rkdarst](https://github.com/rkdarst)) +- Update issue templates [#3001](https://github.com/jupyterhub/jupyterhub/pull/3001) ([@GeorgianaElena](https://github.com/GeorgianaElena)) +- fix wrong name on firewall [#2997](https://github.com/jupyterhub/jupyterhub/pull/2997) ([@thuvh](https://github.com/thuvh)) +- updating docs theme [#2995](https://github.com/jupyterhub/jupyterhub/pull/2995) ([@choldgraf](https://github.com/choldgraf)) +- Update contributor docs [#2972](https://github.com/jupyterhub/jupyterhub/pull/2972) ([@mriedem](https://github.com/mriedem)) +- Server.user_options rest-api documented [#2966](https://github.com/jupyterhub/jupyterhub/pull/2966) ([@mriedem](https://github.com/mriedem)) +- Pin sphinx theme [#2956](https://github.com/jupyterhub/jupyterhub/pull/2956) ([@manics](https://github.com/manics)) +- [doc] Fix couple typos in the documentation [#2951](https://github.com/jupyterhub/jupyterhub/pull/2951) ([@kinow](https://github.com/kinow)) +- Docs: Fixed grammar on landing page [#2950](https://github.com/jupyterhub/jupyterhub/pull/2950) ([@alexdriedger](https://github.com/alexdriedger)) +- add general faq [#2946](https://github.com/jupyterhub/jupyterhub/pull/2946) ([@minrk](https://github.com/minrk)) +- docs: use metachannel for faster environment solve [#2943](https://github.com/jupyterhub/jupyterhub/pull/2943) ([@minrk](https://github.com/minrk)) +- update docs environments [#2942](https://github.com/jupyterhub/jupyterhub/pull/2942) ([@minrk](https://github.com/minrk)) +- [doc] Add more docs about Cookies used for authentication in JupyterHub [#2940](https://github.com/jupyterhub/jupyterhub/pull/2940) ([@kinow](https://github.com/kinow)) +- [doc] Use fixed commit plus line number in github link [#2939](https://github.com/jupyterhub/jupyterhub/pull/2939) ([@kinow](https://github.com/kinow)) +- [doc] Fix link to SSL encryption from troubleshooting page [#2938](https://github.com/jupyterhub/jupyterhub/pull/2938) ([@kinow](https://github.com/kinow)) +- rest api: fix schema for remove parameter in rest api [#2917](https://github.com/jupyterhub/jupyterhub/pull/2917) ([@minrk](https://github.com/minrk)) +- Add troubleshooting topics [#2914](https://github.com/jupyterhub/jupyterhub/pull/2914) ([@jgwerner](https://github.com/jgwerner)) +- Several fixes to the doc [#2904](https://github.com/jupyterhub/jupyterhub/pull/2904) ([@reneluria](https://github.com/reneluria)) +- fix: 'Non-ASCII character '\xc3' [#2901](https://github.com/jupyterhub/jupyterhub/pull/2901) ([@jgwerner](https://github.com/jgwerner)) +- Generate prometheus metrics docs [#2891](https://github.com/jupyterhub/jupyterhub/pull/2891) ([@rajat404](https://github.com/rajat404)) + +#### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyterhub/jupyterhub/graphs/contributors?from=2020-01-17&to=2020-10-29&type=c)) + +[@0nebody](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3A0nebody+updated%3A2020-01-17..2020-10-29&type=Issues) | [@1kastner](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3A1kastner+updated%3A2020-01-17..2020-10-29&type=Issues) | [@ahkui](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aahkui+updated%3A2020-01-17..2020-10-29&type=Issues) | [@alexdriedger](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aalexdriedger+updated%3A2020-01-17..2020-10-29&type=Issues) | [@alexweav](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aalexweav+updated%3A2020-01-17..2020-10-29&type=Issues) | [@AlJohri](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AAlJohri+updated%3A2020-01-17..2020-10-29&type=Issues) | [@Analect](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AAnalect+updated%3A2020-01-17..2020-10-29&type=Issues) | [@analytically](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aanalytically+updated%3A2020-01-17..2020-10-29&type=Issues) | [@aneagoe](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aaneagoe+updated%3A2020-01-17..2020-10-29&type=Issues) | [@AngelOnFira](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AAngelOnFira+updated%3A2020-01-17..2020-10-29&type=Issues) | [@barrachri](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abarrachri+updated%3A2020-01-17..2020-10-29&type=Issues) | [@basvandervlies](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abasvandervlies+updated%3A2020-01-17..2020-10-29&type=Issues) | [@betatim](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abetatim+updated%3A2020-01-17..2020-10-29&type=Issues) | [@bigbosst](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Abigbosst+updated%3A2020-01-17..2020-10-29&type=Issues) | [@blink1073](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ablink1073+updated%3A2020-01-17..2020-10-29&type=Issues) | [@Cadair](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3ACadair+updated%3A2020-01-17..2020-10-29&type=Issues) | [@Carreau](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3ACarreau+updated%3A2020-01-17..2020-10-29&type=Issues) | [@cbjuan](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Acbjuan+updated%3A2020-01-17..2020-10-29&type=Issues) | [@ceocoder](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aceocoder+updated%3A2020-01-17..2020-10-29&type=Issues) | [@chancez](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Achancez+updated%3A2020-01-17..2020-10-29&type=Issues) | [@choldgraf](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Acholdgraf+updated%3A2020-01-17..2020-10-29&type=Issues) | [@Chrisjw42](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AChrisjw42+updated%3A2020-01-17..2020-10-29&type=Issues) | [@cmd-ntrf](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Acmd-ntrf+updated%3A2020-01-17..2020-10-29&type=Issues) | [@consideRatio](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AconsideRatio+updated%3A2020-01-17..2020-10-29&type=Issues) | [@danlester](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adanlester+updated%3A2020-01-17..2020-10-29&type=Issues) | [@diurnalist](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adiurnalist+updated%3A2020-01-17..2020-10-29&type=Issues) | [@Dmitry1987](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3ADmitry1987+updated%3A2020-01-17..2020-10-29&type=Issues) | [@dsblank](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adsblank+updated%3A2020-01-17..2020-10-29&type=Issues) | [@dylex](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Adylex+updated%3A2020-01-17..2020-10-29&type=Issues) | [@echarles](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aecharles+updated%3A2020-01-17..2020-10-29&type=Issues) | [@elgalu](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aelgalu+updated%3A2020-01-17..2020-10-29&type=Issues) | [@fcollonval](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Afcollonval+updated%3A2020-01-17..2020-10-29&type=Issues) | [@gatoniel](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Agatoniel+updated%3A2020-01-17..2020-10-29&type=Issues) | [@GeorgianaElena](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AGeorgianaElena+updated%3A2020-01-17..2020-10-29&type=Issues) | [@hnykda](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ahnykda+updated%3A2020-01-17..2020-10-29&type=Issues) | [@itssimon](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aitssimon+updated%3A2020-01-17..2020-10-29&type=Issues) | [@jgwerner](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ajgwerner+updated%3A2020-01-17..2020-10-29&type=Issues) | [@JohnPaton](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AJohnPaton+updated%3A2020-01-17..2020-10-29&type=Issues) | [@joshmeek](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ajoshmeek+updated%3A2020-01-17..2020-10-29&type=Issues) | [@jtpio](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ajtpio+updated%3A2020-01-17..2020-10-29&type=Issues) | [@kinow](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Akinow+updated%3A2020-01-17..2020-10-29&type=Issues) | [@kreuzert](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Akreuzert+updated%3A2020-01-17..2020-10-29&type=Issues) | [@kxiao-fn](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Akxiao-fn+updated%3A2020-01-17..2020-10-29&type=Issues) | [@lesiano](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Alesiano+updated%3A2020-01-17..2020-10-29&type=Issues) | [@limimiking](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Alimimiking+updated%3A2020-01-17..2020-10-29&type=Issues) | [@lydian](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Alydian+updated%3A2020-01-17..2020-10-29&type=Issues) | [@mabbasi90](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amabbasi90+updated%3A2020-01-17..2020-10-29&type=Issues) | [@maluhoss](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amaluhoss+updated%3A2020-01-17..2020-10-29&type=Issues) | [@manics](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amanics+updated%3A2020-01-17..2020-10-29&type=Issues) | [@matteoipri](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amatteoipri+updated%3A2020-01-17..2020-10-29&type=Issues) | [@mbmilligan](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ambmilligan+updated%3A2020-01-17..2020-10-29&type=Issues) | [@meeseeksmachine](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ameeseeksmachine+updated%3A2020-01-17..2020-10-29&type=Issues) | [@mhwasil](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amhwasil+updated%3A2020-01-17..2020-10-29&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aminrk+updated%3A2020-01-17..2020-10-29&type=Issues) | [@mriedem](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Amriedem+updated%3A2020-01-17..2020-10-29&type=Issues) | [@nscozzaro](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Anscozzaro+updated%3A2020-01-17..2020-10-29&type=Issues) | [@pabepadu](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Apabepadu+updated%3A2020-01-17..2020-10-29&type=Issues) | [@possiblyMikeB](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3ApossiblyMikeB+updated%3A2020-01-17..2020-10-29&type=Issues) | [@psyvision](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Apsyvision+updated%3A2020-01-17..2020-10-29&type=Issues) | [@rabsr](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arabsr+updated%3A2020-01-17..2020-10-29&type=Issues) | [@rainwoodman](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arainwoodman+updated%3A2020-01-17..2020-10-29&type=Issues) | [@rajat404](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arajat404+updated%3A2020-01-17..2020-10-29&type=Issues) | [@rcthomas](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arcthomas+updated%3A2020-01-17..2020-10-29&type=Issues) | [@reneluria](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Areneluria+updated%3A2020-01-17..2020-10-29&type=Issues) | [@rgbkrk](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Argbkrk+updated%3A2020-01-17..2020-10-29&type=Issues) | [@rkdarst](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arkdarst+updated%3A2020-01-17..2020-10-29&type=Issues) | [@rkevin-arch](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Arkevin-arch+updated%3A2020-01-17..2020-10-29&type=Issues) | [@romainx](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aromainx+updated%3A2020-01-17..2020-10-29&type=Issues) | [@ryanlovett](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aryanlovett+updated%3A2020-01-17..2020-10-29&type=Issues) | [@ryogesh](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aryogesh+updated%3A2020-01-17..2020-10-29&type=Issues) | [@sdague](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asdague+updated%3A2020-01-17..2020-10-29&type=Issues) | [@snickell](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asnickell+updated%3A2020-01-17..2020-10-29&type=Issues) | [@SonakshiGrover](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3ASonakshiGrover+updated%3A2020-01-17..2020-10-29&type=Issues) | [@ssanderson](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Assanderson+updated%3A2020-01-17..2020-10-29&type=Issues) | [@stefanvangastel](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Astefanvangastel+updated%3A2020-01-17..2020-10-29&type=Issues) | [@steinad](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asteinad+updated%3A2020-01-17..2020-10-29&type=Issues) | [@stephen-a2z](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Astephen-a2z+updated%3A2020-01-17..2020-10-29&type=Issues) | [@stevegore](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Astevegore+updated%3A2020-01-17..2020-10-29&type=Issues) | [@stv0g](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Astv0g+updated%3A2020-01-17..2020-10-29&type=Issues) | [@subgero](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asubgero+updated%3A2020-01-17..2020-10-29&type=Issues) | [@sudi007](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asudi007+updated%3A2020-01-17..2020-10-29&type=Issues) | [@summerswallow](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asummerswallow+updated%3A2020-01-17..2020-10-29&type=Issues) | [@support](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asupport+updated%3A2020-01-17..2020-10-29&type=Issues) | [@synchronizing](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Asynchronizing+updated%3A2020-01-17..2020-10-29&type=Issues) | [@thuvh](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Athuvh+updated%3A2020-01-17..2020-10-29&type=Issues) | [@tritemio](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Atritemio+updated%3A2020-01-17..2020-10-29&type=Issues) | [@twalcari](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Atwalcari+updated%3A2020-01-17..2020-10-29&type=Issues) | [@vchandvankar](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Avchandvankar+updated%3A2020-01-17..2020-10-29&type=Issues) | [@vilhelmen](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Avilhelmen+updated%3A2020-01-17..2020-10-29&type=Issues) | [@vlizanae](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Avlizanae+updated%3A2020-01-17..2020-10-29&type=Issues) | [@weimin](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aweimin+updated%3A2020-01-17..2020-10-29&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Awelcome+updated%3A2020-01-17..2020-10-29&type=Issues) | [@willingc](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Awillingc+updated%3A2020-01-17..2020-10-29&type=Issues) | [@xlotlu](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Axlotlu+updated%3A2020-01-17..2020-10-29&type=Issues) | [@yhal-nesi](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ayhal-nesi+updated%3A2020-01-17..2020-10-29&type=Issues) | [@ynnelson](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Aynnelson+updated%3A2020-01-17..2020-10-29&type=Issues) | [@yuvipanda](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Ayuvipanda+updated%3A2020-01-17..2020-10-29&type=Issues) | [@zonca](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3Azonca+updated%3A2020-01-17..2020-10-29&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyterhub%2Fjupyterhub+involves%3AZsailer+updated%3A2020-01-17..2020-10-29&type=Issues) + +## 1.1 + +### [1.1.0] 2020-01-17 + +1.1 is a release with lots of accumulated fixes and improvements, +especially in performance, metrics, and customization. +There are no database changes in 1.1, so no database upgrade is required +when upgrading from 1.0 to 1.1. + +Of particular interest to deployments with automatic health checking and/or large numbers of users is that the slow startup time +introduced in 1.0 by additional spawner validation can now be mitigated by `JupyterHub.init_spawners_timeout`, +allowing the Hub to become responsive before the spawners may have finished validating. + +Several new Prometheus metrics are added (and others fixed!) +to measure sources of common performance issues, +such as proxy interactions and startup. + +1.1 also begins adoption of the Jupyter telemetry project in JupyterHub, +See [The Jupyter Telemetry docs](https://jupyter-telemetry.readthedocs.io) +for more info. The only events so far are starting and stopping servers, +but more will be added in future releases. + +There are many more fixes and improvements listed below. +Thanks to everyone who has contributed to this release! + +#### New + +- LocalProcessSpawner should work on windows by using psutil.pid_exists [#2882](https://github.com/jupyterhub/jupyterhub/pull/2882) ([@ociule](https://github.com/ociule)) +- trigger auth_state_hook prior to options form, add auth_state to template namespace [#2881](https://github.com/jupyterhub/jupyterhub/pull/2881) ([@minrk](https://github.com/minrk)) +- Added guide 'install jupyterlab the hard way' #2110 [#2842](https://github.com/jupyterhub/jupyterhub/pull/2842) ([@mangecoeur](https://github.com/mangecoeur)) +- Add prometheus metric to measure hub startup time [#2799](https://github.com/jupyterhub/jupyterhub/pull/2799) ([@rajat404](https://github.com/rajat404)) +- Add Spawner.auth_state_hook [#2555](https://github.com/jupyterhub/jupyterhub/pull/2555) ([@rcthomas](https://github.com/rcthomas)) +- Link services from jupyterhub pages [#2763](https://github.com/jupyterhub/jupyterhub/pull/2763) ([@rcthomas](https://github.com/rcthomas)) +- `JupyterHub.user_redirect_hook` is added to allow admins to customize /user-redirect/ behavior [#2790](https://github.com/jupyterhub/jupyterhub/pull/2790) ([@yuvipanda](https://github.com/yuvipanda)) +- Add prometheus metric to measure hub startup time [#2799](https://github.com/jupyterhub/jupyterhub/pull/2799) ([@rajat404](https://github.com/rajat404)) +- Add prometheus metric to measure proxy route poll times [#2798](https://github.com/jupyterhub/jupyterhub/pull/2798) ([@rajat404](https://github.com/rajat404)) +- `PROXY_DELETE_DURATION_SECONDS` prometheus metric is added, to measure proxy route deletion times [#2788](https://github.com/jupyterhub/jupyterhub/pull/2788) ([@rajat404](https://github.com/rajat404)) +- `Service.oauth_no_confirm` is added, it is useful for admin-managed services that are considered part of the Hub and shouldn't need to prompt the user for access [#2767](https://github.com/jupyterhub/jupyterhub/pull/2767) ([@minrk](https://github.com/minrk)) +- `JupyterHub.default_server_name` is added to make the default server be a named server with provided name [#2735](https://github.com/jupyterhub/jupyterhub/pull/2735) ([@krinsman](https://github.com/krinsman)) +- `JupyterHub.init_spawners_timeout` is introduced to combat slow startups on large JupyterHub deployments [#2721](https://github.com/jupyterhub/jupyterhub/pull/2721) ([@minrk](https://github.com/minrk)) +- The configuration `uids` for local authenticators is added to consistently assign users UNIX id's between installations [#2687](https://github.com/jupyterhub/jupyterhub/pull/2687) ([@rgerkin](https://github.com/rgerkin)) +- `JupyterHub.activity_resolution` is introduced with a default value of 30s improving performance by not updating the database with user activity too often [#2605](https://github.com/jupyterhub/jupyterhub/pull/2605) ([@minrk](https://github.com/minrk)) +- [HubAuth](https://jupyterhub.readthedocs.io/en/stable/api/services.auth.html#jupyterhub.services.auth.HubAuth)'s SSL configuration can now be set through environment variables [#2588](https://github.com/jupyterhub/jupyterhub/pull/2588) ([@cmd-ntrf](https://github.com/cmd-ntrf)) +- Expose spawner.user_options in REST API. [#2755](https://github.com/jupyterhub/jupyterhub/pull/2755) ([@danielballan](https://github.com/danielballan)) +- add block for scripts included in head [#2828](https://github.com/jupyterhub/jupyterhub/pull/2828) ([@bitnik](https://github.com/bitnik)) +- Instrument JupyterHub to record events with jupyter_telemetry [Part II] [#2698](https://github.com/jupyterhub/jupyterhub/pull/2698) ([@Zsailer](https://github.com/Zsailer)) +- Make announcements visible without custom HTML [#2570](https://github.com/jupyterhub/jupyterhub/pull/2570) ([@consideRatio](https://github.com/consideRatio)) +- Display server version on admin page [#2776](https://github.com/jupyterhub/jupyterhub/pull/2776) ([@vilhelmen](https://github.com/vilhelmen)) + +#### Fixes + +- Bugfix: pam_normalize_username didn't return username [#2876](https://github.com/jupyterhub/jupyterhub/pull/2876) ([@rkdarst](https://github.com/rkdarst)) +- Cleanup if spawner stop fails [#2849](https://github.com/jupyterhub/jupyterhub/pull/2849) ([@gabber12](https://github.com/gabber12)) +- Fix an issue occurring with the default spawner and `internal_ssl` enabled [#2785](https://github.com/jupyterhub/jupyterhub/pull/2785) ([@rpwagner](https://github.com/rpwagner)) +- Fix named servers to not be spawnable unless activated [#2772](https://github.com/jupyterhub/jupyterhub/pull/2772) ([@bitnik](https://github.com/bitnik)) +- JupyterHub now awaits proxy availability before accepting web requests [#2750](https://github.com/jupyterhub/jupyterhub/pull/2750) ([@minrk](https://github.com/minrk)) +- Fix a no longer valid assumption that MySQL and MariaDB need to have `innodb_file_format` and `innodb_large_prefix` configured [#2712](https://github.com/jupyterhub/jupyterhub/pull/2712) ([@chicocvenancio](https://github.com/chicocvenancio)) +- Login/Logout button now updates to Login on logout [#2705](https://github.com/jupyterhub/jupyterhub/pull/2705) ([@aar0nTw](https://github.com/aar0nTw)) +- Fix handling of exceptions within `pre_spawn_start` hooks [#2684](https://github.com/jupyterhub/jupyterhub/pull/2684) ([@GeorgianaElena](https://github.com/GeorgianaElena)) +- Fix an issue where a user could end up spawning a default server instead of a named server as intended [#2682](https://github.com/jupyterhub/jupyterhub/pull/2682) ([@rcthomas](https://github.com/rcthomas)) +- /hub/admin now redirects to login if unauthenticated [#2670](https://github.com/jupyterhub/jupyterhub/pull/2670) ([@GeorgianaElena](https://github.com/GeorgianaElena)) +- Fix spawning of users with names containing characters that needs to be escaped [#2648](https://github.com/jupyterhub/jupyterhub/pull/2648) ([@nicorikken](https://github.com/nicorikken)) +- Fix `TOTAL_USERS` prometheus metric [#2637](https://github.com/jupyterhub/jupyterhub/pull/2637) ([@GeorgianaElena](https://github.com/GeorgianaElena)) +- Fix `RUNNING_SERVERS` prometheus metric [#2629](https://github.com/jupyterhub/jupyterhub/pull/2629) ([@GeorgianaElena](https://github.com/GeorgianaElena)) +- Fix faulty redirects to 404 that could occur with the use of named servers [#2594](https://github.com/jupyterhub/jupyterhub/pull/2594) ([@vilhelmen](https://github.com/vilhelmen)) +- JupyterHub API spec is now a valid OpenAPI spec [#2590](https://github.com/jupyterhub/jupyterhub/pull/2590) ([@sbrunk](https://github.com/sbrunk)) +- Use of `--help` or `--version` previously could output unrelated errors [#2584](https://github.com/jupyterhub/jupyterhub/pull/2584) ([@minrk](https://github.com/minrk)) +- No longer crash on startup in Windows [#2560](https://github.com/jupyterhub/jupyterhub/pull/2560) ([@adelcast](https://github.com/adelcast)) +- Escape usernames in the frontend [#2640](https://github.com/jupyterhub/jupyterhub/pull/2640) ([@nicorikken](https://github.com/nicorikken)) + +#### Maintenance + +- Optimize CI jobs and default to bionic [#2897](https://github.com/jupyterhub/jupyterhub/pull/2897) ([@consideRatio](https://github.com/consideRatio)) +- catch connection error for ssl failures [#2889](https://github.com/jupyterhub/jupyterhub/pull/2889) ([@minrk](https://github.com/minrk)) +- Fix implementation of default server name [#2887](https://github.com/jupyterhub/jupyterhub/pull/2887) ([@krinsman](https://github.com/krinsman)) +- fixup allow_failures [#2880](https://github.com/jupyterhub/jupyterhub/pull/2880) ([@minrk](https://github.com/minrk)) +- Pass tests on Python 3.8 [#2879](https://github.com/jupyterhub/jupyterhub/pull/2879) ([@minrk](https://github.com/minrk)) +- Fixup .travis.yml [#2868](https://github.com/jupyterhub/jupyterhub/pull/2868) ([@consideRatio](https://github.com/consideRatio)) +- Update README's badges [#2867](https://github.com/jupyterhub/jupyterhub/pull/2867) ([@consideRatio](https://github.com/consideRatio)) +- Dockerfile: add build-essential to builder image [#2866](https://github.com/jupyterhub/jupyterhub/pull/2866) ([@rkdarst](https://github.com/rkdarst)) +- Dockerfile: Copy share/ to the final image [#2864](https://github.com/jupyterhub/jupyterhub/pull/2864) ([@rkdarst](https://github.com/rkdarst)) +- chore: Dockerfile updates [#2853](https://github.com/jupyterhub/jupyterhub/pull/2853) ([@jgwerner](https://github.com/jgwerner)) +- simplify Dockerfile [#2840](https://github.com/jupyterhub/jupyterhub/pull/2840) ([@minrk](https://github.com/minrk)) +- docker: fix onbuild image arg [#2839](https://github.com/jupyterhub/jupyterhub/pull/2839) ([@minrk](https://github.com/minrk)) +- remove redundant pip package list in docs environment.yml [#2838](https://github.com/jupyterhub/jupyterhub/pull/2838) ([@minrk](https://github.com/minrk)) +- docs: Update docs to run tests [#2812](https://github.com/jupyterhub/jupyterhub/pull/2812) ([@jgwerner](https://github.com/jgwerner)) +- remove redundant pip package list in docs environment.yml [#2838](https://github.com/jupyterhub/jupyterhub/pull/2838) ([@minrk](https://github.com/minrk)) +- updating to pandas docs theme [#2820](https://github.com/jupyterhub/jupyterhub/pull/2820) ([@choldgraf](https://github.com/choldgraf)) +- Adding institutional faq [#2800](https://github.com/jupyterhub/jupyterhub/pull/2800) ([@choldgraf](https://github.com/choldgraf)) +- Add inline comment to test [#2826](https://github.com/jupyterhub/jupyterhub/pull/2826) ([@consideRatio](https://github.com/consideRatio)) +- Raise error on missing specified config [#2824](https://github.com/jupyterhub/jupyterhub/pull/2824) ([@consideRatio](https://github.com/consideRatio)) +- chore: Refactor Dockerfile [#2816](https://github.com/jupyterhub/jupyterhub/pull/2816) ([@jgwerner](https://github.com/jgwerner)) +- chore: Update python versions in travis matrix [#2811](https://github.com/jupyterhub/jupyterhub/pull/2811) ([@jgwerner](https://github.com/jgwerner)) +- chore: Bump package versions used in pre-commit config [#2810](https://github.com/jupyterhub/jupyterhub/pull/2810) ([@jgwerner](https://github.com/jgwerner)) +- adding docs preview to circleci [#2803](https://github.com/jupyterhub/jupyterhub/pull/2803) ([@choldgraf](https://github.com/choldgraf)) +- adding institutional faq [#2800](https://github.com/jupyterhub/jupyterhub/pull/2800) ([@choldgraf](https://github.com/choldgraf)) +- The proxy's REST API listens on port `8001` [#2795](https://github.com/jupyterhub/jupyterhub/pull/2795) ([@bnuhero](https://github.com/bnuhero)) +- cull_idle_servers.py: rebind max_age and inactive_limit locally [#2794](https://github.com/jupyterhub/jupyterhub/pull/2794) ([@rkdarst](https://github.com/rkdarst)) +- Fix deprecation warnings [#2789](https://github.com/jupyterhub/jupyterhub/pull/2789) ([@tirkarthi](https://github.com/tirkarthi)) +- Log proxy class [#2783](https://github.com/jupyterhub/jupyterhub/pull/2783) ([@GeorgianaElena](https://github.com/GeorgianaElena)) +- Add docs for fixtures in CONTRIBUTING.md [#2782](https://github.com/jupyterhub/jupyterhub/pull/2782) ([@kinow](https://github.com/kinow)) +- Fix header project name typo [#2775](https://github.com/jupyterhub/jupyterhub/pull/2775) ([@kinow](https://github.com/kinow)) +- Remove unused setupegg.py [#2774](https://github.com/jupyterhub/jupyterhub/pull/2774) ([@kinow](https://github.com/kinow)) +- Log JupyterHub version on startup [#2752](https://github.com/jupyterhub/jupyterhub/pull/2752) ([@consideRatio](https://github.com/consideRatio)) +- Reduce verbosity for "Failing suspected API request to not-running server" (new) [#2751](https://github.com/jupyterhub/jupyterhub/pull/2751) ([@rkdarst](https://github.com/rkdarst)) +- Add missing package for json schema doc build [#2744](https://github.com/jupyterhub/jupyterhub/pull/2744) ([@willingc](https://github.com/willingc)) +- block urllib3 versions with encoding bug [#2743](https://github.com/jupyterhub/jupyterhub/pull/2743) ([@minrk](https://github.com/minrk)) +- Remove tornado deprecated/unnecessary AsyncIOMainLoop().install() call [#2740](https://github.com/jupyterhub/jupyterhub/pull/2740) ([@kinow](https://github.com/kinow)) +- Fix deprecated call [#2739](https://github.com/jupyterhub/jupyterhub/pull/2739) ([@kinow](https://github.com/kinow)) +- Remove duplicate hub and authenticator traitlets from Spawner [#2736](https://github.com/jupyterhub/jupyterhub/pull/2736) ([@eslavich](https://github.com/eslavich)) +- Update issue template [#2725](https://github.com/jupyterhub/jupyterhub/pull/2725) ([@willingc](https://github.com/willingc)) +- Use autodoc-traits sphinx extension [#2723](https://github.com/jupyterhub/jupyterhub/pull/2723) ([@willingc](https://github.com/willingc)) +- Add New Server: change redirecting to relative to home page in js [#2714](https://github.com/jupyterhub/jupyterhub/pull/2714) ([@bitnik](https://github.com/bitnik)) +- Create a warning when creating a service implicitly from service_tokens [#2704](https://github.com/jupyterhub/jupyterhub/pull/2704) ([@katsar0v](https://github.com/katsar0v)) +- Fix mistypos [#2702](https://github.com/jupyterhub/jupyterhub/pull/2702) ([@rlukin](https://github.com/rlukin)) +- Add Jupyter community link [#2696](https://github.com/jupyterhub/jupyterhub/pull/2696) ([@mattjshannon](https://github.com/mattjshannon)) +- Fix failing travis tests [#2695](https://github.com/jupyterhub/jupyterhub/pull/2695) ([@GeorgianaElena](https://github.com/GeorgianaElena)) +- Documentation update: hint for using services instead of service tokens. [#2679](https://github.com/jupyterhub/jupyterhub/pull/2679) ([@katsar0v](https://github.com/katsar0v)) +- Replace header logo: jupyter -> jupyterhub [#2672](https://github.com/jupyterhub/jupyterhub/pull/2672) ([@consideRatio](https://github.com/consideRatio)) +- Update spawn-form example [#2662](https://github.com/jupyterhub/jupyterhub/pull/2662) ([@kinow](https://github.com/kinow)) +- Update flask hub authentication services example in doc [#2658](https://github.com/jupyterhub/jupyterhub/pull/2658) ([@cmd-ntrf](https://github.com/cmd-ntrf)) +- close `
` tag in home.html [#2649](https://github.com/jupyterhub/jupyterhub/pull/2649) ([@bitnik](https://github.com/bitnik)) +- Some theme updates; no double NEXT/PREV buttons. [#2647](https://github.com/jupyterhub/jupyterhub/pull/2647) ([@Carreau](https://github.com/Carreau)) +- fix typos on technical reference documentation [#2646](https://github.com/jupyterhub/jupyterhub/pull/2646) ([@ilee38](https://github.com/ilee38)) +- Update links for Hadoop-related subprojects [#2645](https://github.com/jupyterhub/jupyterhub/pull/2645) ([@jcrist](https://github.com/jcrist)) +- corrected docker network create instructions in dockerfiles README [#2632](https://github.com/jupyterhub/jupyterhub/pull/2632) ([@bartolone](https://github.com/bartolone)) +- Fixed docs and testing code to use refactored SimpleLocalProcessSpawner [#2631](https://github.com/jupyterhub/jupyterhub/pull/2631) ([@danlester](https://github.com/danlester)) +- Update the config used for testing [#2628](https://github.com/jupyterhub/jupyterhub/pull/2628) ([@jtpio](https://github.com/jtpio)) +- Update doc: do not suggest depricated config key [#2626](https://github.com/jupyterhub/jupyterhub/pull/2626) ([@lumbric](https://github.com/lumbric)) +- Add missing words [#2625](https://github.com/jupyterhub/jupyterhub/pull/2625) ([@remram44](https://github.com/remram44)) +- cull-idle: Include a hint on how to add custom culling logic [#2613](https://github.com/jupyterhub/jupyterhub/pull/2613) ([@rkdarst](https://github.com/rkdarst)) +- Replace existing redirect code by Tornado's addslash decorator [#2609](https://github.com/jupyterhub/jupyterhub/pull/2609) ([@kinow](https://github.com/kinow)) +- Hide Stop My Server red button after server stopped. [#2577](https://github.com/jupyterhub/jupyterhub/pull/2577) ([@aar0nTw](https://github.com/aar0nTw)) +- Update link of `changelog` [#2565](https://github.com/jupyterhub/jupyterhub/pull/2565) ([@iblis17](https://github.com/iblis17)) +- typo [#2564](https://github.com/jupyterhub/jupyterhub/pull/2564) ([@julienchastang](https://github.com/julienchastang)) +- Update to simplify the language related to spawner options [#2558](https://github.com/jupyterhub/jupyterhub/pull/2558) ([@NikeNano](https://github.com/NikeNano)) +- Adding the use case of the Elucidata: How Jupyter Notebook is used in… [#2548](https://github.com/jupyterhub/jupyterhub/pull/2548) ([@IamViditAgarwal](https://github.com/IamViditAgarwal)) +- Dict rewritten as literal [#2546](https://github.com/jupyterhub/jupyterhub/pull/2546) ([@remyleone](https://github.com/remyleone)) + ## 1.0 ### [1.0.0] 2019-05-03 @@ -24,20 +1193,20 @@ whether it was through discussion, testing, documentation, or development. With named servers, each jupyterhub user may have access to more than one named server. For example, a professor may access a server named `research` and another named `teaching`. ![named servers on the home page](./images/named-servers-home.png) + - Authenticators can now expire and refresh authentication data by implementing `Authenticator.refresh_user(user)`. This allows things like OAuth data and access tokens to be refreshed. When used together with `Authenticator.refresh_pre_spawn = True`, auth refresh can be forced prior to Spawn, - allowing the Authenticator to *require* that authentication data is fresh + allowing the Authenticator to _require_ that authentication data is fresh immediately before the user's server is launched. -```eval_rst -.. seealso:: +```{seealso} - - :meth:`.Authenticator.refresh_user` - - :meth:`.Spawner.create_certs` - - :meth:`.Spawner.move_certs` + - {meth}`.Authenticator.refresh_user` + - {meth}`.Spawner.create_certs` + - {meth}`.Spawner.move_certs` ``` #### New features @@ -49,6 +1218,7 @@ whether it was through discussion, testing, documentation, or development. c.JupyterHub.spawner_class = 'docker' c.JupyterHub.proxy_class = 'traefik_etcd' ``` + - Spawners are passed the tornado Handler object that requested their spawn (as `self.handler`), so they can do things like make decisions based on query arguments in the request. - SimpleSpawner and DummyAuthenticator, which are useful for testing, have been merged into JupyterHub itself: @@ -60,6 +1230,7 @@ whether it was through discussion, testing, documentation, or development. ``` These classes are **not** appropriate for production use. Only testing. + - Add health check endpoint at `/hub/health` - Several prometheus metrics have been added (thanks to [Outreachy](https://www.outreachy.org/) applicants!) - A new API for registering user activity. @@ -89,15 +1260,14 @@ whether it was through discussion, testing, documentation, or development. This hook may transform the return value of `Authenticator.authenticate()` and return a new authentication dictionary, e.g. specifying admin privileges, group membership, - or custom white/blacklisting logic. - This hook is called *after* existing normalization and whitelist checking. + or custom allowed/blocked logic. + This hook is called _after_ existing normalization and allowed-username checking. - `Spawner.options_from_form` may now be async - Added `JupyterHub.shutdown_on_logout` option to trigger shutdown of a user's servers when they log out. - When `Spawner.start` raises an Exception, a message can be passed on to the user if the exception has a `.jupyterhub_message` attribute. - #### Changes - Authentication methods such as `check_whitelist` should now take an additional @@ -112,6 +1282,7 @@ whether it was through discussion, testing, documentation, or development. `authentication` should have a default value of None for backward-compatibility with jupyterhub < 1.0. + - Prometheus metrics page is now authenticated. Any authenticated user may see the prometheus metrics. To disable prometheus authentication, @@ -124,7 +1295,6 @@ whether it was through discussion, testing, documentation, or development. so that users can choose to cancel authentication with the single-user server. Confirmation is still skipped when accessing your own server. - #### Fixed - Various fixes to improve Windows compatibility @@ -143,12 +1313,11 @@ In general, see `CONTRIBUTING.md` for contribution info or ask if you have quest - JupyterHub has adopted `black` as a code autoformatter and `pre-commit` as a tool for automatically running code formatting on commit. - This is meant to make it *easier* to contribute to JupyterHub, + This is meant to make it _easier_ to contribute to JupyterHub, so let us know if it's having the opposite effect. - JupyterHub has switched its test suite to using `pytest-asyncio` from `pytest-tornado`. - OAuth is now implemented internally using `oauthlib` instead of `python-oauth2`. This should have no effect on behavior. - ## 0.9 ### [0.9.6] 2019-04-01 @@ -163,7 +1332,7 @@ JupyterHub 0.9.5 included a partial fix for this issue. JupyterHub 0.9.4 is a small bugfix release. -- Fixes an issue that required all running user servers to be restarted +- Fixes an issue that required all running user servers to be restarted when performing an upgrade from 0.8 to 0.9. - Fixes content-type for API endpoints back to `application/json`. It was `text/html` in 0.9.0-0.9.3. @@ -192,7 +1361,6 @@ JupyterHub 0.9.2 contains small bugfixes and improvements. - Fix for handling SIGTERM when run with asyncio (tornado 5) - Windows compatibility fixes - ### [0.9.1] 2018-07-04 JupyterHub 0.9.1 contains a number of small bugfixes on top of 0.9. @@ -222,7 +1390,6 @@ free to use tornado coroutines for async methods, as they will continue to work. As part of this upgrade, JupyterHub 0.9 drops support for Python < 3.5 and tornado < 5.0. - #### Changed - Require Python >= 3.5 @@ -276,14 +1443,14 @@ and tornado < 5.0. launching an IPython session connected to your JupyterHub database. - Include `User.auth_state` in user model on single-user REST endpoints for admins only. - Include `Server.state` in server model on REST endpoints for admins only. -- Add `Authenticator.blacklist` for blacklisting users instead of whitelisting. +- Add `Authenticator.blacklist` for blocking users instead of allowing. - Pass `c.JupyterHub.tornado_settings['cookie_options']` down to Spawners so that cookie options (e.g. `expires_days`) can be set globally for the whole application. - SIGINFO (`ctrl-t`) handler showing the current status of all running threads, coroutines, and CPU/memory/FD consumption. - Add async `Spawner.get_options_form` alternative to `.options_form`, so it can be a coroutine. - Add `JupyterHub.redirect_to_server` config to govern whether - users should be sent to their server on login or the JuptyerHub home page. + users should be sent to their server on login or the JupyterHub home page. - html page templates can be more easily customized and extended. - Allow registering external OAuth clients for using the Hub as an OAuth provider. - Add basic prometheus metrics at `/hub/metrics` endpoint. @@ -297,7 +1464,6 @@ and tornado < 5.0. Expiry is available in the REST model as `expires_at`, and settable when creating API tokens by specifying `expires_in`. - #### Fixed - Remove green from theme to improve accessibility @@ -310,7 +1476,7 @@ and tornado < 5.0. instead relying on subsequent request for `/user/:name` to trigger spawn. - Fixed several inconsistencies for initial redirects, depending on whether server is running or not and whether the user is logged in or not. -- Admin requests for `/user/:name` (when admin-access is enabled) launch the right server if it's not running instead of redirecting to their own. +- Admin requests for `/user/:name` (when admin-access is enabled) launch the right server if it's not running instead of redirecting to their own. - Major performance improvement starting up JupyterHub with many users, especially when most are inactive. - Various fixes in race conditions and performance improvements with the default proxy. @@ -322,7 +1488,6 @@ and tornado < 5.0. - Fix jupyterhub startup when `getpass.getuser()` would fail, e.g. due to missing entry in passwd file in containers. - ## 0.8 ### [0.8.1] 2017-11-07 @@ -347,7 +1512,6 @@ JupyterHub 0.8.1 is a collection of bugfixes and small improvements on 0.8. - Fix ever-growing traceback when re-raising Exceptions from spawn failures. - Remove use of deprecated `bower` for javascript client dependencies. - ### [0.8.0] 2017-10-03 JupyterHub 0.8 is a big release! @@ -387,6 +1551,7 @@ in your Dockerfile is sufficient. This data will be encrypted and requires `JUPYTERHUB_CRYPT_KEY` environment variable to be set and the `Authenticator.enable_auth_state` flag to be True. If these are not set, auth_state returned by the Authenticator will not be stored. + - There is preliminary support for multiple (named) servers per user in the REST API. Named servers can be created via API requests, but there is currently no UI for managing them. - Add `LocalProcessSpawner.popen_kwargs` and `LocalProcessSpawner.shell_cmd` @@ -401,7 +1566,6 @@ in your Dockerfile is sufficient. - Add `JupyterHub.active_server_limit` and `JupyterHub.concurrent_spawn_limit` for limiting the total number of running user servers and the number of pending spawns, respectively. - #### Changed - more arguments to spawners are now passed via environment variables (`.get_env()`) @@ -436,7 +1600,7 @@ So many things fixed! #### Removed -- End support for Python 3.3 +- End support for Python 3.3 ## 0.7 @@ -461,7 +1625,7 @@ So many things fixed! This is needed for cases like `DockerSpawner.remove_containers = False`, where the first API token is re-used for subsequent spawns. - Warning on startup about single-character usernames, - caused by common `set('string')` typo in config. + caused by common `set('string')` typo in config. #### Fixed @@ -475,13 +1639,12 @@ So many things fixed! - Add `/api/` and `/api/info` endpoints [\#675](https://github.com/jupyterhub/jupyterhub/pull/675) - Add documentation for JupyterLab, pySpark configuration, troubleshooting, and more. -- Add logging of error if adding users already in database. [\#689](https://github.com/jupyterhub/jupyterhub/pull/689) +- Add logging of error if adding users already in database. [\#689](https://github.com/jupyterhub/jupyterhub/pull/689) - Add HubAuth class for authenticating with JupyterHub. This class can be used by any application, even outside tornado. - Add user groups. - Add `/hub/user-redirect/...` URL for redirecting users to a file on their own server. - #### Changed - Always install with setuptools but not eggs (effectively require @@ -495,13 +1658,12 @@ So many things fixed! - Fix swagger spec conformance and timestamp type in API spec - Various redirect-loop-causing bugs have been fixed. - #### Removed - Deprecate `--no-ssl` command line option. It has no meaning and warns if used. [\#789](https://github.com/jupyterhub/jupyterhub/pull/789) - Deprecate `%U` username substitution in favor of `{username}`. [\#748](https://github.com/jupyterhub/jupyterhub/pull/748) -- Removed deprecated SwarmSpawner link. [\#699](https://github.com/jupyterhub/jupyterhub/pull/699) +- Removed deprecated SwarmSpawner link. [\#699](https://github.com/jupyterhub/jupyterhub/pull/699) ## 0.6 @@ -516,7 +1678,7 @@ Bugfixes on 0.6: ### [0.6.0] - 2016-04-25 -- JupyterHub has moved to a new `jupyterhub` namespace on GitHub and Docker. What was `juptyer/jupyterhub` is now `jupyterhub/jupyterhub`, etc. +- JupyterHub has moved to a new `jupyterhub` namespace on GitHub and Docker. What was `jupyter/jupyterhub` is now `jupyterhub/jupyterhub`, etc. - `jupyterhub/jupyterhub` image on DockerHub no longer loads the jupyterhub_config.py in an ONBUILD step. A new `jupyterhub/jupyterhub-onbuild` image does this - Add statsd support, via `c.JupyterHub.statsd_{host,port,prefix}` - Update to traitlets 4.1 `@default`, `@observe` APIs for traits @@ -527,10 +1689,8 @@ Bugfixes on 0.6: This can only be used if the Authenticator has a username and password. - Various fixes for user URLs and redirects - ## [0.5] - 2016-03-07 - - Single-user server must be run with Jupyter Notebook ≥ 4.0 - Require `--no-ssl` confirmation to allow the Hub to be run without SSL (e.g. behind SSL termination in nginx) - Add lengths to text fields for MySQL support @@ -556,7 +1716,6 @@ Fix removal of `/login` page in 0.4.0, breaking some OAuth providers. These methods are typically used with custom Authenticator+Spawner pairs. - 0.4 will be the last JupyterHub release where single-user servers running IPython 3 is supported instead of Notebook ≥ 4.0. - ## [0.3] - 2015-11-04 - No longer make the user starting the Hub an admin @@ -575,8 +1734,27 @@ Fix removal of `/login` page in 0.4.0, breaking some OAuth providers. First preview release - -[Unreleased]: https://github.com/jupyterhub/jupyterhub/compare/1.0.0...HEAD +[unreleased]: https://github.com/jupyterhub/jupyterhub/compare/3.0.0...HEAD +[3.0.0]: https://github.com/jupyterhub/jupyterhub/compare/2.3.1...3.0.0 +[2.3.1]: https://github.com/jupyterhub/jupyterhub/compare/2.3.0...2.3.1 +[2.3.0]: https://github.com/jupyterhub/jupyterhub/compare/2.2.2...2.3.0 +[2.2.2]: https://github.com/jupyterhub/jupyterhub/compare/2.2.1...2.2.2 +[2.2.1]: https://github.com/jupyterhub/jupyterhub/compare/2.2.0...2.2.1 +[2.2.0]: https://github.com/jupyterhub/jupyterhub/compare/2.1.1...2.2.0 +[2.1.1]: https://github.com/jupyterhub/jupyterhub/compare/2.1.0...2.1.1 +[2.1.0]: https://github.com/jupyterhub/jupyterhub/compare/2.0.2...2.1.0 +[2.0.2]: https://github.com/jupyterhub/jupyterhub/compare/2.0.1...2.0.2 +[2.0.1]: https://github.com/jupyterhub/jupyterhub/compare/2.0.0...2.0.1 +[2.0.0]: https://github.com/jupyterhub/jupyterhub/compare/1.5.0...2.0.0 +[1.5.0]: https://github.com/jupyterhub/jupyterhub/compare/1.4.2...1.5.0 +[1.4.2]: https://github.com/jupyterhub/jupyterhub/compare/1.4.1...1.4.2 +[1.4.1]: https://github.com/jupyterhub/jupyterhub/compare/1.4.0...1.4.1 +[1.4.0]: https://github.com/jupyterhub/jupyterhub/compare/1.3.0...1.4.0 +[1.3.0]: https://github.com/jupyterhub/jupyterhub/compare/1.2.1...1.3.0 +[1.2.2]: https://github.com/jupyterhub/jupyterhub/compare/1.2.1...1.2.2 +[1.2.1]: https://github.com/jupyterhub/jupyterhub/compare/1.2.0...1.2.1 +[1.2.0]: https://github.com/jupyterhub/jupyterhub/compare/1.1.0...1.2.0 +[1.1.0]: https://github.com/jupyterhub/jupyterhub/compare/1.0.0...1.1.0 [1.0.0]: https://github.com/jupyterhub/jupyterhub/compare/0.9.6...1.0.0 [0.9.6]: https://github.com/jupyterhub/jupyterhub/compare/0.9.4...0.9.6 [0.9.4]: https://github.com/jupyterhub/jupyterhub/compare/0.9.3...0.9.4 diff --git a/docs/source/conf.py b/docs/source/conf.py index 6e83f379..736a0acb 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -1,211 +1,215 @@ -# -*- coding: utf-8 -*- +# Configuration file for Sphinx to build our documentation to HTML. # +# Configuration reference: https://www.sphinx-doc.org/en/master/usage/configuration.html +# +import contextlib +import datetime +import io import os -import shlex -import sys +import subprocess -# Set paths -sys.path.insert(0, os.path.abspath('.')) - -# -- General configuration ------------------------------------------------ - -# Minimal Sphinx version -needs_sphinx = '1.4' - -# Sphinx extension modules -extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.intersphinx', - 'sphinx.ext.napoleon', - 'autodoc_traits', - 'sphinx_copybutton', -] - -templates_path = ['_templates'] - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'JupyterHub' -copyright = u'2016, Project Jupyter team' -author = u'Project Jupyter team' - -# Autopopulate version -from os.path import dirname - -docs = dirname(dirname(__file__)) -root = dirname(docs) -sys.path.insert(0, root) -sys.path.insert(0, os.path.join(docs, 'sphinxext')) +from docutils import nodes +from sphinx.directives.other import SphinxDirective import jupyterhub +from jupyterhub.app import JupyterHub -# The short X.Y version. -version = '%i.%i' % jupyterhub.version_info[:2] -# The full version, including alpha/beta/rc tags. +# -- Project information ----------------------------------------------------- +# ref: https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information +# +project = "JupyterHub" +author = "Project Jupyter Contributors" +copyright = f"{datetime.date.today().year}, {author}" +version = "%i.%i" % jupyterhub.version_info[:2] release = jupyterhub.__version__ -language = None -exclude_patterns = [] -pygments_style = 'sphinx' -todo_include_todos = False -# Set the default role so we can use `foo` instead of ``foo`` -default_role = 'literal' +# -- General Sphinx configuration -------------------------------------------- +# ref: https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration +# +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.intersphinx", + "sphinx.ext.napoleon", + "autodoc_traits", + "sphinx_copybutton", + "sphinx-jsonschema", + "sphinxext.opengraph", + "sphinxext.rediraffe", + "myst_parser", +] +root_doc = "index" +source_suffix = [".md", ".rst"] +# default_role let's use use `foo` instead of ``foo`` in rST +default_role = "literal" -# -- Source ------------------------------------------------------------- -import recommonmark -from recommonmark.transform import AutoStructify +# -- MyST configuration ------------------------------------------------------ +# ref: https://myst-parser.readthedocs.io/en/latest/configuration.html +# +myst_heading_anchors = 2 +myst_enable_extensions = [ + "colon_fence", + "deflist", +] + + +# -- Custom directives to generate documentation ----------------------------- +# ref: https://myst-parser.readthedocs.io/en/latest/syntax/roles-and-directives.html +# +# We define custom directives to help us generate documentation using Python on +# demand when referenced from our documentation files. +# + +# Create a temp instance of JupyterHub for use by two separate directive classes +# to get the output from using the "--generate-config" and "--help-all" CLI +# flags respectively. +# +jupyterhub_app = JupyterHub() + + +class ConfigDirective(SphinxDirective): + """Generate the configuration file output for use in the documentation.""" + + has_content = False + required_arguments = 0 + optional_arguments = 0 + final_argument_whitespace = False + option_spec = {} + + def run(self): + # The generated configuration file for this version + generated_config = jupyterhub_app.generate_config_file() + # post-process output + home_dir = os.environ["HOME"] + generated_config = generated_config.replace(home_dir, "$HOME", 1) + par = nodes.literal_block(text=generated_config) + return [par] + + +class HelpAllDirective(SphinxDirective): + """Print the output of jupyterhub help --all for use in the documentation.""" + + has_content = False + required_arguments = 0 + optional_arguments = 0 + final_argument_whitespace = False + option_spec = {} + + def run(self): + # The output of the help command for this version + buffer = io.StringIO() + with contextlib.redirect_stdout(buffer): + jupyterhub_app.print_help("--help-all") + all_help = buffer.getvalue() + # post-process output + home_dir = os.environ["HOME"] + all_help = all_help.replace(home_dir, "$HOME", 1) + par = nodes.literal_block(text=all_help) + return [par] def setup(app): - app.add_config_value('recommonmark_config', {'enable_eval_rst': True}, True) - app.add_stylesheet('custom.css') - app.add_transform(AutoStructify) + app.add_css_file("custom.css") + app.add_directive("jupyterhub-generate-config", ConfigDirective) + app.add_directive("jupyterhub-help-all", HelpAllDirective) -source_parsers = {'.md': 'recommonmark.parser.CommonMarkParser'} - -source_suffix = ['.rst', '.md'] -# source_encoding = 'utf-8-sig' - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. -import alabaster_jupyterhub - -html_theme = 'alabaster_jupyterhub' -html_theme_path = [alabaster_jupyterhub.get_html_theme_path()] - -html_logo = '_static/images/logo/logo.png' -html_favicon = '_static/images/logo/favicon.ico' - -# Paths that contain custom static files (such as style sheets) -html_static_path = ['_static'] - -html_theme_options = { - 'show_related': True, - 'description': 'Documentation for JupyterHub', - 'github_user': 'jupyterhub', - 'github_repo': 'jupyterhub', - 'github_banner': False, - 'github_button': True, - 'github_type': 'star', - 'show_powered_by': False, - 'extra_nav_links': { - 'GitHub Repo': 'http://github.com/jupyterhub/jupyterhub', - 'Issue Tracker': 'http://github.com/jupyterhub/jupyterhub/issues', - }, -} - -html_sidebars = { - '**': [ - 'about.html', - 'searchbox.html', - 'navigation.html', - 'relations.html', - 'sourcelink.html', - ] -} - -htmlhelp_basename = 'JupyterHubdoc' - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # 'papersize': 'letterpaper', - # 'pointsize': '10pt', - # 'preamble': '', - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - master_doc, - 'JupyterHub.tex', - u'JupyterHub Documentation', - u'Project Jupyter team', - 'manual', - ) -] - -# latex_logo = None -# latex_use_parts = False -# latex_show_pagerefs = False -# latex_show_urls = False -# latex_appendices = [] -# latex_domain_indices = True +# -- Read The Docs ----------------------------------------------------------- +# +# Since RTD runs sphinx-build directly without running "make html", we run the +# pre-requisite steps for "make html" from here if needed. +# +if os.environ.get("READTHEDOCS"): + docs = os.path.dirname(os.path.dirname(__file__)) + subprocess.check_call(["make", "metrics", "scopes"], cwd=docs) -# -- manual page output ------------------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [(master_doc, 'jupyterhub', u'JupyterHub Documentation', [author], 1)] - -# man_show_urls = False - - -# -- Texinfo output ----------------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - 'JupyterHub', - u'JupyterHub Documentation', - author, - 'JupyterHub', - 'One line description of project.', - 'Miscellaneous', - ) -] - -# texinfo_appendices = [] -# texinfo_domain_indices = True -# texinfo_show_urls = 'footnote' -# texinfo_no_detailmenu = False - - -# -- Epub output -------------------------------------------------------- - -# Bibliographic Dublin Core info. -epub_title = project -epub_author = author -epub_publisher = author -epub_copyright = copyright - -# A list of files that should not be packed into the epub file. -epub_exclude_files = ['search.html'] - -# -- Intersphinx ---------------------------------------------------------- - -intersphinx_mapping = {'https://docs.python.org/3/': None} - -# -- Read The Docs -------------------------------------------------------- - -on_rtd = os.environ.get('READTHEDOCS', None) == 'True' -if on_rtd: - # readthedocs.org uses their theme by default, so no need to specify it - # build rest-api, since RTD doesn't run make - from subprocess import check_call as sh - - sh(['make', 'rest-api'], cwd=docs) - -# -- Spell checking ------------------------------------------------------- - +# -- Spell checking ---------------------------------------------------------- +# ref: https://sphinxcontrib-spelling.readthedocs.io/en/latest/customize.html#configuration-options +# +# The "sphinxcontrib.spelling" extension is optionally enabled if its available. +# try: - import sphinxcontrib.spelling + import sphinxcontrib.spelling # noqa except ImportError: pass else: extensions.append("sphinxcontrib.spelling") +spelling_word_list_filename = "spelling_wordlist.txt" -spelling_word_list_filename = 'spelling_wordlist.txt' + +# -- Options for HTML output ------------------------------------------------- +# ref: https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output +# +html_logo = "_static/images/logo/logo.png" +html_favicon = "_static/images/logo/favicon.ico" +html_static_path = ["_static"] + +html_theme = "pydata_sphinx_theme" +html_theme_options = { + "icon_links": [ + { + "name": "GitHub", + "url": "https://github.com/jupyterhub/jupyterhub", + "icon": "fab fa-github-square", + }, + { + "name": "Discourse", + "url": "https://discourse.jupyter.org/c/jupyterhub/10", + "icon": "fab fa-discourse", + }, + ], + "use_edit_page_button": True, + "navbar_align": "left", +} +html_context = { + "github_user": "jupyterhub", + "github_repo": "jupyterhub", + "github_version": "main", + "doc_path": "docs/source", +} + + +# -- Options for linkcheck builder ------------------------------------------- +# ref: https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-the-linkcheck-builder +# +linkcheck_ignore = [ + r"(.*)github\.com(.*)#", # javascript based anchors + r"(.*)/#%21(.*)/(.*)", # /#!forum/jupyter - encoded anchor edge case + r"https://github.com/[^/]*$", # too many github usernames / searches in changelog + "https://github.com/jupyterhub/jupyterhub/pull/", # too many PRs in changelog + "https://github.com/jupyterhub/jupyterhub/compare/", # too many comparisons in changelog +] +linkcheck_anchors_ignore = [ + "/#!", + "/#%21", +] + + +# -- Intersphinx ------------------------------------------------------------- +# ref: https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html#configuration +# +intersphinx_mapping = { + "python": ("https://docs.python.org/3/", None), + "tornado": ("https://www.tornadoweb.org/en/stable/", None), +} +# -- Options for the opengraph extension ------------------------------------- +# ref: https://github.com/wpilibsuite/sphinxext-opengraph#options +# +# ogp_site_url is set automatically by RTD +ogp_image = "_static/logo.png" +ogp_use_first_image = True + + +# -- Options for the rediraffe extension ------------------------------------- +# ref: https://github.com/wpilibsuite/sphinxext-rediraffe#readme +# +# This extensions help us relocated content without breaking links. If a +# document is moved internally, a redirect like should be configured below to +# help us not break links. +# +rediraffe_branch = "main" +rediraffe_redirects = { + # "old-file": "new-folder/new-file-name", +} diff --git a/docs/source/contributing/community.md b/docs/source/contributing/community.md new file mode 100644 index 00000000..f7273ad1 --- /dev/null +++ b/docs/source/contributing/community.md @@ -0,0 +1,27 @@ +# Community communication channels + +We use different channels of communication for different purposes. Whichever one you use will depend on what kind of communication you want to engage in. + +## Discourse (recommended) + +We use [Discourse](https://discourse.jupyter.org) for online discussions and support questions. +You can ask questions here if you are a first-time contributor to the JupyterHub project. +Everyone in the Jupyter community is welcome to bring ideas and questions there. + +We recommend that you first use our Discourse as all past and current discussions on it are archived and searchable. Thus, all discussions remain useful and accessible to the whole community. + +## Gitter + +We use [our Gitter channel](https://gitter.im/jupyterhub/jupyterhub) for online, real-time text chat; a place for more ephemeral discussions. When you're not on Discourse, you can stop here to have other discussions on the fly. + +## Github Issues + +[Github issues](https://docs.github.com/en/issues/tracking-your-work-with-issues/about-issues) are used for most long-form project discussions, bug reports and feature requests. + +- Issues related to a specific authenticator or spawner should be opened in the appropriate repository for the authenticator or spawner. +- If you are using a specific JupyterHub distribution (such as [Zero to JupyterHub on Kubernetes](http://github.com/jupyterhub/zero-to-jupyterhub-k8s) or [The Littlest JupyterHub](http://github.com/jupyterhub/the-littlest-jupyterhub/)), you should open issues directly in their repository. +- If you cannot find a repository to open your issue in, do not worry! Open the issue in the [main JupyterHub repository](https://github.com/jupyterhub/jupyterhub/) and our community will help you figure it out. + +```{note} +Our community is distributed across the world in various timezones, so please be patient if you do not get a response immediately! +``` diff --git a/docs/source/contributing/community.rst b/docs/source/contributing/community.rst deleted file mode 100644 index 359d2472..00000000 --- a/docs/source/contributing/community.rst +++ /dev/null @@ -1,30 +0,0 @@ -.. _contributing/community: - -================================ -Community communication channels -================================ - -We use `Discourse ` for online discussion. -Everyone in the Jupyter community is welcome to bring ideas and questions there. -In addition, we use `Gitter `_ for online, real-time text chat, -a place for more ephemeral discussions. -The primary Gitter channel for JupyterHub is `jupyterhub/jupyterhub `_. -Gitter isn't archived or searchable, so we recommend going to discourse first -to make sure that discussions are most useful and accessible to the community. -Remember that our community is distributed across the world in various -timezones, so be patient if you do not get an answer immediately! - -GitHub issues are used for most long-form project discussions, bug reports -and feature requests. Issues related to a specific authenticator or -spawner should be directed to the appropriate repository for the -authenticator or spawner. If you are using a specific JupyterHub -distribution (such as `Zero to JupyterHub on Kubernetes `_ -or `The Littlest JupyterHub `_), -you should open issues directly in their repository. If you can not -find a repository to open your issue in, do not worry! Create it in the `main -JupyterHub repository `_ and our -community will help you figure it out. - -A `mailing list `_ for all -of Project Jupyter exists, along with one for `teaching with Jupyter -`_. diff --git a/docs/source/contributing/docs.rst b/docs/source/contributing/docs.rst index 3e954224..33b39ae3 100644 --- a/docs/source/contributing/docs.rst +++ b/docs/source/contributing/docs.rst @@ -5,7 +5,7 @@ Contributing Documentation ========================== Documentation is often more important than code. This page helps -you get set up on how to contribute documentation to JupyterHub. +you get set up on how to contribute to JupyterHub's documentation. Building documentation locally ============================== @@ -13,12 +13,12 @@ Building documentation locally We use `sphinx `_ to build our documentation. It takes our documentation source files (written in `markdown `_ or `reStructuredText -`_ & +`_ & stored under the ``docs/source`` directory) and converts it into various formats for people to read. To make sure the documentation you write or change renders correctly, it is good practice to test it locally. -#. Make sure you have successfuly completed :ref:`contributing/setup`. +#. Make sure you have successfully completed :ref:`contributing/setup`. #. Install the packages required to build the docs. @@ -39,13 +39,17 @@ change renders correctly, it is good practice to test it locally. along with the filename / line number in which they occurred. Fix them, and re-run the ``make html`` command to re-render the documentation. -#. View the rendered documentation by opening ``build/html/index.html`` in - a web browser. +#. View the rendered documentation by opening ``build/html/index.html`` in + a web browser. .. tip:: - On macOS, you can open a file from the terminal with ``open ``. - On Linux, you can do the same with ``xdg-open ``. + **On Windows**, you can open a file from the terminal with ``start ``. + + **On macOS**, you can do the same with ``open ``. + + **On Linux**, you can do the same with ``xdg-open ``. + After opening index.html in your browser you can just refresh the page whenever you rebuild the docs via ``make html`` diff --git a/docs/source/contributing/index.rst b/docs/source/contributing/index.rst new file mode 100644 index 00000000..8eec5bd0 --- /dev/null +++ b/docs/source/contributing/index.rst @@ -0,0 +1,21 @@ +============ +Contributing +============ + +We want you to contribute to JupyterHub in ways that are most exciting +& useful to you. We value documentation, testing, bug reporting & code equally, +and are glad to have your contributions in whatever form you wish :) + +Our `Code of Conduct `_ +(`reporting guidelines `_) +helps keep our community welcoming to as many people as possible. + +.. toctree:: + :maxdepth: 2 + + community + setup + docs + tests + roadmap + security diff --git a/docs/source/contributing/roadmap.md b/docs/source/contributing/roadmap.md index 8ad33fbf..5f2bc49c 100644 --- a/docs/source/contributing/roadmap.md +++ b/docs/source/contributing/roadmap.md @@ -4,10 +4,10 @@ This roadmap collects "next steps" for JupyterHub. It is about creating a shared understanding of the project's vision and direction amongst the community of users, contributors, and maintainers. The goal is to communicate priorities and upcoming release plans. -It is not a aimed at limiting contributions to what is listed here. - +It is not aimed at limiting contributions to what is listed here. ## Using the roadmap + ### Sharing Feedback on the Roadmap All of the community is encouraged to provide feedback as well as share new @@ -22,17 +22,17 @@ maintainers will help identify what a good next step is for the issue. When submitting an issue, think about what "next step" category best describes your issue: -* **now**, concrete/actionable step that is ready for someone to start work on. -These might be items that have a link to an issue or more abstract like -"decrease typos and dead links in the documentation" -* **soon**, less concrete/actionable step that is going to happen soon, -discussions around the topic are coming close to an end at which point it can -move into the "now" category -* **later**, abstract ideas or tasks, need a lot of discussion or -experimentation to shape the idea so that it can be executed. Can also -contain concrete/actionable steps that have been postponed on purpose -(these are steps that could be in "now" but the decision was taken to work on -them later) +- **now**, concrete/actionable step that is ready for someone to start work on. + These might be items that have a link to an issue or more abstract like + "decrease typos and dead links in the documentation" +- **soon**, less concrete/actionable step that is going to happen soon, + discussions around the topic are coming close to an end at which point it can + move into the "now" category +- **later**, abstract ideas or tasks, need a lot of discussion or + experimentation to shape the idea so that it can be executed. Can also + contain concrete/actionable steps that have been postponed on purpose + (these are steps that could be in "now" but the decision was taken to work on + them later) ### Reviewing and Updating the Roadmap @@ -47,8 +47,8 @@ For those please create a The roadmap should give the reader an idea of what is happening next, what needs input and discussion before it can happen and what has been postponed. - ## The roadmap proper + ### Project vision JupyterHub is a dependable tool used by humans that reduces the complexity of @@ -58,20 +58,19 @@ creating the environment in which a piece of software can be executed. These "Now" items are considered active areas of focus for the project: -* HubShare - a sharing service for use with JupyterHub. - * Users should be able to: - - Push a project to other users. - - Get a checkout of a project from other users. - - Push updates to a published project. - - Pull updates from a published project. - - Manage conflicts/merges by simply picking a version (our/theirs) - - Get a checkout of a project from the internet. These steps are completely different from saving notebooks/files. - - Have directories that are managed by git completely separately from our stuff. - - Look at pushed content that they have access to without an explicit pull. - - Define and manage teams of users. - - Adding/removing a user to/from a team gives/removes them access to all projects that team has access to. - - Build other services, such as static HTML publishing and dashboarding on top of these things. - +- HubShare - a sharing service for use with JupyterHub. + - Users should be able to: + - Push a project to other users. + - Get a checkout of a project from other users. + - Push updates to a published project. + - Pull updates from a published project. + - Manage conflicts/merges by simply picking a version (our/theirs) + - Get a checkout of a project from the internet. These steps are completely different from saving notebooks/files. + - Have directories that are managed by git completely separately from our stuff. + - Look at pushed content that they have access to without an explicit pull. + - Define and manage teams of users. + - Adding/removing a user to/from a team gives/removes them access to all projects that team has access to. + - Build other services, such as static HTML publishing and dashboarding on top of these things. ### Soon @@ -79,12 +78,10 @@ These "Soon" items are under discussion. Once an item reaches the point of an actionable plan, the item will be moved to the "Now" section. Typically, these will be moved at a future review of the roadmap. -* resource monitoring and management: - - (prometheus?) API for resource monitoring - - tracking activity on single-user servers instead of the proxy - - notes and activity tracking per API token - - UI for managing named servers - +- resource monitoring and management: + - (prometheus?) API for resource monitoring + - tracking activity on single-user servers instead of the proxy + - notes and activity tracking per API token ### Later @@ -93,6 +90,6 @@ time there is no active plan for an item. The project would like to find the resources and time to discuss these ideas. - real-time collaboration - - Enter into real-time collaboration mode for a project that starts a shared execution context. - - Once the single-user notebook package supports realtime collaboration, - implement sharing mechanism integrated into the Hub. + - Enter into real-time collaboration mode for a project that starts a shared execution context. + - Once the single-user notebook package supports realtime collaboration, + implement sharing mechanism integrated into the Hub. diff --git a/docs/source/contributing/setup.rst b/docs/source/contributing/setup.rst index 4ef70b08..1f601960 100644 --- a/docs/source/contributing/setup.rst +++ b/docs/source/contributing/setup.rst @@ -7,28 +7,27 @@ Setting up a development install System requirements =================== -JupyterHub can only run on MacOS or Linux operating systems. If you are -using Windows, we recommend using `VirtualBox `_ +JupyterHub can only run on macOS or Linux operating systems. If you are +using Windows, we recommend using `VirtualBox `_ or a similar system to run `Ubuntu Linux `_ for development. Install Python -------------- -JupyterHub is written in the `Python `_ programming language, and -requires you have at least version 3.5 installed locally. If you haven’t +JupyterHub is written in the `Python `_ programming language and +requires you have at least version 3.6 installed locally. If you haven’t installed Python before, the recommended way to install it is to use -`miniconda `_. Remember to get the ‘Python 3’ version, +`Miniconda `_. Remember to get the ‘Python 3’ version, and **not** the ‘Python 2’ version! Install nodejs -------------- -``configurable-http-proxy``, the default proxy implementation for -JupyterHub, is written in Javascript to run on `NodeJS -`_. If you have not installed nodejs before, we -recommend installing it in the ``miniconda`` environment you set up for -Python. You can do so with ``conda install nodejs``. +`NodeJS 12+ `_ is required for building some JavaScript components. +``configurable-http-proxy``, the default proxy implementation for JupyterHub, is written in Javascript. +If you have not installed NodeJS before, we recommend installing it in the ``miniconda`` environment you set up for Python. +You can do so with ``conda install nodejs``. Many in the Jupyter community use [``nvm``](https://github.com/nvm-sh/nvm) to managing node dependencies. @@ -36,7 +35,7 @@ managing node dependencies. Install git ----------- -JupyterHub uses `git `_ & `GitHub `_ +JupyterHub uses `Git `_ & `GitHub `_ for development & collaboration. You need to `install git `_ to work on JupyterHub. We also recommend getting a free account on GitHub.com. @@ -44,11 +43,15 @@ JupyterHub. We also recommend getting a free account on GitHub.com. Setting up a development install ================================ -When developing JupyterHub, you need to make changes to the code & see -their effects quickly. You need to do a developer install to make that -happen. +When developing JupyterHub, you would need to make changes and be able to instantly view the results of the changes. To achieve that, a developer install is required. -1. Clone the `JupyterHub git repository `_ +.. note:: This guide does not attempt to dictate *how* development + environments should be isolated since that is a personal preference and can + be achieved in many ways, for example, `tox`, `conda`, `docker`, etc. See this + `forum thread `_ for + a more detailed discussion. + +1. Clone the `JupyterHub git repository `_ to your computer. .. code:: bash @@ -63,7 +66,7 @@ happen. python -V - This should return a version number greater than or equal to 3.5. + This should return a version number greater than or equal to 3.6. .. code:: bash @@ -71,12 +74,11 @@ happen. This should return a version number greater than or equal to 5.0. -3. Install ``configurable-http-proxy``. This is required to run - JupyterHub. +3. Install ``configurable-http-proxy`` (required to run and test the default JupyterHub configuration) and ``yarn`` (required to build some components): .. code:: bash - npm install -g configurable-http-proxy + npm install -g configurable-http-proxy yarn If you get an error that says ``Error: EACCES: permission denied``, you might need to prefix the command with ``sudo``. ``sudo`` may be required to perform a system-wide install. @@ -84,25 +86,31 @@ happen. .. code:: bash - npm install configurable-http-proxy + npm install configurable-http-proxy yarn export PATH=$PATH:$(pwd)/node_modules/.bin The second line needs to be run every time you open a new terminal. -4. Install the python packages required for JupyterHub development. + If you are using conda you can instead run: .. code:: bash - python3 -m pip install -r dev-requirements.txt - python3 -m pip install -r requirements.txt + conda install configurable-http-proxy yarn -5. Install the development version of JupyterHub. This lets you edit - JupyterHub code in a text editor & restart the JupyterHub process to - see your code changes immediately. +4. Install an editable version of JupyterHub and its requirements for + development and testing. This lets you edit JupyterHub code in a text editor + & restart the JupyterHub process to see your code changes immediately. .. code:: bash - python3 -m pip install --editable . + python3 -m pip install --editable ".[test]" + +5. Set up a database. + + The default database engine is ``sqlite`` so if you are just trying + to get up and running quickly for local development that should be + available via `Python `__. + See :doc:`/reference/database` for details on other supported databases. 6. You are now ready to start JupyterHub! @@ -123,7 +131,7 @@ To simplify testing of JupyterHub, it’s helpful to use authenticator and SimpleLocalProcessSpawner instead of the default spawner. There is a sample configuration file that does this in -``testing/jupyterhub_config.py``. To launch jupyterhub with this +``testing/jupyterhub_config.py``. To launch JupyterHub with this configuration: .. code:: bash @@ -139,14 +147,14 @@ JupyterHub as. DummyAuthenticator allows you to log in with any username & password, while SimpleLocalProcessSpawner allows you to start servers without having to -create a unix user for each JupyterHub user. Together, these make it +create a Unix user for each JupyterHub user. Together, these make it much easier to test JupyterHub. Tip: If you are working on parts of JupyterHub that are common to all authenticators & spawners, we recommend using both DummyAuthenticator & -SimpleLocalProcessSpawner. If you are working on just authenticator related +SimpleLocalProcessSpawner. If you are working on just authenticator-related parts, use only SimpleLocalProcessSpawner. Similarly, if you are working on -just spawner related parts, use only DummyAuthenticator. +just spawner-related parts, use only DummyAuthenticator. Troubleshooting =============== @@ -176,3 +184,4 @@ development updates, with: python3 setup.py js # fetch updated client-side js python3 setup.py css # recompile CSS from LESS sources + python3 setup.py jsx # build React admin app diff --git a/docs/source/contributing/tests.rst b/docs/source/contributing/tests.rst index 27022156..ad9b1da7 100644 --- a/docs/source/contributing/tests.rst +++ b/docs/source/contributing/tests.rst @@ -1,49 +1,49 @@ .. _contributing/tests: -================== -Testing JupyterHub -================== +=================================== +Testing JupyterHub and linting code +=================================== Unit testing helps to validate that JupyterHub works the way we think it does, and continues to do so when changes occur. They also help communicate precisely what we expect our code to do. -JupyterHub uses `pytest `_ for all our tests. You -can find them under ``jupyterhub/tests`` directory in the git repository. +JupyterHub uses `pytest `_ for all the tests. You +can find them under the `jupyterhub/tests `_ directory in the git repository. Running the tests ================== -#. Make sure you have completed :ref:`contributing/setup`. You should be able - to start ``jupyterhub`` from the commandline & access it from your - web browser. This ensures that the dev environment is properly set +#. Make sure you have completed :ref:`contributing/setup`. Once completed, you should be able + to run ``jupyterhub`` on your command line and access JupyterHub from your browser at http://localhost:8000. Being able to run and access `jupyterhub` should mean that the dev environment is properly set up for tests to run. #. You can run all tests in JupyterHub .. code-block:: bash - pytest --async-test-timeout 15 -v jupyterhub/tests + pytest -v jupyterhub/tests This should display progress as it runs all the tests, printing information about any test failures as they occur. + + If you wish to confirm test coverage the run tests with the `--cov` flag: - The ``--async-test-timeout`` parameter is used by `pytest-tornado - `_ to set the - asynchronous test timeout to 15 seconds rather than the default 5, - since some of our tests take longer than 5s to execute. + .. code-block:: bash + + pytest -v --cov=jupyterhub jupyterhub/tests #. You can also run tests in just a specific file: .. code-block:: bash - pytest --async-test-timeout 15 -v jupyterhub/tests/ + pytest -v jupyterhub/tests/ #. To run a specific test only, you can do: .. code-block:: bash - pytest --async-test-timeout 15 -v jupyterhub/tests/:: + pytest -v jupyterhub/tests/:: This runs the test with function name ```` defined in ````. This is very useful when you are iteratively @@ -56,6 +56,49 @@ Running the tests pytest -v jupyterhub/tests/test_api.py::test_shutdown + For more information, refer to the `pytest usage documentation `_. + +Test organisation +================= + +The tests live in ``jupyterhub/tests`` and are organized roughly into: + +#. ``test_api.py`` tests the REST API +#. ``test_pages.py`` tests loading the HTML pages + +and other collections of tests for different components. +When writing a new test, there should usually be a test of +similar functionality already written and related tests should +be added nearby. + +The fixtures live in ``jupyterhub/tests/conftest.py``. There are +fixtures that can be used for JupyterHub components, such as: + +- ``app``: an instance of JupyterHub with mocked parts +- ``auth_state_enabled``: enables persisting auth_state (like authentication tokens) +- ``db``: a sqlite in-memory DB session +- ``io_loop```: a Tornado event loop +- ``event_loop``: a new asyncio event loop +- ``user``: creates a new temporary user +- ``admin_user``: creates a new temporary admin user +- single user servers + - ``cleanup_after``: allows cleanup of single user servers between tests +- mocked service + - ``MockServiceSpawner``: a spawner that mocks services for testing with a short poll interval + - ``mockservice```: mocked service with no external service url + - ``mockservice_url``: mocked service with a url to test external services + +And fixtures to add functionality or spawning behavior: + +- ``admin_access``: grants admin access +- ``no_patience```: sets slow-spawning timeouts to zero +- ``slow_spawn``: enables the SlowSpawner (a spawner that takes a few seconds to start) +- ``never_spawn``: enables the NeverSpawner (a spawner that will never start) +- ``bad_spawn``: enables the BadSpawner (a spawner that fails immediately) +- ``slow_bad_spawn``: enables the SlowBadSpawner (a spawner that fails after a short delay) + +For information on using the existing fixtures and creating new ones, refer to the `pytest fixtures documentation `_ + Troubleshooting Test Failures ============================= @@ -63,16 +106,34 @@ Troubleshooting Test Failures All the tests are failing ------------------------- -Make sure you have completed all the steps in :ref:`contributing/setup` sucessfully, and -can launch ``jupyterhub`` from the terminal. +Make sure you have completed all the steps in :ref:`contributing/setup` successfully, and are able to access JupyterHub from your browser at http://localhost:8000 after starting ``jupyterhub`` in your command line. -Tests are timing out --------------------- -The ``--async-test-timeout`` parameter to ``pytest`` is used by -`pytest-tornado `_ to set -the asynchronous test timeout to a higher value than the default of 5s, -since some of our tests take longer than 5s to execute. If the tests -are still timing out, try increasing that value even more. You can -also set an environment variable ``ASYNC_TEST_TIMEOUT`` instead of -passing ``--async-test-timeout`` to each invocation of pytest. +Code formatting and linting +=========================== + +JupyterHub automatically enforces code formatting. This means that pull requests +with changes breaking this formatting will receive a commit from pre-commit.ci +automatically. + +To automatically format code locally, you can install pre-commit and register a +*git hook* to automatically check with pre-commit before you make a commit if +the formatting is okay. + +.. code:: bash + + pip install pre-commit + pre-commit install --install-hooks + +To run pre-commit manually you would do: + +.. code:: bash + + # check for changes to code not yet committed + pre-commit run + + # check for changes also in already committed code + pre-commit run --all-files + +You may also install `black integration `_ +into your text editor to format code automatically. diff --git a/docs/source/contributor-list.md b/docs/source/contributor-list.md index 12555c22..f2673671 100644 --- a/docs/source/contributor-list.md +++ b/docs/source/contributor-list.md @@ -120,3 +120,4 @@ contribution on JupyterHub: - yuvipanda - zoltan-fedor - zonca +- Neeraj Natu diff --git a/docs/source/events/index.rst b/docs/source/events/index.rst new file mode 100644 index 00000000..bbbf680f --- /dev/null +++ b/docs/source/events/index.rst @@ -0,0 +1,46 @@ +Event logging and telemetry +=========================== + +JupyterHub can be configured to record structured events from a running server using Jupyter's `Telemetry System`_. The types of events that JupyterHub emits are defined by `JSON schemas`_ listed at the bottom of this page_. + +.. _logging: https://docs.python.org/3/library/logging.html +.. _`Telemetry System`: https://github.com/jupyter/telemetry +.. _`JSON schemas`: https://json-schema.org/ + +How to emit events +------------------ + +Event logging is handled by its ``Eventlog`` object. This leverages Python's standing logging_ library to emit, filter, and collect event data. + + +To begin recording events, you'll need to set two configurations: + + 1. ``handlers``: tells the EventLog *where* to route your events. This trait is a list of Python logging handlers that route events to the event log file. + 2. ``allows_schemas``: tells the EventLog *which* events should be recorded. No events are emitted by default; all recorded events must be listed here. + +Here's a basic example: + +.. code-block:: + + import logging + + c.EventLog.handlers = [ + logging.FileHandler('event.log'), + ] + + c.EventLog.allowed_schemas = [ + 'hub.jupyter.org/server-action' + ] + +The output is a file, ``"event.log"``, with events recorded as JSON data. + + +.. _page: + +Event schemas +------------- + +.. toctree:: + :maxdepth: 2 + + server-actions.rst diff --git a/docs/source/events/server-actions.rst b/docs/source/events/server-actions.rst new file mode 100644 index 00000000..12018713 --- /dev/null +++ b/docs/source/events/server-actions.rst @@ -0,0 +1 @@ +.. jsonschema:: ../../../jupyterhub/event-schemas/server-actions/v1.yaml diff --git a/docs/source/gallery-jhub-deployments.md b/docs/source/gallery-jhub-deployments.md index 55a3dff6..89780e65 100644 --- a/docs/source/gallery-jhub-deployments.md +++ b/docs/source/gallery-jhub-deployments.md @@ -8,27 +8,29 @@ high performance computing. Please submit pull requests to update information or to add new institutions or uses. - ## Academic Institutions, Research Labs, and Supercomputer Centers ### University of California Berkeley - [BIDS - Berkeley Institute for Data Science](https://bids.berkeley.edu/) - - [Teaching with Jupyter notebooks and JupyterHub](https://bids.berkeley.edu/resources/videos/teaching-ipythonjupyter-notebooks-and-jupyterhub) + + - [Teaching with Jupyter notebooks and JupyterHub](https://bids.berkeley.edu/resources/videos/teaching-ipythonjupyter-notebooks-and-jupyterhub) - [Data 8](http://data8.org/) - - [GitHub organization](https://github.com/data-8) + + - [GitHub organization](https://github.com/data-8) - [NERSC](http://www.nersc.gov/) - - [Press release on Jupyter and Cori](http://www.nersc.gov/news-publications/nersc-news/nersc-center-news/2016/jupyter-notebooks-will-open-up-new-possibilities-on-nerscs-cori-supercomputer/) - - [Moving and sharing data](https://www.nersc.gov/assets/Uploads/03-MovingAndSharingData-Cholia.pdf) + + - [Press release on Jupyter and Cori](http://www.nersc.gov/news-publications/nersc-news/nersc-center-news/2016/jupyter-notebooks-will-open-up-new-possibilities-on-nerscs-cori-supercomputer/) + - [Moving and sharing data](https://www.nersc.gov/assets/Uploads/03-MovingAndSharingData-Cholia.pdf) - [Research IT](http://research-it.berkeley.edu) - - [JupyterHub server supports campus research computation](http://research-it.berkeley.edu/blog/17/01/24/free-fully-loaded-jupyterhub-server-supports-campus-research-computation) + - [JupyterHub server supports campus research computation](http://research-it.berkeley.edu/blog/17/01/24/free-fully-loaded-jupyterhub-server-supports-campus-research-computation) ### University of California Davis -- [Spinning up multiple Jupyter Notebooks on AWS for a tutorial](https://github.com/mblmicdiv/course2017/blob/master/exercises/sourmash-setup.md) +- [Spinning up multiple Jupyter Notebooks on AWS for a tutorial](https://github.com/mblmicdiv/course2017/blob/HEAD/exercises/sourmash-setup.md) Although not technically a JupyterHub deployment, this tutorial setup may be helpful to others in the Jupyter community. @@ -59,23 +61,31 @@ easy to do with RStudio too. - [jupyterhub-deploy-teaching](https://github.com/jupyterhub/jupyterhub-deploy-teaching) based on work by Brian Granger for Cal Poly's Data Science 301 Course +### Chameleon + +[Chameleon](https://www.chameleoncloud.org) is a NSF-funded configurable experimental environment for large-scale computer science systems research with [bare metal reconfigurability](https://chameleoncloud.readthedocs.io/en/latest/technical/baremetal.html). Chameleon users utilize JupyterHub to document and reproduce their complex CISE and networking experiments. + +- [Shared JupyterHub](https://jupyter.chameleoncloud.org): provides a common "workbench" environment for any Chameleon user. +- [Trovi](https://www.chameleoncloud.org/experiment/share): a sharing portal of experiments, tutorials, and examples, which users can launch as a dedicated isolated environments on Chameleon's JupyterHub. + ### Clemson University - Advanced Computing - - [Palmetto cluster and JupyterHub](http://citi.sites.clemson.edu/2016/08/18/JupyterHub-for-Palmetto-Cluster.html) + - [Palmetto cluster and JupyterHub](http://citi.sites.clemson.edu/2016/08/18/JupyterHub-for-Palmetto-Cluster.html) ### University of Colorado Boulder - (CU Research Computing) CURC - - [JupyterHub User Guide](https://www.rc.colorado.edu/support/user-guide/jupyterhub.html) - - Slurm job dispatched on Crestone compute cluster - - log troubleshooting - - Profiles in IPython Clusters tab - - [Parallel Processing with JupyterHub tutorial](https://www.rc.colorado.edu/support/examples-and-tutorials/parallel-processing-with-jupyterhub.html) - - [Parallel Programming with JupyterHub document](https://www.rc.colorado.edu/book/export/html/833) + + - [JupyterHub User Guide](https://www.rc.colorado.edu/support/user-guide/jupyterhub.html) + - Slurm job dispatched on Crestone compute cluster + - log troubleshooting + - Profiles in IPython Clusters tab + - [Parallel Processing with JupyterHub tutorial](https://www.rc.colorado.edu/support/examples-and-tutorials/parallel-processing-with-jupyterhub.html) + - [Parallel Programming with JupyterHub document](https://www.rc.colorado.edu/book/export/html/833) - Earth Lab at CU - - [Tutorial on Parallel R on JupyterHub](https://earthdatascience.org/tutorials/parallel-r-on-jupyterhub/) + - [Tutorial on Parallel R on JupyterHub](https://earthdatascience.org/tutorials/parallel-r-on-jupyterhub/) ### George Washington University @@ -87,7 +97,7 @@ easy to do with RStudio too. ### University of Illinois -- https://datascience.business.illinois.edu (currently down; checked 04/26/19) +- https://datascience.business.illinois.edu (currently down; checked 10/26/22) ### IllustrisTNG Simulation Project @@ -112,11 +122,11 @@ easy to do with RStudio too. ### Paderborn University - [Data Science (DICE) group](https://dice.cs.uni-paderborn.de/) - - [nbgraderutils](https://github.com/dice-group/nbgraderutils): Use JupyterHub + nbgrader + iJava kernel for online Java exercises. Used in lecture Statistical Natural Language Processing. + - [nbgraderutils](https://github.com/dice-group/nbgraderutils): Use JupyterHub + nbgrader + iJava kernel for online Java exercises. Used in lecture Statistical Natural Language Processing. ### Penn State University -- [Press release](https://news.psu.edu/story/523093/2018/05/24/new-open-source-web-apps-available-students-and-faculty): "New open-source web apps available for students and faculty" (but Hub is currently down; checked 04/26/19) +- [Press release](https://news.psu.edu/story/523093/2018/05/24/new-open-source-web-apps-available-students-and-faculty): "New open-source web apps available for students and faculty" ### University of Rochester CIRC @@ -125,33 +135,34 @@ easy to do with RStudio too. ### University of California San Diego - San Diego Supercomputer Center - Andrea Zonca - - [Deploy JupyterHub on a Supercomputer with SSH](https://zonca.github.io/2017/05/jupyterhub-hpc-batchspawner-ssh.html) - - [Run Jupyterhub on a Supercomputer](https://zonca.github.io/2015/04/jupyterhub-hpc.html) - - [Deploy JupyterHub on a VM for a Workshop](https://zonca.github.io/2016/04/jupyterhub-sdsc-cloud.html) - - [Customize your Python environment in Jupyterhub](https://zonca.github.io/2017/02/customize-python-environment-jupyterhub.html) - - [Jupyterhub deployment on multiple nodes with Docker Swarm](https://zonca.github.io/2016/05/jupyterhub-docker-swarm.html) - - [Sample deployment of Jupyterhub in HPC on SDSC Comet](https://zonca.github.io/2017/02/sample-deployment-jupyterhub-hpc.html) + + - [Deploy JupyterHub on a Supercomputer with SSH](https://zonca.github.io/2017/05/jupyterhub-hpc-batchspawner-ssh.html) + - [Run Jupyterhub on a Supercomputer](https://zonca.github.io/2015/04/jupyterhub-hpc.html) + - [Deploy JupyterHub on a VM for a Workshop](https://zonca.github.io/2016/04/jupyterhub-sdsc-cloud.html) + - [Customize your Python environment in Jupyterhub](https://zonca.github.io/2017/02/customize-python-environment-jupyterhub.html) + - [Jupyterhub deployment on multiple nodes with Docker Swarm](https://zonca.github.io/2016/05/jupyterhub-docker-swarm.html) + - [Sample deployment of Jupyterhub in HPC on SDSC Comet](https://zonca.github.io/2017/02/sample-deployment-jupyterhub-hpc.html) - Educational Technology Services - Paul Jamason - - [jupyterhub.ucsd.edu](https://jupyterhub.ucsd.edu) + - [jupyterhub.ucsd.edu](https://jupyterhub.ucsd.edu) ### TACC University of Texas ### Texas A&M - Kristen Thyng - Oceanography - - [Teaching with JupyterHub and nbgrader](http://kristenthyng.com/blog/2016/09/07/jupyterhub+nbgrader/) + - [Teaching with JupyterHub and nbgrader](http://kristenthyng.com/blog/2016/09/07/jupyterhub+nbgrader/) ### Elucidata - - What's new in Jupyter Notebooks @[Elucidata](https://elucidata.io/): - - Using Jupyter Notebooks with Jupyterhub on GCP, managed by GKE - - https://medium.com/elucidata/why-you-should-be-using-a-jupyter-notebook-8385a4ccd93d + +- What's new in Jupyter Notebooks @[Elucidata](https://elucidata.io/): + - [Using Jupyter Notebooks with Jupyterhub on GCP, managed by GKE](https://medium.com/elucidata/why-you-should-be-using-a-jupyter-notebook-8385a4ccd93d) ## Service Providers ### AWS -- [running-jupyter-notebook-and-jupyterhub-on-amazon-emr](https://aws.amazon.com/blogs/big-data/running-jupyter-notebook-and-jupyterhub-on-amazon-emr/) +- [Run Jupyter Notebook and JupyterHub on Amazon EMR](https://aws.amazon.com/blogs/big-data/running-jupyter-notebook-and-jupyterhub-on-amazon-emr/) ### Google Cloud Platform @@ -164,28 +175,28 @@ easy to do with RStudio too. ### Microsoft Azure -- https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-data-science-linux-dsvm-intro +- [Azure Data Science Virtual Machine release notes](https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-data-science-linux-dsvm-intro) ### Rackspace Carina - https://getcarina.com/blog/learning-how-to-whale/ -- http://carolynvanslyck.com/talk/carina/jupyterhub/#/ +- http://carolynvanslyck.com/talk/carina/jupyterhub/#/ (but carolynvanslyck is currently down; checked 10/26/22) ### Hadoop - [Deploying JupyterHub on Hadoop](https://jupyterhub-on-hadoop.readthedocs.io) - ## Miscellaneous - https://medium.com/@ybarraud/setting-up-jupyterhub-with-sudospawner-and-anaconda-844628c0dbee#.rm3yt87e1 -- https://groups.google.com/forum/#!topic/jupyter/nkPSEeMr8c0 Mailing list UT deployment -- JupyterHub setup on Centos https://gist.github.com/johnrc/604971f7d41ebf12370bf5729bf3e0a4 -- Deploy JupyterHub to Docker Swarm https://jupyterhub.surge.sh/#/welcome +- [Mailing list UT deployment](https://groups.google.com/forum/#!topic/jupyter/nkPSEeMr8c0) +- [JupyterHub setup on Centos](https://gist.github.com/johnrc/604971f7d41ebf12370bf5729bf3e0a4) +- [Deploy JupyterHub to Docker Swarm](https://jupyterhub.surge.sh/#/welcome) - http://www.laketide.com/building-your-lab-part-3/ - http://estrellita.hatenablog.com/entry/2015/07/31/083202 - http://www.walkingrandomly.com/?p=5734 - https://wrdrd.com/docs/consulting/education-technology - https://bitbucket.org/jackhale/fenics-jupyter - [LinuxCluster blog](https://linuxcluster.wordpress.com/category/application/jupyterhub/) -- [Network Technology](https://arnesund.com/tag/jupyterhub/) [Spark Cluster on OpenStack with Multi-User Jupyter Notebook](https://arnesund.com/2015/09/21/spark-cluster-on-openstack-with-multi-user-jupyter-notebook/) +- [Network Technology](https://arnesund.com/tag/jupyterhub/) +- [Spark Cluster on OpenStack with Multi-User Jupyter Notebook](https://arnesund.com/2015/09/21/spark-cluster-on-openstack-with-multi-user-jupyter-notebook/) diff --git a/docs/source/getting-started/authenticators-users-basics.md b/docs/source/getting-started/authenticators-users-basics.md index f4b9dff4..460579c9 100644 --- a/docs/source/getting-started/authenticators-users-basics.md +++ b/docs/source/getting-started/authenticators-users-basics.md @@ -1,40 +1,51 @@ # Authentication and User Basics -The default Authenticator uses [PAM][] to authenticate system users with +The default Authenticator uses [PAM][] (Pluggable Authentication Module) to authenticate system users with their username and password. With the default Authenticator, any user with an account and password on the system will be allowed to login. -## Create a whitelist of users - -You can restrict which users are allowed to login with a whitelist, -`Authenticator.whitelist`: +## Create a set of allowed users (`allowed_users`) +You can restrict which users are allowed to login with a set, +`Authenticator.allowed_users`: ```python -c.Authenticator.whitelist = {'mal', 'zoe', 'inara', 'kaylee'} +c.Authenticator.allowed_users = {'mal', 'zoe', 'inara', 'kaylee'} ``` -Users in the whitelist are added to the Hub database when the Hub is +Users in the `allowed_users` set are added to the Hub database when the Hub is started. +```{warning} +If this configuration value is not set, then **all authenticated users will be allowed into your hub**. +``` + ## Configure admins (`admin_users`) +```{note} +As of JupyterHub 2.0, the full permissions of `admin_users` +should not be required. +Instead, you can assign [roles](define-role-target) to users or groups +with only the scopes they require. +``` + Admin users of JupyterHub, `admin_users`, can add and remove users from -the user `whitelist`. `admin_users` can take actions on other users' +the user `allowed_users` set. `admin_users` can take actions on other users' behalf, such as stopping and restarting their servers. -A set of initial admin users, `admin_users` can configured be as follows: +A set of initial admin users, `admin_users` can be configured as follows: ```python c.Authenticator.admin_users = {'mal', 'zoe'} ``` -Users in the admin list are automatically added to the user `whitelist`, + +Users in the admin set are automatically added to the user `allowed_users` set, if they are not already present. -Each authenticator may have different ways of determining whether a user is an -administrator. By default JupyterHub use the PAMAuthenticator which provide the -`admin_groups` option and can determine administrator status base on a user -groups. For example we can let any users in the `wheel` group be admin: +Each Authenticator may have different ways of determining whether a user is an +administrator. By default, JupyterHub uses the PAMAuthenticator which provides the +`admin_groups` option and can set administrator status based on a user +group. For example, we can let any user in the `wheel` group be an admin: ```python c.PAMAuthenticator.admin_groups = {'wheel'} @@ -42,35 +53,35 @@ c.PAMAuthenticator.admin_groups = {'wheel'} ## Give admin access to other users' notebook servers (`admin_access`) -Since the default `JupyterHub.admin_access` setting is False, the admins +Since the default `JupyterHub.admin_access` setting is `False`, the admins do not have permission to log in to the single user notebook servers -owned by *other users*. If `JupyterHub.admin_access` is set to True, -then admins have permission to log in *as other users* on their -respective machines, for debugging. **As a courtesy, you should make +owned by _other users_. If `JupyterHub.admin_access` is set to `True`, +then admins have permission to log in _as other users_ on their +respective machines for debugging. **As a courtesy, you should make sure your users know if admin_access is enabled.** ## Add or remove users from the Hub -Users can be added to and removed from the Hub via either the admin +Users can be added to and removed from the Hub via the admin panel or the REST API. When a user is **added**, the user will be -automatically added to the whitelist and database. Restarting the Hub -will not require manually updating the whitelist in your config file, +automatically added to the `allowed_users` set and database. Restarting the Hub +will not require manually updating the `allowed_users` set in your config file, as the users will be loaded from the database. After starting the Hub once, it is not sufficient to **remove** a user -from the whitelist in your config file. You must also remove the user +from the allowed users set in your config file. You must also remove the user from the Hub's database, either by deleting the user from JupyterHub's admin page, or you can clear the `jupyterhub.sqlite` database and start fresh. ## Use LocalAuthenticator to create system users -The `LocalAuthenticator` is a special kind of authenticator that has +The `LocalAuthenticator` is a special kind of Authenticator that has the ability to manage users on the local system. When you try to add a new user to the Hub, a `LocalAuthenticator` will check if the user already exists. If you set the configuration value, `create_system_users`, to `True` in the configuration file, the `LocalAuthenticator` has -the privileges to add users to the system. The setting in the config +the ability to add users to the system. The setting in the config file is: ```python @@ -80,7 +91,7 @@ c.LocalAuthenticator.create_system_users = True Adding a user to the Hub that doesn't already exist on the system will result in the Hub creating that user via the system `adduser` command line tool. This option is typically used on hosted deployments of -JupyterHub, to avoid the need to manually create all your users before +JupyterHub to avoid the need to manually create all your users before launching the service. This approach is not recommended when running JupyterHub in situations where JupyterHub users map directly onto the system's UNIX users. @@ -90,27 +101,25 @@ system's UNIX users. JupyterHub's [OAuthenticator][] currently supports the following popular services: -- Auth0 -- Bitbucket -- CILogon -- GitHub -- GitLab -- Globus -- Google -- MediaWiki -- Okpy -- OpenShift +- [Auth0](https://oauthenticator.readthedocs.io/en/latest/api/gen/oauthenticator.auth0.html#module-oauthenticator.auth0) +- [Azure AD](https://oauthenticator.readthedocs.io/en/latest/api/gen/oauthenticator.azuread.html#module-oauthenticator.azuread) +- [Bitbucket](https://oauthenticator.readthedocs.io/en/latest/api/gen/oauthenticator.bitbucket.html#module-oauthenticator.bitbucket) +- [CILogon](https://oauthenticator.readthedocs.io/en/latest/api/gen/oauthenticator.cilogon.html#module-oauthenticator.cilogon) +- [GitHub](https://oauthenticator.readthedocs.io/en/latest/api/gen/oauthenticator.github.html#module-oauthenticator.github) +- [GitLab](https://oauthenticator.readthedocs.io/en/latest/api/gen/oauthenticator.gitlab.html#module-oauthenticator.gitlab) +- [Globus](https://oauthenticator.readthedocs.io/en/latest/api/gen/oauthenticator.globus.html#module-oauthenticator.globus) +- [Google](https://oauthenticator.readthedocs.io/en/latest/api/gen/oauthenticator.google.html#module-oauthenticator.google) +- [MediaWiki](https://oauthenticator.readthedocs.io/en/latest/api/gen/oauthenticator.mediawiki.html#module-oauthenticator.mediawiki) +- [Okpy](https://oauthenticator.readthedocs.io/en/latest/api/gen/oauthenticator.okpy.html#module-oauthenticator.okpy) +- [OpenShift](https://oauthenticator.readthedocs.io/en/latest/api/gen/oauthenticator.openshift.html#module-oauthenticator.openshift) -NOTE: Open issue asking for more details on this generic implementation. -It's not clear if this is a different implementation or if the JupyterHub OAuth -_is_ the generic implementation. -A generic implementation, which you can use for OAuth authentication +A [generic implementation](https://oauthenticator.readthedocs.io/en/latest/api/gen/oauthenticator.generic.html#module-oauthenticator.generic), which you can use for OAuth authentication with any provider, is also available. ## Use DummyAuthenticator for testing -The :class:`~jupyterhub.auth.DummyAuthenticator` is a simple authenticator that -allows for any username/password unless if a global password has been set. If +The `DummyAuthenticator` is a simple Authenticator that +allows for any username or password unless a global password has been set. If set, it will allow for any username as long as the correct password is provided. To set a global password, add this to the config file: @@ -118,5 +127,5 @@ To set a global password, add this to the config file: c.DummyAuthenticator.password = "some_password" ``` -[PAM]: https://en.wikipedia.org/wiki/Pluggable_authentication_module -[OAuthenticator]: https://github.com/jupyterhub/oauthenticator +[pam]: https://en.wikipedia.org/wiki/Pluggable_authentication_module +[oauthenticator]: https://github.com/jupyterhub/oauthenticator diff --git a/docs/source/getting-started/config-basics.md b/docs/source/getting-started/config-basics.md index f4ae9e4d..853dc3a7 100644 --- a/docs/source/getting-started/config-basics.md +++ b/docs/source/getting-started/config-basics.md @@ -1,6 +1,6 @@ # Configuration Basics -The section contains basic information about configuring settings for a JupyterHub +This section contains basic information about configuring settings for a JupyterHub deployment. The [Technical Reference](../reference/index) documentation provides additional details. @@ -44,30 +44,30 @@ jupyterhub -f /etc/jupyterhub/jupyterhub_config.py ``` The IPython documentation provides additional information on the -[config system](http://ipython.readthedocs.io/en/stable/development/config) +[config system](http://ipython.readthedocs.io/en/stable/development/config.html) that Jupyter uses. ## Configure using command line options -To display all command line options that are available for configuration: +To display all command line options that are available for configuration run the following command: ```bash jupyterhub --help-all ``` Configuration using the command line options is done when launching JupyterHub. -For example, to start JupyterHub on ``10.0.1.2:443`` with https, you +For example, to start JupyterHub on `10.0.1.2:443` with https, you would enter: ```bash jupyterhub --ip 10.0.1.2 --port 443 --ssl-key my_ssl.key --ssl-cert my_ssl.cert ``` -All configurable options may technically be set on the command-line, +All configurable options may technically be set on the command line, though some are inconvenient to type. To set a particular configuration parameter, `c.Class.trait`, you would use the command line option, `--Class.trait`, when starting JupyterHub. For example, to configure the -`c.Spawner.notebook_dir` trait from the command-line, use the +`c.Spawner.notebook_dir` trait from the command line, use the `--Spawner.notebook_dir` option: ```bash @@ -77,24 +77,24 @@ jupyterhub --Spawner.notebook_dir='~/assignments' ## Configure for various deployment environments The default authentication and process spawning mechanisms can be replaced, and -specific [authenticators](./authenticators-users-basics) and -[spawners](./spawners-basics) can be set in the configuration file. +specific [authenticators](authenticators-users-basics) and +[spawners](spawners-basics) can be set in the configuration file. This enables JupyterHub to be used with a variety of authentication methods or process control and deployment environments. [Some examples](../reference/config-examples), -meant as illustration, are: +meant as illustrations, are: - Using GitHub OAuth instead of PAM with [OAuthenticator](https://github.com/jupyterhub/oauthenticator) - Spawning single-user servers with Docker, using the [DockerSpawner](https://github.com/jupyterhub/dockerspawner) ## Run the proxy separately -This is *not* strictly necessary, but useful in many cases. If you -use a custom proxy (e.g. Traefik), this also not needed. +This is _not_ strictly necessary, but useful in many cases. If you +use a custom proxy (e.g. Traefik), this is also not needed. Connections to user servers go through the proxy, and *not* the hub itself. If the proxy stays running when the hub restarts (for maintenance, re-configuration, etc.), then user connections are not interrupted. For simplicity, by default the hub starts the proxy automatically, so if the hub restarts, the proxy restarts, and user -connections are interrupted. It is easy to run the proxy separately, +connections are interrupted. It is easy to run the proxy separately, for information see [the separate proxy page](../reference/separate-proxy). diff --git a/docs/source/getting-started/faq.md b/docs/source/getting-started/faq.md new file mode 100644 index 00000000..4b169ebf --- /dev/null +++ b/docs/source/getting-started/faq.md @@ -0,0 +1,35 @@ +# Frequently asked questions + +## How do I share links to notebooks? + +In short, where you see `/user/name/notebooks/foo.ipynb` use `/hub/user-redirect/notebooks/foo.ipynb` (replace `/user/name` with `/hub/user-redirect`). + +Sharing links to notebooks is a common activity, +and can look different based on what you mean. +Your first instinct might be to copy the URL you see in the browser, +e.g. `hub.jupyter.org/user/yourname/notebooks/coolthing.ipynb`. +However, let's break down what this URL means: + +`hub.jupyter.org/user/yourname/` is the URL prefix handled by _your server_, +which means that sharing this URL is asking the person you share the link with +to come to _your server_ and look at the exact same file. +In most circumstances, this is forbidden by permissions because the person you share with does not have access to your server. +What actually happens when someone visits this URL will depend on whether your server is running and other factors. + +But what is our actual goal? +A typical situation is that you have some shared or common filesystem, +such that the same path corresponds to the same document +(either the exact same document or another copy of it). +Typically, what folks want when they do sharing like this +is for each visitor to open the same file _on their own server_, +so Breq would open `/user/breq/notebooks/foo.ipynb` and +Seivarden would open `/user/seivarden/notebooks/foo.ipynb`, etc. + +JupyterHub has a special URL that does exactly this! +It's called `/hub/user-redirect/...`. +So if you replace `/user/yourname` in your URL bar +with `/hub/user-redirect` any visitor should get the same +URL on their own server, rather than visiting yours. + +In JupyterLab 2.0, this should also be the result of the "Copy Shareable Link" +action in the file browser. diff --git a/docs/source/getting-started/index.rst b/docs/source/getting-started/index.rst index b1e509d9..bae95f8f 100644 --- a/docs/source/getting-started/index.rst +++ b/docs/source/getting-started/index.rst @@ -1,5 +1,10 @@ -Getting Started -=============== +Get Started +=========== + +This section covers how to configure and customize JupyterHub for your +needs. It contains information about authentication, networking, security, and +other topics that are relevant to individuals or organizations deploying their +own JupyterHub. .. toctree:: :maxdepth: 2 @@ -10,3 +15,5 @@ Getting Started authenticators-users-basics spawners-basics services-basics + faq + institutional-faq diff --git a/docs/source/getting-started/institutional-faq.md b/docs/source/getting-started/institutional-faq.md new file mode 100644 index 00000000..678184dd --- /dev/null +++ b/docs/source/getting-started/institutional-faq.md @@ -0,0 +1,260 @@ +# Institutional FAQ + +This page contains common questions from users of JupyterHub, +broken down by their roles within organizations. + +## For all + +### Is it appropriate for adoption within a larger institutional context? + +Yes! JupyterHub has been used at-scale for large pools of users, as well +as complex and high-performance computing. For example, UC Berkeley uses +JupyterHub for its Data Science Education Program courses (serving over +3,000 students). The Pangeo project uses JupyterHub to provide access +to scalable cloud computing with Dask. JupyterHub is stable and customizable +to the use-cases of large organizations. + +### I keep hearing about Jupyter Notebook, JupyterLab, and now JupyterHub. What’s the difference? + +Here is a quick breakdown of these three tools: + +- **The Jupyter Notebook** is a document specification (the `.ipynb`) file that interweaves + narrative text with code cells and their outputs. It is also a graphical interface + that allows users to edit these documents. There are also several other graphical interfaces + that allow users to edit the `.ipynb` format (nteract, Jupyter Lab, Google Colab, Kaggle, etc). +- **JupyterLab** is a flexible and extendible user interface for interactive computing. It + has several extensions that are tailored for using Jupyter Notebooks, as well as extensions + for other parts of the data science stack. +- **JupyterHub** is an application that manages interactive computing sessions for **multiple users**. + It also connects them with infrastructure those users wish to access. It can provide + remote access to Jupyter Notebooks and JupyterLab for many people. + +## For management + +### Briefly, what problem does JupyterHub solve for us? + +JupyterHub provides a shared platform for data science and collaboration. +It allows users to utilize familiar data science workflows (such as the scientific Python stack, +the R tidyverse, and Jupyter Notebooks) on institutional infrastructure. It also allows administrators +some control over access to resources, security, environments, and authentication. + +### Is JupyterHub mature? Why should we trust it? + +Yes - the core JupyterHub application recently +reached 1.0 status, and is considered stable and performant for most institutions. +JupyterHub has also been deployed (along with other tools) to work on +scalable infrastructure, large datasets, and high-performance computing. + +### Who else uses JupyterHub? + +JupyterHub is used at a variety of institutions in academia, +industry, and government research labs. It is most-commonly used by two kinds of groups: + +- Small teams (e.g., data science teams, research labs, or collaborative projects) to provide a + shared resource for interactive computing, collaboration, and analytics. +- Large teams (e.g., a department, a large class, or a large group of remote users) to provide + access to organizational hardware, data, and analytics environments at scale. + +Here is a sample of organizations that use JupyterHub: + +- **Universities and colleges**: UC Berkeley, UC San Diego, Cal Poly SLO, Harvard University, University of Chicago, + University of Oslo, University of Sheffield, Université Paris Sud, University of Versailles +- **Research laboratories**: NASA, NCAR, NOAA, the Large Synoptic Survey Telescope, Brookhaven National Lab, + Minnesota Supercomputing Institute, ALCF, CERN, Lawrence Livermore National Laboratory +- **Online communities**: Pangeo, Quantopian, mybinder.org, MathHub, Open Humans +- **Computing infrastructure providers**: NERSC, San Diego Supercomputing Center, Compute Canada +- **Companies**: Capital One, SANDVIK code, Globus + +See the [Gallery of JupyterHub deployments](../gallery-jhub-deployments.md) for +a more complete list of JupyterHub deployments at institutions. + +### How does JupyterHub compare with hosted products, like Google Colaboratory, RStudio.cloud, or Anaconda Enterprise? + +JupyterHub puts you in control of your data, infrastructure, and coding environment. +In addition, it is vendor neutral, which reduces lock-in to a particular vendor or service. +JupyterHub provides access to interactive computing environments in the cloud (similar to each of these services). +Compared with the tools above, it is more flexible, more customizable, free, and +gives administrators more control over their setup and hardware. + +Because JupyterHub is an open-source, community-driven tool, it can be extended and +modified to fit an institution's needs. It plays nicely with the open source data science +stack, and can serve a variety of computing environments, user interfaces, and +computational hardware. It can also be deployed anywhere - on enterprise cloud infrastructure, on +High-Performance-Computing machines, on local hardware, or even on a single laptop, which +is not possible with most other tools for shared interactive computing. + +## For IT + +### How would I set up JupyterHub on institutional hardware? + +That depends on what kind of hardware you've got. JupyterHub is flexible enough to be deployed +on a variety of hardware, including in-room hardware, on-prem clusters, cloud infrastructure, +etc. + +The most common way to set up a JupyterHub is to use a JupyterHub distribution, these are pre-configured +and opinionated ways to set up a JupyterHub on particular kinds of infrastructure. The two distributions +that we currently suggest are: + +- [Zero to JupyterHub for Kubernetes](https://z2jh.jupyter.org) is a scalable JupyterHub deployment and + guide that runs on Kubernetes. Better for larger or dynamic user groups (50-10,000) or more complex + compute/data needs. +- [The Littlest JupyterHub](https://tljh.jupyter.org) is a lightweight JupyterHub that runs on a single + single machine (in the cloud or under your desk). Better for smaller user groups (4-80) or more + lightweight computational resources. + +### Does JupyterHub run well in the cloud? + +Yes - most deployments of JupyterHub are run via cloud infrastructure and on a variety of cloud providers. +Depending on the distribution of JupyterHub that you'd like to use, you can also connect your JupyterHub +deployment with a number of other cloud-native services so that users have access to other resources from +their interactive computing sessions. + +For example, if you use the [Zero to JupyterHub for Kubernetes](https://z2jh.jupyter.org) distribution, +you'll be able to utilize container-based workflows of other technologies such as the [dask-kubernetes](https://kubernetes.dask.org/en/latest/) +project for distributed computing. + +The Z2JH Helm Chart also has some functionality built in for auto-scaling your cluster up and down +as more resources are needed - allowing you to utilize the benefits of a flexible cloud-based deployment. + +### Is JupyterHub secure? + +The short answer: yes. JupyterHub as a standalone application has been battle-tested at an institutional +level for several years, and makes a number of "default" security decisions that are reasonable for most +users. + +- For security considerations in the base JupyterHub application, + [see the JupyterHub security page](https://jupyterhub.readthedocs.io/en/stable/reference/websecurity.html). +- For security considerations when deploying JupyterHub on Kubernetes, see the + [JupyterHub on Kubernetes security page](https://zero-to-jupyterhub.readthedocs.io/en/latest/security.html). + +The longer answer: it depends on your deployment. Because JupyterHub is very flexible, it can be used +in a variety of deployment setups. This often entails connecting your JupyterHub to **other** infrastructure +(such as a [Dask Gateway service](https://gateway.dask.org/)). There are many security decisions to be made +in these cases, and the security of your JupyterHub deployment will often depend on these decisions. + +If you are worried about security, don't hesitate to reach out to the JupyterHub community in the +[Jupyter Community Forum](https://discourse.jupyter.org/c/jupyterhub). This community of practice has many +individuals with experience running secure JupyterHub deployments. + +### Does JupyterHub provide computing or data infrastructure? + +No - JupyterHub manages user sessions and can _control_ computing infrastructure, but it does not provide these +things itself. You are expected to run JupyterHub on your own infrastructure (local or in the cloud). Moreover, +JupyterHub has no internal concept of "data", but is designed to be able to communicate with data repositories +(again, either locally or remotely) for use within interactive computing sessions. + +### How do I manage users? + +JupyterHub offers a few options for managing your users. Upon setting up a JupyterHub, you can choose what +kind of **authentication** you'd like to use. For example, you can have users sign up with an institutional +email address, or choose a username / password when they first log-in, or offload authentication onto +another service such as an organization's OAuth. + +The users of a JupyterHub are stored locally, and can be modified manually by an administrator of the JupyterHub. +Moreover, the _active_ users on a JupyterHub can be found on the administrator's page. This page +gives you the abiltiy to stop or restart kernels, inspect user filesystems, and even take over user +sessions to assist them with debugging. + +### How do I manage software environments? + +A key benefit of JupyterHub is the ability for an administrator to define the environment(s) that users +have access to. There are many ways to do this, depending on what kind of infrastructure you're using for +your JupyterHub. + +For example, **The Littlest JupyterHub** runs on a single VM. In this case, the administrator defines +an environment by installing packages to a shared folder that exists on the path of all users. The +**JupyterHub for Kubernetes** deployment uses Docker images to define environments. You can create your +own list of Docker images that users can select from, and can also control things like the amount of +RAM available to users, or the types of machines that their sessions will use in the cloud. + +### How does JupyterHub manage computational resources? + +For interactive computing sessions, JupyterHub controls computational resources via a **spawner**. +Spawners define how a new user session is created, and are customized for particular kinds of +infrastructure. For example, the KubeSpawner knows how to control a Kubernetes deployment +to create new pods when users log in. + +For more sophisticated computational resources (like distributed computing), JupyterHub can +connect with other infrastructure tools (like Dask or Spark). This allows users to control +scalable or high-performance resources from within their JupyterHub sessions. The logic of +how those resources are controlled is taken care of by the non-JupyterHub application. + +### Can JupyterHub be used with my high-performance computing resources? + +Yes - JupyterHub can provide access to many kinds of computing infrastructure. +Especially when combined with other open-source schedulers such as Dask, you can manage fairly +complex computing infrastructures from the interactive sessions of a JupyterHub. For example +[see the Dask HPC page](https://docs.dask.org/en/latest/setup/hpc.html). + +### How much resources do user sessions take? + +This is highly configurable by the administrator. If you wish for your users to have simple +data analytics environments for prototyping and light data exploring, you can restrict their +memory and CPU based on the resources that you have available. If you'd like your JupyterHub +to serve as a gateway to high-performance compute or data resources, you may increase the +resources available on user machines, or connect them with computing infrastructures elsewhere. + +### Can I customize the look and feel of a JupyterHub? + +JupyterHub provides some customization of the graphics displayed to users. The most common +modification is to add custom branding to the JupyterHub login page, loading pages, and +various elements that persist across all pages (such as headers). + +## For Technical Leads + +### Will JupyterHub “just work” with our team's interactive computing setup? + +Depending on the complexity of your setup, you'll have different experiences with "out of the box" +distributions of JupyterHub. If all of the resources you need will fit on a single VM, then +[The Littlest JupyterHub](https://tljh.jupyter.org) should get you up-and-running within +a half day or so. For more complex setups, such as scalable Kubernetes clusters or access +to high-performance computing and data, it will require more time and expertise with +the technologies your JupyterHub will use (e.g., dev-ops knowledge with cloud computing). + +In general, the base JupyterHub deployment is not the bottleneck for setup, it is connecting +your JupyterHub with the various services and tools that you wish to provide to your users. + +### How well does JupyterHub scale? What are JupyterHub's limitations? + +JupyterHub works well at both a small scale (e.g., a single VM or machine) as well as a +high scale (e.g., a scalable Kubernetes cluster). It can be used for teams as small as 2, and +for user bases as large as 10,000. The scalability of JupyterHub largely depends on the +infrastructure on which it is deployed. JupyterHub has been designed to be lightweight and +flexible, so you can tailor your JupyterHub deployment to your needs. + +### Is JupyterHub resilient? What happens when a machine goes down? + +For JupyterHubs that are deployed in a containerized environment (e.g., Kubernetes), it is +possible to configure the JupyterHub to be fairly resistant to failures in the system. +For example, if JupyterHub fails, then user sessions will not be affected (though new +users will not be able to log in). When a JupyterHub process is restarted, it should +seamlessly connect with the user database and the system will return to normal. +Again, the details of your JupyterHub deployment (e.g., whether it's deployed on a scalable cluster) +will affect the resiliency of the deployment. + +### What interfaces does JupyterHub support? + +Out of the box, JupyterHub supports a variety of popular data science interfaces for user sessions, +such as JupyterLab, Jupyter Notebooks, and RStudio. Any interface that can be served +via a web address can be served with a JupyterHub (with the right setup). + +### Does JupyterHub make it easier for our team to collaborate? + +JupyterHub provides a standardized environment and access to shared resources for your teams. +This greatly reduces the cost associated with sharing analyses and content with other team +members, and makes it easier to collaborate and build off of one another's ideas. Combined with +access to high-performance computing and data, JupyterHub provides a common resource to +amplify your team's ability to prototype their analyses, scale them to larger data, and then +share their results with one another. + +JupyterHub also provides a computational framework to share computational narratives between +different levels of an organization. For example, data scientists can share Jupyter Notebooks +rendered as [Voilà dashboards](https://voila.readthedocs.io/en/stable/) with those who are not +familiar with programming, or create publicly-available interactive analyses to allow others to +interact with your work. + +### Can I use JupyterHub with R/RStudio or other languages and environments? + +Yes, Jupyter is a polyglot project, and there are over 40 community-provided kernels for a variety +of languages (the most common being Python, Julia, and R). You can also use a JupyterHub to provide +access to other interfaces, such as RStudio, that provide their own access to a language kernel. diff --git a/docs/source/getting-started/networking-basics.md b/docs/source/getting-started/networking-basics.md index 0c86e181..1439400c 100644 --- a/docs/source/getting-started/networking-basics.md +++ b/docs/source/getting-started/networking-basics.md @@ -11,8 +11,8 @@ This section will help you with basic proxy and network configuration to: The Proxy's main IP address setting determines where JupyterHub is available to users. By default, JupyterHub is configured to be available on all network interfaces -(`''`) on port 8000. *Note*: Use of `'*'` is discouraged for IP configuration; -instead, use of `'0.0.0.0'` is preferred. +(`''`) on port 8000. _Note_: Use of `'*'` is discouraged for IP configuration; +instead, use of `'0.0.0.0'` is preferred. Changing the Proxy's main IP address and port can be done with the following JupyterHub **command line options**: @@ -74,7 +74,7 @@ The Hub service listens only on `localhost` (port 8081) by default. The Hub needs to be accessible from both the proxy and all Spawners. When spawning local servers, an IP address setting of `localhost` is fine. -If *either* the Proxy *or* (more likely) the Spawners will be remote or +If _either_ the Proxy _or_ (more likely) the Spawners will be remote or isolated in containers, the Hub must listen on an IP that is accessible. ```python @@ -82,20 +82,20 @@ c.JupyterHub.hub_ip = '10.0.1.4' c.JupyterHub.hub_port = 54321 ``` -**Added in 0.8:** The `c.JupyterHub.hub_connect_ip` setting is the ip address or +**Added in 0.8:** The `c.JupyterHub.hub_connect_ip` setting is the IP address or hostname that other services should use to connect to the Hub. A common configuration for, e.g. docker, is: ```python c.JupyterHub.hub_ip = '0.0.0.0' # listen on all interfaces -c.JupyterHub.hub_connect_ip = '10.0.1.4' # ip as seen on the docker network. Can also be a hostname. +c.JupyterHub.hub_connect_ip = '10.0.1.4' # IP as seen on the docker network. Can also be a hostname. ``` ## Adjusting the hub's URL -The hub will most commonly be running on a hostname of its own. If it +The hub will most commonly be running on a hostname of its own. If it is not – for example, if the hub is being reverse-proxied and being exposed at a URL such as `https://proxy.example.org/jupyter/` – then -you will need to tell JupyterHub the base URL of the service. In such +you will need to tell JupyterHub the base URL of the service. In such a case, it is both necessary and sufficient to set `c.JupyterHub.base_url = '/jupyter/'` in the configuration. diff --git a/docs/source/getting-started/security-basics.rst b/docs/source/getting-started/security-basics.rst index 9223c362..36e7db19 100644 --- a/docs/source/getting-started/security-basics.rst +++ b/docs/source/getting-started/security-basics.rst @@ -5,17 +5,17 @@ Security settings You should not run JupyterHub without SSL encryption on a public network. -Security is the most important aspect of configuring Jupyter. Three -configuration settings are the main aspects of security configuration: +Security is the most important aspect of configuring Jupyter. +Three (3) configuration settings are the main aspects of security configuration: 1. :ref:`SSL encryption ` (to enable HTTPS) 2. :ref:`Cookie secret ` (a key for encrypting browser cookies) 3. Proxy :ref:`authentication token ` (used for the Hub and other services to authenticate to the Proxy) -The Hub hashes all secrets (e.g., auth tokens) before storing them in its +The Hub hashes all secrets (e.g. auth tokens) before storing them in its database. A loss of control over read-access to the database should have -minimal impact on your deployment; if your database has been compromised, it +minimal impact on your deployment. If your database has been compromised, it is still a good idea to revoke existing tokens. .. _ssl-encryption: @@ -31,7 +31,7 @@ Using an SSL certificate This will require you to obtain an official, trusted SSL certificate or create a self-signed certificate. Once you have obtained and installed a key and -certificate you need to specify their locations in the ``jupyterhub_config.py`` +certificate, you need to specify their locations in the ``jupyterhub_config.py`` configuration file as follows: .. code-block:: python @@ -72,7 +72,7 @@ would be the needed configuration: If SSL termination happens outside of the Hub ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -In certain cases, for example if the hub is running behind a reverse proxy, and +In certain cases, for example, if the hub is running behind a reverse proxy, and `SSL termination is being provided by NGINX `_, it is reasonable to run the hub without SSL. @@ -80,12 +80,55 @@ To achieve this, remove ``c.JupyterHub.ssl_key`` and ``c.JupyterHub.ssl_cert`` from your configuration (setting them to ``None`` or an empty string does not have the same effect, and will result in an error). +.. _authentication-token: + +Proxy authentication token +-------------------------- + +The Hub authenticates its requests to the Proxy using a secret token that +the Hub and Proxy agree upon. Note that this applies to the default +``ConfigurableHTTPProxy`` implementation. Not all proxy implementations +use an auth token. + +The value of this token should be a random string (for example, generated by +``openssl rand -hex 32``). You can store it in the configuration file or an +environment variable. + +Generating and storing token in the configuration file +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can set the value in the configuration file, ``jupyterhub_config.py``: + +.. code-block:: python + + c.ConfigurableHTTPProxy.api_token = 'abc123...' # any random string + +Generating and storing as an environment variable +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can pass this value of the proxy authentication token to the Hub and Proxy +using the ``CONFIGPROXY_AUTH_TOKEN`` environment variable: + +.. code-block:: bash + + export CONFIGPROXY_AUTH_TOKEN=$(openssl rand -hex 32) + +This environment variable needs to be visible to the Hub and Proxy. + +Default if token is not set +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If you do not set the Proxy authentication token, the Hub will generate a random +key itself. This means that any time you restart the Hub, you **must also +restart the Proxy**. If the proxy is a subprocess of the Hub, this should happen +automatically (this is the default configuration). + .. _cookie-secret: Cookie secret ------------- -The cookie secret is an encryption key, used to encrypt the browser cookies +The cookie secret is an encryption key, used to encrypt the browser cookies, which are used for authentication. Three common methods are described for generating and configuring the cookie secret. @@ -93,8 +136,8 @@ Generating and storing as a cookie secret file ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The cookie secret should be 32 random bytes, encoded as hex, and is typically -stored in a ``jupyterhub_cookie_secret`` file. An example command to generate the -``jupyterhub_cookie_secret`` file is: +stored in a ``jupyterhub_cookie_secret`` file. Below, is an example command to generate the +``jupyterhub_cookie_secret`` file: .. code-block:: bash @@ -112,7 +155,7 @@ The location of the ``jupyterhub_cookie_secret`` file can be specified in the If the cookie secret file doesn't exist when the Hub starts, a new cookie secret is generated and stored in the file. The file must not be readable by -``group`` or ``other`` or the server won't start. The recommended permissions +``group`` or ``other``, otherwise the server won't start. The recommended permissions for the cookie secret file are ``600`` (owner-only rw). Generating and storing as an environment variable @@ -133,54 +176,79 @@ the Hub starts. Generating and storing as a binary string ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -You can also set the cookie secret in the configuration file -itself, ``jupyterhub_config.py``, as a binary string: +You can also set the cookie secret, as a binary string, +in the configuration file (``jupyterhub_config.py``) itself: .. code-block:: python c.JupyterHub.cookie_secret = bytes.fromhex('64 CHAR HEX STRING') +.. _cookies: -.. important:: +Cookies used by JupyterHub authentication +----------------------------------------- - If the cookie secret value changes for the Hub, all single-user notebook - servers must also be restarted. +The following cookies are used by the Hub for handling user authentication. +This section was created based on this post_ from Discourse. -.. _authentication-token: +.. _post: https://discourse.jupyter.org/t/how-to-force-re-login-for-users/1998/6 -Proxy authentication token --------------------------- +jupyterhub-hub-login +~~~~~~~~~~~~~~~~~~~~ -The Hub authenticates its requests to the Proxy using a secret token that -the Hub and Proxy agree upon. The value of this string should be a random -string (for example, generated by ``openssl rand -hex 32``). +This is the login token used when visiting Hub-served pages that are +protected by authentication, such as the main home, the spawn form, etc. +If this cookie is set, then the user is logged in. -Generating and storing token in the configuration file -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Resetting the Hub cookie secret effectively revokes this cookie. -Or you can set the value in the configuration file, ``jupyterhub_config.py``: +This cookie is restricted to the path ``/hub/``. -.. code-block:: python +jupyterhub-user- +~~~~~~~~~~~~~~~~~~~~~~~~~~ - c.JupyterHub.proxy_auth_token = '0bc02bede919e99a26de1e2a7a5aadfaf6228de836ec39a05a6c6942831d8fe5' +This is the cookie used for authenticating with a single-user server. +It is set by the single-user server, after OAuth with the Hub. -Generating and storing as an environment variable -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Effectively the same as ``jupyterhub-hub-login``, but for the +single-user server instead of the Hub. It contains an OAuth access token, +which is checked with the Hub to authenticate the browser. -You can pass this value of the proxy authentication token to the Hub and Proxy -using the ``CONFIGPROXY_AUTH_TOKEN`` environment variable: +Each OAuth access token is associated with a session id (see ``jupyterhub-session-id`` section +below). -.. code-block:: bash +To avoid hitting the Hub on every request, the authentication response is cached. +The cache key is comprised of both the token and session id, to avoid a stale cache. - export CONFIGPROXY_AUTH_TOKEN=$(openssl rand -hex 32) +Resetting the Hub cookie secret effectively revokes this cookie. -This environment variable needs to be visible to the Hub and Proxy. +This cookie is restricted to the path ``/user/``, +to ensure that only the user’s server receives it. -Default if token is not set -~~~~~~~~~~~~~~~~~~~~~~~~~~~ +jupyterhub-session-id +~~~~~~~~~~~~~~~~~~~~~ -If you don't set the Proxy authentication token, the Hub will generate a random -key itself, which means that any time you restart the Hub you **must also -restart the Proxy**. If the proxy is a subprocess of the Hub, this should happen -automatically (this is the default configuration). +This is a random string, meaningless in itself, and the only cookie +shared by the Hub and single-user servers. + +Its sole purpose is to coordinate logout of the multiple OAuth cookies. + +This cookie is set to ``/`` so all endpoints can receive it, clear it, etc. + +jupyterhub-user--oauth-state +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +A short-lived cookie, used solely to store and validate OAuth state. +It is only set while OAuth between the single-user server and the Hub +is processing. + +If you use your browser development tools, you should see this cookie +for a very brief moment before you are logged in, +with an expiration date shorter than ``jupyterhub-hub-login`` or +``jupyterhub-user-``. + +This cookie should not exist after you have successfully logged in. + +This cookie is restricted to the path ``/user/``, so that only +the user’s server receives it. diff --git a/docs/source/getting-started/services-basics.md b/docs/source/getting-started/services-basics.md index e6562520..5ec3f779 100644 --- a/docs/source/getting-started/services-basics.md +++ b/docs/source/getting-started/services-basics.md @@ -2,10 +2,10 @@ When working with JupyterHub, a **Service** is defined as a process that interacts with the Hub's REST API. A Service may perform a specific -or action or task. For example, shutting down individuals' single user +action or task. For example, shutting down individuals' single user notebook servers that have been idle for some time is a good example of a task that could be automated by a Service. Let's look at how the -[cull_idle_servers][] script can be used as a Service. +[jupyterhub_idle_culler][] script can be used as a Service. ## Real-world example to cull idle servers @@ -15,16 +15,16 @@ document will: - explain some basic information about API tokens - clarify that API tokens can be used to authenticate to single-user servers as of [version 0.8.0](../changelog) -- show how the [cull_idle_servers][] script can be: - - used in a Hub-managed service - - run as a standalone script +- show how the [jupyterhub_idle_culler][] script can be: + - used in a Hub-managed service + - run as a standalone script -Both examples for `cull_idle_servers` will communicate tasks to the +Both examples for `jupyterhub_idle_culler` will communicate tasks to the Hub via the REST API. ## API Token basics -### Create an API token +### Step 1: Generate an API token To run such an external service, an API token must be created and provided to the service. @@ -43,12 +43,12 @@ generating an API token is available from the JupyterHub user interface: ![API TOKEN success page](../images/token-request-success.png) -### Pass environment variable with token to the Hub +### Step 2: Pass environment variable with token to the Hub In the case of `cull_idle_servers`, it is passed as the environment variable called `JUPYTERHUB_API_TOKEN`. -### Use API tokens for services and tasks that require external access +### Step 3: Use API tokens for services and tasks that require external access While API tokens are often associated with a specific user, API tokens can be used by services that require external access for activities @@ -62,7 +62,7 @@ c.JupyterHub.services = [ ] ``` -### Restart JupyterHub +### Step 4: Restart JupyterHub Upon restarting JupyterHub, you should see a message like below in the logs: @@ -78,44 +78,72 @@ single-user servers, and only cookies can be used for authentication. 0.8 supports using JupyterHub API tokens to authenticate to single-user servers. -## Configure `cull-idle` to run as a Hub-Managed Service +## How to configure the idle culler to run as a Hub-Managed Service -In `jupyterhub_config.py`, add the following dictionary for the -`cull-idle` Service to the `c.JupyterHub.services` list: +### Step 1: Install the idle culler: + +``` +pip install jupyterhub-idle-culler +``` + +### Step 2: In `jupyterhub_config.py`, add the following dictionary for the `idle-culler` Service to the `c.JupyterHub.services` list: ```python c.JupyterHub.services = [ { - 'name': 'cull-idle', - 'admin': True, - 'command': [sys.executable, 'cull_idle_servers.py', '--timeout=3600'], + 'name': 'idle-culler', + 'command': [sys.executable, '-m', 'jupyterhub_idle_culler', '--timeout=3600'], + } +] + +c.JupyterHub.load_roles = [ + { + "name": "list-and-cull", # name the role + "services": [ + "idle-culler", # assign the service to this role + ], + "scopes": [ + # declare what permissions the service should have + "list:users", # list users + "read:users:activity", # read user last-activity + "admin:servers", # start/stop servers + ], } ] ``` where: -- `'admin': True` indicates that the Service has 'admin' permissions, and -- `'command'` indicates that the Service will be launched as a +- `command` indicates that the Service will be launched as a subprocess, managed by the Hub. -## Run `cull-idle` manually as a standalone script +```{versionchanged} 2.0 +Prior to 2.0, the idle-culler required 'admin' permissions. +It now needs the scopes: -Now you can run your script, i.e. `cull_idle_servers`, by providing it +- `list:users` to access the user list endpoint +- `read:users:activity` to read activity info +- `admin:servers` to start/stop servers +``` + +## How to run `cull-idle` manually as a standalone script + +Now you can run your script by providing it the API token and it will authenticate through the REST API to interact with it. -This will run `cull-idle` manually. `cull-idle` can be run as a standalone +This will run the idle culler service manually. It can be run as a standalone script anywhere with access to the Hub, and will periodically check for idle servers and shut them down via the Hub's REST API. In order to shutdown the -servers, the token given to cull-idle must have admin privileges. +servers, the token given to `cull-idle` must have permission to list users +and admin their servers. Generate an API token and store it in the `JUPYTERHUB_API_TOKEN` environment -variable. Run `cull_idle_servers.py` manually. +variable. Run `jupyterhub_idle_culler` manually. ```bash export JUPYTERHUB_API_TOKEN='token' - python3 cull_idle_servers.py [--timeout=900] [--url=http://127.0.0.1:8081/hub/api] + python -m jupyterhub_idle_culler [--timeout=900] [--url=http://127.0.0.1:8081/hub/api] ``` -[cull_idle_servers]: https://github.com/jupyterhub/jupyterhub/blob/master/examples/cull-idle/cull_idle_servers.py +[jupyterhub_idle_culler]: https://github.com/jupyterhub/jupyterhub-idle-culler diff --git a/docs/source/getting-started/spawners-basics.md b/docs/source/getting-started/spawners-basics.md index c30d89f6..9cc02631 100644 --- a/docs/source/getting-started/spawners-basics.md +++ b/docs/source/getting-started/spawners-basics.md @@ -1,12 +1,12 @@ # Spawners and single-user notebook servers -Since the single-user server is an instance of `jupyter notebook`, an entire separate -multi-process application, there are many aspect of that server can configure, and a lot of ways -to express that configuration. +A Spawner starts each single-user notebook server. Since the single-user server is an instance of `jupyter notebook`, an entire separate +multi-process application, many aspects of that server can be configured and there are a lot +of ways to express that configuration. At the JupyterHub level, you can set some values on the Spawner. The simplest of these is `Spawner.notebook_dir`, which lets you set the root directory for a user's server. This root -notebook directory is the highest level directory users will be able to access in the notebook +notebook directory is the highest-level directory users will be able to access in the notebook dashboard. In this example, the root notebook directory is set to `~/notebooks`, where `~` is expanded to the user's home directory. @@ -14,13 +14,13 @@ expanded to the user's home directory. c.Spawner.notebook_dir = '~/notebooks' ``` -You can also specify extra command-line arguments to the notebook server with: +You can also specify extra command line arguments to the notebook server with: ```python c.Spawner.args = ['--debug', '--profile=PHYS131'] ``` -This could be used to set the users default page for the single user server: +This could be used to set the user's default page for the single-user server: ```python c.Spawner.args = ['--NotebookApp.default_url=/notebooks/Welcome.ipynb'] diff --git a/docs/source/images/binder-404.png b/docs/source/images/binder-404.png new file mode 100644 index 00000000..32dd45c1 Binary files /dev/null and b/docs/source/images/binder-404.png differ diff --git a/docs/source/images/binderhub-form.png b/docs/source/images/binderhub-form.png new file mode 100644 index 00000000..260aebd5 Binary files /dev/null and b/docs/source/images/binderhub-form.png differ diff --git a/docs/source/images/chp-404.png b/docs/source/images/chp-404.png new file mode 100644 index 00000000..69126a38 Binary files /dev/null and b/docs/source/images/chp-404.png differ diff --git a/docs/source/images/dropdown-details-3.0.png b/docs/source/images/dropdown-details-3.0.png new file mode 100644 index 00000000..522828f9 Binary files /dev/null and b/docs/source/images/dropdown-details-3.0.png differ diff --git a/docs/source/images/rbac-api-request-chart.png b/docs/source/images/rbac-api-request-chart.png new file mode 100644 index 00000000..b70e3a0c Binary files /dev/null and b/docs/source/images/rbac-api-request-chart.png differ diff --git a/docs/source/images/rbac-token-request-chart.png b/docs/source/images/rbac-token-request-chart.png new file mode 100644 index 00000000..21bf470f Binary files /dev/null and b/docs/source/images/rbac-token-request-chart.png differ diff --git a/docs/source/images/server-not-running.png b/docs/source/images/server-not-running.png new file mode 100644 index 00000000..384e0f7d Binary files /dev/null and b/docs/source/images/server-not-running.png differ diff --git a/docs/source/index-about.rst b/docs/source/index-about.rst new file mode 100644 index 00000000..f0771c60 --- /dev/null +++ b/docs/source/index-about.rst @@ -0,0 +1,15 @@ +===== +About +===== + +JupyterHub is an open source project and community. It is a part of the +`Jupyter Project `_. JupyterHub is an open and inclusive +community, and invites contributions from anyone. This section covers information +about our community, as well as ways that you can connect and get involved. + +.. toctree:: + :maxdepth: 1 + + contributor-list + changelog + gallery-jhub-deployments diff --git a/docs/source/index-admin.rst b/docs/source/index-admin.rst new file mode 100644 index 00000000..fe39b5a8 --- /dev/null +++ b/docs/source/index-admin.rst @@ -0,0 +1,14 @@ +===================== +Administrator's Guide +===================== + +This guide covers best-practices, tips, common questions and operations, as +well as other information relevant to running your own JupyterHub over time. + +.. toctree:: + :maxdepth: 2 + + troubleshooting + admin/upgrading + admin/log-messages + changelog diff --git a/docs/source/index.rst b/docs/source/index.rst index db23641a..98eaefb2 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -3,30 +3,28 @@ JupyterHub ========== `JupyterHub`_ is the best way to serve `Jupyter notebook`_ for multiple users. -It can be used in a classes of students, a corporate data science group or scientific +Because JupyterHub manages a separate Jupyter environment for each user, +it can be used in a class of students, a corporate data science group, or a scientific research group. It is a multi-user **Hub** that spawns, manages, and proxies multiple instances of the single-user `Jupyter notebook`_ server. -To make life easier, JupyterHub have distributions. Be sure to -take a look at them before continuing with the configuration of the broad -original system of `JupyterHub`_. Today, you can find two main cases: +JupyterHub offers distributions for different use cases. Be sure to +take a look at them before continuing with the configuration of the broad +original system of `JupyterHub`_. As of now, you can find two main cases: -1. If you need a simple case for a small amount of users (0-100) and single server - take a look at - `The Littlest JupyterHub `__ distribution. -2. If you need to allow for even more users, a dynamic amount of servers can be used on a cloud, - take a look at the `Zero to JupyterHub with Kubernetes `__ . +1. `The Littlest JupyterHub `__ distribution is suitable if you need a small number of users (1-100) and a single server with a simple environment. +2. `Zero to JupyterHub with Kubernetes `__ allows you to deploy dynamic servers on the cloud if you need even more users. Four subsystems make up JupyterHub: * a **Hub** (tornado process) that is the heart of JupyterHub -* a **configurable http proxy** (node-http-proxy) that receives the requests from the client's browser -* multiple **single-user Jupyter notebook servers** (Python/IPython/tornado) that are monitored by Spawners -* an **authentication class** that manages how users can access the system +* a **Configurable HTTP Proxy** (node-http-proxy) that receives the requests from the client's browser +* multiple **Single-User Jupyter Notebook Servers** (Python/IPython/tornado) that are monitored by Spawners +* an **Authentication Class** that manages how users can access the system -Besides these central pieces, you can add optional configurations through a `config.py` file and manage users kernels on an admin panel. A simplification of the whole system can be seen in the figure below: +Besides these central pieces, you can add optional configurations through a `config.py` file and manage users' environments through an admin panel. A simplification of the whole system can be seen in the figure below: .. image:: images/jhub-fluxogram.jpeg :alt: JupyterHub subsystems @@ -43,7 +41,7 @@ JupyterHub performs the following functions: notebook servers For convenient administration of the Hub, its users, and services, -JupyterHub also provides a `REST API`_. +JupyterHub also provides a :doc:`REST API `. The JupyterHub team and Project Jupyter value our community, and JupyterHub follows the Jupyter `Community Guides `_. @@ -56,120 +54,89 @@ Contents Distributions ------------- -A JupyterHub **distribution** is tailored towards a particular set of +Each JupyterHub **distribution** is tailored toward a particular set of use cases. These are generally easier to set up than setting up JupyterHub from scratch, assuming they fit your use case. The two popular ones are: -* `Zero to JupyterHub on Kubernetes `_, for - running JupyterHub on top of `Kubernetes `_. This - can scale to large number of machines & users. * `The Littlest JupyterHub `_, for an easy to set up & run JupyterHub supporting 1-100 users on a single machine. +* `Zero to JupyterHub on Kubernetes `_, for + running JupyterHub on top of `Kubernetes `_. This + can scale to a large number of machines & users. Installation Guide ------------------ .. toctree:: - :maxdepth: 1 + :maxdepth: 2 installation-guide - quickstart - quickstart-docker - installation-basics Getting Started --------------- .. toctree:: - :maxdepth: 1 + :maxdepth: 2 getting-started/index - getting-started/config-basics - getting-started/networking-basics - getting-started/security-basics - getting-started/authenticators-users-basics - getting-started/spawners-basics - getting-started/services-basics Technical Reference ------------------- .. toctree:: - :maxdepth: 1 + :maxdepth: 2 reference/index - reference/technical-overview - reference/websecurity - reference/authenticators - reference/spawners - reference/services - reference/rest - reference/templates - reference/config-user-env - reference/config-examples - reference/config-ghoauth - reference/config-proxy - reference/config-sudo -Contributing ------------- - -We want you to contribute to JupyterHub in ways that are most exciting -& useful to you. We value documentation, testing, bug reporting & code equally, -and are glad to have your contributions in whatever form you wish :) - -Our `Code of Conduct `_ -(`reporting guidelines `_) -helps keep our community welcoming to as many people as possible. - -.. toctree:: - :maxdepth: 1 - - contributing/community - contributing/setup - contributing/docs - contributing/tests - contributing/roadmap - contributing/security - -Upgrading JupyterHub +Administrators guide -------------------- -We try to make upgrades between minor versions as painless as possible. - .. toctree:: - :maxdepth: 1 + :maxdepth: 2 - admin/upgrading - changelog + index-admin API Reference ------------- .. toctree:: - :maxdepth: 1 + :maxdepth: 2 api/index -Troubleshooting ---------------- +RBAC Reference +-------------- .. toctree:: - :maxdepth: 1 + :maxdepth: 2 - troubleshooting + rbac/index + +Contributing +------------ + +We welcome you to contribute to JupyterHub in ways that are most exciting +& useful to you. We value documentation, testing, bug reporting & code equally +and are glad to have your contributions in whatever form you wish :) + +Our `Code of Conduct `_ +(`reporting guidelines `_) +helps keep our community welcoming to as many people as possible. + +.. toctree:: + :maxdepth: 2 + + contributing/index About JupyterHub ---------------- .. toctree:: - :maxdepth: 1 + :maxdepth: 2 - contributor-list - changelog - gallery-jhub-deployments + index-about Indices and tables ================== @@ -184,24 +151,5 @@ Questions? Suggestions? - `Jupyter mailing list `_ - `Jupyter website `_ -.. _contents: - -Full Table of Contents -====================== - -.. toctree:: - :maxdepth: 2 - - installation-guide - getting-started/index - reference/index - api/index - troubleshooting - contributor-list - gallery-jhub-deployments - changelog - - .. _JupyterHub: https://github.com/jupyterhub/jupyterhub .. _Jupyter notebook: https://jupyter-notebook.readthedocs.io/en/latest/ -.. _REST API: http://petstore.swagger.io/?url=https://raw.githubusercontent.com/jupyterhub/jupyterhub/master/docs/rest-api.yml#!/default diff --git a/docs/source/installation-guide-hard.rst b/docs/source/installation-guide-hard.rst new file mode 100644 index 00000000..b24cea08 --- /dev/null +++ b/docs/source/installation-guide-hard.rst @@ -0,0 +1,6 @@ +:orphan: + +JupyterHub the hard way +======================= + +This guide has moved to https://github.com/jupyterhub/jupyterhub-the-hard-way/blob/HEAD/docs/installation-guide-hard.md diff --git a/docs/source/installation-guide.rst b/docs/source/installation-guide.rst index ccda8667..b2415fcc 100644 --- a/docs/source/installation-guide.rst +++ b/docs/source/installation-guide.rst @@ -1,5 +1,9 @@ -Installation Guide -================== +Installation +============ + +These sections cover how to get up-and-running with JupyterHub. They cover +some basics of the tools needed to deploy JupyterHub as well as how to get it +running on your own infrastructure. .. toctree:: :maxdepth: 3 diff --git a/docs/source/quickstart-docker.rst b/docs/source/quickstart-docker.rst index bded4ac7..e298f21a 100644 --- a/docs/source/quickstart-docker.rst +++ b/docs/source/quickstart-docker.rst @@ -1,49 +1,69 @@ -Using Docker -============ +Install JupyterHub with Docker +============================== -.. important:: - We highly recommend following the `Zero to JupyterHub`_ tutorial for - installing JupyterHub. - -Alternate installation using Docker ------------------------------------ - -A ready to go `docker image `_ -gives a straightforward deployment of JupyterHub. +The JupyterHub `docker image `_ is the fastest way to set up Jupyterhub in your local development environment. .. note:: - This ``jupyterhub/jupyterhub`` docker image is only an image for running the Hub service itself. It does not provide the other Jupyter components, such as Notebook installation, which are needed by the single-user servers. To run the single-user servers, which may be on the same system as the Hub or - not, Jupyter Notebook version 4 or greater must be installed. + not, `JupyterLab `_ or Jupyter Notebook must be installed. -Starting JupyterHub with docker -------------------------------- -The JupyterHub docker image can be started with the following command:: +.. important:: + We strongly recommend that you follow the `Zero to JupyterHub`_ tutorial to + install JupyterHub. + + +Prerequisites +------------- +You should have `Docker`_ installed on a Linux/Unix based system. + +Run the Docker Image +-------------------- + +To pull the latest JupyterHub image and start the `jupyterhub` container, run this command in your terminal. +:: + docker run -d -p 8000:8000 --name jupyterhub jupyterhub/jupyterhub jupyterhub -This command will create a container named ``jupyterhub`` that you can -**stop and resume** with ``docker stop/start``. -The Hub service will be listening on all interfaces at port 8000, which makes -this a good choice for **testing JupyterHub on your desktop or laptop**. +This command exposes the Jupyter container on port:8000. Navigate to `http://localhost:8000` in a web browser to access the JupyterHub console. + +You can stop and resume the container by running `docker stop` and `docker start` respectively. +:: + + # find the container id + docker ps + + # stop the running container + docker stop + + # resume the paused container + docker start + If you want to run docker on a computer that has a public IP then you should (as in MUST) **secure it with ssl** by adding ssl options to your docker configuration or using an ssl enabled proxy. -`Mounting volumes `_ -will allow you to store data outside the docker image (host system) so it will -be persistent, even when you start a new image. +`Mounting volumes `_ +enables you to persist and store the data generated by the docker container, even when you stop the container. +The persistent data can be stored on the host system, outside the container. -The command ``docker exec -it jupyterhub bash`` will spawn a root shell in your -docker container. You can use the root shell to **create system users in the container**. -These accounts will be used for authentication in JupyterHub's default + +Create System Users +------------------- + +Spawn a root shell in your docker container by running this command in the terminal.:: + + docker exec -it jupyterhub bash + +The created accounts will be used for authentication in JupyterHub's default configuration. .. _Zero to JupyterHub: https://zero-to-jupyterhub.readthedocs.io/en/latest/ +.. _Docker: https://www.docker.com/ diff --git a/docs/source/quickstart.md b/docs/source/quickstart.md index f9c4beaa..9789a6b0 100644 --- a/docs/source/quickstart.md +++ b/docs/source/quickstart.md @@ -5,35 +5,45 @@ Before installing JupyterHub, you will need: - a Linux/Unix based system -- [Python](https://www.python.org/downloads/) 3.5 or greater. An understanding - of using [`pip`](https://pip.pypa.io/en/stable/) or +- [Python](https://www.python.org/downloads/) 3.6 or greater. An understanding + of using [`pip`](https://pip.pypa.io) or [`conda`](https://conda.io/docs/get-started.html) for installing Python packages is helpful. - [nodejs/npm](https://www.npmjs.com/). [Install nodejs/npm](https://docs.npmjs.com/getting-started/installing-node), using your operating system's package manager. - * If you are using **`conda`**, the nodejs and npm dependencies will be installed for + - If you are using **`conda`**, the nodejs and npm dependencies will be installed for you by conda. - * If you are using **`pip`**, install a recent version of + - If you are using **`pip`**, install a recent version of [nodejs/npm](https://docs.npmjs.com/getting-started/installing-node). For example, install it on Linux (Debian/Ubuntu) using: ``` - sudo apt-get install npm nodejs-legacy + sudo apt-get install nodejs npm ``` - - The `nodejs-legacy` package installs the `node` executable and is currently - required for npm to work on Debian/Ubuntu. + [nodesource][] is a great resource to get more recent versions of the nodejs runtime, + if your system package manager only has an old version of Node.js (e.g. 10 or older). + +- A [pluggable authentication module (PAM)](https://en.wikipedia.org/wiki/Pluggable_authentication_module) + to use the [default Authenticator](./getting-started/authenticators-users-basics.md). + PAM is often available by default on most distributions, if this is not the case it can be installed by + using the operating system's package manager. - TLS certificate and key for HTTPS communication - Domain name +[nodesource]: https://github.com/nodesource/distributions#table-of-contents + Before running the single-user notebook servers (which may be on the same system as the Hub or not), you will need: -- [Jupyter Notebook](https://jupyter.readthedocs.io/en/latest/install.html) - version 4 or greater +- [JupyterLab][] version 3 or greater, + or [Jupyter Notebook][] + 4 or greater. + +[jupyterlab]: https://jupyterlab.readthedocs.io +[jupyter notebook]: https://jupyter.readthedocs.io/en/latest/install.html ## Installation @@ -44,14 +54,14 @@ JupyterHub can be installed with `pip` (and the proxy with `npm`) or `conda`: ```bash python3 -m pip install jupyterhub npm install -g configurable-http-proxy -python3 -m pip install notebook # needed if running the notebook servers locally +python3 -m pip install jupyterlab notebook # needed if running the notebook servers in the same environment ``` **conda** (one command installs jupyterhub and proxy): ```bash conda install -c conda-forge jupyterhub # installs jupyterhub and proxy -conda install notebook # needed if running the notebook servers locally +conda install jupyterlab notebook # needed if running the notebook servers in the same environment ``` Test your installation. If installed, these commands should return the packages' @@ -70,16 +80,16 @@ To start the Hub server, run the command: jupyterhub ``` -Visit `https://localhost:8000` in your browser, and sign in with your unix +Visit `http://localhost:8000` in your browser, and sign in with your unix credentials. To **allow multiple users to sign in** to the Hub server, you must start -`jupyterhub` as a *privileged user*, such as root: +`jupyterhub` as a _privileged user_, such as root: ```bash sudo jupyterhub ``` The [wiki](https://github.com/jupyterhub/jupyterhub/wiki/Using-sudo-to-run-JupyterHub-without-root-privileges) -describes how to run the server as a *less privileged user*. This requires +describes how to run the server as a _less privileged user_. This requires additional configuration of the system. diff --git a/docs/source/rbac/generate-scope-table.py b/docs/source/rbac/generate-scope-table.py new file mode 100644 index 00000000..53057a4a --- /dev/null +++ b/docs/source/rbac/generate-scope-table.py @@ -0,0 +1,161 @@ +""" +This script updates two files with the RBAC scope descriptions found in +`scopes.py`. + +The files are: + + 1. scope-table.md + + This file is git ignored and referenced by the documentation. + + 2. rest-api.yml + + This file is JupyterHub's REST API schema. Both a version and the RBAC + scopes descriptions are updated in it. +""" +import os +from collections import defaultdict +from pathlib import Path +from subprocess import run + +from pytablewriter import MarkdownTableWriter +from ruamel.yaml import YAML + +from jupyterhub import __version__ +from jupyterhub.scopes import scope_definitions + +HERE = os.path.abspath(os.path.dirname(__file__)) +DOCS = Path(HERE).parent.parent.absolute() +REST_API_YAML = DOCS.joinpath("source", "_static", "rest-api.yml") +SCOPE_TABLE_MD = Path(HERE).joinpath("scope-table.md") + + +class ScopeTableGenerator: + def __init__(self): + self.scopes = scope_definitions + + @classmethod + def create_writer(cls, table_name, headers, values): + writer = MarkdownTableWriter() + writer.table_name = table_name + writer.headers = headers + writer.value_matrix = values + writer.margin = 1 + return writer + + def _get_scope_relationships(self): + """Returns a tuple of dictionary of all scope-subscope pairs and a list of just subscopes: + + ({scope: subscope}, [subscopes]) + + used for creating hierarchical scope table in _parse_scopes() + """ + pairs = [] + for scope, data in self.scopes.items(): + subscopes = data.get('subscopes') + if subscopes is not None: + for subscope in subscopes: + pairs.append((scope, subscope)) + else: + pairs.append((scope, None)) + subscopes = [pair[1] for pair in pairs] + pairs_dict = defaultdict(list) + for scope, subscope in pairs: + pairs_dict[scope].append(subscope) + return pairs_dict, subscopes + + def _get_top_scopes(self, subscopes): + """Returns a list of highest level scopes + (not a subscope of any other scopes)""" + top_scopes = [] + for scope in self.scopes.keys(): + if scope not in subscopes: + top_scopes.append(scope) + return top_scopes + + def _parse_scopes(self): + """Returns a list of table rows where row: + [indented scopename string, scope description string]""" + scope_pairs, subscopes = self._get_scope_relationships() + top_scopes = self._get_top_scopes(subscopes) + + table_rows = [] + md_indent = "   " + + def _add_subscopes(table_rows, scopename, depth=0): + description = self.scopes[scopename]['description'] + doc_description = self.scopes[scopename].get('doc_description', '') + if doc_description: + description = doc_description + table_row = [f"{md_indent * depth}`{scopename}`", description] + table_rows.append(table_row) + for subscope in scope_pairs[scopename]: + if subscope: + _add_subscopes(table_rows, subscope, depth + 1) + + for scope in top_scopes: + _add_subscopes(table_rows, scope) + + return table_rows + + def write_table(self): + """Generates the RBAC scopes reference documentation as a markdown table + and writes it to the .gitignored `scope-table.md`.""" + filename = SCOPE_TABLE_MD + table_name = "" + headers = ["Scope", "Grants permission to:"] + values = self._parse_scopes() + writer = self.create_writer(table_name, headers, values) + + title = "Table 1. Available scopes and their hierarchy" + content = f"{title}\n{writer.dumps()}" + with open(filename, 'w') as f: + f.write(content) + print(f"Generated {filename}.") + print( + "Run 'make clean' before 'make html' to ensure the built scopes.html contains latest scope table changes." + ) + + def write_api(self): + """Loads `rest-api.yml` and writes it back with a dynamically set + JupyterHub version field and list of RBAC scopes descriptions from + `scopes.py`.""" + filename = REST_API_YAML + + yaml = YAML(typ="rt") + yaml.preserve_quotes = True + yaml.indent(mapping=2, offset=2, sequence=4) + + scope_dict = {} + with open(filename) as f: + content = yaml.load(f.read()) + + content["info"]["version"] = __version__ + for scope in self.scopes: + description = self.scopes[scope]['description'] + doc_description = self.scopes[scope].get('doc_description', '') + if doc_description: + description = doc_description + scope_dict[scope] = description + content['components']['securitySchemes']['oauth2']['flows'][ + 'authorizationCode' + ]['scopes'] = scope_dict + + with open(filename, 'w') as f: + yaml.dump(content, f) + + run( + ['pre-commit', 'run', 'prettier', '--files', filename], + cwd=HERE, + check=False, + ) + + +def main(): + table_generator = ScopeTableGenerator() + table_generator.write_table() + table_generator.write_api() + + +if __name__ == "__main__": + main() diff --git a/docs/source/rbac/index.md b/docs/source/rbac/index.md new file mode 100644 index 00000000..3be61c54 --- /dev/null +++ b/docs/source/rbac/index.md @@ -0,0 +1,39 @@ +(RBAC)= + +# JupyterHub RBAC + +Role Based Access Control (RBAC) in JupyterHub serves to provide fine grained control of access to Jupyterhub's API resources. + +RBAC is new in JupyterHub 2.0. + +## Motivation + +The JupyterHub API requires authorization to access its APIs. +This ensures that an arbitrary user, or even an unauthenticated third party, are not allowed to perform such actions. +For instance, the behaviour prior to adoption of RBAC is that creating or deleting users requires _admin rights_. + +The prior system is functional, but lacks flexibility. If your Hub serves a number of users in different groups, you might want to delegate permissions to other users or automate certain processes. +Prior to RBAC, appointing a 'group-only admin' or a bot that culls idle servers, requires granting full admin rights to all actions. This poses a risk of the user or service intentionally or unintentionally accessing and modifying any data within the Hub and violates the [principle of least privilege](https://en.wikipedia.org/wiki/Principle_of_least_privilege). + +To remedy situations like this, JupyterHub is transitioning to an RBAC system. By equipping users, groups and services with _roles_ that supply them with a collection of permissions (_scopes_), administrators are able to fine-tune which parties are granted access to which resources. + +## Definitions + +**Scopes** are specific permissions used to evaluate API requests. For example: the API endpoint `users/servers`, which enables starting or stopping user servers, is guarded by the scope `servers`. + +Scopes are not directly assigned to requesters. Rather, when a client performs an API call, their access will be evaluated based on their assigned roles. + +**Roles** are collections of scopes that specify the level of what a client is allowed to do. For example, a group administrator may be granted permission to control the servers of group members, but not to create, modify or delete group members themselves. +Within the RBAC framework, this is achieved by assigning a role to the administrator that covers exactly those privileges. + +## Technical Overview + +```{toctree} +:maxdepth: 2 + +roles +scopes +use-cases +tech-implementation +upgrade +``` diff --git a/docs/source/rbac/roles.md b/docs/source/rbac/roles.md new file mode 100644 index 00000000..7828c8cd --- /dev/null +++ b/docs/source/rbac/roles.md @@ -0,0 +1,159 @@ +# Roles + +JupyterHub provides four (4) roles that are available by default: + +```{admonition} **Default roles** +- `user` role provides a {ref}`default user scope ` `self` that grants access to the user's own resources. +- `admin` role contains all available scopes and grants full rights to all actions. This role **cannot be edited**. +- `token` role provides a {ref}`default token scope ` `inherit` that resolves to the same permissions as the owner of the token has. +- `server` role allows for posting activity of "itself" only. + +**These roles cannot be deleted.** +``` + +We call these 'default' roles because they are available by default and have a default collection of scopes. +However, you can define the scopes associated with each role (excluding the admin role) to suit your needs, +as seen [below](overriding-default-roles). + +The `user`, `admin`, and `token` roles, by default, all preserve the permissions prior to Role-based Access Control (RBAC). +Only the `server` role is changed from pre-2.0, to reduce its permissions to activity-only +instead of the default of a full access token. + +Additional custom roles can also be defined (see {ref}`define-role-target`). +Roles can be assigned to the following entities: + +- Users +- Services +- Groups + +An entity can have zero, one, or multiple roles, and there are no restrictions on which roles can be assigned to which entity. Roles can be added to or removed from entities at any time. + +**Users** \ +When a new user gets created, they are assigned their default role, `user`. Additionally, if the user is created with admin privileges (via `c.Authenticator.admin_users` in `jupyterhub_config.py` or `admin: true` via API), they will be also granted `admin` role. If existing user's admin status changes via API or `jupyterhub_config.py`, their default role will be updated accordingly (after next startup for the latter). + +**Services** \ +Services do not have a default role. Services without roles have no access to the guarded API end-points. So, most services will require assignment of a role in order to function. + +**Groups** \ +A group does not require any role, and has no roles by default. If a user is a member of a group, they automatically inherit any of the group's permissions (see {ref}`resolving-roles-scopes-target` for more details). This is useful for assigning a set of common permissions to several users. + +**Tokens** \ +A token’s permissions are evaluated based on their owning entity. Since a token is always issued for a user or service, it can never have more permissions than its owner. If no specific scopes are requested for a new token, the token is assigned the scopes of the `token` role. + +(define-role-target)= + +## Defining Roles + +Roles can be defined or modified in the configuration file as a list of dictionaries. An example: + +% TODO: think about loading users into roles if membership has been changed via API. +% What should be the result? + +```python +# in jupyterhub_config.py + +c.JupyterHub.load_roles = [ + { +   'name': 'server-rights', +   'description': 'Allows parties to start and stop user servers', +   'scopes': ['servers'], +   'users': ['alice', 'bob'], +   'services': ['idle-culler'], +   'groups': ['admin-group'], + } +] +``` + +The role `server-rights` now allows the starting and stopping of servers by any of the following: + +- users `alice` and `bob` +- the service `idle-culler` +- any member of the `admin-group`. + +```{attention} +Tokens cannot be assigned roles through role definition but may be assigned specific roles when requested via API (see {ref}`requesting-api-token-target`). +``` + +Another example: + +```python +# in jupyterhub_config.py + +c.JupyterHub.load_roles = [ + { + 'description': 'Read-only user models', + 'name': 'reader', + 'scopes': ['read:users'], + 'services': ['external'], + 'users': ['maria', 'joe'] + } +] +``` + +The role `reader` allows users `maria` and `joe` and service `external` to read (but not modify) any user’s model. + +```{admonition} Requirements +:class: warning +In a role definition, the `name` field is required, while all other fields are optional.\ +**Role names must:** +- be 3 - 255 characters +- use ascii lowercase, numbers, 'unreserved' URL punctuation `-_.~` +- start with a letter +- end with letter or number. + +`users`, `services`, and `groups` only accept objects that already exist in the database or are defined previously in the file. +It is not possible to implicitly add a new user to the database by defining a new role. +``` + +If no scopes are defined for _new role_, JupyterHub will raise a warning. Providing non-existing scopes will result in an error. + +In case the role with a certain name already exists in the database, its definition and scopes will be overwritten. This holds true for all roles except the `admin` role, which cannot be overwritten; an error will be raised if trying to do so. All the role bearers permissions present in the definition will change accordingly. + +(overriding-default-roles)= + +### Overriding Default Roles + +Role definitions can include those of the "default" roles listed above (admin excluded), +if the default scopes associated with those roles do not suit your deployment. +For example, to specify what permissions the $JUPYTERHUB_API_TOKEN issued to all single-user servers +has, +define the `server` role. + +To restore the JupyterHub 1.x behavior of servers being able to do anything their owners can do, +use the scope `inherit` (for 'inheriting' the owner's permissions): + +```python +c.JupyterHub.load_roles = [ + { + 'name': 'server', + 'scopes': ['inherit'], + } +] +``` + +or, better yet, identify the specific [scopes][] you want server environments to have access to. + +[scopes]: available-scopes-target + +If you don't want to get too detailed, +one option is the `self` scope, +which will have no effect on non-admin users, +but will restrict the token issued to admin user servers to only have access to their own resources, +instead of being able to take actions on behalf of all other users. + +```python +c.JupyterHub.load_roles = [ + { + 'name': 'server', + 'scopes': ['self'], + } +] +``` + +(removing-roles-target)= + +## Removing Roles + +Only the entities present in the role definition in the `jupyterhub_config.py` remain the role bearers. If a user, service or group is removed from the role definition, they will lose the role on the next startup. + +Once a role is loaded, it remains in the database until removing it from the `jupyterhub_config.py` and restarting the Hub. All previously defined role bearers will lose the role and associated permissions. Default roles, even if previously redefined through the config file and removed, will not be deleted from the database. diff --git a/docs/source/rbac/scopes.md b/docs/source/rbac/scopes.md new file mode 100644 index 00000000..d2046dc1 --- /dev/null +++ b/docs/source/rbac/scopes.md @@ -0,0 +1,303 @@ +# Scopes in JupyterHub + +A scope has a syntax-based design that reveals which resources it provides access to. Resources are objects with a type, associated data, relationships to other resources, and a set of methods that operate on them (see [RESTful API](https://restful-api-design.readthedocs.io/en/latest/resources.html) documentation for more information). + +`` in the RBAC scope design refers to the resource name in the [JupyterHub's API](../reference/rest-api.rst) endpoints in most cases. For instance, `` equal to `users` corresponds to JupyterHub's API endpoints beginning with _/users_. + +(scope-conventions-target)= + +## Scope conventions + +- `` \ + The top-level `` scopes, such as `users` or `groups`, grant read, write, and list permissions to the resource itself as well as its sub-resources. For example, the scope `users:activity` is included in the scope `users`. + +- `read:` \ + Limits permissions to read-only operations on single resources. + +- `list:` \ + Read-only access to listing endpoints. + Use `read::` to control what fields are returned. + +- `admin:` \ + Grants additional permissions such as create/delete on the corresponding resource in addition to read and write permissions. + +- `access:` \ + Grants access permissions to the `` via API or browser. + +- `:` \ + The {ref}`vertically filtered ` scopes provide access to a subset of the information granted by the `` scope. E.g., the scope `users:activity` only provides permission to post user activity. + +- `!=` \ + {ref}`horizontal-filtering-target` is implemented by the `!=`scope structure. A resource (or sub-resource) can be filtered based on `user`, `server`, `group` or `service` name. For instance, `!user=charlie` limits access to only return resources of user `charlie`. \ + Only one filter per scope is allowed, but filters for the same scope have an additive effect; a larger filter can be used by supplying the scope multiple times with different filters. + +By adding a scope to an existing role, all role bearers will gain the associated permissions. + +## Metascopes + +Metascopes do not follow the general scope syntax. Instead, a metascope resolves to a set of scopes, which can refer to different resources, based on their owning entity. In JupyterHub, there are currently two metascopes: + +1. default user scope `self`, and +2. default token scope `inherit`. + +(default-user-scope-target)= + +### Default user scope + +Access to the user's own resources and subresources is covered by metascope `self`. This metascope includes the user's model, activity, servers and tokens. For example, `self` for a user named "gerard" includes: + +- `users!user=gerard` where the `users` scope provides access to the full user model and activity. The filter restricts this access to the user's own resources. +- `servers!user=gerard` which grants the user access to their own servers without being able to create/delete any. +- `tokens!user=gerard` which allows the user to access, request and delete their own tokens. +- `access:servers!user=gerard` which allows the user to access their own servers via API or browser. + +The `self` scope is only valid for user entities. In other cases (e.g., for services) it resolves to an empty set of scopes. + +(default-token-scope-target)= + +### Default token scope + +The token metascope `inherit` causes the token to have the same permissions as the token's owner. For example, if a token owner has roles containing the scopes `read:groups` and `read:users`, the `inherit` scope resolves to the set of scopes `{read:groups, read:users}`. + +If the token owner has default `user` role, the `inherit` scope resolves to `self`, which will subsequently be expanded to include all the user-specific scopes (or empty set in the case of services). + +If the token owner is a member of any group with roles, the group scopes will also be included in resolving the `inherit` scope. + +(horizontal-filtering-target)= + +## Horizontal filtering + +Horizontal filtering, also called _resource filtering_, is the concept of reducing the payload of an API call to cover only the subset of the _resources_ that the scopes of the client provides them access to. +Requested resources are filtered based on the filter of the corresponding scope. For instance, if a service requests a user list (guarded with scope `read:users`) with a role that only contains scopes `read:users!user=hannah` and `read:users!user=ivan`, the returned list of user models will be an intersection of all users and the collection `{hannah, ivan}`. In case this intersection is empty, the API call returns an HTTP 404 error, regardless if any users exist outside of the clients scope filter collection. + +In case a user resource is being accessed, any scopes with _group_ filters will be expanded to filters for each _user_ in those groups. + +(self-referencing-filters)= + +### Self-referencing filters + +There are some 'shortcut' filters, +which can be applied to all scopes, +that filter based on the entities associated with the request. + +The `!user` filter is a special horizontal filter that strictly refers to the **"owner only"** scopes, where _owner_ is a user entity. The filter resolves internally into `!user=` ensuring that only the owner's resources may be accessed through the associated scopes. + +For example, the `server` role assigned by default to server tokens contains `access:servers!user` and `users:activity!user` scopes. This allows the token to access and post activity of only the servers owned by the token owner. + +:::{versionadded} 3.0 +`!service` and `!server` filters. +::: + +In addition to `!user`, _tokens_ may have filters `!service` +or `!server`, which expand similarly to `!service=servicename` +and `!server=servername`. +This only applies to tokens issued via the OAuth flow. +In these cases, the name is the _issuing_ entity (a service or single-user server), +so that access can be restricted to the issuing service, +e.g. `access:servers!server` would grant access only to the server that requested the token. + +These filters can be applied to any scope. + +(vertical-filtering-target)= + +## Vertical filtering + +Vertical filtering, also called _attribute filtering_, is the concept of reducing the payload of an API call to cover only the _attributes_ of the resources that the scopes of the client provides them access to. This occurs when the client scopes are subscopes of the API endpoint that is called. +For instance, if a client requests a user list with the only scope being `read:users:groups`, the returned list of user models will contain only a list of groups per user. +In case the client has multiple subscopes, the call returns the union of the data the client has access to. + +The payload of an API call can be filtered both horizontally and vertically simultaneously. For instance, performing an API call to the endpoint `/users/` with the scope `users:name!user=juliette` returns a payload of `[{name: 'juliette'}]` (provided that this name is present in the database). + +(available-scopes-target)= + +## Available scopes + +Table below lists all available scopes and illustrates their hierarchy. Indented scopes indicate subscopes of the scope(s) above them. + +There are four exceptions to the general {ref}`scope conventions `: + +- `read:users:name` is a subscope of both `read:users` and `read:servers`. \ + The `read:servers` scope requires access to the user name (server owner) due to named servers distinguished internally in the form `!server=username/servername`. + +- `read:users:activity` is a subscope of both `read:users` and `users:activity`. \ + Posting activity via the `users:activity`, which is not included in `users` scope, needs to check the last valid activity of the user. + +- `read:roles:users` is a subscope of both `read:roles` and `admin:users`. \ + Admin privileges to the _users_ resource include the information about user roles. + +- `read:roles:groups` is a subscope of both `read:roles` and `admin:groups`. \ + Similar to the `read:roles:users` above. + +```{include} scope-table.md + +``` + +:::{versionadded} 3.0 +The `admin-ui` scope is added to explicitly grant access to the admin page, +rather than combining `admin:users` and `admin:servers` permissions. +This means a deployment can enable the admin page with only a subset of functionality enabled. + +Note that this means actions to take _via_ the admin UI +and access _to_ the admin UI are separated. +For example, it generally doesn't make sense to grant +`admin-ui` without at least `list:users` for at least some subset of users. + +For example: + +```python +c.JupyterHub.load_roles = [ + { + "name": "instructor-data8", + "scopes": [ + # access to the admin page + "admin-ui", + # list users in the class group + "list:users!group=students-data8", + # start/stop servers for users in the class + "admin:servers!group=students-data8", + # access servers for users in the class + "access:servers!group=students-data8", + ], + "group": ["instructors-data8"], + } +] +``` + +will grant instructors in the data8 course permission to: + +1. view the admin UI +2. see students in the class (but not all users) +3. start/stop/access servers for users in the class +4. but _not_ permission to administer the users themselves (e.g. change their permissions, etc.) + ::: + +```{Caution} +Note that only the {ref}`horizontal filtering ` can be added to scopes to customize them. \ +Metascopes `self` and `all`, ``, `:`, `read:`, `admin:`, and `access:` scopes are predefined and cannot be changed otherwise. +``` + +(custom-scopes)= + +### Custom scopes + +:::{versionadded} 3.0 +::: + +JupyterHub 3.0 introduces support for custom scopes. +Services that use JupyterHub for authentication and want to implement their own granular access may define additional _custom_ scopes and assign them to users with JupyterHub roles. + +% Note: keep in sync with pattern/description in jupyterhub/scopes.py + +Custom scope names must start with `custom:` +and contain only lowercase ascii letters, numbers, hyphen, underscore, colon, and asterisk (`-_:*`). +The part after `custom:` must start with a letter or number. +Scopes may not end with a hyphen or colon. + +The only strict requirement is that a custom scope definition must have a `description`. +It _may_ also have `subscopes` if you are defining multiple scopes that have a natural hierarchy, + +For example: + +```python +c.JupyterHub.custom_scopes = { + "custom:myservice:read": { + "description": "read-only access to myservice", + }, + "custom:myservice:write": { + "description": "write access to myservice", + # write permission implies read permission + "subscopes": [ + "custom:myservice:read", + ], + }, +} + +c.JupyterHub.load_roles = [ + # graders have read-only access to the service + { + "name": "service-user", + "groups": ["graders"], + "scopes": [ + "custom:myservice:read", + "access:service!service=myservice", + ], + }, + # instructors have read and write access to the service + { + "name": "service-admin", + "groups": ["instructors"], + "scopes": [ + "custom:myservice:write", + "access:service!service=myservice", + ], + }, +] +``` + +In the above configuration, two scopes are defined: + +- `custom:myservice:read` grants read-only access to the service, and +- `custom:myservice:write` grants write access to the service +- write access _implies_ read access via the `subscope` + +These custom scopes are assigned to two groups via `roles`: + +- users in the group `graders` are granted read access to the service +- users in the group `instructors` are +- both are granted _access_ to the service via `access:service!service=myservice` + +When the service completes OAuth, it will retrieve the user model from `/hub/api/user`. +This model includes a `scopes` field which is a list of authorized scope for the request, +which can be used. + +```python +def require_scope(scope): + """decorator to require a scope to perform an action""" + def wrapper(func): + @functools.wraps(func) + def wrapped_func(request): + user = fetch_hub_api_user(request.token) + if scope not in user["scopes"]: + raise HTTP403(f"Requires scope {scope}") + else: + return func() + return wrapper + +@require_scope("custom:myservice:read") +async def read_something(request): + ... + +@require_scope("custom:myservice:write") +async def write_something(request): + ... +``` + +If you use {class}`~.HubOAuthenticated`, this check is performed automatically +against the `.hub_scopes` attribute of each Handler +(the default is populated from `$JUPYTERHUB_OAUTH_ACCESS_SCOPES` and usually `access:services!service=myservice`). + +:::{versionchanged} 3.0 +The JUPYTERHUB_OAUTH_SCOPES environment variable is deprecated and renamed to JUPYTERHUB_OAUTH_ACCESS_SCOPES, +to avoid ambiguity with JUPYTERHUB_OAUTH_CLIENT_ALLOWED_SCOPES +::: + +```python +from tornado import web +from jupyterhub.services.auth import HubOAuthenticated + +class MyHandler(HubOAuthenticated, BaseHandler): + hub_scopes = ["custom:myservice:read"] + + @web.authenticated + def get(self): + ... +``` + +Existing scope filters (`!user=`, etc.) may be applied to custom scopes. +Custom scope _filters_ are NOT supported. + +### Scopes and APIs + +The scopes are also listed in the [](../reference/rest-api.rst) documentation. Each API endpoint has a list of scopes which can be used to access the API; if no scopes are listed, the API is not authenticated and can be accessed without any permissions (i.e., no scopes). + +Listed scopes by each API endpoint reflect the "lowest" permissions required to gain any access to the corresponding API. For example, posting user's activity (_POST /users/:name/activity_) needs `users:activity` scope. If scope `users` is passed during the request, the access will be granted as the required scope is a subscope of the `users` scope. If, on the other hand, `read:users:activity` scope is passed, the access will be denied. diff --git a/docs/source/rbac/tech-implementation.md b/docs/source/rbac/tech-implementation.md new file mode 100644 index 00000000..bc765020 --- /dev/null +++ b/docs/source/rbac/tech-implementation.md @@ -0,0 +1,99 @@ +# Technical Implementation + +[Roles](roles) are stored in the database, where they are associated with users, services, and groups. Roles can be added or modified as explained in the {ref}`define-role-target` section. Users, services, groups, and tokens can gain, change, and lose roles. This is currently achieved via `jupyterhub_config.py` (see {ref}`define-role-target`) and will be made available via API in the future. The latter will allow for changing a user's role, and thereby its permissions, without the need to restart JupyterHub. + +Roles and scopes utilities can be found in `roles.py` and `scopes.py` modules. Scope variables take on five different formats that are reflected throughout the utilities via specific nomenclature: + +```{admonition} **Scope variable nomenclature** +:class: tip +- _scopes_ \ + List of scopes that may contain abbreviations (used in role definitions). E.g., `["users:activity!user", "self"]`. +- _expanded scopes_ \ + Set of fully expanded scopes without abbreviations (i.e., resolved metascopes, filters, and subscopes). E.g., `{"users:activity!user=charlie", "read:users:activity!user=charlie"}`. +- _parsed scopes_ \ + Dictionary representation of expanded scopes. E.g., `{"users:activity": {"user": ["charlie"]}, "read:users:activity": {"users": ["charlie"]}}`. +- _intersection_ \ + Set of expanded scopes as intersection of 2 expanded scope sets. +- _identify scopes_ \ + Set of expanded scopes needed for identity (whoami) endpoints. +``` + +(resolving-roles-scopes-target)= + +## Resolving roles and scopes + +**Resolving roles** involves determining which roles a user, service, or group has, extracting the list of scopes from each role and combining them into a single set of scopes. + +**Resolving scopes** involves expanding scopes into all their possible subscopes (_expanded scopes_), parsing them into the format used for access evaluation (_parsed scopes_) and, if applicable, comparing two sets of scopes (_intersection_). All procedures take into account the scope hierarchy, {ref}`vertical ` and {ref}`horizontal filtering `, limiting or elevated permissions (`read:` or `admin:`, respectively), and metascopes. + +Roles and scopes are resolved on several occasions, for example when requesting an API token with specific scopes or when making an API request. The following sections provide more details. + +(requesting-api-token-target)= + +### Requesting API token with specific scopes + +:::{versionchanged} 3.0 +API tokens have _scopes_ instead of roles, +so that their permissions cannot be updated. + +You may still request roles for a token, +but those roles will be evaluated to the corresponding _scopes_ immediately. + +Prior to 3.0, tokens stored _roles_, +which meant their scopes were resolved on each request. +::: + +API tokens grant access to JupyterHub's APIs. The [RBAC framework](./index.md) allows for requesting tokens with specific permissions. + +RBAC is involved in several stages of the OAuth token flow. + +When requesting a token via the tokens API (`/users/:name/tokens`), or the token page (`/hub/token`), +if no scopes are requested, the token is issued with the permissions stored on the default `token` role +(provided the requester is allowed to create the token). + +OAuth tokens are also requested via OAuth flow + +If the token is requested with any scopes, the permissions of requesting entity are checked against the requested permissions to ensure the token would not grant its owner additional privileges. + +If a token has any scopes that its owner does not possess +at the time of making the API request, those scopes are removed. +The API request is resolved without additional errors using the scope _intersection_; +the Hub logs a warning in this case (see {ref}`Figure 2 `). + +Resolving a token's scope (yellow box in {ref}`Figure 1 `) corresponds to resolving all the roles of the token's owner (including the roles associated with their groups) and the token's own scopes into a set of scopes. The two sets are compared (Resolve the scopes box in orange in {ref}`Figure 1 `), taking into account the scope hierarchy. +If the token's scopes are a subset of the token owner's scopes, the token is issued with the requested scopes; if not, JupyterHub will raise an error. + +{ref}`Figure 1 ` below illustrates the steps involved. The orange rectangles highlight where in the process the roles and scopes are resolved. + +```{figure} ../images/rbac-token-request-chart.png +:align: center +:name: token-request-chart + +Figure 1. Resolving roles and scopes during API token request +``` + +### Making an API request + +With the RBAC framework, each authenticated JupyterHub API request is guarded by a scope decorator that specifies which scopes are required in order to gain the access to the API. + +When an API request is made, the requesting API token's scopes are again intersected with its owner's (yellow box in {ref}`Figure 2 `) to ensure that the token does not grant more permissions than its owner has at the request time (e.g., due to changing/losing roles). +If the owner's roles do not include some scopes of the token, only the _intersection_ of the token's and owner's scopes will be used. For example, using a token with scope `users` whose owner's role scope is `read:users:name` will result in only the `read:users:name` scope being passed on. In the case of no _intersection_, an empty set of scopes will be used. + +The passed scopes are compared to the scopes required to access the API as follows: + +- if the API scopes are present within the set of passed scopes, the access is granted and the API returns its "full" response + +- if that is not the case, another check is utilized to determine if subscopes of the required API scopes can be found in the passed scope set: + + - if found, the RBAC framework employs the {ref}`filtering ` procedures to refine the API response to access only resource attributes corresponding to the passed scopes. For example, providing a scope `read:users:activity!group=class-C` for the `GET /users` API will return a list of user models from group `class-C` containing only the `last_activity` attribute for each user model + + - if not found, the access to API is denied + +{ref}`Figure 2 ` illustrates this process highlighting the steps where the role and scope resolutions as well as filtering occur in orange. + +```{figure} ../images/rbac-api-request-chart.png +:align: center +:name: api-request-chart + +Figure 2. Resolving roles and scopes when an API request is made +``` diff --git a/docs/source/rbac/upgrade.md b/docs/source/rbac/upgrade.md new file mode 100644 index 00000000..15065d07 --- /dev/null +++ b/docs/source/rbac/upgrade.md @@ -0,0 +1,54 @@ +# Upgrading JupyterHub with RBAC framework + +RBAC framework requires different database setup than any previous JupyterHub versions due to eliminating the distinction between OAuth and API tokens (see {ref}`oauth-vs-api-tokens-target` for more details). This requires merging the previously two different database tables into one. By doing so, all existing tokens created before the upgrade no longer comply with the new database version and must be replaced. + +This is achieved by the Hub deleting all existing tokens during the database upgrade and recreating the tokens loaded via the `jupyterhub_config.py` file with updated structure. However, any manually issued or stored tokens are not recreated automatically and must be manually re-issued after the upgrade. + +No other database records are affected. + +(rbac-upgrade-steps-target)= + +## Upgrade steps + +1. All running **servers must be stopped** before proceeding with the upgrade. +2. To upgrade the Hub, follow the [Upgrading JupyterHub](../admin/upgrading.rst) instructions. + ```{attention} + We advise against defining any new roles in the `jupyterhub.config.py` file right after the upgrade is completed and JupyterHub restarted for the first time. This preserves the 'current' state of the Hub. You can define and assign new roles on any other following startup. + ``` +3. After restarting the Hub **re-issue all tokens that were previously issued manually** (i.e., not through the `jupyterhub_config.py` file). + +When the JupyterHub is restarted for the first time after the upgrade, all users, services and tokens stored in the database or re-loaded through the configuration file will be assigned their default role. Any newly added entities after that will be assigned their default role only if no other specific role is requested for them. + +## Changing the permissions after the upgrade + +Once all the {ref}`upgrade steps ` above are completed, the RBAC framework will be available for utilization. You can define new roles, modify default roles (apart from `admin`) and assign them to entities as described in the {ref}`define-role-target` section. + +We recommended the following procedure to start with RBAC: + +1. Identify which admin users and services you would like to grant only the permissions they need through the new roles. +2. Strip these users and services of their admin status via API or UI. This will change their roles from `admin` to `user`. + ```{note} + Stripping entities of their roles is currently available only via `jupyterhub_config.py` (see {ref}`removing-roles-target`). + ``` +3. Define new roles that you would like to start using with appropriate scopes and assign them to these entities in `jupyterhub_config.py`. +4. Restart the JupyterHub for the new roles to take effect. + +(oauth-vs-api-tokens-target)= + +## OAuth vs API tokens + +### Before RBAC + +Previous JupyterHub versions utilize two types of tokens, OAuth token and API token. + +OAuth token is issued by the Hub to a single-user server when the user logs in. The token is stored in the browser cookie and is used to identify the user who owns the server during the OAuth flow. This token by default expires when the cookie reaches its expiry time of 2 weeks (or after 1 hour in JupyterHub versions < 1.3.0). + +API token is issued by the Hub to a single-user server when launched and is used to communicate with the Hub's APIs such as posting activity or completing the OAuth flow. This token has no expiry by default. + +API tokens can also be issued to users via API ([_/hub/token_](../reference/urls.md) or [_POST /users/:username/tokens_](../reference/rest-api.rst)) and services via `jupyterhub_config.py` to perform API requests. + +### With RBAC + +The RBAC framework allows for granting tokens different levels of permissions via scopes attached to roles. The 'only identify' purpose of the separate OAuth tokens is no longer required. API tokens can be used for every action, including the login and authentication, for which an API token with no role (i.e., no scope in {ref}`available-scopes-target`) is used. + +OAuth tokens are therefore dropped from the Hub upgraded with the RBAC framework. diff --git a/docs/source/rbac/use-cases.md b/docs/source/rbac/use-cases.md new file mode 100644 index 00000000..874c772d --- /dev/null +++ b/docs/source/rbac/use-cases.md @@ -0,0 +1,130 @@ +# Use Cases + +To determine which scopes a role should have, one can follow these steps: + +1. Determine what actions the role holder should have/have not access to +2. Match the actions against the [JupyterHub's APIs](../reference/rest-api.rst) +3. Check which scopes are required to access the APIs +4. Combine scopes and subscopes if applicable +5. Customize the scopes with filters if needed +6. Define the role with required scopes and assign to users/services/groups/tokens + +Below, different use cases are presented on how to use the [RBAC framework](./index.md) + +## Service to cull idle servers + +Finding and shutting down idle servers can save a lot of computational resources. +**We can make use of [jupyterhub-idle-culler](https://github.com/jupyterhub/jupyterhub-idle-culler) to manage this for us.** +Below follows a short tutorial on how to add a cull-idle service in the RBAC system. + +1. Install the cull-idle server script with `pip install jupyterhub-idle-culler`. +2. Define a new service `idle-culler` and a new role for this service: + + ```python + # in jupyterhub_config.py + + c.JupyterHub.services = [ + { + "name": "idle-culler", + "command": [ + sys.executable, "-m", + "jupyterhub_idle_culler", + "--timeout=3600" + ], + } + ] + + c.JupyterHub.load_roles = [ + { + "name": "idle-culler", + "description": "Culls idle servers", + "scopes": ["read:users:name", "read:users:activity", "servers"], + "services": ["idle-culler"], + } + ] + ``` + + ```{important} + Note that in the RBAC system the `admin` field in the `idle-culler` service definition is omitted. Instead, the `idle-culler` role provides the service with only the permissions it needs. + + If the optional actions of deleting the idle servers and/or removing inactive users are desired, **change the following scopes** in the `idle-culler` role definition: + - `servers` to `admin:servers` for deleting servers + - `read:users:name`, `read:users:activity` to `admin:users` for deleting users. + ``` + +3. Restart JupyterHub to complete the process. + +## API launcher + +A service capable of creating/removing users and launching multiple servers should have access to: + +1. _POST_ and _DELETE /users_ +2. _POST_ and _DELETE /users/:name/server_ or _/users/:name/servers/:server_name_ +3. Creating/deleting servers + +The scopes required to access the API enpoints: + +1. `admin:users` +2. `servers` +3. `admin:servers` + +From the above, the role definition is: + +```python +# in jupyterhub_config.py + +c.JupyterHub.load_roles = [ + { + "name": "api-launcher", + "description": "Manages servers", + "scopes": ["admin:users", "admin:servers"], + "services": [] + } +] +``` + +If needed, the scopes can be modified to limit the permissions to e.g. a particular group with `!group=groupname` filter. + +## Group admin roles + +Roles can be used to specify different group member privileges. + +For example, a group of students `class-A` may have a role allowing all group members to access information about their group. Teacher `johan`, who is a student of `class-A` but a teacher of another group of students `class-B`, can have additional role permitting him to access information about `class-B` students as well as start/stop their servers. + +The roles can then be defined as follows: + +```python +# in jupyterhub_config.py + +c.JupyterHub.load_groups = { + 'class-A': ['johan', 'student1', 'student2'], + 'class-B': ['student3', 'student4'] +} + +c.JupyterHub.load_roles = [ + { +   'name': 'class-A-student', +   'description': 'Grants access to information about the group', +   'scopes': ['read:groups!group=class-A'], +   'groups': ['class-A'] + }, + { +   'name': 'class-B-student', +   'description': 'Grants access to information about the group', +   'scopes': ['read:groups!group=class-B'], +   'groups': ['class-B'] + }, + { +   'name': 'teacher', +   'description': 'Allows for accessing information about teacher group members and starting/stopping their servers', +   'scopes': [ 'read:users!group=class-B', 'servers!group=class-B'], +   'users': ['johan'] + } +] +``` + +In the above example, `johan` has privileges inherited from `class-A-student` role and the `teacher` role on top of those. + +```{note} +The scope filters (`!group=`) limit the privileges only to the particular groups. `johan` can access the servers and information of `class-B` group members only. +``` diff --git a/docs/source/reference/api-only.md b/docs/source/reference/api-only.md new file mode 100644 index 00000000..c265468d --- /dev/null +++ b/docs/source/reference/api-only.md @@ -0,0 +1,128 @@ +(api-only)= + +# Deploying JupyterHub in "API only mode" + +As a service for deploying and managing Jupyter servers for users, JupyterHub +exposes this functionality _primarily_ via a [REST API](rest). +For convenience, JupyterHub also ships with a _basic_ web UI built using that REST API. +The basic web UI enables users to click a button to quickly start and stop their servers, +and it lets admins perform some basic user and server management tasks. + +The REST API has always provided additional functionality beyond what is available in the basic web UI. +Similarly, we avoid implementing UI functionality that is also not available via the API. +With JupyterHub 2.0, the basic web UI will **always** be composed using the REST API. +In other words, no UI pages should rely on information not available via the REST API. +Previously, some admin UI functionality could only be achieved via admin pages, +such as paginated requests. + +## Limited UI customization via templates + +The JupyterHub UI is customizable via extensible HTML [templates](templates), +but this has some limited scope to what can be customized. +Adding some content and messages to existing pages is well supported, +but changing the page flow and what pages are available are beyond the scope of what is customizable. + +## Rich UI customization with REST API based apps + +Increasingly, JupyterHub is used purely as an API for managing Jupyter servers +for other Jupyter-based applications that might want to present a different user experience. +If you want a fully customized user experience, +you can now disable the Hub UI and use your own pages together with the JupyterHub REST API +to build your own web application to serve your users, +relying on the Hub only as an API for managing users and servers. + +One example of such an application is [BinderHub][], which powers https://mybinder.org, +and motivates many of these changes. + +BinderHub is distinct from a traditional JupyterHub deployment +because it uses temporary users created for each launch. +Instead of presenting a login page, +users are presented with a form to specify what environment they would like to launch: + +![Binder launch form](../images/binderhub-form.png) + +When a launch is requested: + +1. an image is built, if necessary +2. a temporary user is created, +3. a server is launched for that user, and +4. when running, users are redirected to an already running server with an auth token in the URL +5. after the session is over, the user is deleted + +This means that a lot of JupyterHub's UI flow doesn't make sense: + +- there is no way for users to login +- the human user doesn't map onto a JupyterHub `User` in a meaningful way +- when a server isn't running, there isn't a 'restart your server' action available because the user has been deleted +- users do not have any access to any Hub functionality, so presenting pages for those features would be confusing + +BinderHub is one of the motivating use cases for JupyterHub supporting being used _only_ via its API. +We'll use BinderHub here as an example of various configuration options. + +[binderhub]: https://binderhub.readthedocs.io + +## Disabling Hub UI + +`c.JupyterHub.hub_routespec` is a configuration option to specify which URL prefix should be routed to the Hub. +The default is `/` which means that the Hub will receive all requests not already specified to be routed somewhere else. + +There are three values that are most logical for `hub_routespec`: + +- `/` - this is the default, and used in most deployments. + It is also the only option prior to JupyterHub 1.4. +- `/hub/` - this serves only Hub pages, both UI and API +- `/hub/api` - this serves _only the Hub API_, so all Hub UI is disabled, + aside from the OAuth confirmation page, if used. + +If you choose a hub routespec other than `/`, +the main JupyterHub feature you will lose is the automatic handling of requests for `/user/:username` +when the requested server is not running. + +JupyterHub's handling of this request shows this page, +telling you that the server is not running, +with a button to launch it again: + +![screenshot of hub page for server not running](../images/server-not-running.png) + +If you set `hub_routespec` to something other than `/`, +it is likely that you also want to register another destination for `/` to handle requests to not-running servers. +If you don't, you will see a default 404 page from the proxy: + +![screenshot of CHP default 404](../images/chp-404.png) + +For mybinder.org, the default "start my server" page doesn't make sense, +because when a server is gone, there is no restart action. +Instead, we provide hints about how to get back to a link to start a _new_ server: + +![screenshot of mybinder.org 404](../images/binder-404.png) + +To achieve this, mybinder.org registers a route for `/` that goes to a custom endpoint +that runs nginx and only serves this static HTML error page. +This is set with + +```python +c.Proxy.extra_routes = { + "/": "http://custom-404-entpoint/", +} +``` + +You may want to use an alternate behavior, such as redirecting to a landing page, +or taking some other action based on the requested page. + +If you use `c.JupyterHub.hub_routespec = "/hub/"`, +then all the Hub pages will be available, +and only this default-page-404 issue will come up. + +If you use `c.JupyterHub.hub_routespec = "/hub/api/"`, +then only the Hub _API_ will be available, +and all UI will be up to you. +mybinder.org takes this last option, +because none of the Hub UI pages really make sense. +Binder users don't have any reason to know or care that JupyterHub happens +to be an implementation detail of how their environment is managed. +Seeing Hub error pages and messages in that situation is more likely to be confusing than helpful. + +:::{versionadded} 1.4 + +`c.JupyterHub.hub_routespec` and `c.Proxy.extra_routes` are new in JupyterHub 1.4. +::: diff --git a/docs/source/reference/authenticators.md b/docs/source/reference/authenticators.md index c7016d0d..ee8227c8 100644 --- a/docs/source/reference/authenticators.md +++ b/docs/source/reference/authenticators.md @@ -1,6 +1,6 @@ # Authenticators -The [Authenticator][] is the mechanism for authorizing users to use the +The {class}`.Authenticator` is the mechanism for authorizing users to use the Hub and single user notebook servers. ## The default PAM Authenticator @@ -36,7 +36,7 @@ A [generic implementation](https://github.com/jupyterhub/oauthenticator/blob/mas ## The Dummy Authenticator When testing, it may be helpful to use the -:class:`~jupyterhub.auth.DummyAuthenticator`. This allows for any username and +{class}`jupyterhub.auth.DummyAuthenticator`. This allows for any username and password unless if a global password has been set. Once set, any username will still be accepted but the correct password will need to be provided. @@ -88,7 +88,6 @@ class DictionaryAuthenticator(Authenticator): return data['username'] ``` - #### Normalize usernames Since the Authenticator and Spawner both use the same username, @@ -111,11 +110,8 @@ normalize usernames using PAM (basically round-tripping them: username to uid to username), which is useful in case you use some external service that allows multiple usernames mapping to the same user (such as ActiveDirectory, yes, this really happens). When -`pam_normalize_username` is on, usernames are *not* normalized to +`pam_normalize_username` is on, usernames are _not_ normalized to lowercase. -NOTE: Earlier it says that usernames are normalized using PAM. -I guess that doesn't normalize them? - #### Validate usernames @@ -133,7 +129,6 @@ To only allow usernames that start with 'w': c.Authenticator.username_pattern = r'w.*' ``` - ### How to write a custom authenticator You can use custom Authenticator subclasses to enable authentication @@ -141,12 +136,11 @@ via other mechanisms. One such example is using [GitHub OAuth][]. Because the username is passed from the Authenticator to the Spawner, a custom Authenticator and Spawner are often used together. -For example, the Authenticator methods, [pre_spawn_start(user, spawner)][] -and [post_spawn_stop(user, spawner)][], are hooks that can be used to do +For example, the Authenticator methods, {meth}`.Authenticator.pre_spawn_start` +and {meth}`.Authenticator.post_spawn_stop`, are hooks that can be used to do auth-related startup (e.g. opening PAM sessions) and cleanup (e.g. closing PAM sessions). - See a list of custom Authenticators [on the wiki](https://github.com/jupyterhub/jupyterhub/wiki/Authenticators). If you are interested in writing a custom authenticator, you can read @@ -187,7 +181,6 @@ Additionally, configurable attributes for your authenticator will appear in jupyterhub help output and auto-generated configuration files via `jupyterhub --generate-config`. - ### Authentication state JupyterHub 0.8 adds the ability to persist state related to authentication, @@ -221,25 +214,22 @@ To store auth_state, two conditions must be met: export JUPYTERHUB_CRYPT_KEY=$(openssl rand -hex 32) ``` - JupyterHub uses [Fernet](https://cryptography.io/en/latest/fernet/) to encrypt auth_state. To facilitate key-rotation, `JUPYTERHUB_CRYPT_KEY` may be a semicolon-separated list of encryption keys. If there are multiple keys present, the **first** key is always used to persist any new auth_state. - #### Using auth_state Typically, if `auth_state` is persisted it is desirable to affect the Spawner environment in some way. This may mean defining environment variables, placing certificate in the user's home directory, etc. -The `Authenticator.pre_spawn_start` method can be used to pass information from authenticator state +The {meth}`Authenticator.pre_spawn_start` method can be used to pass information from authenticator state to Spawner environment: ```python class MyAuthenticator(Authenticator): - @gen.coroutine - def authenticate(self, handler, data=None): - username = yield identify_user(handler, data) - upstream_token = yield token_for_user(username) + async def authenticate(self, handler, data=None): + username = await identify_user(handler, data) + upstream_token = await token_for_user(username) return { 'name': username, 'auth_state': { @@ -247,21 +237,69 @@ class MyAuthenticator(Authenticator): }, } - @gen.coroutine - def pre_spawn_start(self, user, spawner): + async def pre_spawn_start(self, user, spawner): """Pass upstream_token to spawner via environment variable""" - auth_state = yield user.get_auth_state() + auth_state = await user.get_auth_state() if not auth_state: # auth_state not enabled return spawner.environment['UPSTREAM_TOKEN'] = auth_state['upstream_token'] ``` +Note that environment variable names and values are always strings, so passing multiple values means setting multiple environment variables or serializing more complex data into a single variable, e.g. as a JSON string. + +auth state can also be used to configure the spawner via _config_ without subclassing +by setting `c.Spawner.auth_state_hook`. This function will be called with `(spawner, auth_state)`, +only when auth_state is defined. + +For example: +(for KubeSpawner) + +```python +def auth_state_hook(spawner, auth_state): + spawner.volumes = auth_state['user_volumes'] + spawner.mounts = auth_state['user_mounts'] + +c.Spawner.auth_state_hook = auth_state_hook +``` + +(authenticator-groups)= + +## Authenticator-managed group membership + +:::{versionadded} 2.2 +::: + +Some identity providers may have their own concept of group membership that you would like to preserve in JupyterHub. +This is now possible with `Authenticator.managed_groups`. + +You can set the config: + +```python +c.Authenticator.manage_groups = True +``` + +to enable this behavior. +The default is False for Authenticators that ship with JupyterHub, +but may be True for custom Authenticators. +Check your Authenticator's documentation for manage_groups support. + +If True, {meth}`.Authenticator.authenticate` and {meth}`.Authenticator.refresh_user` may include a field `groups` +which is a list of group names the user should be a member of: + +- Membership will be added for any group in the list +- Membership in any groups not in the list will be revoked +- Any groups not already present in the database will be created +- If `None` is returned, no changes are made to the user's group membership + +If authenticator-managed groups are enabled, +all group-management via the API is disabled. + ## pre_spawn_start and post_spawn_stop hooks -Authenticators uses two hooks, [pre_spawn_start(user, spawner)][] and -[post_spawn_stop(user, spawner)][] to pass additional state information -between the authenticator and a spawner. These hooks are typically used for auth-related +Authenticators use two hooks, {meth}`.Authenticator.pre_spawn_start` and +{meth}`.Authenticator.post_spawn_stop(user, spawner)` to add pass additional state information +between the authenticator and a spawner. These hooks are typically used auth-related startup, i.e. opening a PAM session, and auth-related cleanup, i.e. closing a PAM session. @@ -269,11 +307,7 @@ PAM session. Beginning with version 0.8, JupyterHub is an OAuth provider. - -[Authenticator]: https://github.com/jupyterhub/jupyterhub/blob/master/jupyterhub/auth.py -[PAM]: https://en.wikipedia.org/wiki/Pluggable_authentication_module -[OAuth]: https://en.wikipedia.org/wiki/OAuth -[GitHub OAuth]: https://developer.github.com/v3/oauth/ -[OAuthenticator]: https://github.com/jupyterhub/oauthenticator -[pre_spawn_start(user, spawner)]: https://jupyterhub.readthedocs.io/en/latest/api/auth.html#jupyterhub.auth.Authenticator.pre_spawn_start -[post_spawn_stop(user, spawner)]: https://jupyterhub.readthedocs.io/en/latest/api/auth.html#jupyterhub.auth.Authenticator.post_spawn_stop +[pam]: https://en.wikipedia.org/wiki/Pluggable_authentication_module +[oauth]: https://en.wikipedia.org/wiki/OAuth +[github oauth]: https://developer.github.com/v3/oauth/ +[oauthenticator]: https://github.com/jupyterhub/oauthenticator diff --git a/docs/source/reference/config-ghoauth.md b/docs/source/reference/config-ghoauth.md index b120b3b2..bd8e290b 100644 --- a/docs/source/reference/config-ghoauth.md +++ b/docs/source/reference/config-ghoauth.md @@ -3,18 +3,17 @@ In this example, we show a configuration file for a fairly standard JupyterHub deployment with the following assumptions: -* Running JupyterHub on a single cloud server -* Using SSL on the standard HTTPS port 443 -* Using GitHub OAuth (using oauthenticator) for login -* Using the default spawner (to configure other spawners, uncomment and edit +- Running JupyterHub on a single cloud server +- Using SSL on the standard HTTPS port 443 +- Using GitHub OAuth (using [OAuthenticator](https://oauthenticator.readthedocs.io/en/latest)) for login +- Using the default spawner (to configure other spawners, uncomment and edit `spawner_class` as well as follow the instructions for your desired spawner) -* Users exist locally on the server -* Users' notebooks to be served from `~/assignments` to allow users to browse +- Users exist locally on the server +- Users' notebooks to be served from `~/assignments` to allow users to browse for notebooks within other users' home directories -* You want the landing page for each user to be a `Welcome.ipynb` notebook in - their assignments directory. -* All runtime files are put into `/srv/jupyterhub` and log files in `/var/log`. - +- You want the landing page for each user to be a `Welcome.ipynb` notebook in + their assignments directory +- All runtime files are put into `/srv/jupyterhub` and log files in `/var/log` The `jupyterhub_config.py` file would have these settings: @@ -52,7 +51,7 @@ c.GitHubOAuthenticator.oauth_callback_url = os.environ['OAUTH_CALLBACK_URL'] c.LocalAuthenticator.create_system_users = True # specify users and admin -c.Authenticator.whitelist = {'rgbkrk', 'minrk', 'jhamrick'} +c.Authenticator.allowed_users = {'rgbkrk', 'minrk', 'jhamrick'} c.Authenticator.admin_users = {'jhamrick', 'rgbkrk'} # uses the default spawner @@ -70,7 +69,7 @@ c.Spawner.args = ['--NotebookApp.default_url=/notebooks/Welcome.ipynb'] ``` Using the GitHub Authenticator requires a few additional -environment variable to be set prior to launching JupyterHub: +environment variables to be set prior to launching JupyterHub: ```bash export GITHUB_CLIENT_ID=github_id @@ -80,3 +79,5 @@ export CONFIGPROXY_AUTH_TOKEN=super-secret # append log output to log file /var/log/jupyterhub.log jupyterhub -f /etc/jupyterhub/jupyterhub_config.py &>> /var/log/jupyterhub.log ``` + +Visit the [Github OAuthenticator reference](https://oauthenticator.readthedocs.io/en/latest/api/gen/oauthenticator.github.html) to see the full list of options for configuring Github OAuth with JupyterHub. diff --git a/docs/source/reference/config-proxy.md b/docs/source/reference/config-proxy.md index 26ec8546..8c4dc373 100644 --- a/docs/source/reference/config-proxy.md +++ b/docs/source/reference/config-proxy.md @@ -6,15 +6,15 @@ SSL port `443`. This could be useful if the JupyterHub server machine is also hosting other domains or content on `443`. The goal in this example is to satisfy the following: -* JupyterHub is running on a server, accessed *only* via `HUB.DOMAIN.TLD:443` -* On the same machine, `NO_HUB.DOMAIN.TLD` strictly serves different content, +- JupyterHub is running on a server, accessed _only_ via `HUB.DOMAIN.TLD:443` +- On the same machine, `NO_HUB.DOMAIN.TLD` strictly serves different content, also on port `443` -* `nginx` or `apache` is used as the public access point (which means that - only nginx/apache will bind to `443`) -* After testing, the server in question should be able to score at least an A on the +- `nginx` or `apache` is used as the public access point (which means that + only nginx/apache will bind to `443`) +- After testing, the server in question should be able to score at least an A on the Qualys SSL Labs [SSL Server Test](https://www.ssllabs.com/ssltest/) -Let's start out with needed JupyterHub configuration in `jupyterhub_config.py`: +Let's start out with the needed JupyterHub configuration in `jupyterhub_config.py`: ```python # Force the proxy to only listen to connections to 127.0.0.1 (on port 8000) @@ -30,15 +30,15 @@ This can take a few minutes: openssl dhparam -out /etc/ssl/certs/dhparam.pem 4096 ``` -## nginx +## Nginx This **`nginx` config file** is fairly standard fare except for the two `location` blocks within the main section for HUB.DOMAIN.tld. -To create a new site for jupyterhub in your nginx config, make a new file +To create a new site for jupyterhub in your Nginx config, make a new file in `sites.enabled`, e.g. `/etc/nginx/sites.enabled/jupyterhub.conf`: ```bash -# top-level http config for websocket headers +# Top-level HTTP config for WebSocket headers # If Upgrade is defined, Connection = upgrade # If Upgrade is empty, Connection = close map $http_upgrade $connection_upgrade { @@ -51,7 +51,7 @@ server { listen 80; server_name HUB.DOMAIN.TLD; - # Tell all requests to port 80 to be 302 redirected to HTTPS + # Redirect the request to HTTPS return 302 https://$host$request_uri; } @@ -75,7 +75,7 @@ server { ssl_stapling_verify on; add_header Strict-Transport-Security max-age=15768000; - # Managing literal requests to the JupyterHub front end + # Managing literal requests to the JupyterHub frontend location / { proxy_pass http://127.0.0.1:8000; proxy_set_header X-Real-IP $remote_addr; @@ -83,8 +83,12 @@ server { proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; # websocket headers + proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection $connection_upgrade; + proxy_set_header X-Scheme $scheme; + + proxy_buffering off; } # Managing requests to verify letsencrypt host @@ -97,10 +101,10 @@ server { If `nginx` is not running on port 443, substitute `$http_host` for `$host` on the lines setting the `Host` header. -`nginx` will now be the front facing element of JupyterHub on `443` which means +`nginx` will now be the front-facing element of JupyterHub on `443` which means it is also free to bind other servers, like `NO_HUB.DOMAIN.TLD` to the same port on the same machine and network interface. In fact, one can simply use the same -server blocks as above for `NO_HUB` and simply add line for the root directory +server blocks as above for `NO_HUB` and simply add a line for the root directory of the site as well as the applicable location call: ```bash @@ -108,7 +112,7 @@ server { listen 80; server_name NO_HUB.DOMAIN.TLD; - # Tell all requests to port 80 to be 302 redirected to HTTPS + # Redirect the request to HTTPS return 302 https://$host$request_uri; } @@ -139,25 +143,40 @@ Now restart `nginx`, restart the JupyterHub, and enjoy accessing `https://HUB.DOMAIN.TLD` while serving other content securely on `https://NO_HUB.DOMAIN.TLD`. +### SELinux permissions for Nginx + +On distributions with SELinux enabled (e.g. Fedora), one may encounter permission errors +when the Nginx service is started. + +We need to allow Nginx to perform network relay and connect to the JupyterHub port. The +following commands do that: + +```bash +semanage port -a -t http_port_t -p tcp 8000 +setsebool -P httpd_can_network_relay 1 +setsebool -P httpd_can_network_connect 1 +``` + +Replace 8000 with the port the JupyterHub server is running from. ## Apache -As with nginx above, you can use [Apache](https://httpd.apache.org) as the reverse proxy. -First, we will need to enable the apache modules that we are going to need: +As with Nginx above, you can use [Apache](https://httpd.apache.org) as the reverse proxy. +First, we will need to enable the Apache modules that we are going to need: ```bash -a2enmod ssl rewrite proxy proxy_http proxy_wstunnel +a2enmod ssl rewrite proxy headers proxy_http proxy_wstunnel ``` -Our Apache configuration is equivalent to the nginx configuration above: +Our Apache configuration is equivalent to the Nginx configuration above: - Redirect HTTP to HTTPS - Good SSL Configuration -- Support for websockets on any proxied URL +- Support for WebSocket on any proxied URL - JupyterHub is running locally at http://127.0.0.1:8000 ```bash -# redirect HTTP to HTTPS +# Redirect HTTP to HTTPS Listen 80 ServerName HUB.DOMAIN.TLD @@ -169,15 +188,26 @@ Listen 443 ServerName HUB.DOMAIN.TLD - # configure SSL + # Enable HTTP/2, if available + Protocols h2 http/1.1 + + # HTTP Strict Transport Security (mod_headers is required) (63072000 seconds) + Header always set Strict-Transport-Security "max-age=63072000" + + # Configure SSL SSLEngine on SSLCertificateFile /etc/letsencrypt/live/HUB.DOMAIN.TLD/fullchain.pem SSLCertificateKeyFile /etc/letsencrypt/live/HUB.DOMAIN.TLD/privkey.pem - SSLProtocol All -SSLv2 -SSLv3 SSLOpenSSLConfCmd DHParameters /etc/ssl/certs/dhparam.pem - SSLCipherSuite EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH - # Use RewriteEngine to handle websocket connection upgrades + # Intermediate configuration from SSL-config.mozilla.org (2022-03-03) + # Please note, that this configuration might be outdated - please update it accordingly using https://ssl-config.mozilla.org/ + SSLProtocol all -SSLv3 -TLSv1 -TLSv1.1 + SSLCipherSuite ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384 + SSLHonorCipherOrder off + SSLSessionTickets off + + # Use RewriteEngine to handle WebSocket connection upgrades RewriteEngine On RewriteCond %{HTTP:Connection} Upgrade [NC] RewriteCond %{HTTP:Upgrade} websocket [NC] @@ -189,26 +219,29 @@ Listen 443 # proxy to JupyterHub ProxyPass http://127.0.0.1:8000/ ProxyPassReverse http://127.0.0.1:8000/ + RequestHeader set "X-Forwarded-Proto" expr=%{REQUEST_SCHEME} ``` - -In case of the need to run the jupyterhub under /jhub/ or other location please use the below configurations: +In case of the need to run JupyterHub under /jhub/ or another location please use the below configurations: + - JupyterHub running locally at http://127.0.0.1:8000/jhub/ or other location httpd.conf amendments: + ```bash RewriteRule /jhub/(.*) ws://127.0.0.1:8000/jhub/$1 [P,L] RewriteRule /jhub/(.*) http://127.0.0.1:8000/jhub/$1 [P,L] - + ProxyPass /jhub/ http://127.0.0.1:8000/jhub/ ProxyPassReverse /jhub/ http://127.0.0.1:8000/jhub/ - ``` - +``` + jupyterhub_config.py amendments: + ```python # The public facing URL of the whole JupyterHub application. -# This is the address on which the proxy will bind. Sets protocol, ip, base_url +# This is the address on which the proxy will bind. Sets protocol, IP, base_url c.JupyterHub.bind_url = 'http://127.0.0.1:8000/jhub/' ``` diff --git a/docs/source/reference/config-reference.rst b/docs/source/reference/config-reference.rst new file mode 100644 index 00000000..22ee25cb --- /dev/null +++ b/docs/source/reference/config-reference.rst @@ -0,0 +1,30 @@ +============================== +Configuration Reference +============================== + +.. important:: + + Make sure the version of JupyterHub for this documentation matches your + installation version, as the output of this command may change between versions. + +JupyterHub configuration +------------------------ + +As explained in the `Configuration Basics <../getting-started/config-basics.html#generate-a-default-config-file>`_ +section, the ``jupyterhub_config.py`` can be automatically generated via + + .. code-block:: bash + + jupyterhub --generate-config + + +The following contains the output of that command for reference. + +.. jupyterhub-generate-config:: + +JupyterHub help command output +------------------------------ + +This section contains the output of the command ``jupyterhub --help-all``. + +.. jupyterhub-help-all:: diff --git a/docs/source/reference/config-sudo.md b/docs/source/reference/config-sudo.md index b2f32e82..9cbfe642 100644 --- a/docs/source/reference/config-sudo.md +++ b/docs/source/reference/config-sudo.md @@ -6,10 +6,10 @@ Only do this if you are very sure you must. ## Overview -There are many Authenticators and Spawners available for JupyterHub. Some, such -as DockerSpawner or OAuthenticator, do not need any elevated permissions. This +There are many [Authenticators](../getting-started/authenticators-users-basics) and [Spawners](../getting-started/spawners-basics) available for JupyterHub. Some, such +as [DockerSpawner](https://github.com/jupyterhub/dockerspawner) or [OAuthenticator](https://github.com/jupyterhub/oauthenticator), do not need any elevated permissions. This document describes how to get the full default behavior of JupyterHub while -running notebook servers as real system users on a shared system without +running notebook servers as real system users on a shared system, without running the Hub itself as root. Since JupyterHub needs to spawn processes as other users, the simplest way @@ -50,14 +50,13 @@ To do this we add to `/etc/sudoers` (use `visudo` for safe editing of sudoers): - specify the list of users `JUPYTER_USERS` for whom `rhea` can spawn servers - set the command `JUPYTER_CMD` that `rhea` can execute on behalf of users -- give `rhea` permission to run `JUPYTER_CMD` on behalf of `JUPYTER_USERS` +- give `rhea` permission to run `JUPYTER_CMD` on behalf of `JUPYTER_USERS` without entering a password - For example: ```bash -# comma-separated whitelist of users that can spawn single-user servers +# comma-separated list of users that can spawn single-user servers # this should include all of your Hub users Runas_Alias JUPYTER_USERS = rhea, zoe, wash @@ -92,16 +91,16 @@ $ adduser -G jupyterhub newuser Test that the new user doesn't need to enter a password to run the sudospawner command. -This should prompt for your password to switch to rhea, but *not* prompt for +This should prompt for your password to switch to `rhea`, but _not_ prompt for any password for the second switch. It should show some help output about logging options: ```bash $ sudo -u rhea sudo -n -u $USER /usr/local/bin/sudospawner --help Usage: /usr/local/bin/sudospawner [OPTIONS] - + Options: - + --help show this help information ... ``` @@ -121,6 +120,11 @@ the shadow password database. ### Shadow group (Linux) +**Note:** On [Fedora based distributions](https://fedoraproject.org/wiki/List_of_Fedora_remixes) there is no clear way to configure +the PAM database to allow sufficient access for authenticating with the target user's password +from JupyterHub. As a workaround we recommend use an +[alternative authentication method](https://github.com/jupyterhub/jupyterhub/wiki/Authenticators). + ```bash $ ls -l /etc/shadow -rw-r----- 1 root shadow 2197 Jul 21 13:41 shadow @@ -147,12 +151,13 @@ We want our new user to be able to read the shadow passwords, so add it to the s $ sudo usermod -a -G shadow rhea ``` -If you want jupyterhub to serve pages on a restricted port (such as port 80 for http), +If you want jupyterhub to serve pages on a restricted port (such as port 80 for HTTP), then you will need to give `node` permission to do so: ```bash sudo setcap 'cap_net_bind_service=+ep' /usr/bin/node ``` + However, you may want to further understand the consequences of this. ([Further reading](http://man7.org/linux/man-pages/man7/capabilities.7.html)) @@ -162,7 +167,6 @@ distributions' packaging system. This can be used to keep any user's process from using too much CPU cycles. You can configure it accoring to [these instructions](http://ubuntuforums.org/showthread.php?t=992706). - ### Shadow group (FreeBSD) **NOTE:** This has not been tested on FreeBSD and may not work as expected on @@ -184,7 +188,7 @@ $ sudo chgrp shadow /etc/master.passwd $ sudo chmod g+r /etc/master.passwd ``` -We want our new user to be able to read the shadow passwords, so add it to the +We want our new user to be able to read the shadow passwords, so add it to the shadow group: ```bash @@ -218,15 +222,15 @@ Finally, start the server as our newly configured user, `rhea`: ```bash $ cd /etc/jupyterhub $ sudo -u rhea jupyterhub --JupyterHub.spawner_class=sudospawner.SudoSpawner -``` +``` And try logging in. ## Troubleshooting: SELinux If you still get a generic `Permission denied` `PermissionError`, it's possible SELinux is blocking you. -Here's how you can make a module to allow this. -First, put this in a file named `sudo_exec_selinux.te`: +Here's how you can make a module to resolve this. +First, put this in a file named `sudo_exec_selinux.te`: ```bash module sudo_exec_selinux 1.1; diff --git a/docs/source/reference/config-user-env.md b/docs/source/reference/config-user-env.md index 27fd9b30..204c062d 100644 --- a/docs/source/reference/config-user-env.md +++ b/docs/source/reference/config-user-env.md @@ -1,8 +1,8 @@ # Configuring user environments -Deploying JupyterHub means you are providing Jupyter notebook environments for +To deploy JupyterHub means you are providing Jupyter notebook environments for multiple users. Often, this includes a desire to configure the user -environment in some way. +environment in a custom way. Since the `jupyterhub-singleuser` server extends the standard Jupyter notebook server, most configuration and documentation that applies to Jupyter Notebook @@ -10,71 +10,56 @@ applies to the single-user environments. Configuration of user environments typically does not occur through JupyterHub itself, but rather through system-wide configuration of Jupyter, which is inherited by `jupyterhub-singleuser`. -**Tip:** When searching for configuration tips for JupyterHub user -environments, try removing JupyterHub from your search because there are a lot -more people out there configuring Jupyter than JupyterHub and the -configuration is the same. +**Tip:** When searching for configuration tips for JupyterHub user environments, you might want to remove JupyterHub from your search because there are a lot more people out there configuring Jupyter than JupyterHub and the configuration is the same. -This section will focus on user environments, including: - -- Installing packages -- Configuring Jupyter and IPython -- Installing kernelspecs -- Using containers vs. multi-user hosts +This section will focus on user environments, which includes the following: +- [Installing packages](#installing-packages) +- [Configuring Jupyter and IPython](#configuring-jupyter-and-ipython) +- [Installing kernelspecs](#installing-kernelspecs) +- [Using containers vs. multi-user hosts](#multi-user-hosts-vs-containers) ## Installing packages -To make packages available to users, you generally will install packages -system-wide or in a shared environment. +To make packages available to users, you will typically install packages system-wide or in a shared environment. -This installation location should always be in the same environment that -`jupyterhub-singleuser` itself is installed in, and must be *readable and -executable* by your users. If you want users to be able to install additional -packages, it must also be *writable* by your users. - -If you are using a standard system Python install, you would use: +This installation location should always be in the same environment where +`jupyterhub-singleuser` itself is installed in, and must be _readable and +executable_ by your users. If you want your users to be able to install additional +packages, the installation location must also be _writable_ by your users. +If you are using a standard Python installation on your system, use the following command: ```bash sudo python3 -m pip install numpy ``` -to install the numpy package in the default system Python 3 environment +to install the numpy package in the default Python 3 environment on your system (typically `/usr/local`). -TODO: Get a link from the conda team for a description of what "appropriate permissions for users" is - You may also use conda to install packages. If you do, you should make sure that the conda environment has appropriate permissions for users to be able to run Python code in the env. The env must be *readable and executable* by all users. Additionally it must be *writeable* if you want users to install additional packages. - ## Configuring Jupyter and IPython [Jupyter](https://jupyter-notebook.readthedocs.io/en/stable/config_overview.html) and [IPython](https://ipython.readthedocs.io/en/stable/development/config.html) have their own configuration systems. -As a JupyterHub administrator, you will typically want to install and configure -environments for all JupyterHub users. For example, you wish for each student in -a class to have the same user environment configuration. - -Jupyter and IPython support **"system-wide"** locations for configuration, which -is the logical place to put global configuration that you want to affect all -users. It's generally more efficient to configure user environments "system-wide", -and it's a good idea to avoid creating files in users' home directories. +As a JupyterHub administrator, you will typically want to install and configure environments for all JupyterHub users. For example, let's say you wish for each student in a class to have the same user environment configuration. +Jupyter and IPython support **"system-wide"** locations for configuration, which is the logical place to put global configuration that you want to affect all users. It's generally more efficient to configure user environments "system-wide", and it's a good practice to avoid creating files in the users' home directories. The typical locations for these config files are: + - **system-wide** in `/etc/{jupyter|ipython}` - **env-wide** (environment wide) in `{sys.prefix}/etc/{jupyter|ipython}`. ### Example: Enable an extension system-wide -For example, to enable the `cython` IPython extension for all of your users, -create the file `/etc/ipython/ipython_config.py`: +For example, to enable the `cython` IPython extension for all of your users, create the file `/etc/ipython/ipython_config.py`: ```python c.InteractiveShellApp.extensions.append("cython") @@ -82,32 +67,39 @@ c.InteractiveShellApp.extensions.append("cython") ### Example: Enable a Jupyter notebook configuration setting for all users -To enable Jupyter notebook's internal idle-shutdown behavior (requires -notebook ≥ 5.4), set the following in the `/etc/jupyter/jupyter_notebook_config.py` -file: +:::{note} +These examples configure the Jupyter ServerApp, which is used by JupyterLab, the default in JupyterHub 2.0. + +If you are using the classing Jupyter Notebook server, +the same things should work, +with the following substitutions: + +- Search for `jupyter_server_config`, and replace with `jupyter_notebook_config` +- Search for `NotebookApp`, and replace with `ServerApp` + +::: + +To enable Jupyter notebook's internal idle-shutdown behavior (requires notebook ≥ 5.4), set the following in the `/etc/jupyter/jupyter_server_config.py` file: ```python # shutdown the server after no activity for an hour -c.NotebookApp.shutdown_no_activity_timeout = 60 * 60 +c.ServerApp.shutdown_no_activity_timeout = 60 * 60 # shutdown kernels after no activity for 20 minutes c.MappingKernelManager.cull_idle_timeout = 20 * 60 # check for idle kernels every two minutes c.MappingKernelManager.cull_interval = 2 * 60 ``` - ## Installing kernelspecs -You may have multiple Jupyter kernels installed and want to make sure that -they are available to all of your users. This means installing kernelspecs -either system-wide (e.g. in /usr/local/) or in the `sys.prefix` of JupyterHub +You may have multiple Jupyter kernels installed and want to make sure that they are available to all of your users. This means installing kernelspecs either system-wide (e.g. in /usr/local/) or in the `sys.prefix` of JupyterHub itself. -Jupyter kernelspec installation is system wide by default, but some kernels +Jupyter kernelspec installation is system-wide by default, but some kernels may default to installing kernelspecs in your home directory. These will need to be moved system-wide to ensure that they are accessible. -You can see where your kernelspecs are with: +To see where your kernelspecs are, you can use the following command: ```bash jupyter kernelspec list @@ -115,15 +107,13 @@ jupyter kernelspec list ### Example: Installing kernels system-wide -Assuming I have a Python 2 and Python 3 environment that I want to make -sure are available, I can install their specs system-wide (in /usr/local) with: +Let's assume that I have a Python 2 and Python 3 environment that I want to make sure are available, I can install their specs **system-wide** (in /usr/local) using the following command: ```bash -/path/to/python3 -m IPython kernel install --prefix=/usr/local -/path/to/python2 -m IPython kernel install --prefix=/usr/local +/path/to/python3 -m ipykernel install --prefix=/usr/local +/path/to/python2 -m ipykernel install --prefix=/usr/local ``` - ## Multi-user hosts vs. Containers There are two broad categories of user environments that depend on what @@ -136,31 +126,25 @@ How you configure user environments for each category can differ a bit depending on what Spawner you are using. The first category is a **shared system (multi-user host)** where -each user has a JupyterHub account and a home directory as well as being +each user has a JupyterHub account, a home directory as well as being a real system user. In this example, shared configuration and installation -must be in a 'system-wide' location, such as `/etc/` or `/usr/local` +must be in a 'system-wide' location, such as `/etc/`, or `/usr/local` or a custom prefix such as `/opt/conda`. When JupyterHub uses **container-based** Spawners (e.g. KubeSpawner or -DockerSpawner), the 'system-wide' environment is really the container image -which you are using for users. +DockerSpawner), the 'system-wide' environment is really the container image used for users. -In both cases, you want to *avoid putting configuration in user home -directories* because users can change those configuration settings. Also, -home directories typically persist once they are created, so they are -difficult for admins to update later. +In both cases, you want to _avoid putting configuration in user home +directories_ because users can change those configuration settings. Also, home directories typically persist once they are created, thereby making it difficult for admins to update later. ## Named servers -By default, in a JupyterHub deployment each user has exactly one server. +By default, in a JupyterHub deployment, each user has one server only. JupyterHub can, however, have multiple servers per user. -This is most useful in deployments where users can configure the environment -in which their server will start (e.g. resource requests on an HPC cluster), -so that a given user can have multiple configurations running at the same time, -without having to stop and restart their one server. +This is mostly useful in deployments where users can configure the environment in which their server will start (e.g. resource requests on an HPC cluster), so that a given user can have multiple configurations running at the same time, without having to stop and restart their own server. -To allow named servers: +To allow named servers, include this code snippet in your config file: ```python c.JupyterHub.allow_named_servers = True @@ -176,10 +160,66 @@ as well as the admin page: ![named servers on the admin page](../images/named-servers-admin.png) Named servers can be accessed, created, started, stopped, and deleted -from these pages. Activity tracking is now per-server as well. +from these pages. Activity tracking is now per server as well. -The number of named servers per user can be limited by setting +To limit the number of **named server** per user by setting a constant value, include this code snippet in your config file: ```python c.JupyterHub.named_server_limit_per_user = 5 ``` + +Alternatively, to use a callable/awaitable based on the handler object, include this code snippet in your config file: + +```python +def named_server_limit_per_user_fn(handler): + user = handler.current_user + if user and user.admin: + return 0 + return 5 + +c.JupyterHub.named_server_limit_per_user = named_server_limit_per_user_fn +``` + +This can be useful for quota service implementations. The example above limits the number of named servers for non-admin users only. + +If `named_server_limit_per_user` is set to `0`, no limit is enforced. + +(classic-notebook-ui)= + +## Switching back to the classic notebook + +By default, the single-user server launches JupyterLab, +which is based on [Jupyter Server][]. + +This is the default server when running JupyterHub ≥ 2.0. +To switch to using the legacy Jupyter Notebook server, you can set the `JUPYTERHUB_SINGLEUSER_APP` environment variable +(in the single-user environment) to: + +```bash +export JUPYTERHUB_SINGLEUSER_APP='notebook.notebookapp.NotebookApp' +``` + +[jupyter server]: https://jupyter-server.readthedocs.io +[jupyter notebook]: https://jupyter-notebook.readthedocs.io + +:::{versionchanged} 2.0 + +JupyterLab is now the default single-user UI, if available, +which is based on the [Jupyter Server][], +no longer the legacy [Jupyter Notebook][] server. +JupyterHub prior to 2.0 launched the legacy notebook server (`jupyter notebook`), +and the Jupyter server could be selected by specifying the following: + +```python +# jupyterhub_config.py +c.Spawner.cmd = ["jupyter-labhub"] +``` + +Alternatively, for an otherwise customized Jupyter Server app, +set the environment variable using the following command: + +```bash +export JUPYTERHUB_SINGLEUSER_APP='jupyter_server.serverapp.ServerApp' +``` + +::: diff --git a/docs/source/reference/database.md b/docs/source/reference/database.md index 5db0b0d9..b5a89629 100644 --- a/docs/source/reference/database.md +++ b/docs/source/reference/database.md @@ -46,8 +46,8 @@ additional configuration required for MySQL that is not needed for PostgreSQL. - You should use the `pymysql` sqlalchemy provider (the other one, MySQLdb, isn't available for py3). -- You also need to set `pool_recycle` to some value (typically 60 - 300) - which depends on your MySQL setup. This is necessary since MySQL kills +- You also need to set `pool_recycle` to some value (typically 60 - 300) + which depends on your MySQL setup. This is necessary since MySQL kills connections serverside if they've been idle for a while, and the connection from the hub will be idle for longer than most connections. This behavior will lead to frustrating 'the connection has gone away' errors from diff --git a/docs/source/reference/index.rst b/docs/source/reference/index.rst index 088d0ae8..98e2be50 100644 --- a/docs/source/reference/index.rst +++ b/docs/source/reference/index.rst @@ -1,6 +1,9 @@ Technical Reference =================== +This section covers more of the details of the JupyterHub architecture, as well as +what happens under-the-hood when you deploy and configure your JupyterHub. + .. toctree:: :maxdepth: 2 @@ -13,10 +16,17 @@ Technical Reference proxy separate-proxy rest + rest-api + server-api + monitoring database templates + api-only + ../events/index config-user-env config-examples config-ghoauth config-proxy config-sudo + config-reference + oauth diff --git a/docs/source/reference/monitoring.rst b/docs/source/reference/monitoring.rst new file mode 100644 index 00000000..774656ec --- /dev/null +++ b/docs/source/reference/monitoring.rst @@ -0,0 +1,20 @@ +Monitoring +========== + +This section covers details on monitoring the state of your JupyterHub installation. + +JupyterHub expose the ``/metrics`` endpoint that returns text describing its current +operational state formatted in a way `Prometheus `_ understands. + +Prometheus is a separate open source tool that can be configured to repeatedly poll +JupyterHub's ``/metrics`` endpoint to parse and save its current state. + +By doing so, Prometheus can describe JupyterHub's evolving state over time. +This evolving state can then be accessed through Prometheus that expose its underlying +storage to those allowed to access it, and be presented with dashboards by a +tool like `Grafana `_. + +.. toctree:: + :maxdepth: 2 + + metrics diff --git a/docs/source/reference/oauth.md b/docs/source/reference/oauth.md new file mode 100644 index 00000000..ab0aeb86 --- /dev/null +++ b/docs/source/reference/oauth.md @@ -0,0 +1,373 @@ +# JupyterHub and OAuth + +JupyterHub uses [OAuth 2](https://oauth.net/2/) as an internal mechanism for authenticating users. +As such, JupyterHub itself always functions as an OAuth **provider**. +You can find out more about what that means [below](oauth-terms). + +Additionally, JupyterHub is _often_ deployed with [OAuthenticator](https://oauthenticator.readthedocs.io), +where an external identity provider, such as GitHub or KeyCloak, is used to authenticate users. +When this is the case, there are _two_ nested OAuth flows: +an _internal_ OAuth flow where JupyterHub is the **provider**, +and an _external_ OAuth flow, where JupyterHub is the **client**. + +This means that when you are using JupyterHub, there is always _at least one_ and often two layers of OAuth involved in a user logging in and accessing their server. + +The following points are noteworthy: + +- Single-user servers _never_ need to communicate with or be aware of the upstream provider configured in your Authenticator. + As far as the servers are concerned, only JupyterHub is an OAuth provider, + and how users authenticate with the Hub itself is irrelevant. +- When interacting with a single-user server, + there are ~always two tokens: + first, a token issued to the server itself to communicate with the Hub API, + and second, a per-user token in the browser to represent the completed login process and authorized permissions. + More on this [later](two-tokens). + +(oauth-terms)= + +## Key OAuth terms + +Here are some key definitions to keep in mind when we are talking about OAuth. +You can also read more in detail [here](https://www.oauth.com/oauth2-servers/definitions/). + +- **provider**: The entity responsible for managing identity and authorization; + always a web server. + JupyterHub is _always_ an OAuth provider for JupyterHub's components. + When OAuthenticator is used, an external service, such as GitHub or KeyCloak, is also an OAuth provider. +- **client**: An entity that requests OAuth **tokens** on a user's behalf; + generally a web server of some kind. + OAuth **clients** are services that _delegate_ authentication and/or authorization + to an OAuth **provider**. + JupyterHub _services_ or single-user _servers_ are OAuth **clients** of the JupyterHub **provider**. + When OAuthenticator is used, JupyterHub is itself _also_ an OAuth **client** for the external OAuth **provider**, e.g. GitHub. +- **browser**: A user's web browser, which makes requests and stores things like cookies. +- **token**: The secret value used to represent a user's authorization. This is the final product of the OAuth process. +- **code**: A short-lived temporary secret that the **client** exchanges + for a **token** at the conclusion of OAuth, + in what's generally called the "OAuth callback handler." + +## One oauth flow + +OAuth **flow** is what we call the sequence of HTTP requests involved in authenticating a user and issuing a token, ultimately used for authorizing access to a service or single-user server. + +A single OAuth flow typically goes like this: + +### OAuth request and redirect + +1. A **browser** makes an HTTP request to an OAuth **client**. +2. There are no credentials, so the client _redirects_ the browser to an "authorize" page on the OAuth **provider** with some extra information: + - the OAuth **client ID** of the client itself. + - the **redirect URI** to be redirected back to after completion. + - the **scopes** requested, which the user should be presented with to confirm. + This is the "X would like to be able to Y on your behalf. Allow this?" page you see on all the "Login with ..." pages around the Internet. +3. During this authorize step, + the browser must be _authenticated_ with the provider. + This is often already stored in a cookie, + but if not the provider webapp must begin its _own_ authentication process before serving the authorization page. + This _may_ even begin another OAuth flow! +4. After the user tells the provider that they want to proceed with the authorization, + the provider records this authorization in a short-lived record called an **OAuth code**. +5. Finally, the oauth provider redirects the browser _back_ to the oauth client's "redirect URI" + (or "OAuth callback URI"), + with the OAuth code in a URL parameter. + +That marks the end of the requests made between the **browser** and the **provider**. + +### State after redirect + +At this point: + +- The browser is authenticated with the _provider_. +- The user's authorized permissions are recorded in an _OAuth code_. +- The _provider_ knows that the permissions requested by the OAuth client have been granted, but the client doesn't know this yet. +- All the requests so far have been made directly by the browser. + No requests have originated from the client or provider. + +### OAuth Client Handles Callback Request + +At this stage, we get to finish the OAuth process. +Let's dig into what the OAuth client does when it handles +the OAuth callback request. + +- The OAuth client receives the _code_ and makes an API request to the _provider_ to exchange the code for a real _token_. + This is the first direct request between the OAuth _client_ and the _provider_. +- Once the token is retrieved, the client _usually_ + makes a second API request to the _provider_ + to retrieve information about the owner of the token (the user). + This is the step where behavior diverges for different OAuth providers. + Up to this point, all OAuth providers are the same, following the OAuth specification. + However, OAuth does not define a standard for issuing tokens in exchange for information about their owner or permissions ([OpenID Connect](https://openid.net/connect/) does that), + so this step may be different for each OAuth provider. +- Finally, the OAuth client stores its own record that the user is authorized in a cookie. + This could be the token itself, or any other appropriate representation of successful authentication. +- Now that credentials have been established, + the browser can be redirected to the _original_ URL where it started, + to try the request again. + If the client wasn't able to keep track of the original URL all this time + (not always easy!), + you might end up back at a default landing page instead of where you started the login process. This is frustrating! + +😮‍💨 _phew_. + +So that's _one_ OAuth process. + +## Full sequence of OAuth in JupyterHub + +Let's go through the above OAuth process in JupyterHub, +with specific examples of each HTTP request and what information it contains. +For bonus points, we are using the double-OAuth example of JupyterHub configured with GitHubOAuthenticator. + +To disambiguate, we will call the OAuth process where JupyterHub is the **provider** "internal OAuth," +and the one with JupyterHub as a **client** "external OAuth." + +Our starting point: + +- a user's single-user server is running. Let's call them `danez` +- Jupyterhub is running with GitHub as an OAuth provider (this means two full instances of OAuth), +- Danez has a fresh browser session with no cookies yet. + +First request: + +- browser->single-user server running JupyterLab or Jupyter Classic +- `GET /user/danez/notebooks/mynotebook.ipynb` +- no credentials, so single-user server (as an OAuth **client**) starts internal OAuth process with JupyterHub (the **provider**) +- response: 302 redirect -> `/hub/api/oauth2/authorize` + with: + - client-id=`jupyterhub-user-danez` + - redirect-uri=`/user/danez/oauth_callback` (we'll come back later!) + +Second request, following redirect: + +- browser->JupyterHub +- `GET /hub/api/oauth2/authorize` +- no credentials, so JupyterHub starts external OAuth process _with GitHub_ +- response: 302 redirect -> `https://github.com/login/oauth/authorize` + with: + - client-id=`jupyterhub-client-uuid` + - redirect-uri=`/hub/oauth_callback` (we'll come back later!) + +_pause_ This is where JupyterHub configuration comes into play. +Recall, in this case JupyterHub is using: + +```python +c.JupyterHub.authenticator_class = 'github' +``` + +That means authenticating a request to the Hub itself starts +a _second_, external OAuth process with GitHub as a provider. +This external OAuth process is optional, though. +If you were using the default username+password PAMAuthenticator, +this redirect would have been to `/hub/login` instead, to present the user +with a login form. + +Third request, following redirect: + +- browser->GitHub +- `GET https://github.com/login/oauth/authorize` + +Here, GitHub prompts for login and asks for confirmation of authorization +(more redirects if you aren't logged in to GitHub yet, but ultimately back to this `/authorize` URL). + +After successful authorization +(either by looking up a pre-existing authorization, +or recording it via form submission) +GitHub issues an **OAuth code** and redirects to `/hub/oauth_callback?code=github-code` + +Next request: + +- browser->JupyterHub +- `GET /hub/oauth_callback?code=github-code` + +Inside the callback handler, JupyterHub makes two API requests: + +The first: + +- JupyterHub->GitHub +- `POST https://github.com/login/oauth/access_token` +- request made with OAuth **code** from URL parameter +- response includes an access **token** + +The second: + +- JupyterHub->GitHub +- `GET https://api.github.com/user` +- request made with access **token** in the `Authorization` header +- response is the user model, including username, email, etc. + +Now the external OAuth callback request completes with: + +- set cookie on `/hub/` path, recording jupyterhub authentication so we don't need to do external OAuth with GitHub again for a while +- redirect -> `/hub/api/oauth2/authorize` + +🎉 At this point, we have completed our first OAuth flow! 🎉 + +Now, we get our first repeated request: + +- browser->jupyterhub +- `GET /hub/api/oauth2/authorize` +- this time with credentials, + so jupyterhub either + 1. serves the internal authorization confirmation page, or + 2. automatically accepts authorization (shortcut taken when a user is visiting their own server) +- redirect -> `/user/danez/oauth_callback?code=jupyterhub-code` + +Here, we start the same OAuth callback process as before, but at Danez's single-user server for the _internal_ OAuth. + +- browser->single-user server +- `GET /user/danez/oauth_callback` + +(in handler) + +Inside the internal OAuth callback handler, +Danez's server makes two API requests to JupyterHub: + +The first: + +- single-user server->JupyterHub +- `POST /hub/api/oauth2/token` +- request made with oauth code from url parameter +- response includes an API token + +The second: + +- single-user server->JupyterHub +- `GET /hub/api/user` +- request made with token in the `Authorization` header +- response is the user model, including username, groups, etc. + +Finally completing `GET /user/danez/oauth_callback`: + +- response sets cookie, storing encrypted access token +- _finally_ redirects back to the original `/user/danez/notebooks/mynotebook.ipynb` + +Final request: + +- browser -> single-user server +- `GET /user/danez/notebooks/mynotebook.ipynb` +- encrypted jupyterhub token in cookie + +To authenticate this request, the single token stored in the encrypted cookie is passed to the Hub for verification: + +- single-user server -> Hub +- `GET /hub/api/user` +- browser's token in Authorization header +- response: user model with name, groups, etc. + +If the user model matches who should be allowed (e.g. Danez), +then the request is allowed. +See {doc}`../rbac/scopes` for how JupyterHub uses scopes to determine authorized access to servers and services. + +_the end_ + +## Token caches and expiry + +Because tokens represent information from an external source, +they can become 'stale,' +or the information they represent may no longer be accurate. +For example: a user's GitHub account may no longer be authorized to use JupyterHub, +that should ultimately propagate to revoking access and force logging in again. + +To handle this, OAuth tokens and the various places they are stored can _expire_, +which should have the same effect as no credentials, +and trigger the authorization process again. + +In JupyterHub's internal OAuth, we have these layers of information that can go stale: + +- The OAuth client has a **cache** of Hub responses for tokens, + so it doesn't need to make API requests to the Hub for every request it receives. + This cache has an expiry of five minutes by default, + and is governed by the configuration `HubAuth.cache_max_age` in the single-user server. +- The internal OAuth token is stored in a cookie, which has its own expiry (default: 14 days), + governed by `JupyterHub.cookie_max_age_days`. +- The internal OAuth token itself can also expire, + which is by default the same as the cookie expiry, + since it makes sense for the token itself and the place it is stored to expire at the same time. + This is governed by `JupyterHub.cookie_max_age_days` first, + or can overridden by `JupyterHub.oauth_token_expires_in`. + +That's all for _internal_ auth storage, +but the information from the _external_ authentication provider +(could be PAM or GitHub OAuth, etc.) can also expire. +Authenticator configuration governs when JupyterHub needs to ask again, +triggering the external login process anew before letting a user proceed. + +- `jupyterhub-hub-login` cookie stores that a browser is authenticated with the Hub. + This expires according to `JupyterHub.cookie_max_age_days` configuration, + with a default of 14 days. + The `jupyterhub-hub-login` cookie is encrypted with `JupyterHub.cookie_secret` + configuration. +- {meth}`.Authenticator.refresh_user` is a method to refresh a user's auth info. + By default, it does nothing, but it can return an updated user model if a user's information has changed, + or force a full login process again if needed. +- {attr}`.Authenticator.auth_refresh_age` configuration governs how often + `refresh_user()` will be called to check if a user must login again (default: 300 seconds). +- {attr}`.Authenticator.refresh_pre_spawn` configuration governs whether + `refresh_user()` should be called prior to spawning a server, + to force fresh auth info when a server is launched (default: False). + This can be useful when Authenticators pass access tokens to spawner environments, to ensure they aren't getting a stale token that's about to expire. + +**So what happens when these things expire or get stale?** + +- If the HubAuth **token response cache** expires, + when a request is made with a token, + the Hub is asked for the latest information about the token. + This usually has no visible effect, since it is just refreshing a cache. + If it turns out that the token itself has expired or been revoked, + the request will be denied. +- If the token has expired, but is still in the cookie: + when the token response cache expires, + the next time the server asks the hub about the token, + no user will be identified and the internal OAuth process begins again. +- If the token _cookie_ expires, the next browser request will be made with no credentials, + and the internal OAuth process will begin again. + This will usually have the form of a transparent redirect browsers won't notice. + However, if this occurs on an API request in a long-lived page visit + such as a JupyterLab session, the API request may fail and require + a page refresh to get renewed credentials. +- If the _JupyterHub_ cookie expires, the next time the browser makes a request to the Hub, + the Hub's authorization process must begin again (e.g. login with GitHub). + Hub cookie expiry on its own **does not** mean that a user can no longer access their single-user server! +- If credentials from the upstream provider (e.g. GitHub) become stale or outdated, + these will not be refreshed until/unless `refresh_user` is called + _and_ `refresh_user()` on the given Authenticator is implemented to perform such a check. + At this point, few Authenticators implement `refresh_user` to support this feature. + If your Authenticator does not or cannot implement `refresh_user`, + the only way to force a check is to reset the `JupyterHub.cookie_secret` encryption key, + which invalidates the `jupyterhub-hub-login` cookie for all users. + +### Logging out + +Logging out of JupyterHub means clearing and revoking many of these credentials: + +- The `jupyterhub-hub-login` cookie is revoked, meaning the next request to the Hub itself will require a new login. +- The token stored in the `jupyterhub-user-username` cookie for the single-user server + will be revoked, based on its associaton with `jupyterhub-session-id`, but the _cookie itself cannot be cleared at this point_ +- The shared `jupyterhub-session-id` is cleared, which ensures that the HubAuth **token response cache** will not be used, + and the next request with the expired token will ask the Hub, which will inform the single-user server that the token has expired + +## Extra bits + +(two-tokens)= + +### A tale of two tokens + +**TODO**: discuss API token issued to server at startup ($JUPYTERHUB_API_TOKEN) +and OAuth-issued token in the cookie, +and some details of how JupyterLab currently deals with that. +They are different, and JupyterLab should be making requests using the token from the cookie, +not the token from the server, +but that is not currently the case. + +### Redirect loops + +In general, an authenticated web endpoint has this behavior, +based on the authentication/authorization state of the browser: + +- If authorized, allow the request to happen +- If authenticated (I know who you are) but not authorized (you are not allowed), fail with a 403 permission denied error +- If not authenticated, start a redirect process to establish authorization, + which should end in a redirect back to the original URL to try again. + **This is why problems in authentication result in redirect loops!** + If the second request fails to detect the authentication that should have been established during the redirect, + it will start the authentication redirect process over again, + and keep redirecting in a loop until the browser balks. diff --git a/docs/source/reference/proxy.md b/docs/source/reference/proxy.md index 620073b6..f0cfa974 100644 --- a/docs/source/reference/proxy.md +++ b/docs/source/reference/proxy.md @@ -7,9 +7,12 @@ Hub manages by default as a subprocess (it can be run externally, as well, and typically is in production deployments). The upside to CHP, and why we use it by default, is that it's easy to install -and run (if you have nodejs, you are set!). The downsides are that it's a -single process and does not support any persistence of the routing table. So -if the proxy process dies, your whole JupyterHub instance is inaccessible +and run (if you have nodejs, you are set!). The downsides are that + +- it's a single process and +- does not support any persistence of the routing table. + +So if the proxy process dies, your whole JupyterHub instance is inaccessible until the Hub notices, restarts the proxy, and restores the routing table. For deployments that want to avoid such a single point of failure, or leverage existing proxy infrastructure in their chosen deployment (such as Kubernetes @@ -54,7 +57,7 @@ class MyProxy(Proxy): """Stop the proxy""" ``` -These methods **may** be coroutines. +These methods **may** be coroutines. `c.Proxy.should_start` is a configurable flag that determines whether the Hub should call these methods when the Hub itself starts and stops. @@ -103,7 +106,7 @@ route to be proxied, such as `/user/name/`. A routespec will: When adding a route, JupyterHub may pass a JSON-serializable dict as a `data` argument that should be attached to the proxy route. When that route is -retrieved, the `data` argument should be returned as well. If your proxy +retrieved, the `data` argument should be returned as well. If your proxy implementation doesn't support storing data attached to routes, then your Python wrapper may have to handle storing the `data` piece itself, e.g in a simple file or database. @@ -136,7 +139,7 @@ async def delete_route(self, routespec): ### Retrieving routes -For retrieval, you only *need* to implement a single method that retrieves all +For retrieval, you only _need_ to implement a single method that retrieves all routes. The return value for this function should be a dictionary, keyed by `routespec`, of dicts whose keys are the same three arguments passed to `add_route` (`routespec`, `target`, `data`) @@ -220,3 +223,11 @@ as previously required. Additionally, configurable attributes for your proxy will appear in jupyterhub help output and auto-generated configuration files via `jupyterhub --generate-config`. + +### Index of proxies + +A list of the proxies that are currently available for JupyterHub (that we know about). + +1. [`jupyterhub/configurable-http-proxy`](https://github.com/jupyterhub/configurable-http-proxy) The default proxy which uses node-http-proxy +2. [`jupyterhub/traefik-proxy`](https://github.com/jupyterhub/traefik-proxy) The proxy which configures traefik proxy server for jupyterhub +3. [`AbdealiJK/configurable-http-proxy`](https://github.com/AbdealiJK/configurable-http-proxy) A pure python implementation of the configurable-http-proxy diff --git a/docs/source/reference/rest-api.md b/docs/source/reference/rest-api.md new file mode 100644 index 00000000..117d7e10 --- /dev/null +++ b/docs/source/reference/rest-api.md @@ -0,0 +1,27 @@ +# JupyterHub REST API + +Below is an interactive view of JupyterHub's OpenAPI specification. + + + + + + + + +
+ + diff --git a/docs/source/reference/rest-api.rst b/docs/source/reference/rest-api.rst deleted file mode 100644 index c16d678d..00000000 --- a/docs/source/reference/rest-api.rst +++ /dev/null @@ -1,14 +0,0 @@ -:orphan: - -=================== -JupyterHub REST API -=================== - -.. this doc exists as a resolvable link target -.. which _static files are not - -.. meta:: - :http-equiv=refresh: 0;url=../_static/rest-api/index.html - -The rest API docs are `here <../_static/rest-api/index.html>`_ -if you are not redirected automatically. diff --git a/docs/source/reference/rest.md b/docs/source/reference/rest.md index 95e9ea8b..3ac0f423 100644 --- a/docs/source/reference/rest.md +++ b/docs/source/reference/rest.md @@ -1,34 +1,39 @@ +(rest-api)= + # Using JupyterHub's REST API This section will give you information on: -- what you can do with the API -- create an API token -- add API tokens to the config files -- make an API request programmatically using the requests library -- learn more about JupyterHub's API +- What you can do with the API +- How to create an API token +- Assigning permissions to a token +- Updating to admin services +- Making an API request programmatically using the requests library +- Paginating API requests +- Enabling users to spawn multiple named-servers via the API +- Learn more about JupyterHub's API + +Before we discuss about JupyterHub's REST API, you can learn about [REST APIs here](https://en.wikipedia.org/wiki/Representational_state_transfer). A REST +API provides a standard way for users to get and send information to the +Hub. ## What you can do with the API Using the [JupyterHub REST API][], you can perform actions on the Hub, such as: -- checking which users are active -- adding or removing users -- stopping or starting single user notebook servers -- authenticating services - -A [REST](https://en.wikipedia.org/wiki/Representational_state_transfer) -API provides a standard way for users to get and send information to the -Hub. +- Checking which users are active +- Adding or removing users +- Stopping or starting single user notebook servers +- Authenticating services +- Communicating with an individual Jupyter server's REST API ## Create an API token -To send requests using JupyterHub API, you must pass an API token with +To send requests using the JupyterHub API, you must pass an API token with the request. -As of [version 0.6.0](../changelog.md), the preferred way of -generating an API token is: +The preferred way of generating an API token is by running: ```bash openssl rand -hex 32 @@ -38,8 +43,12 @@ This `openssl` command generates a potential token that can then be added to JupyterHub using `.api_tokens` configuration setting in `jupyterhub_config.py`. -Alternatively, use the `jupyterhub token` command to generate a token -for a specific hub user by passing the 'username': +```{note} +The api_tokens configuration has been softly deprecated since the introduction of services. +``` + +Alternatively, you can use the `jupyterhub token` command to generate a token +for a specific hub user by passing the **username**: ```bash jupyterhub token @@ -48,25 +57,94 @@ jupyterhub token This command generates a random string to use as a token and registers it for the given user with the Hub's database. -In [version 0.8.0](../changelog.md), a TOKEN request page for +In [version 0.8.0](../changelog.md), a token request page for generating an API token is available from the JupyterHub user interface: -![Request API TOKEN page](../images/token-request.png) +:::{figure-md} -![API TOKEN success page](../images/token-request-success.png) +![token request page](../images/token-request.png) -## Add API tokens to the config file +JupyterHub's API token page +::: -You may also add a dictionary of API tokens and usernames to the hub's -configuration file, `jupyterhub_config.py` (note that -the **key** is the 'secret-token' while the **value** is the 'username'): +:::{figure-md} +![token-request-success](../images/token-request-success.png) + +JupyterHub's token page after successfully requesting a token. + +::: + +## Assigning permissions to a token + +Prior to JupyterHub 2.0, there were two levels of permissions: + +1. user, and +2. admin + +where a token would always have full permissions to do whatever its owner could do. + +In JupyterHub 2.0, +specific permissions are now defined as '**scopes**', +and can be assigned both at the user/service level, +and at the individual token level. + +This allows e.g. a user with full admin permissions to request a token with limited permissions. + +## Updating to admin services + +```{note} +The `api_tokens` configuration has been softly deprecated since the introduction of services. +We have no plans to remove it, +but deployments are encouraged to use service configuration instead. +``` + +If you have been using `api_tokens` to create an admin user +and the token for that user to perform some automations, then +the services' mechanism may be a better fit if you have the following configuration: ```python +c.JupyterHub.admin_users = {"service-admin"} c.JupyterHub.api_tokens = { - 'secret-token': 'username', + "secret-token": "service-admin", } ``` +This can be updated to create a service, with the following configuration: + +```python +c.JupyterHub.services = [ + { + # give the token a name + "name": "service-admin", + "api_token": "secret-token", + # "admin": True, # if using JupyterHub 1.x + }, +] + +# roles were introduced in JupyterHub 2.0 +# prior to 2.0, only "admin": True or False was available + +c.JupyterHub.load_roles = [ + { + "name": "service-role", + "scopes": [ + # specify the permissions the token should have + "admin:users", + ], + "services": [ + # assign the service the above permissions + "service-admin", + ], + } +] +``` + +The token will have the permissions listed in the role +(see [scopes][] for a list of available permissions), +but there will no longer be a user account created to house it. +The main noticeable difference between a user and a service is that there will be no notebook server associated with the account +and the service will not show up in the various user list pages and APIs. + ## Make an API request To authenticate your requests, pass the API token in the request's @@ -74,10 +152,9 @@ Authorization header. ### Use requests -Using the popular Python [requests](http://docs.python-requests.org/en/master/) -library, here's example code to make an API request for the users of a JupyterHub -deployment. An API GET request is made, and the request sends an API token for -authorization. The response contains information about the users: +Using the popular Python [requests](https://docs.python-requests.org) +library, an API GET request is made, and the request sends an API token for +authorization. The response contains information about the users, here's example code to make an API request for the users of a JupyterHub deployment ```python import requests @@ -86,9 +163,9 @@ api_url = 'http://127.0.0.1:8081/hub/api' r = requests.get(api_url + '/users', headers={ - 'Authorization': 'token %s' % token, - } - ) + 'Authorization': f'token {token}', + } +) r.raise_for_status() users = r.json() @@ -106,23 +183,100 @@ data = {'name': 'mygroup', 'users': ['user1', 'user2']} r = requests.post(api_url + '/groups/formgrade-data301/users', headers={ - 'Authorization': 'token %s' % token, - }, - json=data + 'Authorization': f'token {token}', + }, + json=data, ) r.raise_for_status() r.json() ``` The same API token can also authorize access to the [Jupyter Notebook REST API][] -provided by notebook servers managed by JupyterHub if one of the following is true: -1. The token is for the same user as the owner of the notebook -2. The token is tied to an admin user or service **and** `c.JupyterHub.admin_access` is set to `True` +provided by notebook servers managed by JupyterHub if it has the necessary `access:servers` scope. + +(api-pagination)= + +## Paginating API requests + +```{versionadded} 2.0 + +``` + +Pagination is available through the `offset` and `limit` query parameters on +list endpoints, which can be used to return ideally sized windows of results. +Here's example code demonstrating pagination on the `GET /users` +endpoint to fetch the first 20 records. + +```python +import os +import requests + +api_url = 'http://127.0.0.1:8081/hub/api' + +r = requests.get( + api_url + '/users?offset=0&limit=20', + headers={ + "Accept": "application/jupyterhub-pagination+json", + "Authorization": f"token {token}", + }, +) +r.raise_for_status() +r.json() +``` + +For backward-compatibility, the default structure of list responses is unchanged. +However, this lacks pagination information (e.g. is there a next page), +so if you have enough users that they won't fit in the first response, +it is a good idea to opt-in to the new paginated list format. +There is a new schema for list responses which include pagination information. +You can request this by including the header: + +``` +Accept: application/jupyterhub-pagination+json +``` + +with your request, in which case a response will look like: + +```python +{ + "items": [ + { + "name": "username", + "kind": "user", + ... + }, + ], + "_pagination": { + "offset": 0, + "limit": 20, + "total": 50, + "next": { + "offset": 20, + "limit": 20, + "url": "http://127.0.0.1:8081/hub/api/users?limit=20&offset=20" + } + } +} +``` + +where the list results (same as pre-2.0) will be in `items`, +and pagination info will be in `_pagination`. +The `next` field will include the `offset`, `limit`, and `url` for requesting the next page. +`next` will be `null` if there is no next page. + +Pagination is governed by two configuration options: + +- `JupyterHub.api_page_default_limit` - the page size, if `limit` is unspecified in the request + and the new pagination API is requested + (default: 50) +- `JupyterHub.api_page_max_limit` - the maximum page size a request can ask for (default: 200) + +Pagination is enabled on the `GET /users`, `GET /groups`, and `GET /proxy` REST endpoints. ## Enabling users to spawn multiple named-servers via the API -With JupyterHub version 0.8, support for multiple servers per user has landed. +Support for multiple servers per user was introduced in JupyterHub [version 0.8.](../changelog.md) Prior to that, each user could only launch a single default server via the API like this: @@ -131,14 +285,14 @@ curl -X POST -H "Authorization: token " "http://127.0.0.1:8081/hub/api/us ``` With the named-server functionality, it's now possible to launch more than one -specifically named servers against a given user. This could be used, for instance, +specifically named servers against a given user. This could be used, for instance, to launch each server based on a different image. First you must enable named-servers by including the following setting in the `jupyterhub_config.py` file. `c.JupyterHub.allow_named_servers = True` -If using the [zero-to-jupyterhub-k8s](https://github.com/jupyterhub/zero-to-jupyterhub-k8s) set-up to run JupyterHub, +If you are using the [zero-to-jupyterhub-k8s](https://github.com/jupyterhub/zero-to-jupyterhub-k8s) set-up to run JupyterHub, then instead of editing the `jupyterhub_config.py` file directly, you could pass the following as part of the `config.yaml` file, as per the [tutorial](https://zero-to-jupyterhub.readthedocs.io/en/latest/): @@ -149,6 +303,7 @@ hub: ``` With that setting in place, a new named-server is activated like this: + ```bash curl -X POST -H "Authorization: token " "http://127.0.0.1:8081/hub/api/users//servers/" curl -X POST -H "Authorization: token " "http://127.0.0.1:8081/hub/api/users//servers/" @@ -163,15 +318,11 @@ will need to be able to handle the case of multiple servers per user and ensure uniqueness of names, particularly if servers are spawned via docker containers or kubernetes pods. - ## Learn more about the API -You can see the full [JupyterHub REST API][] for details. This REST API Spec can -be viewed in a more [interactive style on swagger's petstore][]. -Both resources contain the same information and differ only in its display. -Note: The Swagger specification is being renamed the [OpenAPI Initiative][]. +You can see the full [JupyterHub REST API][] for more details. -[interactive style on swagger's petstore]: http://petstore.swagger.io/?url=https://raw.githubusercontent.com/jupyterhub/jupyterhub/master/docs/rest-api.yml#!/default -[OpenAPI Initiative]: https://www.openapis.org/ -[JupyterHub REST API]: ./rest-api -[Jupyter Notebook REST API]: http://petstore.swagger.io/?url=https://raw.githubusercontent.com/jupyter/notebook/master/notebook/services/api/api.yaml +[openapi initiative]: https://www.openapis.org/ +[jupyterhub rest api]: ./rest-api +[scopes]: ../rbac/scopes.md +[jupyter notebook rest api]: https://petstore3.swagger.io/?url=https://raw.githubusercontent.com/jupyter/notebook/HEAD/notebook/services/api/api.yaml diff --git a/docs/source/reference/separate-proxy.md b/docs/source/reference/separate-proxy.md index 56f3dd2b..851d4da6 100644 --- a/docs/source/reference/separate-proxy.md +++ b/docs/source/reference/separate-proxy.md @@ -1,27 +1,24 @@ # Running proxy separately from the hub - ## Background -The thing which users directly connect to is the proxy, by default -`configurable-http-proxy`. The proxy either redirects users to the +The thing which users directly connect to is the proxy, which by default is +`configurable-http-proxy`. The proxy either redirects users to the hub (for login and managing servers), or to their own single-user -servers. Thus, as long as the proxy stays running, access to existing +servers. Thus, as long as the proxy stays running, access to existing servers continues, even if the hub itself restarts or goes down. When you first configure the hub, you may not even realize this -because the proxy is automatically managed by the hub. This is great -for getting started and even most use, but everytime you restart the -hub, all user connections also get restarted. But it's also simple to +because the proxy is automatically managed by the hub. This is great +for getting started and even most use-cases, although, everytime you restart the +hub, all user connections are also restarted. However, it is also simple to run the proxy as a service separate from the hub, so that you are free to reconfigure the hub while only interrupting users who are waiting for their notebook server to start. starting their notebook server. The default JupyterHub proxy is -[configurable-http-proxy](https://github.com/jupyterhub/configurable-http-proxy), -and that page has some docs. If you are using a different proxy, such -as Traefik, these instructions are probably not relevant to you. - +[configurable-http-proxy](https://github.com/jupyterhub/configurable-http-proxy). If you are using a different proxy, such +as [Traefik](https://github.com/traefik/traefik), these instructions are probably not relevant to you. ## Configuration options @@ -37,24 +34,25 @@ it yourself). token for authenticating communication with the proxy. `c.ConfigurableHTTPProxy.api_url = 'http://localhost:8001'` should be -set to the URL which the hub uses to connect *to the proxy's API*. - +set to the URL which the hub uses to connect _to the proxy's API_. ## Proxy configuration -You need to configure a service to start the proxy. An example -command line for this is `configurable-http-proxy --ip=127.0.0.1 ---port=8000 --api-ip=127.0.0.1 --api-port=8001 ---default-target=http://localhost:8081 ---error-target=http://localhost:8081/hub/error`. (Details for how to -do this is out of scope for this tutorial - for example it might be a -systemd service on within another docker cotainer). The proxy has no +You need to configure a service to start the proxy. An example +command line argument for this is: + +```bash +$ configurable-http-proxy --ip=127.0.0.1 --port=8000 --api-ip=127.0.0.1 --api-port=8001 --default-target=http://localhost:8081 --error-target=http://localhost:8081/hub/error +``` + +(Details on how to do this is out of the scope of this tutorial. For example, it might be a +systemd service configured within another docker container). The proxy has no configuration files, all configuration is via the command line and environment variables. `--api-ip` and `--api-port` (which tells the proxy where to listen) should match the hub's `ConfigurableHTTPProxy.api_url`. -`--ip`, `-port`, and other options configure the *user* connections to the proxy. +`--ip`, `-port`, and other options configure the _user_ connections to the proxy. `--default-target` and `--error-target` should point to the hub, and used when users navigate to the proxy originally. @@ -63,10 +61,9 @@ match the token given to `c.ConfigurableHTTPProxy.auth_token`. You should check the [configurable-http-proxy options](https://github.com/jupyterhub/configurable-http-proxy) to see -what other options are needed, for example SSL options. Note that -these are configured in the hub if the hub is starting the proxy - you -need to move the options to here. - +what other options are needed, for example, SSL options. Note that +these options are configured in the hub if the hub is starting the proxy, so you +need to configure the options there. ## Docker image @@ -74,7 +71,6 @@ You can use [jupyterhub configurable-http-proxy docker image](https://hub.docker.com/r/jupyterhub/configurable-http-proxy/) to run the proxy. - ## See also -* [jupyterhub configurable-http-proxy](https://github.com/jupyterhub/configurable-http-proxy) +- [jupyterhub configurable-http-proxy](https://github.com/jupyterhub/configurable-http-proxy) diff --git a/docs/source/reference/server-api.md b/docs/source/reference/server-api.md new file mode 100644 index 00000000..03369dab --- /dev/null +++ b/docs/source/reference/server-api.md @@ -0,0 +1,332 @@ +# Starting servers with the JupyterHub API + +Sometimes, when working with applications such as [BinderHub](https://binderhub.readthedocs.io), it may be necessary to launch Jupyter-based services on behalf of your users. +Doing so can be achieved through JupyterHub's [REST API](../reference/rest.md), which allows one to launch and manage servers on behalf of users through API calls instead of the JupyterHub UI. +This way, you can take advantage of other user/launch/lifecycle patterns that are not natively supported by the JupyterHub UI, all without the need to develop the server management features of JupyterHub Spawners and/or Authenticators. + +This tutorial goes through working with the JupyterHub API to manage servers for users. +In particular, it covers how to: + +1. [Check the status of servers](checking) +2. [Start servers](starting) +3. [Wait for servers to be ready](waiting) +4. [Communicate with servers](communicating) +5. [Stop servers](stopping) + +At the end, we also provide sample Python code that can be used to implement these steps. + +(checking)= + +## Checking server status + +First, request information about a particular user using a GET request: + +``` +GET /hub/api/users/:username +``` + +The response you get will include a `servers` field, which is a dictionary, as shown in this JSON-formatted response: + +**Required scope: `read:servers`** + +```json +{ + "admin": false, + "groups": [], + "pending": null, + "server": null, + "name": "test-1", + "kind": "user", + "last_activity": "2021-08-03T18:12:46.026411Z", + "created": "2021-08-03T18:09:59.767600Z", + "roles": ["user"], + "servers": {} +} +``` + +Many JupyterHub deployments only use a 'default' server, represented as an empty string `''` for a name. An investigation of the `servers` field can yield one of two results. First, it can be empty as in the sample JSON response above. In such a case, the user has no running servers. + +However, should the user have running servers, then the returned dict should contain various information, as shown in this response: + +```json + "servers": { + "": { + "name": "", + "last_activity": "2021-08-03T18:48:35.934000Z", + "started": "2021-08-03T18:48:29.093885Z", + "pending": null, + "ready": true, + "url": "/user/test-1/", + "user_options": {}, + "progress_url": "/hub/api/users/test-1/server/progress" + } + } +``` + +Key properties of a server: + +name +: the server's name. Always the same as the key in `servers`. + +ready +: boolean. If true, the server can be expected to respond to requests at `url`. + +pending +: `null` or a string indicating a transitional state (such as `start` or `stop`). +Will always be `null` if `ready` is true or a string if false. + +url +: The server's url path (e.g. `/users/:name/:servername/`) where the server can be accessed if `ready` is true. + +progress_url +: The API URL path (starting with `/hub/api`) where the progress API can be used to wait for the server to be ready. + +last_activity +: ISO8601 timestamp indicating when activity was last observed on the server. + +started +: ISO801 timestamp indicating when the server was last started. + +The two responses above are from a user with no servers and another with one `ready` server. The sample below is a response likely to be received when one requests a server launch while the server is not yet ready: + +```json + "servers": { + "": { + "name": "", + "last_activity": "2021-08-03T18:48:29.093885Z", + "started": "2021-08-03T18:48:29.093885Z", + "pending": "spawn", + "ready": false, + "url": "/user/test-1/", + "user_options": {}, + "progress_url": "/hub/api/users/test-1/server/progress" + } + } +``` + +Note that `ready` is `false` and `pending` has the value `spawn`, meaning that the server is not ready and attempting to access it may not work as it is still in the process of spawning. We'll get more into this below in [waiting for a server][]. + +[waiting for a server]: waiting + +(starting)= + +## Starting servers + +To start a server, make this API request: + +``` +POST /hub/api/users/:username/servers/[:servername] +``` + +**Required scope: `servers`** + +Assuming the request was valid, there are two possible responses: + +201 Created +: This status code means the launch completed and the server is ready and is available at the server's URL immediately. + +202 Accepted +: This is the more likely response, and means that the server has begun launching, +but is not immediately ready. As a result, the server shows `pending: 'spawn'` at this point and you should wait for it to start. + +(waiting)= + +## Waiting for a server to start + +After receiving a `202 Accepted` response, you have to wait for the server to start. +Two approaches can be applied to establish when the server is ready: + +1. {ref}`Polling the server model ` +2. {ref}`Using the progress API ` + +(polling)= + +### Polling the server model + +The simplest way to check if a server is ready is to programmatically query the server model until two conditions are true: + +1. The server name is contained in the `servers` response, and +2. `servers['servername']['ready']` is true. + +The Python code snippet below can be used to check if a server is ready: + +```python +def server_ready(hub_url, user, server_name="", token): + r = requests.get( + f"{hub_url}/hub/api/users/{user}/servers/{server_name}", + headers={"Authorization": f"token {token}"}, + ) + r.raise_for_status() + user_model = r.json() + servers = user_model.get("servers", {}) + if server_name not in servers: + return False + + server = servers[server_name] + if server['ready']: + print(f"Server {user}/{server_name} ready at {server['url']}") + return True + else: + print(f"Server {user}/{server_name} not ready, pending {server['pending']}") + return False +``` + +You can keep making this check until `ready` is true. + +(progress)= + +### Using the progress API + +The most _efficient_ way to wait for a server to start is by using the progress API. +The progress URL is available in the server model under `progress_url` and has the form `/hub/api/users/:user/servers/:servername/progress`. + +The default server progress can be accessed at `:user/servers//progress` or `:user/server/progress` as demonstrated in the following GET request: + +``` +GET /hub/api/users/:user/servers/:servername/progress +``` + +**Required scope: `read:servers`** + +The progress API is an example of an [EventStream][] API. +Messages are _streamed_ and delivered in the form: + +``` +data: {"progress": 10, "message": "...", ...} +``` + +where the line after `data:` contains a JSON-serialized dictionary. +Lines that do not start with `data:` should be ignored. + +[eventstream]: https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#examples + +Progress events have the form: + +```python +{ + "progress": 0-100, + "message": "", + "ready": True, # or False + +} +``` + +progress +: integer, 0-100 + +message +: string message describing progress stages + +ready +: present and true only for the last event when the server is ready + +url +: only present if `ready` is true; will be the server's URL + +The progress API can be used even with fully ready servers. +If the server is ready, there will only be one event, which will look like: + +```json +{ + "progress": 100, + "ready": true, + "message": "Server ready at /user/test-1/", + "html_message": "Server ready at /user/test-1/", + "url": "/user/test-1/" +} +``` + +where `ready` and `url` are the same as in the server model, and `ready` will always be true. + +A significant advantage of the progress API is that it shows the status of the server through a stream of messages. +Below is an example of a typical complete stream from the API: + +``` + +data: {"progress": 0, "message": "Server requested"} + +data: {"progress": 50, "message": "Spawning server..."} + +data: {"progress": 100, "ready": true, "message": "Server ready at /user/test-user/", "html_message": "Server ready at /user/test-user/", "url": "/user/test-user/"} +``` + +Here is a Python example for consuming an event stream: + +```{literalinclude} ../../../examples/server-api/start-stop-server.py +:language: python +:pyobject: event_stream +``` + +(stopping)= + +## Stopping servers + +Servers can be stopped with a DELETE request: + +``` +DELETE /hub/api/users/:user/servers/[:servername] +``` + +**Required scope: `servers`** + +Similar to when starting a server, issuing the DELETE request above might not stop the server immediately. +Instead, the DELETE request has two possible response codes: + +204 Deleted +: This status code means the delete completed and the server is fully stopped. +It will now be absent from the user `servers` model. + +202 Accepted +: This code means your request was accepted but is not yet completely processed. +The server has `pending: 'stop'` at this point. + +There is no progress API for checking when a server actually stops. +The only way to wait for a server to stop is to poll it and wait for the server to disappear from the user `servers` model. + +This Python code snippet can be used to stop a server and the wait for the process to complete: + +```{literalinclude} ../../../examples/server-api/start-stop-server.py +:language: python +:pyobject: stop_server +``` + +(communicating)= + +## Communicating with servers + +JupyterHub tokens with the `access:servers` scope can be used to communicate with servers themselves. +The tokens can be the same as those you used to launch your service. + +```{note} +Access scopes are new in JupyterHub 2.0. +To access servers in JupyterHub 1.x, +a token must be owned by the same user as the server, +*or* be an admin token if admin_access is enabled. +``` + +The URL returned from a server model is the URL path suffix, +e.g. `/user/:name/` to append to the jupyterhub base URL. +The returned URL is of the form `{hub_url}{server_url}`, +where `hub_url` would be `http://127.0.0.1:8000` by default and `server_url` is `/user/myname`. +When combined, the two give a full URL of `http://127.0.0.1:8000/user/myname`. + +## Python example + +The JupyterHub repo includes a complete example in {file}`examples/server-api` +that ties all theses steps together. + +In summary, the processes involved in managing servers on behalf of users are: + +1. Get user information from `/user/:name`. +2. The server model includes a `ready` state to tell you if it's ready. +3. If it's not ready, you can follow up with `progress_url` to wait for it. +4. If it is ready, you can use the `url` field to link directly to the running server. + +The example below demonstrates starting and stopping servers via the JupyterHub API, +including waiting for them to start via the progress API and waiting for them to stop by polling the user model. + +```{literalinclude} ../../../examples/server-api/start-stop-server.py +:language: python +:start-at: def event_stream +:end-before: def main +``` diff --git a/docs/source/reference/services.md b/docs/source/reference/services.md index 3020fa69..32229f2c 100644 --- a/docs/source/reference/services.md +++ b/docs/source/reference/services.md @@ -1,17 +1,5 @@ # Services -With version 0.7, JupyterHub adds support for **Services**. - -This section provides the following information about Services: - -- [Definition of a Service](#definition-of-a-service) -- [Properties of a Service](#properties-of-a-service) -- [Hub-Managed Services](#hub-managed-services) -- [Launching a Hub-Managed Service](#launching-a-hub-managed-service) -- [Externally-Managed Services](#externally-managed-services) -- [Writing your own Services](#writing-your-own-services) -- [Hub Authentication and Services](#hub-authentication-and-services) - ## Definition of a Service When working with JupyterHub, a **Service** is defined as a process that interacts @@ -45,17 +33,25 @@ A Service may have the following properties: - `url: str (default - None)` - The URL where the service is/should be. If a url is specified for where the Service runs its own web server, the service will be added to the proxy at `/services/:name` -- `api_token: str (default - None)` - For Externally-Managed Services you need to specify +- `api_token: str (default - None)` - For Externally-Managed Services you need to specify an API token to perform API requests to the Hub +- `display: bool (default - True)` - When set to true, display a link to the + service's URL under the 'Services' dropdown in user's hub home page. + +- `oauth_no_confirm: bool (default - False)` - When set to true, + skip the OAuth confirmation page when users access this service. + + By default, when users authenticate with a service using JupyterHub, + they are prompted to confirm that they want to grant that service + access to their credentials. + Skipping the confirmation page is useful for admin-managed services that are considered part of the Hub + and shouldn't need extra prompts for login. If a service is also to be managed by the Hub, it has a few extra options: -- `command: (str/Popen list`) - Command for JupyterHub to spawn the service. - - Only use this if the service should be a subprocess. - - If command is not specified, the Service is assumed to be managed - externally. - - If a command is specified for launching the Service, the Service will - be started and managed by the Hub. +- `command: (str/Popen list)` - Command for JupyterHub to spawn the service. - Only use this if the service should be a subprocess. - If command is not specified, the Service is assumed to be managed + externally. - If a command is specified for launching the Service, the Service will + be started and managed by the Hub. - `environment: dict` - additional environment variables for the Service. - `user: str` - the name of a system user to manage the Service. If unspecified, run as the same user as the Hub. @@ -89,11 +85,21 @@ Hub-Managed Service would include: This example would be configured as follows in `jupyterhub_config.py`: ```python +c.JupyterHub.load_roles = [ + { + "name": "idle-culler", + "scopes": [ + "read:users:activity", # read user last_activity + "servers", # start and stop servers + # 'admin:users' # needed if culling idle users as well + ] + } +] + c.JupyterHub.services = [ { - 'name': 'cull-idle', - 'admin': True, - 'command': [sys.executable, '/path/to/cull-idle.py', '--timeout'] + 'name': 'idle-culler', + 'command': [sys.executable, '-m', 'jupyterhub_idle_culler', '--timeout=3600'] } ] ``` @@ -103,12 +109,14 @@ parameters, which describe the environment needed to start the Service process: - `environment: dict` - additional environment variables for the Service. - `user: str` - name of the user to run the server if different from the Hub. - Requires Hub to be root. + Requires Hub to be root. - `cwd: path` directory in which to run the Service, if different from the - Hub directory. + Hub directory. The Hub will pass the following environment variables to launch the Service: +(service-env)= + ```bash JUPYTERHUB_SERVICE_NAME: The name of the service JUPYTERHUB_API_TOKEN: API token assigned to the service @@ -117,21 +125,24 @@ JUPYTERHUB_BASE_URL: Base URL of the Hub (https://mydomain[:port]/) JUPYTERHUB_SERVICE_PREFIX: URL path prefix of this service (/services/:service-name/) JUPYTERHUB_SERVICE_URL: Local URL where the service is expected to be listening. Only for proxied web services. +JUPYTERHUB_OAUTH_SCOPES: JSON-serialized list of scopes to use for allowing access to the service + (deprecated in 3.0, use JUPYTERHUB_OAUTH_ACCESS_SCOPES). +JUPYTERHUB_OAUTH_ACCESS_SCOPES: JSON-serialized list of scopes to use for allowing access to the service (new in 3.0). +JUPYTERHUB_OAUTH_CLIENT_ALLOWED_SCOPES: JSON-serialized list of scopes that can be requested by the oauth client on behalf of users (new in 3.0). ``` For the previous 'cull idle' Service example, these environment variables would be passed to the Service when the Hub starts the 'cull idle' Service: ```bash -JUPYTERHUB_SERVICE_NAME: 'cull-idle' +JUPYTERHUB_SERVICE_NAME: 'idle-culler' JUPYTERHUB_API_TOKEN: API token assigned to the service JUPYTERHUB_API_URL: http://127.0.0.1:8080/hub/api JUPYTERHUB_BASE_URL: https://mydomain[:port] -JUPYTERHUB_SERVICE_PREFIX: /services/cull-idle/ +JUPYTERHUB_SERVICE_PREFIX: /services/idle-culler/ ``` -See the JupyterHub GitHub repo for additional information about the -[`cull-idle` example](https://github.com/jupyterhub/jupyterhub/tree/master/examples/cull-idle). +See the GitHub repo for additional information about the [jupyterhub_idle_culler][]. ## Externally-Managed Services @@ -151,6 +162,8 @@ c.JupyterHub.services = [ { 'name': 'my-web-service', 'url': 'https://10.0.1.1:1984', + # any secret >8 characters, you'll use api_token to + # authenticate api requests to the hub from your service 'api_token': 'super-secret', } ] @@ -173,7 +186,7 @@ information to the Service via the environment variables described above. A flexible Service, whether managed by the Hub or not, can make use of these same environment variables. -When you run a service that has a url, it will be accessible under a +When you run a service that has a URL, it will be accessible under a `/services/` prefix, such as `https://myhub.horse/services/my-service/`. For your service to route proxied requests properly, it must take `JUPYTERHUB_SERVICE_PREFIX` into account when routing requests. For example, a @@ -188,18 +201,38 @@ extra slash you might get unexpected behavior. For example if your service has a ## Hub Authentication and Services -JupyterHub 0.7 introduces some utilities for using the Hub's authentication -mechanism to govern access to your service. When a user logs into JupyterHub, -the Hub sets a **cookie (`jupyterhub-services`)**. The service can use this -cookie to authenticate requests. +JupyterHub provides some utilities for using the Hub's authentication +mechanism to govern access to your service. -JupyterHub ships with a reference implementation of Hub authentication that +Requests to all JupyterHub services are made with OAuth tokens. +These can either be requests with a token in the `Authorization` header, +or url parameter `?token=...`, +or browser requests which must complete the OAuth authorization code flow, +which results in a token that should be persisted for future requests +(persistence is up to the service, +but an encrypted cookie confined to the service path is appropriate, +and provided by default). + +:::{versionchanged} 2.0 +The shared `jupyterhub-services` cookie is removed. +OAuth must be used to authenticate browser requests with services. +::: + +JupyterHub includes a reference implementation of Hub authentication that can be used by services. You may go beyond this reference implementation and create custom hub-authenticating clients and services. We describe the process below. -The reference, or base, implementation is the [`HubAuth`][HubAuth] class, -which implements the requests to the Hub. +The reference, or base, implementation is the {class}`.HubAuth` class, +which implements the API requests to the Hub that resolve a token to a User model. + +There are two levels of authentication with the Hub: + +- {class}`.HubAuth` - the most basic authentication, + for services that should only accept API requests authorized with a token. + +- {class}`.HubOAuth` - For services that should use oauth to authenticate with the Hub. + This should be used for any service that serves pages that should be visited with a browser. To use HubAuth, you must set the `.api_token` instance variable. This can be done either programmatically when constructing the class, or via the @@ -214,11 +247,9 @@ and [service-whoiami](https://github.com/jupyterhub/jupyterhub/tree/master/examp (TODO: Where is this API TOKen set?) -Most of the logic for authentication implementation is found in the -[`HubAuth.user_for_cookie`][HubAuth.user_for_cookie] -and in the -[`HubAuth.user_for_token`][HubAuth.user_for_token] -methods, which makes a request of the Hub, and returns: +Most of the logic for authentication implementation is found in the +{meth}`.HubAuth.user_for_token` methods, +which makes a request of the Hub, and returns: - None, if no user could be identified, or - a dict of the following form: @@ -227,7 +258,9 @@ methods, which makes a request of the Hub, and returns: { "name": "username", "groups": ["list", "of", "groups"], - "admin": False, # or True + "scopes": [ + "access:servers!server=username/", + ], } ``` @@ -237,79 +270,45 @@ action. HubAuth also caches the Hub's response for a number of seconds, configurable by the `cookie_cache_max_age` setting (default: five minutes). +If your service would like to make further requests _on behalf of users_, +it should use the token issued by this OAuth process. +If you are using tornado, +you can access the token authenticating the current request with {meth}`.HubAuth.get_token`. + +:::{versionchanged} 2.2 + +{meth}`.HubAuth.get_token` adds support for retrieving +tokens stored in tornado cookies after the completion of OAuth. +Previously, it only retrieved tokens from URL parameters or the Authorization header. +Passing `get_token(handler, in_cookie=False)` preserves this behavior. +::: + ### Flask Example For example, you have a Flask service that returns information about a user. JupyterHub's HubAuth class can be used to authenticate requests to the Flask service. See the `service-whoami-flask` example in the -[JupyterHub GitHub repo](https://github.com/jupyterhub/jupyterhub/tree/master/examples/service-whoami-flask) +[JupyterHub GitHub repo](https://github.com/jupyterhub/jupyterhub/tree/HEAD/examples/service-whoami-flask) for more details. -```python -from functools import wraps -import json -import os -from urllib.parse import quote - -from flask import Flask, redirect, request, Response - -from jupyterhub.services.auth import HubAuth - -prefix = os.environ.get('JUPYTERHUB_SERVICE_PREFIX', '/') - -auth = HubAuth( - api_token=os.environ['JUPYTERHUB_API_TOKEN'], - cookie_cache_max_age=60, -) - -app = Flask(__name__) - - -def authenticated(f): - """Decorator for authenticating with the Hub""" - @wraps(f) - def decorated(*args, **kwargs): - cookie = request.cookies.get(auth.cookie_name) - token = request.headers.get(auth.auth_header_name) - if cookie: - user = auth.user_for_cookie(cookie) - elif token: - user = auth.user_for_token(token) - else: - user = None - if user: - return f(user, *args, **kwargs) - else: - # redirect to login url on failed auth - return redirect(auth.login_url + '?next=%s' % quote(request.path)) - return decorated - - -@app.route(prefix) -@authenticated -def whoami(user): - return Response( - json.dumps(user, indent=1, sort_keys=True), - mimetype='application/json', - ) +```{literalinclude} ../../../examples/service-whoami-flask/whoami-flask.py +:language: python ``` - ### Authenticating tornado services with JupyterHub Since most Jupyter services are written with tornado, -we include a mixin class, [`HubAuthenticated`][HubAuthenticated], +we include a mixin class, [`HubOAuthenticated`][huboauthenticated], for quickly authenticating your own tornado services with JupyterHub. -Tornado's `@web.authenticated` method calls a Handler's `.get_current_user` -method to identify the user. Mixing in `HubAuthenticated` defines -`get_current_user` to use HubAuth. If you want to configure the HubAuth -instance beyond the default, you'll want to define an `initialize` method, +Tornado's {py:func}`~.tornado.web.authenticated` decorator calls a Handler's {py:meth}`~.tornado.web.RequestHandler.get_current_user` +method to identify the user. Mixing in {class}`.HubAuthenticated` defines +{meth}`~.HubAuthenticated.get_current_user` to use HubAuth. If you want to configure the HubAuth +instance beyond the default, you'll want to define an {py:meth}`~.tornado.web.RequestHandler.initialize` method, such as: ```python -class MyHandler(HubAuthenticated, web.RequestHandler): - hub_users = {'inara', 'mal'} +class MyHandler(HubOAuthenticated, web.RequestHandler): def initialize(self, hub_auth): self.hub_auth = hub_auth @@ -319,66 +318,97 @@ class MyHandler(HubAuthenticated, web.RequestHandler): ... ``` +The HubAuth class will automatically load the desired configuration from the Service +[environment variables](service-env). -The HubAuth will automatically load the desired configuration from the Service -environment variables. +:::{versionchanged} 2.0 -If you want to limit user access, you can whitelist users through either the -`.hub_users` attribute or `.hub_groups`. These are sets that check against the -username and user group list, respectively. If a user matches neither the user -list nor the group list, they will not be allowed access. If both are left -undefined, then any user will be allowed. +Access scopes are used to govern access to services. +Prior to 2.0, +sets of users and groups could be used to grant access +by defining `.hub_groups` or `.hub_users` on the authenticated handler. +These are ignored if the 2.0 `.hub_scopes` is defined. +::: +:::{seealso} +{meth}`.HubAuth.check_scopes` +::: ### Implementing your own Authentication with JupyterHub If you don't want to use the reference implementation (e.g. you find the implementation a poor fit for your Flask app), you can implement authentication via the Hub yourself. -We recommend looking at the [`HubAuth`][HubAuth] class implementation for reference, +JupyterHub is a standard OAuth2 provider, +so you can use any OAuth 2 client implementation appropriate for your toolkit. +See the [FastAPI example][] for an example of using JupyterHub as an OAuth provider with [FastAPI][], +without using any code imported from JupyterHub. + +On completion of OAuth, you will have an access token for JupyterHub, +which can be used to identify the user and the permissions (scopes) +the user has authorized for your service. + +You will only get to this stage if the user has the required `access:services!service=$service-name` scope. + +To retrieve the user model for the token, make a request to `GET /hub/api/user` with the token in the Authorization header. +For example, using flask: + +```{literalinclude} ../../../examples/service-whoami-flask/whoami-flask.py +:language: python +``` + +We recommend looking at the [`HubOAuth`][huboauth] class implementation for reference, and taking note of the following process: -1. retrieve the cookie `jupyterhub-services` from the request. -2. Make an API request `GET /hub/api/authorizations/cookie/jupyterhub-services/cookie-value`, - where cookie-value is the url-encoded value of the `jupyterhub-services` cookie. - This request must be authenticated with a Hub API token in the `Authorization` header. - For example, with [requests][]: +1. retrieve the token from the request. +2. Make an API request `GET /hub/api/user`, + with the token in the `Authorization` header. - ```python - r = requests.get( - '/'.join((["http://127.0.0.1:8081/hub/api", - "authorizations/cookie/jupyterhub-services", - quote(encrypted_cookie, safe=''), - ]), - headers = { - 'Authorization' : 'token %s' % api_token, - }, - ) - r.raise_for_status() - user = r.json() - ``` + For example, with [requests][]: + + ```python + r = requests.get( + "http://127.0.0.1:8081/hub/api/user", + headers = { + 'Authorization' : f'token {api_token}', + }, + ) + r.raise_for_status() + user = r.json() + ``` 3. On success, the reply will be a JSON model describing the user: - ```json + ```python { "name": "inara", - "groups": ["serenity", "guild"] - + # groups may be omitted, depending on permissions + "groups": ["serenity", "guild"], + # scopes is new in JupyterHub 2.0 + "scopes": [ + "access:services", + "read:users:name", + "read:users!user=inara", + "..." + ] } ``` +The `scopes` field can be used to manage access. +Note: a user will have access to a service to complete oauth access to the service for the first time. +Individual permissions may be revoked at any later point without revoking the token, +in which case the `scopes` field in this model should be checked on each access. +The default required scopes for access are available from `hub_auth.oauth_scopes` or `$JUPYTERHUB_OAUTH_ACCESS_SCOPES`. + An example of using an Externally-Managed Service and authentication is in the [nbviewer README][nbviewer example] section on securing the notebook viewer, -and an example of its configuration is found [here](https://github.com/jupyter/nbviewer/blob/master/nbviewer/providers/base.py#L94). +and an example of its configuration is found [here](https://github.com/jupyter/nbviewer/blob/ed942b10a52b6259099e2dd687930871dc8aac22/nbviewer/providers/base.py#L95). nbviewer can also be run as a Hub-Managed Service as described [nbviewer README][nbviewer example] section on securing the notebook viewer. - [requests]: http://docs.python-requests.org/en/master/ [services_auth]: ../api/services.auth.html -[HubAuth]: ../api/services.auth.html#jupyterhub.services.auth.HubAuth -[HubAuth.user_for_cookie]: ../api/services.auth.html#jupyterhub.services.auth.HubAuth.user_for_cookie -[HubAuth.user_for_token]: ../api/services.auth.html#jupyterhub.services.auth.HubAuth.user_for_token -[HubAuthenticated]: ../api/services.auth.html#jupyterhub.services.auth.HubAuthenticated [nbviewer example]: https://github.com/jupyter/nbviewer#securing-the-notebook-viewer +[fastapi example]: https://github.com/jupyterhub/jupyterhub/tree/HEAD/examples/service-fastapi +[fastapi]: https://fastapi.tiangolo.com +[jupyterhub_idle_culler]: https://github.com/jupyterhub/jupyterhub-idle-culler diff --git a/docs/source/reference/spawners.md b/docs/source/reference/spawners.md index cc3b7a65..a8b7c522 100644 --- a/docs/source/reference/spawners.md +++ b/docs/source/reference/spawners.md @@ -4,10 +4,9 @@ A [Spawner][] starts each single-user notebook server. The Spawner represents an abstract interface to a process, and a custom Spawner needs to be able to take three actions: -- start the process -- poll whether the process is still running -- stop the process - +- start a process +- poll whether a process is still running +- stop a process ## Examples @@ -15,11 +14,11 @@ Custom Spawners for JupyterHub can be found on the [JupyterHub wiki](https://git Some examples include: - [DockerSpawner](https://github.com/jupyterhub/dockerspawner) for spawning user servers in Docker containers - * `dockerspawner.DockerSpawner` for spawning identical Docker containers for + - `dockerspawner.DockerSpawner` for spawning identical Docker containers for each user - * `dockerspawner.SystemUserSpawner` for spawning Docker containers with an + - `dockerspawner.SystemUserSpawner` for spawning Docker containers with an environment and home directory for each user - * both `DockerSpawner` and `SystemUserSpawner` also work with Docker Swarm for + - both `DockerSpawner` and `SystemUserSpawner` also work with Docker Swarm for launching containers on remote machines - [SudoSpawner](https://github.com/jupyterhub/sudospawner) enables JupyterHub to run without being root, by spawning an intermediate process via `sudo` @@ -27,26 +26,25 @@ Some examples include: servers using batch systems - [YarnSpawner](https://github.com/jupyterhub/yarnspawner) for spawning notebook servers in YARN containers on a Hadoop cluster -- [RemoteSpawner](https://github.com/zonca/remotespawner) to spawn notebooks - and a remote server and tunnel the port via SSH - +- [SSHSpawner](https://github.com/NERSC/sshspawner) to spawn notebooks + on a remote server using SSH +- [KubeSpawner](https://github.com/jupyterhub/kubespawner) to spawn notebook servers on kubernetes cluster. ## Spawner control methods ### Spawner.start -`Spawner.start` should start the single-user server for a single user. +`Spawner.start` should start a single-user server for a single user. Information about the user can be retrieved from `self.user`, an object encapsulating the user's name, authentication, and server info. -The return value of `Spawner.start` should be the (ip, port) of the running server. - -**NOTE:** When writing coroutines, *never* `yield` in between a database change and a commit. +The return value of `Spawner.start` should be the `(ip, port)` of the running server, +or a full URL as a string. Most `Spawner.start` functions will look similar to this example: ```python -def start(self): +async def start(self): self.ip = '127.0.0.1' self.port = random_port() # get environment variables, @@ -58,8 +56,10 @@ def start(self): cmd.extend(self.cmd) cmd.extend(self.get_args()) - yield self._actually_start_server_somehow(cmd, env) - return (self.ip, self.port) + await self._actually_start_server_somehow(cmd, env) + # url may not match self.ip:self.port, but it could! + url = self._get_connectable_url() + return url ``` When `Spawner.start` returns, the single-user server process should actually be running, @@ -67,20 +67,71 @@ not just requested. JupyterHub can handle `Spawner.start` being very slow (such as PBS-style batch queues, or instantiating whole AWS instances) via relaxing the `Spawner.start_timeout` config value. +#### Note on IPs and ports + +`Spawner.ip` and `Spawner.port` attributes set the _bind_ URL, +which the single-user server should listen on +(passed to the single-user process via the `JUPYTERHUB_SERVICE_URL` environment variable). +The _return_ value is the IP and port (or full URL) the Hub should _connect to_. +These are not necessarily the same, and usually won't be in any Spawner that works with remote resources or containers. + +The default for `Spawner.ip`, and `Spawner.port` is `127.0.0.1:{random}`, +which is appropriate for Spawners that launch local processes, +where everything is on localhost and each server needs its own port. +For remote or container Spawners, it will often make sense to use a different value, +such as `ip = '0.0.0.0'` and a fixed port, e.g. `8888`. +The defaults can be changed in the class, +preserving configuration with traitlets: + +```python +from traitlets import default +from jupyterhub.spawner import Spawner + +class MySpawner(Spawner): + @default("ip") + def _default_ip(self): + return '0.0.0.0' + + @default("port") + def _default_port(self): + return 8888 + + async def start(self): + env = self.get_env() + cmd = [] + # get jupyterhub command to run, + # typically ['jupyterhub-singleuser'] + cmd.extend(self.cmd) + cmd.extend(self.get_args()) + + remote_server_info = await self._actually_start_server_somehow(cmd, env) + url = self.get_public_url_from(remote_server_info) + return url +``` + +#### Exception handling + +When `Spawner.start` raises an Exception, a message can be passed on to the user via the exception using a `.jupyterhub_html_message` or `.jupyterhub_message` attribute. + +When the Exception has a `.jupyterhub_html_message` attribute, it will be rendered as HTML to the user. + +Alternatively `.jupyterhub_message` is rendered as unformatted text. + +If both attributes are not present, the Exception will be shown to the user as unformatted text. + ### Spawner.poll -`Spawner.poll` should check if the spawner is still running. +`Spawner.poll` checks if the spawner is still running. It should return `None` if it is still running, and an integer exit status, otherwise. -For the local process case, `Spawner.poll` uses `os.kill(PID, 0)` -to check if the local process is still running. +In the case of local processes, `Spawner.poll` uses `os.kill(PID, 0)` +to check if the local process is still running. On Windows, it uses `psutil.pid_exists`. ### Spawner.stop `Spawner.stop` should stop the process. It must be a tornado coroutine, which should return when the process has finished exiting. - ## Spawner state JupyterHub should be able to stop and restart without tearing down @@ -90,7 +141,7 @@ A JSON-able dictionary of state can be used to store persisted information. Unlike start, stop, and poll methods, the state methods must not be coroutines. -For the single-process case, the Spawner state is only the process ID of the server: +In the case of single processes, the Spawner state is only the process ID of the server: ```python def get_state(self): @@ -112,7 +163,6 @@ def clear_state(self): self.pid = 0 ``` - ## Spawner options form (new in 0.4) @@ -129,7 +179,7 @@ If the `Spawner.options_form` is defined, when a user tries to start their serve If `Spawner.options_form` is undefined, the user's server is spawned directly, and no spawn page is rendered. -See [this example](https://github.com/jupyterhub/jupyterhub/blob/master/examples/spawn-form/jupyterhub_config.py) for a form that allows custom CLI args for the local spawner. +See [this example](https://github.com/jupyterhub/jupyterhub/blob/HEAD/examples/spawn-form/jupyterhub_config.py) for a form that allows custom CLI args for the local spawner. ### `Spawner.options_from_form` @@ -170,8 +220,7 @@ which would return: When `Spawner.start` is called, this dictionary is accessible as `self.user_options`. - -[Spawner]: https://github.com/jupyterhub/jupyterhub/blob/master/jupyterhub/spawner.py +[spawner]: https://github.com/jupyterhub/jupyterhub/blob/HEAD/jupyterhub/spawner.py ## Writing a custom spawner @@ -212,6 +261,75 @@ Additionally, configurable attributes for your spawner will appear in jupyterhub help output and auto-generated configuration files via `jupyterhub --generate-config`. +## Environment variables and command-line arguments + +Spawners mainly do one thing: launch a command in an environment. + +The command-line is constructed from user configuration: + +- Spawner.cmd (default: `['jupyterhub-singleuser']`) +- Spawner.args (CLI args to pass to the cmd, default: empty) + +where the configuration: + +```python +c.Spawner.cmd = ["my-singleuser-wrapper"] +c.Spawner.args = ["--debug", "--flag"] +``` + +would result in spawning the command: + +```bash +my-singleuser-wrapper --debug --flag +``` + +The `Spawner.get_args()` method is how `Spawner.args` is accessed, +and can be used by Spawners to customize/extend user-provided arguments. + +Prior to 2.0, JupyterHub unconditionally added certain options _if specified_ to the command-line, +such as `--ip={Spawner.ip}` and `--port={Spawner.port}`. +These have now all been moved to environment variables, +and from JupyterHub 2.0, +the command-line launched by JupyterHub is fully specified by overridable configuration `Spawner.cmd + Spawner.args`. + +Most process configuration is passed via environment variables. +Additional variables can be specified via the `Spawner.environment` configuration. + +The process environment is returned by `Spawner.get_env`, which specifies the following environment variables: + +- JUPYTERHUB*SERVICE_URL - the \_bind* URL where the server should launch its HTTP server (`http://127.0.0.1:12345`). + This includes `Spawner.ip` and `Spawner.port`; _new in 2.0, prior to 2.0 IP, port were on the command-line and only if specified_ +- JUPYTERHUB_SERVICE_PREFIX - the URL prefix the service will run on (e.g. `/user/name/`) +- JUPYTERHUB_USER - the JupyterHub user's username +- JUPYTERHUB_SERVER_NAME - the server's name, if using named servers (default server has an empty name) +- JUPYTERHUB_API_URL - the full URL for the JupyterHub API (http://17.0.0.1:8001/hub/api) +- JUPYTERHUB_BASE_URL - the base URL of the whole jupyterhub deployment, i.e. the bit before `hub/` or `user/`, + as set by `c.JupyterHub.base_url` (default: `/`) +- JUPYTERHUB_API_TOKEN - the API token the server can use to make requests to the Hub. + This is also the OAuth client secret. +- JUPYTERHUB_CLIENT_ID - the OAuth client ID for authenticating visitors. +- JUPYTERHUB_OAUTH_CALLBACK_URL - the callback URL to use in OAuth, typically `/user/:name/oauth_callback` +- JUPYTERHUB_OAUTH_ACCESS_SCOPES - the scopes required to access the server (called JUPYTERHUB_OAUTH_SCOPES prior to 3.0) +- JUPYTERHUB_OAUTH_CLIENT_ALLOWED_SCOPES - the scopes the service is allowed to request. + If no scopes are requested explicitly, these scopes will be requested. + +Optional environment variables, depending on configuration: + +- JUPYTERHUB*SSL*[KEYFILE|CERTFILE|CLIENT_CI] - SSL configuration, when `internal_ssl` is enabled +- JUPYTERHUB_ROOT_DIR - the root directory of the server (notebook directory), when `Spawner.notebook_dir` is defined (new in 2.0) +- JUPYTERHUB_DEFAULT_URL - the default URL for the server (for redirects from `/user/:name/`), + if `Spawner.default_url` is defined + (new in 2.0, previously passed via CLI) +- JUPYTERHUB_DEBUG=1 - generic debug flag, sets maximum log level when `Spawner.debug` is True + (new in 2.0, previously passed via CLI) +- JUPYTERHUB_DISABLE_USER_CONFIG=1 - disable loading user config, + sets maximum log level when `Spawner.debug` is True (new in 2.0, + previously passed via CLI) + +- JUPYTERHUB*[MEM|CPU]*[LIMIT_GUARANTEE] - the values of CPU and memory limits and guarantees. + These are not expected to be enforced by the process, + but are made available as a hint, + e.g. for resource monitoring extensions. ## Spawners, resource limits, and guarantees (Optional) @@ -220,14 +338,14 @@ guarantees on resources, such as CPU and memory. To provide a consistent experience for sysadmins and users, we provide a standard way to set and discover these resource limits and guarantees, such as for memory and CPU. For the limits and guarantees to be useful, **the spawner must implement -support for them**. For example, LocalProcessSpawner, the default +support for them**. For example, `LocalProcessSpawner`, the default spawner, does not support limits and guarantees. One of the spawners that supports limits and guarantees is the [`systemdspawner`](https://github.com/jupyterhub/systemdspawner). ### Memory Limits & Guarantees -`c.Spawner.mem_limit`: A **limit** specifies the *maximum amount of memory* +`c.Spawner.mem_limit`: A **limit** specifies the _maximum amount of memory_ that may be allocated, though there is no promise that the maximum amount will be available. In supported spawners, you can set `c.Spawner.mem_limit` to limit the total amount of memory that a single-user notebook server can @@ -235,8 +353,8 @@ allocate. Attempting to use more memory than this limit will cause errors. The single-user notebook server can discover its own memory limit by looking at the environment variable `MEM_LIMIT`, which is specified in absolute bytes. -`c.Spawner.mem_guarantee`: Sometimes, a **guarantee** of a *minimum amount of -memory* is desirable. In this case, you can set `c.Spawner.mem_guarantee` to +`c.Spawner.mem_guarantee`: Sometimes, a **guarantee** of a _minimum amount of +memory_ is desirable. In this case, you can set `c.Spawner.mem_guarantee` to to provide a guarantee that at minimum this much memory will always be available for the single-user notebook server to use. The environment variable `MEM_GUARANTEE` will also be set in the single-user notebook server. @@ -250,7 +368,7 @@ limits or guarantees are provided, and no environment values are set. `c.Spawner.cpu_limit`: In supported spawners, you can set `c.Spawner.cpu_limit` to limit the total number of cpu-cores that a single-user notebook server can use. These can be fractional - `0.5` means 50% -of one CPU core, `4.0` is 4 cpu-cores, etc. This value is also set in the +of one CPU core, `4.0` is 4 CPU-cores, etc. This value is also set in the single-user notebook server's environment variable `CPU_LIMIT`. The limit does not claim that you will be able to use all the CPU up to your limit as other higher priority applications might be taking up CPU. @@ -271,7 +389,7 @@ utilize these certs, there are two methods of interest on the base `Spawner` class: `.create_certs` and `.move_certs`. The first method, `.create_certs` will sign a key-cert pair using an internally -trusted authority for notebooks. During this process, `.create_certs` can +trusted authority for notebooks. During this process, `.create_certs` can apply `ip` and `dns` name information to the cert via an `alt_names` `kwarg`. This is used for certificate authentication (verification). Without proper verification, the `Notebook` will be unable to communicate with the `Hub` and diff --git a/docs/source/reference/technical-overview.md b/docs/source/reference/technical-overview.md index a1577701..b0ac6a51 100644 --- a/docs/source/reference/technical-overview.md +++ b/docs/source/reference/technical-overview.md @@ -2,7 +2,7 @@ The **Technical Overview** section gives you a high-level view of: -- JupyterHub's Subsystems: Hub, Proxy, Single-User Notebook Server +- JupyterHub's major Subsystems: Hub, Proxy, Single-User Notebook Server - how the subsystems interact - the process from JupyterHub access to user login - JupyterHub's default behavior @@ -11,16 +11,16 @@ The **Technical Overview** section gives you a high-level view of: The goal of this section is to share a deeper technical understanding of JupyterHub and how it works. -## The Subsystems: Hub, Proxy, Single-User Notebook Server +## The Major Subsystems: Hub, Proxy, Single-User Notebook Server -JupyterHub is a set of processes that together provide a single user Jupyter -Notebook server for each person in a group. Three major subsystems are started +JupyterHub is a set of processes that together, provide a single-user Jupyter +Notebook server for each person in a group. Three subsystems are started by the `jupyterhub` command line program: - **Hub** (Python/Tornado): manages user accounts, authentication, and - coordinates Single User Notebook Servers using a Spawner. + coordinates Single User Notebook Servers using a [Spawner](./spawners.md). -- **Proxy**: the public facing part of JupyterHub that uses a dynamic proxy +- **Proxy**: the public-facing part of JupyterHub that uses a dynamic proxy to route HTTP requests to the Hub and Single User Notebook Servers. [configurable http proxy](https://github.com/jupyterhub/configurable-http-proxy) (node-http-proxy) is the default proxy. @@ -28,7 +28,7 @@ by the `jupyterhub` command line program: - **Single-User Notebook Server** (Python/Tornado): a dedicated, single-user, Jupyter Notebook server is started for each user on the system when the user logs in. The object that starts the single-user notebook - servers is called a **Spawner**. + servers is called a **[Spawner](./spawners.md)**. ![JupyterHub subsystems](../images/jhub-parts.png) @@ -41,8 +41,8 @@ The basic principles of operation are: - The Hub spawns the proxy (in the default JupyterHub configuration) - The proxy forwards all requests to the Hub by default -- The Hub handles login, and spawns single-user notebook servers on demand -- The Hub configures the proxy to forward url prefixes to single-user notebook +- The Hub handles login and spawns single-user notebook servers on demand +- The Hub configures the proxy to forward URL prefixes to single-user notebook servers The proxy is the only process that listens on a public interface. The Hub sits @@ -50,17 +50,16 @@ behind the proxy at `/hub`. Single-user servers sit behind the proxy at `/user/[username]`. Different **[authenticators](./authenticators.md)** control access -to JupyterHub. The default one (PAM) uses the user accounts on the server where +to JupyterHub. The default one [(PAM)](https://en.wikipedia.org/wiki/Pluggable_authentication_module) uses the user accounts on the server where JupyterHub is running. If you use this, you will need to create a user account -on the system for each user on your team. Using other authenticators, you can +on the system for each user on your team. However, using other authenticators you can allow users to sign in with e.g. a GitHub account, or with any single-sign-on system your organization has. Next, **[spawners](./spawners.md)** control how JupyterHub starts the individual notebook server for each user. The default spawner will start a notebook server on the same machine running under their system username. -The other main option is to start each server in a separate container, often -using Docker. +The other main option is to start each server in a separate container, often using [Docker](https://jupyterhub-dockerspawner.readthedocs.io/en/latest/). ## The Process from JupyterHub Access to User Login @@ -72,20 +71,20 @@ When a user accesses JupyterHub, the following events take place: - A single-user notebook server instance is [spawned](./spawners.md) for the logged-in user - When the single-user notebook server starts, the proxy is notified to forward - requests to `/user/[username]/*` to the single-user notebook server. -- A cookie is set on `/hub/`, containing an encrypted token. (Prior to version + requests made to `/user/[username]/*`, to the single-user notebook server. +- A [cookie](https://en.wikipedia.org/wiki/HTTP_cookie) is set on `/hub/`, containing an encrypted token. (Prior to version 0.8, a cookie for `/user/[username]` was used too.) - The browser is redirected to `/user/[username]`, and the request is handled by the single-user notebook server. -The single-user server identifies the user with the Hub via OAuth: +How does the single-user server identify the user with the Hub via OAuth? -- on request, the single-user server checks a cookie -- if no cookie is set, redirect to the Hub for verification via OAuth -- after verification at the Hub, the browser is redirected back to the +- On request, the single-user server checks a cookie +- If no cookie is set, the single-user server redirects to the Hub for verification via OAuth +- After verification at the Hub, the browser is redirected back to the single-user server -- the token is verified and stored in a cookie -- if no user is identified, the browser is redirected back to `/hub/login` +- The token is verified and stored in a cookie +- If no user is identified, the browser is redirected back to `/hub/login` ## Default Behavior @@ -111,7 +110,7 @@ working directory: This file needs to persist so that a **Hub** server restart will avoid invalidating cookies. Conversely, deleting this file and restarting the server effectively invalidates all login cookies. The cookie secret file is discussed - in the [Cookie Secret section of the Security Settings document](../getting-started/security-basics.md). + in the [Cookie Secret section of the Security Settings document](../getting-started/security-basics.rst). The location of these files can be specified via configuration settings. It is recommended that these files be stored in standard UNIX filesystem locations, diff --git a/docs/source/reference/templates.md b/docs/source/reference/templates.md index d820c099..f29708b9 100644 --- a/docs/source/reference/templates.md +++ b/docs/source/reference/templates.md @@ -1,8 +1,8 @@ # Working with templates and UI The pages of the JupyterHub application are generated from -[Jinja](http://jinja.pocoo.org/) templates. These allow the header, for -example, to be defined once and incorporated into all pages. By providing +[Jinja](http://jinja.pocoo.org/) templates. These allow the header, for +example, to be defined once and incorporated into all pages. By providing your own templates, you can have complete control over JupyterHub's appearance. @@ -10,7 +10,7 @@ appearance. JupyterHub will look for custom templates in all of the paths in the `JupyterHub.template_paths` configuration option, falling back on the -[default templates](https://github.com/jupyterhub/jupyterhub/tree/master/share/jupyterhub/templates) +[default templates](https://github.com/jupyterhub/jupyterhub/tree/HEAD/share/jupyterhub/templates) if no custom template with that name is found. This fallback behavior is new in version 0.9; previous versions searched only those paths explicitly included in `template_paths`. You may override as many @@ -20,8 +20,8 @@ or as few templates as you desire. Jinja provides a mechanism to [extend templates](http://jinja.pocoo.org/docs/2.10/templates/#template-inheritance). A base template can define a `block`, and child templates can replace or -supplement the material in the block. The -[JupyterHub templates](https://github.com/jupyterhub/jupyterhub/tree/master/share/jupyterhub/templates) +supplement the material in the block. The +[JupyterHub templates](https://github.com/jupyterhub/jupyterhub/tree/HEAD/share/jupyterhub/templates) make extensive use of blocks, which allows you to customize parts of the interface easily. @@ -32,8 +32,8 @@ In general, a child template can extend a base template, `page.html`, by beginni ``` This works, unless you are trying to extend the default template for the same -file name. Starting in version 0.9, you may refer to the base file with a -`templates/` prefix. Thus, if you are writing a custom `page.html`, start the +file name. Starting in version 0.9, you may refer to the base file with a +`templates/` prefix. Thus, if you are writing a custom `page.html`, start the file with this block: ```html @@ -41,7 +41,7 @@ file with this block: ``` By defining `block`s with same name as in the base template, child templates -can replace those sections with custom content. The content from the base +can replace those sections with custom content. The content from the base template can be included with the `{{ super() }}` directive. ### Example @@ -52,10 +52,7 @@ text about the server starting up, place this content in a file named `JupyterHub.template_paths` configuration option. ```html -{% extends "templates/spawn_pending.html" %} - -{% block message %} -{{ super() }} +{% extends "templates/spawn_pending.html" %} {% block message %} {{ super() }}

Patience is a virtue.

{% endblock %} ``` @@ -69,9 +66,8 @@ To add announcements to be displayed on a page, you have two options: ### Announcement Configuration Variables -If you set the configuration variable `JupyterHub.template_vars = -{'announcement': 'some_text'}`, the given `some_text` will be placed on -the top of all pages. The more specific variables +If you set the configuration variable `JupyterHub.template_vars = {'announcement': 'some_text'}`, the given `some_text` will be placed on +the top of all pages. The more specific variables `announcement_login`, `announcement_spawn`, `announcement_home`, and `announcement_logout` are more specific and only show on their respective pages (overriding the global `announcement` variable). @@ -79,15 +75,14 @@ Note that changing these variables require a restart, unlike direct template extension. You can get the same effect by extending templates, which allows you -to update the messages without restarting. Set +to update the messages without restarting. Set `c.JupyterHub.template_paths` as mentioned above, and then create a template (for example, `login.html`) with: ```html -{% extends "templates/login.html" %} -{% set announcement = 'some message' %} +{% extends "templates/login.html" %} {% set announcement = 'some message' %} ``` Extending `page.html` puts the message on all pages, but note that -extending `page.html` take precedence over an extension of a specific +extending `page.html` takes precedence over an extension of a specific page (unlike the variable-based approach above). diff --git a/docs/source/reference/urls.md b/docs/source/reference/urls.md index 53aefe4b..1ccc6e84 100644 --- a/docs/source/reference/urls.md +++ b/docs/source/reference/urls.md @@ -2,17 +2,15 @@ This document describes how JupyterHub routes requests. -This does not include the [REST API](./rest.md) urls. +This does not include the [REST API](./rest.md) URLs. In general, all URLs can be prefixed with `c.JupyterHub.base_url` to run the whole JupyterHub application on a prefix. -All authenticated handlers redirect to `/hub/login` to login users -prior to being redirected back to the originating page. +All authenticated handlers redirect to `/hub/login` to log-in users +before being redirected back to the originating page. The returned request should preserve all query parameters. - - ## `/` The top-level request is always a simple redirect to `/hub/`, @@ -27,12 +25,12 @@ This is an authenticated URL. This handler redirects users to the default URL of the application, which defaults to the user's default server. -That is, it redirects to `/hub/spawn` if the user's server is not running, -or the server itself (`/user/:name`) if the server is running. +That is, the handler redirects to `/hub/spawn` if the user's server is not running, +or to the server itself (`/user/:name`) if the server is running. -This default url behavior can be customized in two ways: +This default URL behavior can be customized in two ways: -To redirect users to the JupyterHub home page (`/hub/home`) +First, to redirect users to the JupyterHub home page (`/hub/home`) instead of spawning their server, set `redirect_to_server` to False: @@ -42,7 +40,7 @@ c.JupyterHub.redirect_to_server = False This might be useful if you have a Hub where you expect users to be managing multiple server configurations -and automatic spawning is not desirable. +but automatic spawning is not desirable. Second, you can customise the landing page to any page you like, such as a custom service you have deployed e.g. with course information: @@ -59,42 +57,42 @@ By default, the Hub home page has just one or two buttons for starting and stopping the user's server. If named servers are enabled, there will be some additional -tools for management of named servers. +tools for management of the named servers. -*Version added: 1.0* named server UI is new in 1.0. +_Version added: 1.0_ named server UI is new in 1.0. ## `/hub/login` This is the JupyterHub login page. If you have a form-based username+password login, -such as the default PAMAuthenticator, +such as the default [PAMAuthenticator](https://en.wikipedia.org/wiki/Pluggable_authentication_module), this page will render the login form. ![A login form](../images/login-form.png) If login is handled by an external service, e.g. with OAuth, this page will have a button, -declaring "Login with ..." which users can click -to login with the chosen service. +declaring "Log in with ..." which users can click +to log in with the chosen service. ![A login redirect button](../images/login-button.png) -If you want to skip the user-interaction to initiate logging in -via the button, you can set +If you want to skip the user interaction and initiate login +via the button, you can set: ```python c.Authenticator.auto_login = True ``` -This can be useful when the user is "already logged in" via some mechanism, -but a handshake via redirects is necessary to complete the authentication with JupyterHub. +This can be useful when the user is "already logged in" via some mechanism. +However, a handshake via `redirects` is necessary to complete the authentication with JupyterHub. ## `/hub/logout` -Visiting `/hub/logout` clears cookies from the current browser. +Visiting `/hub/logout` clears [cookies](https://en.wikipedia.org/wiki/HTTP_cookie) from the current browser. Note that **logging out does not stop a user's server(s)** by default. -If you would like to shutdown user servers on logout, +If you would like to shut down user servers on logout, you can enable this behavior with: ```python @@ -107,11 +105,11 @@ does not mean the user is no longer actively using their server from another mac ## `/user/:username[/:servername]` If a user's server is running, this URL is handled by the user's given server, -not the Hub. -The username is the first part and, if using named servers, +not by the Hub. +The username is the first part, and if using named servers, the server name is the second part. -If the user's server is *not* running, this will be redirected to `/hub/user/:username/...` +If the user's server is _not_ running, this will be redirected to `/hub/user/:username/...` ## `/hub/user/:username[/:servername]` @@ -120,8 +118,14 @@ This URL indicates a request for a user server that is not running if the specified server were running). Handling this URL depends on two conditions: whether a requested user is found -as a match and the state of the requested user's notebook server. +as a match and the state of the requested user's notebook server, +for example: +1. the server is not active + a. user matches + b. user doesn't match +2. the server is ready +3. the server is pending, but not ready If the server is pending spawn, the browser will be redirected to `/hub/spawn-pending/:username/:servername` @@ -137,39 +141,37 @@ Some checks are performed and a delay is added before redirecting back to `/user If something is really wrong, this can result in a redirect loop. Visiting this page will never result in triggering the spawn of servers -without additional user action (i.e. clicking the link on the page) +without additional user action (i.e. clicking the link on the page). ![Visiting a URL for a server that's not running](../images/not-running.png) -*Version changed: 1.0* +_Version changed: 1.0_ -Prior to 1.0, this URL itself was responsible for spawning servers, -and served the progress page if it was pending, -redirected to running servers, and -This was useful because it made sure that requested servers were restarted after they stopped, -but could also be harmful because unused servers would continuously be restarted if e.g. -an idle JupyterLab frontend were open pointed at it, -which constantly makes polling requests. +Prior to 1.0, this URL itself was responsible for spawning servers. +If the progress page was pending, the URL redirected it to running servers. +This was useful because it made sure that the requested servers were restarted after they stopped. +However, it could also be harmful because unused servers would continuously be restarted if e.g. +an idle JupyterLab frontend that constantly makes polling requests was openly pointed at it. ### Special handling of API requests Requests to `/user/:username[/:servername]/api/...` are assumed to be from applications connected to stopped servers. -These are failed with 503 and an informative JSON error message -indicating how to spawn the server. -This is meant to help applications such as JupyterLab +These requests fail with a `503` status code and an informative JSON error message +that indicates how to spawn the server. +This is meant to help applications such as JupyterLab, that are connected to a server that has stopped. -*Version changed: 1.0* +_Version changed: 1.0_ -JupyterHub 0.9 failed these API requests with status 404, -but 1.0 uses 503. +JupyterHub version 0.9 failed these API requests with status `404`, +but version 1.0 uses 503. ## `/user-redirect/...` -This URL is for sharing a URL that will redirect a user +The `/user-redirect/...` URL is for sharing a URL that will redirect a user to a path on their own default server. -This is useful when users have the same file at the same URL on their servers, +This is useful when different users have the same file at the same URL on their servers, and you want a single link to give to any user that will open that file on their server. e.g. a link to `/user-redirect/notebooks/Index.ipynb` @@ -191,7 +193,7 @@ that is intended to make it possible. ### `/hub/spawn[/:username[/:servername]]` Requesting `/hub/spawn` will spawn the default server for the current user. -If `username` and optionally `servername` are specified, +If the `username` and optionally `servername` are specified, then the specified server for the specified user will be spawned. Once spawn has been requested, the browser is redirected to `/hub/spawn-pending/...`. @@ -202,12 +204,12 @@ and a POST request will trigger the actual spawn and redirect. ![The spawn form](../images/spawn-form.png) -*Version added: 1.0* +_Version added: 1.0_ -1.0 adds the ability to specify username and servername. +1.0 adds the ability to specify `username` and `servername`. Prior to 1.0, only `/hub/spawn` was recognized for the default server. -*Version changed: 1.0* +_Version changed: 1.0_ Prior to 1.0, this page redirected back to `/hub/user/:username`, which was responsible for triggering spawn and rendering progress, etc. @@ -216,7 +218,7 @@ which was responsible for triggering spawn and rendering progress, etc. ![The spawn pending page](../images/spawn-pending.png) -*Version added: 1.0* this URL is new in JupyterHub 1.0. +_Version added: 1.0_ this URL is new in JupyterHub 1.0. This page renders the progress view for the given spawn request. Once the server is ready, @@ -244,7 +246,7 @@ against the [JupyterHub REST API](./rest.md). Administrators can take various administrative actions from this page: -1. add/remove users -2. grant admin privileges -3. start/stop user servers -4. shutdown JupyterHub itself +- add/remove users +- grant admin privileges +- start/stop user servers +- shutdown JupyterHub itself diff --git a/docs/source/reference/websecurity.md b/docs/source/reference/websecurity.md index b9b1df68..67d9e8f2 100644 --- a/docs/source/reference/websecurity.md +++ b/docs/source/reference/websecurity.md @@ -5,24 +5,24 @@ The **Security Overview** section helps you learn about: - the design of JupyterHub with respect to web security - the semi-trusted user - the available mitigations to protect untrusted users from each other -- the value of periodic security audits. +- the value of periodic security audits This overview also helps you obtain a deeper understanding of how JupyterHub works. ## Semi-trusted and untrusted users -JupyterHub is designed to be a *simple multi-user server for modestly sized -groups* of **semi-trusted** users. While the design reflects serving semi-trusted +JupyterHub is designed to be a _simple multi-user server for modestly sized +groups_ of **semi-trusted** users. While the design reflects serving semi-trusted users, JupyterHub is not necessarily unsuitable for serving **untrusted** users. -Using JupyterHub with **untrusted** users does mean more work by the +Using JupyterHub with **untrusted** users does mean more work for the administrator. Much care is required to secure a Hub, with extra caution on -protecting users from each other as the Hub is serving untrusted users. +protecting users from each other, since the Hub serves untrusted users. -One aspect of JupyterHub's *design simplicity* for **semi-trusted** users is that -the Hub and single-user servers are placed in a *single domain*, behind a -[*proxy*][configurable-http-proxy]. If the Hub is serving untrusted +One aspect of JupyterHub's _design simplicity_ for **semi-trusted** users is that +the Hub and single-user servers are placed in a _single domain_, behind a +[_proxy_][configurable-http-proxy]. If the Hub is serving untrusted users, many of the web's cross-site protections are not applied between single-user servers and the Hub, or between single-user servers and each other, since browsers see the whole thing (proxy, Hub, and single user @@ -32,7 +32,7 @@ servers) as a single website (i.e. single domain). To protect users from each other, a user must **never** be able to write arbitrary HTML and serve it to another user on the Hub's domain. JupyterHub's -authentication setup prevents a user writing arbitrary HTML and serving it to +authentication setup prevents a user from writing arbitrary HTML and serving it to another user because only the owner of a given single-user notebook server is allowed to view user-authored pages served by the given single-user notebook server. @@ -40,25 +40,25 @@ server. To protect all users from each other, JupyterHub administrators must ensure that: -* A user **does not have permission** to modify their single-user notebook server, +- A user **does not have permission** to modify their single-user notebook server, including: - A user **may not** install new packages in the Python environment that runs their single-user server. - If the `PATH` is used to resolve the single-user executable (instead of using an absolute path), a user **may not** create new files in any `PATH` directory that precedes the directory containing `jupyterhub-singleuser`. - - A user may not modify environment variables (e.g. PATH, PYTHONPATH) for + - A user may not modify environment variables (e.g. `PATH`, `PYTHONPATH`) for their single-user server. -* A user **may not** modify the configuration of the notebook server +- A user **may not** modify the configuration of the notebook server (the `~/.jupyter` or `JUPYTER_CONFIG_DIR` directory). If any additional services are run on the same domain as the Hub, the services -**must never** display user-authored HTML that is neither *sanitized* nor *sandboxed* +**must never** display user-authored HTML that is neither _sanitized_ nor _sandboxed_ (e.g. IFramed) to any user that lacks authentication as the author of a file. ## Mitigate security issues -Several approaches to mitigating these issues with configuration +The several approaches to mitigating security issues with configuration options provided by JupyterHub include: ### Enable subdomains @@ -76,16 +76,16 @@ resolves the cross-site issues. ### Disable user config -If subdomains are not available or not desirable, JupyterHub provides a +If subdomains are unavailable or undesirable, JupyterHub provides a configuration option `Spawner.disable_user_config`, which can be set to prevent the user-owned configuration files from being loaded. After implementing this -option, PATHs and package installation and PATHs are the other things that the +option, `PATH`s and package installation are the other things that the admin must enforce. ### Prevent spawners from evaluating shell configuration files For most Spawners, `PATH` is not something users can influence, but care should -be taken to ensure that the Spawner does *not* evaluate shell configuration +be taken to ensure that the Spawner does _not_ evaluate shell configuration files prior to launching the server. ### Isolate packages using virtualenv @@ -101,8 +101,8 @@ pose additional risk to the web application's security. ### Encrypt internal connections with SSL/TLS -By default, all communication on the server, between the proxy, hub, and single --user notebooks is performed unencrypted. Setting the `internal_ssl` flag in +By default, all communications on the server, between the proxy, hub, and single +-user notebooks are performed unencrypted. Setting the `internal_ssl` flag in `jupyterhub_config.py` secures the aforementioned routes. Turning this feature on does require that the enabled `Spawner` can use the certificates generated by the `Hub` (the default `LocalProcessSpawner` can, for instance). @@ -119,19 +119,18 @@ extend to securing the `tcp` sockets as well. ## Security audits We recommend that you do periodic reviews of your deployment's security. It's -good practice to keep JupyterHub, configurable-http-proxy, and nodejs -versions up to date. +good practice to keep [JupyterHub](https://readthedocs.org/projects/jupyterhub/), [configurable-http-proxy][], and [nodejs +versions](https://github.com/nodejs/Release) up to date. A handy website for testing your deployment is [Qualsys' SSL analyzer tool](https://www.ssllabs.com/ssltest/analyze.html). - [configurable-http-proxy]: https://github.com/jupyterhub/configurable-http-proxy ## Vulnerability reporting -If you believe you’ve found a security vulnerability in JupyterHub, or any +If you believe you have found a security vulnerability in JupyterHub, or any Jupyter project, please report it to -[security@ipython.org](mailto:security@iypthon.org). If you prefer to encrypt +[security@ipython.org](mailto:security@ipython.org). If you prefer to encrypt your security reports, you can use [this PGP public key](https://jupyter-notebook.readthedocs.io/en/stable/_downloads/ipython_security.asc). diff --git a/docs/source/troubleshooting.md b/docs/source/troubleshooting.md index 270a26de..ea4c7706 100644 --- a/docs/source/troubleshooting.md +++ b/docs/source/troubleshooting.md @@ -1,32 +1,9 @@ # Troubleshooting When troubleshooting, you may see unexpected behaviors or receive an error -message. This section provide links for identifying the cause of the +message. This section provides links for identifying the cause of the problem and how to resolve it. -[*Behavior*](#behavior) -- JupyterHub proxy fails to start -- sudospawner fails to run -- What is the default behavior when none of the lists (admin, whitelist, - group whitelist) are set? -- JupyterHub Docker container not accessible at localhost - -[*Errors*](#errors) -- 500 error after spawning my single-user server - -[*How do I...?*](#how-do-i) -- Use a chained SSL certificate -- Install JupyterHub without a network connection -- I want access to the whole filesystem, but still default users to their home directory -- How do I increase the number of pySpark executors on YARN? -- How do I use JupyterLab's prerelease version with JupyterHub? -- How do I set up JupyterHub for a workshop (when users are not known ahead of time)? -- How do I set up rotating daily logs? -- Toree integration with HDFS rack awareness script -- Where do I find Docker images and Dockerfiles related to JupyterHub? - -[*Troubleshooting commands*](#troubleshooting-commands) - ## Behavior ### JupyterHub proxy fails to start @@ -34,12 +11,12 @@ problem and how to resolve it. If you have tried to start the JupyterHub proxy and it fails to start: - check if the JupyterHub IP configuration setting is - ``c.JupyterHub.ip = '*'``; if it is, try ``c.JupyterHub.ip = ''`` -- Try starting with ``jupyterhub --ip=0.0.0.0`` + `c.JupyterHub.ip = '*'`; if it is, try `c.JupyterHub.ip = ''` +- Try starting with `jupyterhub --ip=0.0.0.0` -**Note**: If this occurs on Ubuntu/Debian, check that the you are using a -recent version of node. Some versions of Ubuntu/Debian come with a version -of node that is very old, and it is necessary to update node. +**Note**: If this occurs on Ubuntu/Debian, check that you are using a +recent version of [Node](https://nodejs.org). Some versions of Ubuntu/Debian come with a very old version +of Node and it is necessary to update Node. ### sudospawner fails to run @@ -55,32 +32,62 @@ or add: to the config file, `jupyterhub_config.py`. -### What is the default behavior when none of the lists (admin, whitelist, group whitelist) are set? +### What is the default behavior when none of the lists (admin, allowed, allowed groups) are set? When nothing is given for these lists, there will be no admins, and all users -who can authenticate on the system (i.e. all the unix users on the server with -a password) will be allowed to start a server. The whitelist lets you limit -this to a particular set of users, and the admin_users lets you specify who +who can authenticate on the system (i.e. all the Unix users on the server with +a password) will be allowed to start a server. The allowed username set lets you limit +this to a particular set of users, and admin_users lets you specify who among them may use the admin interface (not necessary, unless you need to do -things like inspect other users' servers, or modify the userlist at runtime). +things like inspect other users' servers or modify the user list at runtime). -### JupyterHub Docker container not accessible at localhost +### JupyterHub Docker container is not accessible at localhost -Even though the command to start your Docker container exposes port 8000 -(`docker run -p 8000:8000 -d --name jupyterhub jupyterhub/jupyterhub jupyterhub`), -it is possible that the IP address itself is not accessible/visible. As a result -when you try http://localhost:8000 in your browser, you are unable to connect -even though the container is running properly. One workaround is to explicitly -tell Jupyterhub to start at `0.0.0.0` which is visible to everyone. Try this -command: +Even though the command to start your Docker container exposes port 8000 +(`docker run -p 8000:8000 -d --name jupyterhub jupyterhub/jupyterhub jupyterhub`), +it is possible that the IP address itself is not accessible/visible. As a result, +when you try http://localhost:8000 in your browser, you are unable to connect +even though the container is running properly. One workaround is to explicitly +tell Jupyterhub to start at `0.0.0.0` which is visible to everyone. Try this +command: `docker run -p 8000:8000 -d --name jupyterhub jupyterhub/jupyterhub jupyterhub --ip 0.0.0.0 --port 8000` +### How can I kill ports from JupyterHub-managed services that have been orphaned? + +I started JupyterHub + nbgrader on the same host without containers. When I try to restart JupyterHub + nbgrader with this configuration, errors appear that the service accounts cannot start because the ports are being used. + +How can I kill the processes that are using these ports? + +Run the following command: + + sudo kill -9 $(sudo lsof -t -i:) + +Where `` is the port used by the nbgrader course service. This configuration is specified in `jupyterhub_config.py`. + +### Why am I getting a Spawn failed error message? + +After successfully logging in to JupyterHub with a compatible authenticator, I get a 'Spawn failed' error message in the browser. The JupyterHub logs have `jupyterhub KeyError: "getpwnam(): name not found: `. + +This issue occurs when the authenticator requires a local system user to exist. In these cases, you need to use a spawner +that does not require an existing system user account, such as `DockerSpawner` or `KubeSpawner`. + +### How can I run JupyterHub with sudo but use my current environment variables and virtualenv location? + +When launching JupyterHub with `sudo jupyterhub` I get import errors and my environment variables don't work. + +When launching services with `sudo ...` the shell won't have the same environment variables or `PATH`s in place. The most direct way to solve this issue is to use the full path to your python environment and add environment variables. For example: + +```bash +sudo MY_ENV=abc123 \ + /home/foo/venv/bin/python3 \ + /srv/jupyterhub/jupyterhub +``` ## Errors -### 500 error after spawning my single-user server +### Error 500 after spawning my single-user server -You receive a 500 error when accessing the URL `/user//...`. +You receive a 500 error while accessing the URL `/user//...`. This is often seen when your single-user server cannot verify your user cookie with the Hub. @@ -88,11 +95,11 @@ There are two likely reasons for this: 1. The single-user server cannot connect to the Hub's API (networking configuration problems) -2. The single-user server cannot *authenticate* its requests (invalid token) +2. The single-user server cannot _authenticate_ its requests (invalid token) #### Symptoms -The main symptom is a failure to load *any* page served by the single-user +The main symptom is a failure to load _any_ page served by the single-user server, met with a 500 error. This is typically the first page at `/user/` after logging in or clicking "Start my server". When a single-user notebook server receives a request, the notebook server makes an API request to the Hub to @@ -108,7 +115,7 @@ You should see a similar 200 message, as above, in the Hub log when you first visit your single-user notebook server. If you don't see this message in the log, it may mean that your single-user notebook server isn't connecting to your Hub. -If you see 403 (forbidden) like this, it's a token problem: +If you see 403 (forbidden) like this, it's likely a token problem: ``` 403 GET /hub/api/authorizations/cookie/jupyterhub-token-name/[secret] (@10.0.1.4) 4.14ms @@ -138,10 +145,10 @@ If you receive a 403 error, the API token for the single-user server is likely invalid. Commonly, the 403 error is caused by resetting the JupyterHub database (either removing jupyterhub.sqlite or some other action) while leaving single-user servers running. This happens most frequently when using -DockerSpawner, because Docker's default behavior is to stop/start containers -which resets the JupyterHub database, rather than destroying and recreating +DockerSpawner because Docker's default behavior is to stop/start containers +that reset the JupyterHub database, rather than destroying and recreating the container every time. This means that the same API token is used by the -server for its whole life, until the container is rebuilt. +server for its whole life until the container is rebuilt. The fix for this Docker case is to remove any Docker containers seeing this issue (typically all containers created before a certain point in time): @@ -152,6 +159,42 @@ After this, when you start your server via JupyterHub, it will build a new container. If this was the underlying cause of the issue, you should see your server again. +##### Proxy settings (403 GET) + +When your whole JupyterHub sits behind an organization proxy (_not_ a reverse proxy like NGINX as part of your setup and _not_ the configurable-http-proxy) the environment variables `HTTP_PROXY`, `HTTPS_PROXY`, `http_proxy`, and `https_proxy` might be set. This confuses the JupyterHub single-user servers: When connecting to the Hub for authorization they connect via the proxy instead of directly connecting to the Hub on localhost. The proxy might deny the request (403 GET). This results in the single-user server thinking it has the wrong auth token. To circumvent this you should add `,,localhost,127.0.0.1` to the environment variables `NO_PROXY` and `no_proxy`. + +### Launching Jupyter Notebooks to run as an externally managed JupyterHub service with the `jupyterhub-singleuser` command returns a `JUPYTERHUB_API_TOKEN` error + +[JupyterHub services](https://jupyterhub.readthedocs.io/en/stable/reference/services.html) allow processes to interact with JupyterHub's REST API. Example use-cases include: + +- **Secure Testing**: provide a canonical Jupyter Notebook for testing production data to reduce the number of entry points into production systems. +- **Grading Assignments**: provide access to shared Jupyter Notebooks that may be used for management tasks such as grading assignments. +- **Private Dashboards**: share dashboards with certain group members. + +If possible, try to run the Jupyter Notebook as an externally managed service with one of the provided [jupyter/docker-stacks](https://github.com/jupyter/docker-stacks). + +Standard JupyterHub installations include a [jupyterhub-singleuser](https://github.com/jupyterhub/jupyterhub/blob/9fdab027daa32c9017845572ad9d5ba1722dbc53/setup.py#L116) command which is built from the `jupyterhub.singleuser:main` method. The `jupyterhub-singleuser` command is the default command when JupyterHub launches single-user Jupyter Notebooks. One of the goals of this command is to make sure the version of JupyterHub installed within the Jupyter Notebook coincides with the version of the JupyterHub server itself. + +If you launch a Jupyter Notebook with the `jupyterhub-singleuser` command directly from the command line the Jupyter Notebook won't have access to the `JUPYTERHUB_API_TOKEN` and will return: + +``` + JUPYTERHUB_API_TOKEN env is required to run jupyterhub-singleuser. + Did you launch it manually? +``` + +If you plan on testing `jupyterhub-singleuser` independently from JupyterHub, then you can set the API token environment variable. For example, if you were to run the single-user Jupyter Notebook on the host, then: + + export JUPYTERHUB_API_TOKEN=my_secret_token + jupyterhub-singleuser + +With a docker container, pass in the environment variable with the run command: + + docker run -d \ + -p 8888:8888 \ + -e JUPYTERHUB_API_TOKEN=my_secret_token \ + jupyter/datascience-notebook:latest + +[This example](https://github.com/jupyterhub/jupyterhub/tree/HEAD/examples/service-notebook/external) demonstrates how to combine the use of the `jupyterhub-singleuser` environment variables when launching a Notebook as an externally managed service. ## How do I...? @@ -170,11 +213,10 @@ You would then set in your `jupyterhub_config.py` file the `ssl_key` and c.JupyterHub.ssl_cert = your_host-chained.crt c.JupyterHub.ssl_key = your_host.key - #### Example Your certificate provider gives you the following files: `example_host.crt`, -`Entrust_L1Kroot.txt` and `Entrust_Root.txt`. +`Entrust_L1Kroot.txt`, and `Entrust_Root.txt`. Concatenate the files appending the chain cert and root cert to your host cert: @@ -193,7 +235,7 @@ where `ssl_cert` is example-chained.crt and ssl_key to your private key. Then restart JupyterHub. -See also [JupyterHub SSL encryption](getting-started.md#ssl-encryption). +See also {ref}`ssl-encryption`. ### Install JupyterHub without a network connection @@ -207,7 +249,7 @@ with npmbox: python3 -m pip wheel jupyterhub npmbox configurable-http-proxy -### I want access to the whole filesystem, but still default users to their home directory +### I want access to the whole filesystem and still default users to their home directory Setting the following in `jupyterhub_config.py` will configure access to the entire filesystem and set the default to the user's home directory. @@ -226,7 +268,7 @@ similar to this one: provides additional information. The [pySpark configuration documentation](https://spark.apache.org/docs/0.9.0/configuration.html) is also helpful for programmatic configuration examples. -### How do I use JupyterLab's prerelease version with JupyterHub? +### How do I use JupyterLab's pre-release version with JupyterHub? While JupyterLab is still under active development, we have had users ask about how to try out JupyterLab with JupyterHub. @@ -239,7 +281,7 @@ For instance: python3 -m pip install jupyterlab jupyter serverextension enable --py jupyterlab --sys-prefix -The important thing is that jupyterlab is installed and enabled in the +The important thing is that JupyterLab is installed and enabled in the single-user notebook server environment. For system users, this means system-wide, as indicated above. For Docker containers, it means inside the single-user docker image, etc. @@ -252,15 +294,14 @@ notebook servers to default to JupyterLab: ### How do I set up JupyterHub for a workshop (when users are not known ahead of time)? 1. Set up JupyterHub using OAuthenticator for GitHub authentication -2. Configure whitelist to be an empty list in` jupyterhub_config.py` -3. Configure admin list to have workshop leaders be listed with administrator privileges. +2. Configure the admin list to have workshop leaders listed with administrator privileges. -Users will need a GitHub account to login and be authenticated by the Hub. +Users will need a GitHub account to log in and be authenticated by the Hub. ### How do I set up rotating daily logs? You can do this with [logrotate](https://linux.die.net/man/8/logrotate), -or pipe to `logger` to use syslog instead of directly to a file. +or pipe to `logger` to use Syslog instead of directly to a file. For example, with this logrotate config file: @@ -281,35 +322,9 @@ Or use syslog: jupyterhub | logger -t jupyterhub - -## Troubleshooting commands - -The following commands provide additional detail about installed packages, -versions, and system information that may be helpful when troubleshooting -a JupyterHub deployment. The commands are: - -- System and deployment information - -```bash -jupyter troubleshooting -``` - -- Kernel information - -```bash -jupyter kernelspec list -``` - -- Debug logs when running JupyterHub - -```bash -jupyterhub --debug -``` - ### Toree integration with HDFS rack awareness script -The Apache Toree kernel will an issue, when running with JupyterHub, if the standard HDFS -rack awareness script is used. This will materialize in the logs as a repeated WARN: +The Apache Toree kernel will have an issue when running with JupyterHub if the standard HDFS rack awareness script is used. This will materialize in the logs as a repeated WARN: ```bash 16/11/29 16:24:20 WARN ScriptBasedMapping: Exception running /etc/hadoop/conf/topology_script.py some.ip.address @@ -324,16 +339,54 @@ SyntaxError: Missing parentheses in call to 'print' In order to resolve this issue, there are two potential options. 1. Update HDFS core-site.xml, so the parameter "net.topology.script.file.name" points to a custom -script (e.g. /etc/hadoop/conf/custom_topology_script.py). Copy the original script and change the first line point -to a python two installation (e.g. /usr/bin/python). + script (e.g. /etc/hadoop/conf/custom_topology_script.py). Copy the original script and change the first line point + to a python two installation (e.g. /usr/bin/python). 2. In spark-env.sh add a Python 2 installation to your path (e.g. export PATH=/opt/anaconda2/bin:$PATH). ### Where do I find Docker images and Dockerfiles related to JupyterHub? Docker images can be found at the [JupyterHub organization on DockerHub](https://hub.docker.com/u/jupyterhub/). The Docker image [jupyterhub/singleuser](https://hub.docker.com/r/jupyterhub/singleuser/) -provides an example single user notebook server for use with DockerSpawner. +provides an example single-user notebook server for use with DockerSpawner. -Additional single user notebook server images can be found at the [Jupyter +Additional single-user notebook server images can be found at the [Jupyter organization on DockerHub](https://hub.docker.com/r/jupyter/) and information about each image at the [jupyter/docker-stacks repo](https://github.com/jupyter/docker-stacks). + +### How can I view the logs for JupyterHub or the user's Notebook servers when using the DockerSpawner? + +Use `docker logs ` where `` is the container name defined within `docker-compose.yml`. For example, to view the logs of the JupyterHub container use: + + docker logs hub + +By default, the user's notebook server is named `jupyter-` where `username` is the user's username within JupyterHub's db. So if you wanted to see the logs for user `foo` you would use: + + docker logs jupyter-foo + +You can also tail logs to view them in real-time using the `-f` option: + + docker logs -f hub + +## Troubleshooting commands + +The following commands provide additional detail about installed packages, +versions, and system information that may be helpful when troubleshooting +a JupyterHub deployment. The commands are: + +- System and deployment information + +```bash +jupyter troubleshoot +``` + +- Kernel information + +```bash +jupyter kernelspec list +``` + +- Debug logs when running JupyterHub + +```bash +jupyterhub --debug +``` diff --git a/docs/sphinxext/autodoc_traits.py b/docs/sphinxext/autodoc_traits.py deleted file mode 100644 index 3d54f8bb..00000000 --- a/docs/sphinxext/autodoc_traits.py +++ /dev/null @@ -1,57 +0,0 @@ -"""autodoc extension for configurable traits""" -from sphinx.domains.python import PyClassmember -from sphinx.ext.autodoc import AttributeDocumenter -from sphinx.ext.autodoc import ClassDocumenter -from traitlets import TraitType -from traitlets import Undefined - - -class ConfigurableDocumenter(ClassDocumenter): - """Specialized Documenter subclass for traits with config=True""" - - objtype = 'configurable' - directivetype = 'class' - - def get_object_members(self, want_all): - """Add traits with .tag(config=True) to members list""" - check, members = super().get_object_members(want_all) - get_traits = ( - self.object.class_own_traits - if self.options.inherited_members - else self.object.class_traits - ) - trait_members = [] - for name, trait in sorted(get_traits(config=True).items()): - # put help in __doc__ where autodoc will look for it - trait.__doc__ = trait.help - trait_members.append((name, trait)) - return check, trait_members + members - - -class TraitDocumenter(AttributeDocumenter): - objtype = 'trait' - directivetype = 'attribute' - member_order = 1 - priority = 100 - - @classmethod - def can_document_member(cls, member, membername, isattr, parent): - return isinstance(member, TraitType) - - def add_directive_header(self, sig): - default = self.object.get_default_value() - if default is Undefined: - default_s = '' - else: - default_s = repr(default) - self.options.annotation = 'c.{name} = {trait}({default})'.format( - name=self.format_name(), - trait=self.object.__class__.__name__, - default=default_s, - ) - super().add_directive_header(sig) - - -def setup(app): - app.add_autodocumenter(ConfigurableDocumenter) - app.add_autodocumenter(TraitDocumenter) diff --git a/docs/test_docs.py b/docs/test_docs.py new file mode 100644 index 00000000..634bb656 --- /dev/null +++ b/docs/test_docs.py @@ -0,0 +1,46 @@ +import sys +from pathlib import Path +from subprocess import run + +from ruamel.yaml import YAML + +yaml = YAML(typ="safe") + +here = Path(__file__).absolute().parent +root = here.parent + + +def test_rest_api_version_is_updated(): + """Checks that the version in JupyterHub's REST API definition file + (rest-api.yml) is matching the JupyterHub version.""" + version_py = root.joinpath("jupyterhub", "_version.py") + rest_api_yaml = root.joinpath("docs", "source", "_static", "rest-api.yml") + ns = {} + with version_py.open() as f: + exec(f.read(), {}, ns) + jupyterhub_version = ns["__version__"] + + with rest_api_yaml.open() as f: + rest_api = yaml.load(f) + rest_api_version = rest_api["info"]["version"] + + assert jupyterhub_version == rest_api_version + + +def test_rest_api_rbac_scope_descriptions_are_updated(): + """Checks that the RBAC scope descriptions in JupyterHub's REST API + definition file (rest-api.yml) as can be updated by generate-scope-table.py + matches what is committed.""" + run([sys.executable, "source/rbac/generate-scope-table.py"], cwd=here, check=True) + run( + [ + "git", + "--no-pager", + "diff", + "--color=always", + "--exit-code", + str(here.joinpath("source", "_static", "rest-api.yml")), + ], + cwd=here, + check=True, + ) diff --git a/examples/azuread-with-group-management/jupyterhub_config.py b/examples/azuread-with-group-management/jupyterhub_config.py new file mode 100644 index 00000000..32355013 --- /dev/null +++ b/examples/azuread-with-group-management/jupyterhub_config.py @@ -0,0 +1,31 @@ +"""sample jupyterhub config file for testing + +configures jupyterhub with dummyauthenticator and simplespawner +to enable testing without administrative privileges. +""" + +c = get_config() # noqa +c.Application.log_level = 'DEBUG' + +import os + +from oauthenticator.azuread import AzureAdOAuthenticator + +c.JupyterHub.authenticator_class = AzureAdOAuthenticator + +c.AzureAdOAuthenticator.client_id = os.getenv("AAD_CLIENT_ID") +c.AzureAdOAuthenticator.client_secret = os.getenv("AAD_CLIENT_SECRET") +c.AzureAdOAuthenticator.oauth_callback_url = os.getenv("AAD_CALLBACK_URL") +c.AzureAdOAuthenticator.tenant_id = os.getenv("AAD_TENANT_ID") +c.AzureAdOAuthenticator.username_claim = "email" +c.AzureAdOAuthenticator.authorize_url = os.getenv("AAD_AUTHORIZE_URL") +c.AzureAdOAuthenticator.token_url = os.getenv("AAD_TOKEN_URL") +c.Authenticator.manage_groups = True +c.Authenticator.refresh_pre_spawn = True + +# Optionally set a global password that all users must use +# c.DummyAuthenticator.password = "your_password" + +from jupyterhub.spawner import SimpleLocalProcessSpawner + +c.JupyterHub.spawner_class = SimpleLocalProcessSpawner diff --git a/examples/azuread-with-group-management/requirements.txt b/examples/azuread-with-group-management/requirements.txt new file mode 100644 index 00000000..6fda532a --- /dev/null +++ b/examples/azuread-with-group-management/requirements.txt @@ -0,0 +1,2 @@ +oauthenticator +pyjwt diff --git a/examples/bootstrap-script/README.md b/examples/bootstrap-script/README.md index d2f27053..50e15993 100644 --- a/examples/bootstrap-script/README.md +++ b/examples/bootstrap-script/README.md @@ -1,34 +1,34 @@ # Bootstrapping your users -Before spawning a notebook to the user, it could be useful to +Before spawning a notebook to the user, it could be useful to do some preparation work in a bootstrapping process. Common use cases are: -*Providing writeable storage for LDAP users* +_Providing writeable storage for LDAP users_ Your Jupyterhub is configured to use the LDAPAuthenticator and DockerSpawer. -* The user has no file directory on the host since your are using LDAP. -* When a user has no directory and DockerSpawner wants to mount a volume, -the spawner will use docker to create a directory. -Since the docker daemon is running as root, the generated directory for the volume -mount will not be writeable by the `jovyan` user inside of the container. -For the directory to be useful to the user, the permissions on the directory -need to be modified for the user to have write access. +- The user has no file directory on the host since you are using LDAP. +- When a user has no directory and DockerSpawner wants to mount a volume, + the spawner will use docker to create a directory. + Since the docker daemon is running as root, the generated directory for the volume + mount will not be writeable by the `jovyan` user inside of the container. + For the directory to be useful to the user, the permissions on the directory + need to be modified for the user to have write access. -*Prepopulating Content* +_Prepopulating Content_ Another use would be to copy initial content, such as tutorial files or reference - material, into the user's space when a notebook server is newly spawned. +material, into the user's space when a notebook server is newly spawned. You can define your own bootstrap process by implementing a `pre_spawn_hook` on any spawner. -The Spawner itself is passed as parameter to your hook and you can easily get the contextual information out of the spawning process. +The Spawner itself is passed as a parameter to your hook and you can easily get the contextual information out of the spawning process. Similarly, there may be cases where you would like to clean up after a spawner stops. You may implement a `post_stop_hook` that is always executed after the spawner stops. -If you implement a hook, make sure that it is *idempotent*. It will be executed every time +If you implement a hook, make sure that it is _idempotent_. It will be executed every time a notebook server is spawned to the user. That means you should somehow ensure that things which should run only once are not running again and again. For example, before you create a directory, check if it exists. @@ -41,13 +41,13 @@ Create a directory for the user, if none exists ```python -# in jupyterhub_config.py +# in jupyterhub_config.py import os def create_dir_hook(spawner): username = spawner.user.name # get the username volume_path = os.path.join('/volumes/jupyterhub', username) if not os.path.exists(volume_path): - # create a directory with umask 0755 + # create a directory with umask 0755 # hub and container user must have the same UID to be writeable # still readable by other users on the system os.mkdir(volume_path, 0o755) @@ -83,17 +83,17 @@ in a new file in `/etc/sudoers.d`, or simply in `/etc/sudoers`. All new home directories will be created from `/etc/skel`, so make sure to place any custom homedir-contents in there. -### Example #3 - Run a shell script +### Example #3 - Run a shell script -You can specify a plain ole' shell script (or any other executable) to be run +You can specify a plain ole' shell script (or any other executable) to be run by the bootstrap process. -For example, you can execute a shell script and as first parameter pass the name +For example, you can execute a shell script and as first parameter pass the name of the user: ```python -# in jupyterhub_config.py +# in jupyterhub_config.py from subprocess import check_call import os def my_script_hook(spawner): @@ -106,7 +106,7 @@ c.Spawner.pre_spawn_hook = my_script_hook ``` -Here's an example on what you could do in your shell script. See also +Here's an example on what you could do in your shell script. See also `/examples/bootstrap-script/` ```bash @@ -126,7 +126,7 @@ fi # This example script will do the following: # - create one directory for the user $USER in a BASE_DIRECTORY (see below) -# - create a "tutorials" directory within and download and unzip +# - create a "tutorials" directory within and download and unzip # the PythonDataScienceHandbook from GitHub # Start the Bootstrap Process @@ -148,9 +148,9 @@ else echo "...initial content loading for user ..." mkdir $USER_DIRECTORY/tutorials cd $USER_DIRECTORY/tutorials - wget https://github.com/jakevdp/PythonDataScienceHandbook/archive/master.zip - unzip -o master.zip - rm master.zip + wget https://github.com/jakevdp/PythonDataScienceHandbook/archive/HEAD.zip + unzip -o HEAD.zip + rm HEAD.zip fi exit 0 diff --git a/examples/bootstrap-script/bootstrap.sh b/examples/bootstrap-script/bootstrap.sh index 417d8463..f095d463 100755 --- a/examples/bootstrap-script/bootstrap.sh +++ b/examples/bootstrap-script/bootstrap.sh @@ -40,9 +40,9 @@ else echo "...initial content loading for user ..." mkdir $USER_DIRECTORY/tutorials cd $USER_DIRECTORY/tutorials - wget https://github.com/jakevdp/PythonDataScienceHandbook/archive/master.zip - unzip -o master.zip - rm master.zip + wget https://github.com/jakevdp/PythonDataScienceHandbook/archive/HEAD.zip + unzip -o HEAD.zip + rm HEAD.zip fi exit 0 diff --git a/examples/bootstrap-script/jupyterhub_config.py b/examples/bootstrap-script/jupyterhub_config.py index 4fbdc179..61eae47a 100644 --- a/examples/bootstrap-script/jupyterhub_config.py +++ b/examples/bootstrap-script/jupyterhub_config.py @@ -10,7 +10,7 @@ from jupyter_client.localinterfaces import public_ips def create_dir_hook(spawner): - """ Create directory """ + """Create directory""" username = spawner.user.name # get the username volume_path = os.path.join('/volumes/jupyterhub', username) if not os.path.exists(volume_path): @@ -20,7 +20,7 @@ def create_dir_hook(spawner): def clean_dir_hook(spawner): - """ Delete directory """ + """Delete directory""" username = spawner.user.name # get the username temp_path = os.path.join('/volumes/jupyterhub', username, 'temp') if os.path.exists(temp_path) and os.path.isdir(temp_path): diff --git a/examples/cull-idle/README.md b/examples/cull-idle/README.md index 9f043e05..005f15a8 100644 --- a/examples/cull-idle/README.md +++ b/examples/cull-idle/README.md @@ -1,41 +1,4 @@ -# `cull-idle` Example +# idle-culler example -The `cull_idle_servers.py` file provides a script to cull and shut down idle -single-user notebook servers. This script is used when `cull-idle` is run as -a Service or when it is run manually as a standalone script. - - -## Configure `cull-idle` to run as a Hub-Managed Service - -In `jupyterhub_config.py`, add the following dictionary for the `cull-idle` -Service to the `c.JupyterHub.services` list: - -```python -c.JupyterHub.services = [ - { - 'name': 'cull-idle', - 'admin': True, - 'command': [sys.executable, 'cull_idle_servers.py', '--timeout=3600'], - } -] -``` - -where: - -- `'admin': True` indicates that the Service has 'admin' permissions, and -- `'command'` indicates that the Service will be managed by the Hub. - -## Run `cull-idle` manually as a standalone script - -This will run `cull-idle` manually. `cull-idle` can be run as a standalone -script anywhere with access to the Hub, and will periodically check for idle -servers and shut them down via the Hub's REST API. In order to shutdown the -servers, the token given to cull-idle must have admin privileges. - -Generate an API token and store it in the `JUPYTERHUB_API_TOKEN` environment -variable. Run `cull_idle_servers.py` manually. - -```bash - export JUPYTERHUB_API_TOKEN=$(jupyterhub token) - python3 cull_idle_servers.py [--timeout=900] [--url=http://127.0.0.1:8081/hub/api] -``` +The idle culler has been moved to its own repository at +[jupyterhub/jupyterhub-idle-culler](https://github.com/jupyterhub/jupyterhub-idle-culler). diff --git a/examples/cull-idle/cull_idle_servers.py b/examples/cull-idle/cull_idle_servers.py deleted file mode 100755 index 8b2b3c2a..00000000 --- a/examples/cull-idle/cull_idle_servers.py +++ /dev/null @@ -1,401 +0,0 @@ -#!/usr/bin/env python3 -"""script to monitor and cull idle single-user servers - -Caveats: - -last_activity is not updated with high frequency, -so cull timeout should be greater than the sum of: - -- single-user websocket ping interval (default: 30s) -- JupyterHub.last_activity_interval (default: 5 minutes) - -You can run this as a service managed by JupyterHub with this in your config:: - - - c.JupyterHub.services = [ - { - 'name': 'cull-idle', - 'admin': True, - 'command': [sys.executable, 'cull_idle_servers.py', '--timeout=3600'], - } - ] - -Or run it manually by generating an API token and storing it in `JUPYTERHUB_API_TOKEN`: - - export JUPYTERHUB_API_TOKEN=$(jupyterhub token) - python3 cull_idle_servers.py [--timeout=900] [--url=http://127.0.0.1:8081/hub/api] - -This script uses the same ``--timeout`` and ``--max-age`` values for -culling users and users' servers. If you want a different value for -users and servers, you should add this script to the services list -twice, just with different ``name``s, different values, and one with -the ``--cull-users`` option. -""" -import json -import os -from datetime import datetime -from datetime import timezone -from functools import partial - -try: - from urllib.parse import quote -except ImportError: - from urllib import quote - -import dateutil.parser - -from tornado.gen import coroutine, multi -from tornado.locks import Semaphore -from tornado.log import app_log -from tornado.httpclient import AsyncHTTPClient, HTTPRequest -from tornado.ioloop import IOLoop, PeriodicCallback -from tornado.options import define, options, parse_command_line - - -def parse_date(date_string): - """Parse a timestamp - - If it doesn't have a timezone, assume utc - - Returned datetime object will always be timezone-aware - """ - dt = dateutil.parser.parse(date_string) - if not dt.tzinfo: - # assume naïve timestamps are UTC - dt = dt.replace(tzinfo=timezone.utc) - return dt - - -def format_td(td): - """ - Nicely format a timedelta object - - as HH:MM:SS - """ - if td is None: - return "unknown" - if isinstance(td, str): - return td - seconds = int(td.total_seconds()) - h = seconds // 3600 - seconds = seconds % 3600 - m = seconds // 60 - seconds = seconds % 60 - return "{h:02}:{m:02}:{seconds:02}".format(h=h, m=m, seconds=seconds) - - -@coroutine -def cull_idle( - url, api_token, inactive_limit, cull_users=False, max_age=0, concurrency=10 -): - """Shutdown idle single-user servers - - If cull_users, inactive *users* will be deleted as well. - """ - auth_header = {'Authorization': 'token %s' % api_token} - req = HTTPRequest(url=url + '/users', headers=auth_header) - now = datetime.now(timezone.utc) - client = AsyncHTTPClient() - - if concurrency: - semaphore = Semaphore(concurrency) - - @coroutine - def fetch(req): - """client.fetch wrapped in a semaphore to limit concurrency""" - yield semaphore.acquire() - try: - return (yield client.fetch(req)) - finally: - yield semaphore.release() - - else: - fetch = client.fetch - - resp = yield fetch(req) - users = json.loads(resp.body.decode('utf8', 'replace')) - futures = [] - - @coroutine - def handle_server(user, server_name, server): - """Handle (maybe) culling a single server - - "server" is the entire server model from the API. - - Returns True if server is now stopped (user removable), - False otherwise. - """ - log_name = user['name'] - if server_name: - log_name = '%s/%s' % (user['name'], server_name) - if server.get('pending'): - app_log.warning( - "Not culling server %s with pending %s", log_name, server['pending'] - ) - return False - - # jupyterhub < 0.9 defined 'server.url' once the server was ready - # as an *implicit* signal that the server was ready. - # 0.9 adds a dedicated, explicit 'ready' field. - # By current (0.9) definitions, servers that have no pending - # events and are not ready shouldn't be in the model, - # but let's check just to be safe. - - if not server.get('ready', bool(server['url'])): - app_log.warning( - "Not culling not-ready not-pending server %s: %s", log_name, server - ) - return False - - if server.get('started'): - age = now - parse_date(server['started']) - else: - # started may be undefined on jupyterhub < 0.9 - age = None - - # check last activity - # last_activity can be None in 0.9 - if server['last_activity']: - inactive = now - parse_date(server['last_activity']) - else: - # no activity yet, use start date - # last_activity may be None with jupyterhub 0.9, - # which introduces the 'started' field which is never None - # for running servers - inactive = age - - # CUSTOM CULLING TEST CODE HERE - # Add in additional server tests here. Return False to mean "don't - # cull", True means "cull immediately", or, for example, update some - # other variables like inactive_limit. - # - # Here, server['state'] is the result of the get_state method - # on the spawner. This does *not* contain the below by - # default, you may have to modify your spawner to make this - # work. The `user` variable is the user model from the API. - # - # if server['state']['profile_name'] == 'unlimited' - # return False - # inactive_limit = server['state']['culltime'] - - should_cull = ( - inactive is not None and inactive.total_seconds() >= inactive_limit - ) - if should_cull: - app_log.info( - "Culling server %s (inactive for %s)", log_name, format_td(inactive) - ) - - if max_age and not should_cull: - # only check started if max_age is specified - # so that we can still be compatible with jupyterhub 0.8 - # which doesn't define the 'started' field - if age is not None and age.total_seconds() >= max_age: - app_log.info( - "Culling server %s (age: %s, inactive for %s)", - log_name, - format_td(age), - format_td(inactive), - ) - should_cull = True - - if not should_cull: - app_log.debug( - "Not culling server %s (age: %s, inactive for %s)", - log_name, - format_td(age), - format_td(inactive), - ) - return False - - if server_name: - # culling a named server - delete_url = url + "/users/%s/servers/%s" % ( - quote(user['name']), - quote(server['name']), - ) - else: - delete_url = url + '/users/%s/server' % quote(user['name']) - - req = HTTPRequest(url=delete_url, method='DELETE', headers=auth_header) - resp = yield fetch(req) - if resp.code == 202: - app_log.warning("Server %s is slow to stop", log_name) - # return False to prevent culling user with pending shutdowns - return False - return True - - @coroutine - def handle_user(user): - """Handle one user. - - Create a list of their servers, and async exec them. Wait for - that to be done, and if all servers are stopped, possibly cull - the user. - """ - # shutdown servers first. - # Hub doesn't allow deleting users with running servers. - # jupyterhub 0.9 always provides a 'servers' model. - # 0.8 only does this when named servers are enabled. - if 'servers' in user: - servers = user['servers'] - else: - # jupyterhub < 0.9 without named servers enabled. - # create servers dict with one entry for the default server - # from the user model. - # only if the server is running. - servers = {} - if user['server']: - servers[''] = { - 'last_activity': user['last_activity'], - 'pending': user['pending'], - 'url': user['server'], - } - server_futures = [ - handle_server(user, server_name, server) - for server_name, server in servers.items() - ] - results = yield multi(server_futures) - if not cull_users: - return - # some servers are still running, cannot cull users - still_alive = len(results) - sum(results) - if still_alive: - app_log.debug( - "Not culling user %s with %i servers still alive", - user['name'], - still_alive, - ) - return False - - should_cull = False - if user.get('created'): - age = now - parse_date(user['created']) - else: - # created may be undefined on jupyterhub < 0.9 - age = None - - # check last activity - # last_activity can be None in 0.9 - if user['last_activity']: - inactive = now - parse_date(user['last_activity']) - else: - # no activity yet, use start date - # last_activity may be None with jupyterhub 0.9, - # which introduces the 'created' field which is never None - inactive = age - - should_cull = ( - inactive is not None and inactive.total_seconds() >= inactive_limit - ) - if should_cull: - app_log.info("Culling user %s (inactive for %s)", user['name'], inactive) - - if max_age and not should_cull: - # only check created if max_age is specified - # so that we can still be compatible with jupyterhub 0.8 - # which doesn't define the 'started' field - if age is not None and age.total_seconds() >= max_age: - app_log.info( - "Culling user %s (age: %s, inactive for %s)", - user['name'], - format_td(age), - format_td(inactive), - ) - should_cull = True - - if not should_cull: - app_log.debug( - "Not culling user %s (created: %s, last active: %s)", - user['name'], - format_td(age), - format_td(inactive), - ) - return False - - req = HTTPRequest( - url=url + '/users/%s' % user['name'], method='DELETE', headers=auth_header - ) - yield fetch(req) - return True - - for user in users: - futures.append((user['name'], handle_user(user))) - - for (name, f) in futures: - try: - result = yield f - except Exception: - app_log.exception("Error processing %s", name) - else: - if result: - app_log.debug("Finished culling %s", name) - - -if __name__ == '__main__': - define( - 'url', - default=os.environ.get('JUPYTERHUB_API_URL'), - help="The JupyterHub API URL", - ) - define('timeout', default=600, help="The idle timeout (in seconds)") - define( - 'cull_every', - default=0, - help="The interval (in seconds) for checking for idle servers to cull", - ) - define( - 'max_age', - default=0, - help="The maximum age (in seconds) of servers that should be culled even if they are active", - ) - define( - 'cull_users', - default=False, - help="""Cull users in addition to servers. - This is for use in temporary-user cases such as tmpnb.""", - ) - define( - 'concurrency', - default=10, - help="""Limit the number of concurrent requests made to the Hub. - - Deleting a lot of users at the same time can slow down the Hub, - so limit the number of API requests we have outstanding at any given time. - """, - ) - - parse_command_line() - if not options.cull_every: - options.cull_every = options.timeout // 2 - api_token = os.environ['JUPYTERHUB_API_TOKEN'] - - try: - AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient") - except ImportError as e: - app_log.warning( - "Could not load pycurl: %s\n" - "pycurl is recommended if you have a large number of users.", - e, - ) - - loop = IOLoop.current() - cull = partial( - cull_idle, - url=options.url, - api_token=api_token, - inactive_limit=options.timeout, - cull_users=options.cull_users, - max_age=options.max_age, - concurrency=options.concurrency, - ) - # schedule first cull immediately - # because PeriodicCallback doesn't start until the end of the first interval - loop.add_callback(cull) - # schedule periodic cull - pc = PeriodicCallback(cull, 1e3 * options.cull_every) - pc.start() - try: - loop.start() - except KeyboardInterrupt: - pass diff --git a/examples/cull-idle/jupyterhub_config.py b/examples/cull-idle/jupyterhub_config.py deleted file mode 100644 index 29fdc2d6..00000000 --- a/examples/cull-idle/jupyterhub_config.py +++ /dev/null @@ -1,11 +0,0 @@ -import sys - -# run cull-idle as a service - -c.JupyterHub.services = [ - { - 'name': 'cull-idle', - 'admin': True, - 'command': [sys.executable, 'cull_idle_servers.py', '--timeout=3600'], - } -] diff --git a/examples/custom-scopes/grades.py b/examples/custom-scopes/grades.py new file mode 100644 index 00000000..f1f8be54 --- /dev/null +++ b/examples/custom-scopes/grades.py @@ -0,0 +1,132 @@ +import os +from functools import wraps +from html import escape +from urllib.parse import urlparse + +from tornado.httpserver import HTTPServer +from tornado.ioloop import IOLoop +from tornado.web import Application, RequestHandler, authenticated + +from jupyterhub.services.auth import HubOAuthCallbackHandler, HubOAuthenticated +from jupyterhub.utils import url_path_join + +SCOPE_PREFIX = "custom:grades" +READ_SCOPE = f"{SCOPE_PREFIX}:read" +WRITE_SCOPE = f"{SCOPE_PREFIX}:write" + + +def require_scope(scopes): + """Decorator to require scopes + + For use if multiple methods on one Handler + may want different scopes, + so class-level .hub_scopes is insufficient + (e.g. read for GET, write for POST). + """ + if isinstance(scopes, str): + scopes = [scopes] + + def wrap(method): + """The actual decorator""" + + @wraps(method) + @authenticated + def wrapped(self, *args, **kwargs): + self.hub_scopes = scopes + return method(self, *args, **kwargs) + + return wrapped + + return wrap + + +class MyGradesHandler(HubOAuthenticated, RequestHandler): + # no hub_scopes, anyone with access to this service + # will be able to visit this URL + + @authenticated + def get(self): + self.write("

My grade

") + name = self.current_user["name"] + grades = self.settings["grades"] + self.write(f"

My name is: {escape(name)}

") + if name in grades: + self.write(f"

My grade is: {escape(str(grades[name]))}

") + else: + self.write("

No grade entered

") + if READ_SCOPE in self.current_user["scopes"]: + self.write('enter grades') + + +class GradesHandler(HubOAuthenticated, RequestHandler): + # default scope for this Handler: read-only + hub_scopes = [READ_SCOPE] + + def _render(self): + grades = self.settings["grades"] + self.write("

All grades

") + self.write("") + self.write("") + for student, grade in grades.items(): + qstudent = escape(student) + qgrade = escape(str(grade)) + self.write( + f""" + + + + + """ + ) + if WRITE_SCOPE in self.current_user["scopes"]: + self.write("Enter grade:") + self.write( + """ + + + + + """ + ) + + @require_scope([READ_SCOPE]) + async def get(self): + self._render() + + # POST requires WRITE_SCOPE instead of READ_SCOPE + @require_scope([WRITE_SCOPE]) + async def post(self): + name = self.get_argument("student") + grade = self.get_argument("grade") + self.settings["grades"][name] = grade + self._render() + + +def main(): + base_url = os.environ['JUPYTERHUB_SERVICE_PREFIX'] + + app = Application( + [ + (base_url, MyGradesHandler), + (url_path_join(base_url, 'grades/'), GradesHandler), + ( + url_path_join(base_url, 'oauth_callback'), + HubOAuthCallbackHandler, + ), + ], + cookie_secret=os.urandom(32), + grades={"student": 53}, + ) + + http_server = HTTPServer(app) + url = urlparse(os.environ['JUPYTERHUB_SERVICE_URL']) + + http_server.listen(url.port, url.hostname) + try: + IOLoop.current().start() + except KeyboardInterrupt: + pass + + +if __name__ == '__main__': + main() diff --git a/examples/custom-scopes/jupyterhub_config.py b/examples/custom-scopes/jupyterhub_config.py new file mode 100644 index 00000000..efe0bf98 --- /dev/null +++ b/examples/custom-scopes/jupyterhub_config.py @@ -0,0 +1,52 @@ +import sys + +c = get_config() # noqa + +c.JupyterHub.services = [ + { + 'name': 'grades', + 'url': 'http://127.0.0.1:10101', + 'command': [sys.executable, './grades.py'], + 'oauth_client_allowed_scopes': [ + 'custom:grades:write', + 'custom:grades:read', + ], + }, +] + +c.JupyterHub.custom_scopes = { + "custom:grades:read": { + "description": "read-access to all grades", + }, + "custom:grades:write": { + "description": "Enter new grades", + "subscopes": ["custom:grades:read"], + }, +} + +c.JupyterHub.load_roles = [ + { + "name": "user", + # grant all users access to services + "scopes": ["access:services", "self"], + }, + { + "name": "grader", + # grant graders access to write grades + "scopes": ["custom:grades:write"], + "users": ["grader"], + }, + { + "name": "instructor", + # grant instructors access to read, but not write grades + "scopes": ["custom:grades:read"], + "users": ["instructor"], + }, +] + +c.JupyterHub.allowed_users = {"instructor", "grader", "student"} +# dummy spawner and authenticator for testing, don't actually use these! +c.JupyterHub.authenticator_class = 'dummy' +c.JupyterHub.spawner_class = 'simple' +c.JupyterHub.ip = '127.0.0.1' # let's just run on localhost while dummy auth is enabled +c.JupyterHub.log_level = 10 diff --git a/examples/external-oauth/README.md b/examples/external-oauth/README.md index a944cb68..fc94bb66 100644 --- a/examples/external-oauth/README.md +++ b/examples/external-oauth/README.md @@ -16,63 +16,62 @@ implementations in other web servers or languages. ## Run the example -1. generate an API token: +1. generate an API token: export JUPYTERHUB_API_TOKEN=$(openssl rand -hex 32) -2. launch a version of the the whoami service. - For `whoami-oauth`: +2. launch a version of the the whoami service. + For `whoami-oauth`: - bash launch-service.sh & + bash launch-service.sh & or for `whoami-oauth-basic`: - bash launch-service-basic.sh & + bash launch-service-basic.sh & -3. Launch JupyterHub: +3. Launch JupyterHub: jupyterhub -4. Visit http://127.0.0.1:5555/ +4. Visit http://127.0.0.1:5555/ After logging in with your local-system credentials, you should see a JSON dump of your user info: ```json { - "admin": false, - "last_activity": "2016-05-27T14:05:18.016372", - "name": "queequeg", - "pending": null, - "server": "/user/queequeg" + "admin": false, + "last_activity": "2016-05-27T14:05:18.016372", + "name": "queequeg", + "pending": null, + "server": "/user/queequeg" } ``` - The essential pieces for using JupyterHub as an OAuth provider are: 1. registering your service with jupyterhub: - ```python - c.JupyterHub.services = [ - { - # the name of your service - # should be simple and unique. - # mostly used to identify your service in logging - "name": "my-service", - # the oauth client id of your service - # must be unique but isn't private - # can be randomly generated or hand-written - "oauth_client_id": "abc123", - # the API token and client secret of the service - # should be generated securely, - # e.g. via `openssl rand -hex 32` - "api_token": "abc123...", - # the redirect target for jupyterhub to send users - # after successful authentication - "oauth_redirect_uri": "https://service-host/oauth_callback" - } - ] - ``` + ```python + c.JupyterHub.services = [ + { + # the name of your service + # should be simple and unique. + # mostly used to identify your service in logging + "name": "my-service", + # the oauth client id of your service + # must be unique but isn't private + # can be randomly generated or hand-written + "oauth_client_id": "abc123", + # the API token and client secret of the service + # should be generated securely, + # e.g. via `openssl rand -hex 32` + "api_token": "abc123...", + # the redirect target for jupyterhub to send users + # after successful authentication + "oauth_redirect_uri": "https://service-host/oauth_callback" + } + ] + ``` 2. Telling your service how to authenticate with JupyterHub. diff --git a/examples/external-oauth/jupyterhub_config.py b/examples/external-oauth/jupyterhub_config.py index d44fe7d1..75f141ea 100644 --- a/examples/external-oauth/jupyterhub_config.py +++ b/examples/external-oauth/jupyterhub_config.py @@ -13,7 +13,7 @@ if not api_token: c.JupyterHub.services = [ { 'name': 'external-oauth', - 'oauth_client_id': "whoami-oauth-client-test", + 'oauth_client_id': "service-oauth-client-test", 'api_token': api_token, 'oauth_redirect_uri': 'http://127.0.0.1:5555/oauth_callback', } diff --git a/examples/external-oauth/launch-service-basic.sh b/examples/external-oauth/launch-service-basic.sh index dc2ab9eb..2392f48a 100644 --- a/examples/external-oauth/launch-service-basic.sh +++ b/examples/external-oauth/launch-service-basic.sh @@ -9,7 +9,7 @@ if [[ -z "${JUPYTERHUB_API_TOKEN}" ]]; then fi # 2. oauth client ID -export JUPYTERHUB_CLIENT_ID='whoami-oauth-client-test' +export JUPYTERHUB_CLIENT_ID='service-oauth-client-test' # 3. where the Hub is export JUPYTERHUB_URL='http://127.0.0.1:8000' diff --git a/examples/external-oauth/launch-service.sh b/examples/external-oauth/launch-service.sh index 8218e036..d9b07b75 100644 --- a/examples/external-oauth/launch-service.sh +++ b/examples/external-oauth/launch-service.sh @@ -9,7 +9,7 @@ if [[ -z "${JUPYTERHUB_API_TOKEN}" ]]; then fi # 2. oauth client ID -export JUPYTERHUB_CLIENT_ID="whoami-oauth-client-test" +export JUPYTERHUB_CLIENT_ID="service-oauth-client-test" # 3. what URL to run on export JUPYTERHUB_SERVICE_PREFIX='/' export JUPYTERHUB_SERVICE_URL='http://127.0.0.1:5555' diff --git a/examples/external-oauth/whoami-oauth-basic.py b/examples/external-oauth/whoami-oauth-basic.py index ad98115c..a89f8db7 100644 --- a/examples/external-oauth/whoami-oauth-basic.py +++ b/examples/external-oauth/whoami-oauth-basic.py @@ -5,15 +5,10 @@ so all URLs and requests necessary for OAuth with JupyterHub should be in one pl """ import json import os -import sys -from urllib.parse import urlencode -from urllib.parse import urlparse +from urllib.parse import urlencode, urlparse -from tornado import log -from tornado import web -from tornado.auth import OAuth2Mixin -from tornado.httpclient import AsyncHTTPClient -from tornado.httpclient import HTTPRequest +from tornado import log, web +from tornado.httpclient import AsyncHTTPClient, HTTPRequest from tornado.httputil import url_concat from tornado.ioloop import IOLoop diff --git a/examples/postgres/README.md b/examples/postgres/README.md index 13cb9f5f..8c4aae33 100644 --- a/examples/postgres/README.md +++ b/examples/postgres/README.md @@ -4,14 +4,14 @@ This example shows how you can connect Jupyterhub to a Postgres database instead of the default SQLite backend. ### Running Postgres with Jupyterhub on the host. + 0. Uncomment and replace `ENV JPY_PSQL_PASSWORD arglebargle` with your own password in the Dockerfile for `examples/postgres/db`. (Alternatively, pass -e `JPY_PSQL_PASSWORD=` when you start the db container.) 1. `cd` to the root of your jupyterhub repo. -2. Build the postgres image with `docker build -t jupyterhub-postgres-db - examples/postgres/db`. This may take a minute or two the first time it's +2. Build the postgres image with `docker build -t jupyterhub-postgres-db examples/postgres/db`. This may take a minute or two the first time it's run. 3. Run the db image with `docker run -d -p 5433:5432 jupyterhub-postgres-db`. @@ -24,24 +24,22 @@ instead of the default SQLite backend. 5. Log in as the user running jupyterhub on your host machine. ### Running Postgres with Containerized Jupyterhub. + 0. Do steps 0-2 in from the above section, ensuring that the values set/passed for `JPY_PSQL_PASSWORD` match for the hub and db containers. -1. Build the hub image with `docker build -t jupyterhub-postgres-hub - examples/postgres/hub`. This may take a minute or two the first time it's +1. Build the hub image with `docker build -t jupyterhub-postgres-hub examples/postgres/hub`. This may take a minute or two the first time it's run. -2. Run the db image with `docker run -d --name=jpy-db - jupyterhub-postgres`. Note that, unlike when connecting to a host machine +2. Run the db image with `docker run -d --name=jpy-db jupyterhub-postgres`. Note that, unlike when connecting to a host machine jupyterhub, we don't specify a port-forwarding scheme here, but we do need to specify a name for the container. -3. Run the containerized hub with `docker run -it --link jpy-db:postgres - jupyterhub-postgres-hub`. This instructs docker to run the hub container +3. Run the containerized hub with `docker run -it --link jpy-db:postgres jupyterhub-postgres-hub`. This instructs docker to run the hub container with a link to the already-running db container, which will forward environment and connection information from the DB to the hub. 4. Log in as one of the users defined in the `examples/postgres/hub/` - Dockerfile. By default `rhea` is the server's admin user, `io` and + Dockerfile. By default `rhea` is the server's admin user, `io` and `ganymede` are non-admin users, and all users' passwords are their usernames. diff --git a/examples/postgres/hub/jupyterhub_config.py b/examples/postgres/hub/jupyterhub_config.py index 9482657b..bd4a03fa 100644 --- a/examples/postgres/hub/jupyterhub_config.py +++ b/examples/postgres/hub/jupyterhub_config.py @@ -1,10 +1,23 @@ # Configuration file for jupyterhub (postgres example). -c = get_config() +c = get_config() # noqa -# Add some users. -c.JupyterHub.admin_users = {'rhea'} -c.Authenticator.whitelist = {'ganymede', 'io', 'rhea'} +# Add some users +c.Authenticator.allowed_users = {'ganymede', 'io', 'rhea'} + +c.JupyterHub.load_roles = [ + { + "name": "user-admin", + "scopes": [ + "admin:groups", + "admin:users", + "admin:servers", + ], + "users": [ + "rhea", + ], + } +] # These environment variables are automatically supplied by the linked postgres # container. diff --git a/examples/server-api/jupyterhub_config.py b/examples/server-api/jupyterhub_config.py new file mode 100644 index 00000000..8a0af9dd --- /dev/null +++ b/examples/server-api/jupyterhub_config.py @@ -0,0 +1,57 @@ +# create a role with permissions to: +# 1. start/stop servers, and +# 2. access the server API + +c = get_config() # noqa + +c.JupyterHub.load_roles = [ + { + "name": "launcher", + "scopes": [ + "servers", # manage servers + "access:servers", # access servers themselves + ], + # assign role to our 'launcher' service + "services": ["launcher"], + } +] + + +# persist token to a file, to share it with the launch-server.py script +import pathlib +import secrets + +here = pathlib.Path(__file__).parent +token_file = here.joinpath("service-token") +if token_file.exists(): + with token_file.open("r") as f: + token = f.read() +else: + token = secrets.token_hex(16) + with token_file.open("w") as f: + f.write(token) + +# define our service +c.JupyterHub.services = [ + { + "name": "launcher", + "api_token": token, + } +] + +# ensure spawn requests return immediately, +# rather than waiting up to 10 seconds for spawn to complete +# this ensures that we use the progress API + +c.JupyterHub.tornado_settings = {"slow_spawn_timeout": 0} + +# create our test-user +c.Authenticator.allowed_users = { + 'test-user', +} + + +# testing boilerplate: fake auth/spawner, localhost. Don't use this for real! +c.JupyterHub.authenticator_class = 'dummy' +c.JupyterHub.spawner_class = 'simple' +c.JupyterHub.ip = '127.0.0.1' diff --git a/examples/server-api/start-stop-server.py b/examples/server-api/start-stop-server.py new file mode 100644 index 00000000..d20e1530 --- /dev/null +++ b/examples/server-api/start-stop-server.py @@ -0,0 +1,172 @@ +#!/usr/bin/env python3 +""" +Example of starting/stopping a server via the JupyterHub API + +1. get user status +2. start server +3. wait for server to be ready via progress api +4. make a request to the server itself +5. stop server via API +6. wait for server to finish stopping +""" +import json +import logging +import pathlib +import time + +import requests + +log = logging.getLogger(__name__) + + +def get_token(): + """boilerplate: get token from share file. + + Make sure to start jupyterhub in this directory first + """ + here = pathlib.Path(__file__).parent + token_file = here.joinpath("service-token") + log.info(f"Loading token from {token_file}") + with token_file.open("r") as f: + token = f.read().strip() + return token + + +def make_session(token): + """Create a requests.Session with our service token in the Authorization header""" + session = requests.Session() + session.headers = {"Authorization": f"token {token}"} + return session + + +def event_stream(session, url): + """Generator yielding events from a JSON event stream + + For use with the server progress API + """ + r = session.get(url, stream=True) + r.raise_for_status() + for line in r.iter_lines(): + line = line.decode('utf8', 'replace') + # event lines all start with `data:` + # all other lines should be ignored (they will be empty) + if line.startswith('data:'): + yield json.loads(line.split(':', 1)[1]) + + +def start_server(session, hub_url, user, server_name=""): + """Start a server for a jupyterhub user + + Returns the full URL for accessing the server + """ + user_url = f"{hub_url}/hub/api/users/{user}" + log_name = f"{user}/{server_name}".rstrip("/") + + # step 1: get user status + r = session.get(user_url) + r.raise_for_status() + user_model = r.json() + + # if server is not 'active', request launch + if server_name not in user_model.get('servers', {}): + log.info(f"Starting server {log_name}") + r = session.post(f"{user_url}/servers/{server_name}") + r.raise_for_status() + if r.status_code == 201: + log.info(f"Server {log_name} is launched and ready") + elif r.status_code == 202: + log.info(f"Server {log_name} is launching...") + else: + log.warning(f"Unexpected status: {r.status_code}") + r = session.get(user_url) + r.raise_for_status() + user_model = r.json() + + # report server status + server = user_model['servers'][server_name] + if server['pending']: + status = f"pending {server['pending']}" + elif server['ready']: + status = "ready" + else: + # shouldn't be possible! + raise ValueError(f"Unexpected server state: {server}") + + log.info(f"Server {log_name} is {status}") + + # wait for server to be ready using progress API + progress_url = user_model['servers'][server_name]['progress_url'] + for event in event_stream(session, f"{hub_url}{progress_url}"): + log.info(f"Progress {event['progress']}%: {event['message']}") + if event.get("ready"): + server_url = event['url'] + break + else: + # server never ready + raise ValueError(f"{log_name} never started!") + + # at this point, we know the server is ready and waiting to receive requests + # return the full URL where the server can be accessed + return f"{hub_url}{server_url}" + + +def stop_server(session, hub_url, user, server_name=""): + """Stop a server via the JupyterHub API + + Returns when the server has finished stopping + """ + # step 1: get user status + user_url = f"{hub_url}/hub/api/users/{user}" + server_url = f"{user_url}/servers/{server_name}" + log_name = f"{user}/{server_name}".rstrip("/") + + log.info(f"Stopping server {log_name}") + r = session.delete(server_url) + if r.status_code == 404: + log.info(f"Server {log_name} already stopped") + + r.raise_for_status() + if r.status_code == 204: + log.info(f"Server {log_name} stopped") + return + + # else: 202, stop requested, but not complete + # wait for stop to finish + log.info(f"Server {log_name} stopping...") + + # wait for server to be done stopping + while True: + r = session.get(user_url) + r.raise_for_status() + user_model = r.json() + if server_name not in user_model.get("servers", {}): + log.info(f"Server {log_name} stopped") + return + server = user_model["servers"][server_name] + if not server['pending']: + raise ValueError(f"Waiting for {log_name}, but no longer pending.") + log.info(f"Server {log_name} pending: {server['pending']}") + # wait to poll again + time.sleep(1) + + +def main(): + """Start and stop one server + + Uses test-user and hub from jupyterhub_config.py in this directory + """ + user = "test-user" + hub_url = "http://127.0.0.1:8000" + + session = make_session(get_token()) + server_url = start_server(session, hub_url, user) + r = session.get(f"{server_url}/api/status") + r.raise_for_status() + log.info(f"Server status: {r.text}") + + stop_server(session, hub_url, user) + + +if __name__ == "__main__": + logging.basicConfig(level=logging.INFO) + main() diff --git a/examples/service-announcement/README.md b/examples/service-announcement/README.md index 4cdf7bbb..637f25f5 100644 --- a/examples/service-announcement/README.md +++ b/examples/service-announcement/README.md @@ -1,4 +1,3 @@ - # Simple Announcement Service Example This is a simple service that allows administrators to manage announcements @@ -7,32 +6,63 @@ that appear when JupyterHub renders pages. To run the service as a hub-managed service simply include in your JupyterHub configuration file something like: - c.JupyterHub.services = [ - { - 'name': 'announcement', - 'url': 'http://127.0.0.1:8888', - 'command': [sys.executable, "-m", "announcement"], - } - ] +:notebook:**Info**: You can run the announcement service example from the `examples` +directory, using one of the several services provided by JupyterHub. -This starts the announcements service up at `/services/announcement` when -JupyterHub launches. By default the announcement text is empty. +```python + +import sys + +from pathlib import Path +# absolute path to announcement.py +announcement_py = str(Path(__file__).parent.joinpath("announcement.py").resolve()) + +#ensure get_config() is added in + c = get_config() + +... +.. + +c.JupyterHub.services = [ + { + 'name': 'announcement', + 'url': 'http://127.0.0.1:8888', + 'command': [sys.executable, announcement_py, "--port", "8888"], + } +] +``` + +This starts the announcements service up at `/services/announcement/` when +JupyterHub launches. By default the announcement text is empty. The `announcement` module has a configurable port (default 8888) and an API -prefix setting. By default the API prefix is `JUPYTERHUB_SERVICE_PREFIX` if +prefix setting. By default the API prefix is `JUPYTERHUB_SERVICE_PREFIX` if that environment variable is set or `/` if it is not. ## Managing the Announcement -Admin users can set the announcement text with an API token: +Users with permission can set the announcement text with an API token: $ curl -X POST -H "Authorization: token " \ - -d "{'announcement':'JupyterHub will be upgraded on August 14!'}" \ - https://.../services/announcement + -d '{"announcement":"JupyterHub will be upgraded on August 14!"}' \ + https://.../services/announcement/ + +To grant permission, add a role (JupyterHub 2.0) with access to the announcement service: + +```python +# grant the 'announcer' permission to access the announcement service +c.JupyterHub.load_roles = [ + { + "name": "announcers", + "users": ["announcer"], # or groups + "scopes": ["access:services!service=announcement"], + } +] +``` Anyone can read the announcement: - $ curl https://.../services/announcement | python -m json.tool + $ curl https://.../services/announcement/ | python -m json.tool { announcement: "JupyterHub will be upgraded on August 14!", timestamp: "...", @@ -42,19 +72,20 @@ Anyone can read the announcement: The time the announcement was posted is recorded in the `timestamp` field and the user who posted the announcement is recorded in the `user` field. -To clear the announcement text, just DELETE. Only admin users can do this. +To clear the announcement text, send a DELETE request. +This has the same permission requirement. - $ curl -X POST -H "Authorization: token " \ - https://.../services/announcement + $ curl -X DELETE -H "Authorization: token " \ + https://.../services/announcement/ ## Seeing the Announcement in JupyterHub To be able to render the announcement, include the provide `page.html` template -that extends the base `page.html` template. Set `c.JupyterHub.template_paths` +that extends the base `page.html` template. Set `c.JupyterHub.template_paths` in JupyterHub's configuration to include the path to the extending template. The template changes the `announcement` element and does a JQuery `$.get()` call to retrieve the announcement text. JupyterHub's configurable announcement template variables can be set for various -pages like login, logout, spawn, and home. Including the template provided in +pages like login, logout, spawn, and home. Including the template provided in this example overrides all of those. diff --git a/examples/service-announcement/announcement.py b/examples/service-announcement/announcement.py index 2b140fdb..5354c70c 100644 --- a/examples/service-announcement/announcement.py +++ b/examples/service-announcement/announcement.py @@ -3,10 +3,7 @@ import datetime import json import os -from tornado import escape -from tornado import gen -from tornado import ioloop -from tornado import web +from tornado import escape, ioloop, web from jupyterhub.services.auth import HubAuthenticated @@ -14,9 +11,6 @@ from jupyterhub.services.auth import HubAuthenticated class AnnouncementRequestHandler(HubAuthenticated, web.RequestHandler): """Dynamically manage page announcements""" - hub_users = [] - allow_admin = True - def initialize(self, storage): """Create storage for announcement text""" self.storage = storage diff --git a/examples/service-announcement/jupyterhub_config.py b/examples/service-announcement/jupyterhub_config.py index 069a76d7..07a9fe76 100644 --- a/examples/service-announcement/jupyterhub_config.py +++ b/examples/service-announcement/jupyterhub_config.py @@ -1,12 +1,21 @@ import sys +c = get_config() + # To run the announcement service managed by the hub, add this. +port = 9999 c.JupyterHub.services = [ { 'name': 'announcement', - 'url': 'http://127.0.0.1:8888', - 'command': [sys.executable, "-m", "announcement"], + 'url': f'http://127.0.0.1:{port}', + 'command': [ + sys.executable, + "-m", + "announcement", + '--port', + str(port), + ], } ] @@ -14,3 +23,19 @@ c.JupyterHub.services = [ # for an example of how to do this. c.JupyterHub.template_paths = ["templates"] + +c.Authenticator.allowed_users = {"announcer", "otheruser"} + +# grant the 'announcer' permission to access the announcement service +c.JupyterHub.load_roles = [ + { + "name": "announcers", + "users": ["announcer"], + "scopes": ["access:services!service=announcement"], + } +] + +# dummy spawner and authenticator for testing, don't actually use these! +c.JupyterHub.authenticator_class = 'dummy' +c.JupyterHub.spawner_class = 'simple' +c.JupyterHub.ip = '127.0.0.1' # let's just run on localhost while dummy auth is enabled diff --git a/examples/service-announcement/templates/page.html b/examples/service-announcement/templates/page.html index 5ba023dd..42c976ab 100644 --- a/examples/service-announcement/templates/page.html +++ b/examples/service-announcement/templates/page.html @@ -1,14 +1,9 @@ -{% extends "templates/page.html" %} -{% block announcement %} -
-
-{% endblock %} - -{% block script %} -{{ super() }} +{% extends "templates/page.html" %} {% block announcement %} +
+{% endblock %} {% block script %} {{ super() }} {% endblock %} diff --git a/examples/service-fastapi/Dockerfile b/examples/service-fastapi/Dockerfile new file mode 100644 index 00000000..d2e8d5b5 --- /dev/null +++ b/examples/service-fastapi/Dockerfile @@ -0,0 +1,13 @@ +FROM jupyterhub/jupyterhub + +# Create test user (PAM auth) and install single-user Jupyter +RUN useradd testuser --create-home --shell /bin/bash +RUN echo 'testuser:passwd' | chpasswd +RUN pip install jupyter + +COPY app ./app +COPY jupyterhub_config.py . +COPY requirements.txt /tmp/requirements.txt +RUN pip install -r /tmp/requirements.txt + +CMD ["jupyterhub", "--ip", "0.0.0.0"] diff --git a/examples/service-fastapi/README.md b/examples/service-fastapi/README.md new file mode 100644 index 00000000..b26a586b --- /dev/null +++ b/examples/service-fastapi/README.md @@ -0,0 +1,113 @@ +# Fastapi + +[FastAPI](https://fastapi.tiangolo.com/) is a popular new web framework attractive for its type hinting, async support, automatic doc generation (Swagger), and more. Their [Feature highlights](https://fastapi.tiangolo.com/features/) sum it up nicely. + +# Swagger UI with OAuth demo + +![Fastapi Service Example](./fastapi_example.gif) + +# Try it out locally + +1. Install `fastapi` and other dependencies, then launch Jupyterhub + +``` +pip install -r requirements.txt +jupyterhub --ip=127.0.0.1 +``` + +2. Visit http://127.0.0.1:8000/services/fastapi or http://127.0.0.1:8000/services/fastapi/docs + Login with username 'test-user' and any password. + +3. Try interacting programmatically. If you create a new token in your control panel or pull out the `JUPYTERHUB_API_TOKEN` in the single user environment, you can skip the third step here. + +``` +$ curl -X GET http://127.0.0.1:8000/services/fastapi/ +{"Hello":"World"} + +$ curl -X GET http://127.0.0.1:8000/services/fastapi/me +{"detail":"Must login with token parameter, or Authorization bearer header"} + +$ curl -X POST http://127.0.0.1:8000/hub/api/users/test-user/tokens \ + -d '{"auth": {"username": "test-user", "password": "mypasswd!"}}' \ + | jq '.token' +"3fee13ce6d2845da9bd5f2c2170d3428" + +$ curl -X GET http://127.0.0.1:8000/services/fastapi/me \ + -H "Authorization: Bearer 3fee13ce6d2845da9bd5f2c2170d3428" \ + | jq . +{ + "name": "test-user", + "admin": false, + "groups": [], + "server": null, + "pending": null, + "last_activity": "2021-05-21T09:13:00.514309+00:00", + "servers": null, + "scopes": [ + "access:services", + "access:servers!user=test-user", + "...", + ] +} +``` + +# Try it out in Docker + +1. Build and run the Docker image locally + +```bash +sudo docker build . -t service-fastapi +sudo docker run -it -p 8000:8000 service-fastapi +``` + +2. Visit http://127.0.0.1:8000/services/fastapi/docs. When going through the OAuth flow or getting a token from the control panel, you can log in with `testuser` / `passwd`. + +# PUBLIC_HOST + +If you are running your service behind a proxy, or on a Docker / Kubernetes infrastructure, you might run into an error during OAuth that says `Mismatching redirect URI`. In the Jupterhub logs, there will be a warning along the lines of: `[W 2021-04-06 23:40:06.707 JupyterHub provider:498] Redirect uri https://jupyterhub.my.cloud/services/fastapi/oauth_callback != /services/fastapi/oauth_callback`. This happens because Swagger UI adds the request host, as seen in the browser, to the Authorization URL. + +To solve that problem, the `oauth_redirect_uri` value in the service initialization needs to match what Swagger will auto-generate and what the service will use when POST'ing to `/oauth2/token`. In this example, setting the `PUBLIC_HOST` environment variable to your public-facing Hub domain (e.g. `https://jupyterhub.my.cloud`) should make it work. + +# Notes on security.py + +FastAPI has a concept of a [dependency injection](https://fastapi.tiangolo.com/tutorial/dependencies) using a `Depends` object (and a subclass `Security`) that is automatically instantiated/executed when it is a parameter for your endpoint routes. You can utilize a `Depends` object for re-useable common parameters or authentication mechanisms like the [`get_user`](https://fastapi.tiangolo.com/tutorial/security/get-current-user) pattern. + +JupyterHub OAuth has three ways to authenticate: a `token` url parameter; a `Authorization: Bearer ` header; and a (deprecated) `jupyterhub-services` cookie. FastAPI has helper functions that let us create `Security` (dependency injection) objects for each of those. When you need to allow multiple / optional authentication dependencies (`Security` objects), then you can use the argument `auto_error=False` and it will return `None` instead of raising an `HTTPException`. + +Endpoints that need authentication (`/me` and `/debug` in this example) can leverage the `get_user` pattern and effectively pull the user model from the Hub API when a request has authenticated with cookie / token / header all using the simple syntax, + +```python +from .security import get_current_user +from .models import User + +@router.get("/new_endpoint") +async def new_endpoint(user: User = Depends(get_current_user)): + "Function that needs to work with an authenticated user" + return {"Hello": user.name} +``` + +# Notes on client.py + +FastAPI is designed to be an asynchronous web server, so the interactions with the Hub API should be made asynchronously as well. Instead of using `requests` to get user information from a token/cookie, this example uses [`httpx`](https://www.python-httpx.org/). `client.py` defines a small function that creates a `Client` (equivalent of `requests.Session`) with the Hub API url as it's `base_url` and adding the `JUPYTERHUB_API_TOKEN` to every header. + +Consider this a very minimal alternative to using `jupyterhub.services.auth.HubOAuth` + +```python +# client.py +import os + +def get_client(): + base_url = os.environ["JUPYTERHUB_API_URL"] + token = os.environ["JUPYTERHUB_API_TOKEN"] + headers = {"Authorization": "Bearer %s" % token} + return httpx.AsyncClient(base_url=base_url, headers=headers) +``` + +```python +# other modules +from .client import get_client + +async with get_client() as client: + resp = await client.get('/endpoint') + ... +``` diff --git a/examples/service-fastapi/app/__init__.py b/examples/service-fastapi/app/__init__.py new file mode 100644 index 00000000..34f275ed --- /dev/null +++ b/examples/service-fastapi/app/__init__.py @@ -0,0 +1,3 @@ +from .app import app + +__all__ = ["app"] diff --git a/examples/service-fastapi/app/app.py b/examples/service-fastapi/app/app.py new file mode 100644 index 00000000..c586b63b --- /dev/null +++ b/examples/service-fastapi/app/app.py @@ -0,0 +1,25 @@ +import os + +from fastapi import FastAPI + +from .service import router + +### When managed by Jupyterhub, the actual endpoints +### will be served out prefixed by /services/:name. +### One way to handle this with FastAPI is to use an APIRouter. +### All routes are defined in service.py + +app = FastAPI( + title="Example FastAPI Service", + version="0.1", + ### Serve out Swagger from the service prefix (/services/:name/docs) + openapi_url=router.prefix + "/openapi.json", + docs_url=router.prefix + "/docs", + redoc_url=router.prefix + "/redoc", + ### Add our service client id to the /docs Authorize form automatically + swagger_ui_init_oauth={"clientId": os.environ["JUPYTERHUB_CLIENT_ID"]}, + ### Default /docs/oauth2 redirect will cause Hub + ### to raise oauth2 redirect uri mismatch errors + swagger_ui_oauth2_redirect_url=os.environ["JUPYTERHUB_OAUTH_CALLBACK_URL"], +) +app.include_router(router) diff --git a/examples/service-fastapi/app/client.py b/examples/service-fastapi/app/client.py new file mode 100644 index 00000000..e31d5ebc --- /dev/null +++ b/examples/service-fastapi/app/client.py @@ -0,0 +1,11 @@ +import os + +import httpx + + +# a minimal alternative to using HubOAuth class +def get_client(): + base_url = os.environ["JUPYTERHUB_API_URL"] + token = os.environ["JUPYTERHUB_API_TOKEN"] + headers = {"Authorization": "Bearer %s" % token} + return httpx.AsyncClient(base_url=base_url, headers=headers) diff --git a/examples/service-fastapi/app/models.py b/examples/service-fastapi/app/models.py new file mode 100644 index 00000000..fad8e1e8 --- /dev/null +++ b/examples/service-fastapi/app/models.py @@ -0,0 +1,45 @@ +from datetime import datetime +from typing import Any, Dict, List, Optional + +from pydantic import BaseModel + + +# https://jupyterhub.readthedocs.io/en/stable/_static/rest-api/index.html +class Server(BaseModel): + name: str + ready: bool + pending: Optional[str] + url: str + progress_url: str + started: datetime + last_activity: datetime + state: Optional[Any] + user_options: Optional[Any] + + +class User(BaseModel): + name: str + admin: bool + groups: Optional[List[str]] + server: Optional[str] + pending: Optional[str] + last_activity: datetime + servers: Optional[Dict[str, Server]] + scopes: List[str] + + +# https://stackoverflow.com/questions/64501193/fastapi-how-to-use-httpexception-in-responses +class AuthorizationError(BaseModel): + detail: str + + +class HubResponse(BaseModel): + msg: str + request_url: str + token: str + response_code: int + hub_response: dict + + +class HubApiError(BaseModel): + detail: HubResponse diff --git a/examples/service-fastapi/app/security.py b/examples/service-fastapi/app/security.py new file mode 100644 index 00000000..63fd2a5e --- /dev/null +++ b/examples/service-fastapi/app/security.py @@ -0,0 +1,77 @@ +import json +import os + +from fastapi import HTTPException, Security, status +from fastapi.security import OAuth2AuthorizationCodeBearer +from fastapi.security.api_key import APIKeyQuery + +from .client import get_client +from .models import User + +### Endpoints can require authentication using Depends(get_current_user) +### get_current_user will look for a token in url params or +### Authorization: bearer token (header). +### Hub technically supports cookie auth too, but it is deprecated so +### not being included here. +auth_by_param = APIKeyQuery(name="token", auto_error=False) + +auth_url = os.environ["PUBLIC_HOST"] + "/hub/api/oauth2/authorize" +auth_by_header = OAuth2AuthorizationCodeBearer( + authorizationUrl=auth_url, tokenUrl="get_token", auto_error=False +) +### ^^ The flow for OAuth2 in Swagger is that the "authorize" button +### will redirect user (browser) to "auth_url", which is the Hub login page. +### After logging in, the browser will POST to our internal /get_token endpoint +### with the auth code. That endpoint POST's to Hub /oauth2/token with +### our client_secret (JUPYTERHUB_API_TOKEN) and that code to get an +### access_token, which it returns to browser, which places in Authorization header. + +if os.environ.get("JUPYTERHUB_OAUTH_SCOPES"): + # typically ["access:services", "access:services!service=$service_name"] + access_scopes = json.loads(os.environ["JUPYTERHUB_OAUTH_SCOPES"]) +else: + access_scopes = ["access:services"] + +### For consideration: optimize performance with a cache instead of +### always hitting the Hub api? +async def get_current_user( + auth_by_param: str = Security(auth_by_param), + auth_by_header: str = Security(auth_by_header), +): + token = auth_by_param or auth_by_header + if token is None: + raise HTTPException( + status.HTTP_401_UNAUTHORIZED, + detail="Must login with token parameter or Authorization bearer header", + ) + + async with get_client() as client: + endpoint = "/user" + # normally we auth to Hub API with service api token, + # but this time auth as the user token to get user model + headers = {"Authorization": f"Bearer {token}"} + resp = await client.get(endpoint, headers=headers) + if resp.is_error: + raise HTTPException( + status.HTTP_400_BAD_REQUEST, + detail={ + "msg": "Error getting user info from token", + "request_url": str(resp.request.url), + "token": token, + "response_code": resp.status_code, + "hub_response": resp.json(), + }, + ) + user = User(**resp.json()) + if any(scope in user.scopes for scope in access_scopes): + return user + else: + raise HTTPException( + status.HTTP_403_FORBIDDEN, + detail={ + "msg": f"User not authorized: {user.name}", + "request_url": str(resp.request.url), + "token": token, + "user": resp.json(), + }, + ) diff --git a/examples/service-fastapi/app/service.py b/examples/service-fastapi/app/service.py new file mode 100644 index 00000000..8aaeef94 --- /dev/null +++ b/examples/service-fastapi/app/service.py @@ -0,0 +1,65 @@ +import os + +from fastapi import APIRouter, Depends, Form, Request + +from .client import get_client +from .models import AuthorizationError, HubApiError, User +from .security import get_current_user + +# APIRouter prefix cannot end in / +service_prefix = os.getenv("JUPYTERHUB_SERVICE_PREFIX", "").rstrip("/") +router = APIRouter(prefix=service_prefix) + + +@router.post("/get_token", include_in_schema=False) +async def get_token(code: str = Form(...)): + "Callback function for OAuth2AuthorizationCodeBearer scheme" + # The only thing we need in this form post is the code + # Everything else we can hardcode / pull from env + async with get_client() as client: + redirect_uri = ( + os.environ["PUBLIC_HOST"] + os.environ["JUPYTERHUB_OAUTH_CALLBACK_URL"], + ) + data = { + "client_id": os.environ["JUPYTERHUB_CLIENT_ID"], + "client_secret": os.environ["JUPYTERHUB_API_TOKEN"], + "grant_type": "authorization_code", + "code": code, + "redirect_uri": redirect_uri, + } + resp = await client.post("/oauth2/token", data=data) + ### resp.json() is {'access_token': , 'token_type': 'Bearer'} + return resp.json() + + +@router.get("/") +async def index(): + "Non-authenticated function that returns {'Hello': 'World'}" + return {"Hello": "World"} + + +# response_model and responses dict translate to OpenAPI (Swagger) hints +# compare and contrast what the /me endpoint looks like in Swagger vs /debug +@router.get( + "/me", + response_model=User, + responses={401: {'model': AuthorizationError}, 400: {'model': HubApiError}}, +) +async def me(user: User = Depends(get_current_user)): + "Authenticated function that returns the User model" + return user + + +@router.get("/debug") +async def debug(request: Request, user: User = Depends(get_current_user)): + """ + Authenticated function that returns a few pieces of debug + * Environ of the service process + * Request headers + * User model + """ + return { + "env": dict(os.environ), + "headers": dict(request.headers), + "user": user, + } diff --git a/examples/service-fastapi/fastapi_example.gif b/examples/service-fastapi/fastapi_example.gif new file mode 100644 index 00000000..6c2c2301 Binary files /dev/null and b/examples/service-fastapi/fastapi_example.gif differ diff --git a/examples/service-fastapi/jupyterhub_config.py b/examples/service-fastapi/jupyterhub_config.py new file mode 100644 index 00000000..f8f9e864 --- /dev/null +++ b/examples/service-fastapi/jupyterhub_config.py @@ -0,0 +1,44 @@ +import os +import warnings + +# When Swagger performs OAuth2 in the browser, it will set +# the request host + relative path as the redirect uri, causing a +# uri mismatch if the oauth_redirect_uri is just the relative path +# is set in the c.JupyterHub.services entry (as per default). +# Therefore need to know the request host ahead of time. +if "PUBLIC_HOST" not in os.environ: + msg = ( + "env PUBLIC_HOST is not set, defaulting to http://127.0.0.1:8000. " + "This can cause problems with OAuth. " + "Set PUBLIC_HOST to your public (browser accessible) host." + ) + warnings.warn(msg) + public_host = "http://127.0.0.1:8000" +else: + public_host = os.environ["PUBLIC_HOST"].rstrip('/') +service_name = "fastapi" +oauth_redirect_uri = f"{public_host}/services/{service_name}/oauth_callback" + +c.JupyterHub.services = [ + { + "name": service_name, + "url": "http://127.0.0.1:10202", + "command": ["uvicorn", "app:app", "--port", "10202"], + "oauth_redirect_uri": oauth_redirect_uri, + "environment": {"PUBLIC_HOST": public_host}, + } +] + +c.JupyterHub.load_roles = [ + { + "name": "user", + # grant all users access to services + "scopes": ["self", "access:services"], + }, +] + + +# dummy for testing, create test-user +c.Authenticator.allowed_users = {"test-user"} +c.JupyterHub.authenticator_class = "dummy" +c.JupyterHub.spawner_class = "simple" diff --git a/examples/service-fastapi/requirements.txt b/examples/service-fastapi/requirements.txt new file mode 100644 index 00000000..64716c79 --- /dev/null +++ b/examples/service-fastapi/requirements.txt @@ -0,0 +1,4 @@ +fastapi +httpx +python-multipart +uvicorn diff --git a/examples/service-notebook/README.md b/examples/service-notebook/README.md index f6be76b9..6ac3e84d 100644 --- a/examples/service-notebook/README.md +++ b/examples/service-notebook/README.md @@ -17,8 +17,8 @@ and the name of the shared-notebook service. In the external example, some extra steps are required to set up supervisor: -1. select a system user to run the service. This is a user on the system, and does not need to be a Hub user. Add this to the user field in `shared-notebook.conf`, replacing `someuser`. +1. select a system user to run the service. This is a user on the system, and does not need to be a Hub user. Add this to the user field in `shared-notebook.conf`, replacing `someuser`. 2. generate a secret token for authentication, and replace the `super-secret` fields in `shared-notebook-service` and `jupyterhub_config.py` 3. install `shared-notebook-service` somewhere on your system, and update `/path/to/shared-notebook-service` to the absolute path of this destination -3. copy `shared-notebook.conf` to `/etc/supervisor/conf.d/` -4. `supervisorctl reload` +4. copy `shared-notebook.conf` to `/etc/supervisor/conf.d/` +5. `supervisorctl reload` diff --git a/examples/service-notebook/external/jupyterhub_config.py b/examples/service-notebook/external/jupyterhub_config.py index 3b2ef52e..e6371389 100644 --- a/examples/service-notebook/external/jupyterhub_config.py +++ b/examples/service-notebook/external/jupyterhub_config.py @@ -1,15 +1,35 @@ # our user list -c.Authenticator.whitelist = ['minrk', 'ellisonbg', 'willingc'] +c.Authenticator.allowed_users = ['minrk', 'ellisonbg', 'willingc'] -# ellisonbg and willingc have access to a shared server: +service_name = 'shared-notebook' +service_port = 9999 +group_name = 'shared' -c.JupyterHub.load_groups = {'shared': ['ellisonbg', 'willingc']} +# ellisonbg and willingc are in a group that will access the shared server: + +c.JupyterHub.load_groups = {group_name: ['ellisonbg', 'willingc']} # start the notebook server as a service c.JupyterHub.services = [ { - 'name': 'shared-notebook', - 'url': 'http://127.0.0.1:9999', - 'api_token': 'super-secret', + 'name': service_name, + 'url': f'http://127.0.0.1:{service_port}', + 'api_token': 'c3a29e5d386fd7c9aa1e8fe9d41c282ec8b', } ] + +# This "role assignment" is what grants members of the group +# access to the service +c.JupyterHub.load_roles = [ + { + "name": "shared-notebook", + "groups": [group_name], + "scopes": [f"access:services!service={service_name}"], + }, +] + + +# dummy spawner and authenticator for testing, don't actually use these! +c.JupyterHub.authenticator_class = 'dummy' +c.JupyterHub.spawner_class = 'simple' +c.JupyterHub.ip = '127.0.0.1' # let's just run on localhost while dummy auth is enabled diff --git a/examples/service-notebook/external/shared-notebook-service b/examples/service-notebook/external/shared-notebook-service index 20206e92..3510c0a6 100755 --- a/examples/service-notebook/external/shared-notebook-service +++ b/examples/service-notebook/external/shared-notebook-service @@ -1,9 +1,11 @@ #!/bin/bash -l set -e -export JUPYTERHUB_API_TOKEN=super-secret +# these must match the values in jupyterhub_config.py +export JUPYTERHUB_API_TOKEN=c3a29e5d386fd7c9aa1e8fe9d41c282ec8b export JUPYTERHUB_SERVICE_URL=http://127.0.0.1:9999 export JUPYTERHUB_SERVICE_NAME=shared-notebook +export JUPYTERHUB_SERVICE_PREFIX="/services/${JUPYTERHUB_SERVICE_NAME}/" +export JUPYTERHUB_CLIENT_ID="service-${JUPYTERHUB_SERVICE_NAME}" -jupyterhub-singleuser \ - --group='shared' +jupyterhub-singleuser diff --git a/examples/service-notebook/managed/jupyterhub_config.py b/examples/service-notebook/managed/jupyterhub_config.py index f9ef721f..1102dda9 100644 --- a/examples/service-notebook/managed/jupyterhub_config.py +++ b/examples/service-notebook/managed/jupyterhub_config.py @@ -1,19 +1,35 @@ # our user list -c.Authenticator.whitelist = ['minrk', 'ellisonbg', 'willingc'] - -# ellisonbg and willingc have access to a shared server: - -c.JupyterHub.load_groups = {'shared': ['ellisonbg', 'willingc']} +c.Authenticator.allowed_users = ['minrk', 'ellisonbg', 'willingc'] service_name = 'shared-notebook' service_port = 9999 group_name = 'shared' +# ellisonbg and willingc have access to a shared server: + +c.JupyterHub.load_groups = {group_name: ['ellisonbg', 'willingc']} + # start the notebook server as a service c.JupyterHub.services = [ { 'name': service_name, - 'url': 'http://127.0.0.1:{}'.format(service_port), - 'command': ['jupyterhub-singleuser', '--group=shared', '--debug'], + 'url': f'http://127.0.0.1:{service_port}', + 'command': ['jupyterhub-singleuser', '--debug'], } ] + +# This "role assignment" is what grants members of the group +# access to the service +c.JupyterHub.load_roles = [ + { + "name": "shared-notebook", + "groups": [group_name], + "scopes": [f"access:services!service={service_name}"], + }, +] + + +# dummy spawner and authenticator for testing, don't actually use these! +c.JupyterHub.authenticator_class = 'dummy' +c.JupyterHub.spawner_class = 'simple' +c.JupyterHub.ip = '127.0.0.1' # let's just run on localhost while dummy auth is enabled diff --git a/examples/service-whoami-flask/README.md b/examples/service-whoami-flask/README.md index 9addc52a..b90f4f44 100644 --- a/examples/service-whoami-flask/README.md +++ b/examples/service-whoami-flask/README.md @@ -1,24 +1,24 @@ # Authenticating a flask service with JupyterHub -Uses `jupyterhub.services.HubAuth` to authenticate requests with the Hub in a [flask][] application. +Uses `jupyterhub.services.HubOAuth` to authenticate requests with the Hub in a [flask][] application. ## Run -1. Launch JupyterHub and the `whoami service` with +1. Launch JupyterHub and the `whoami service` with jupyterhub --ip=127.0.0.1 -2. Visit http://127.0.0.1:8000/services/whoami/ or http://127.0.0.1:8000/services/whoami-oauth/ +2. Visit http://127.0.0.1:8000/services/whoami/ After logging in with your local-system credentials, you should see a JSON dump of your user info: ```json { - "admin": false, - "last_activity": "2016-05-27T14:05:18.016372", - "name": "queequeg", - "pending": null, - "server": "/user/queequeg" + "admin": false, + "last_activity": "2016-05-27T14:05:18.016372", + "name": "queequeg", + "pending": null, + "server": "/user/queequeg" } ``` @@ -29,5 +29,4 @@ A similar service could be run externally, by setting the JupyterHub service env JUPYTERHUB_API_TOKEN JUPYTERHUB_SERVICE_PREFIX - [flask]: http://flask.pocoo.org diff --git a/examples/service-whoami-flask/jupyterhub_config.py b/examples/service-whoami-flask/jupyterhub_config.py index 54d3c736..71e890d9 100644 --- a/examples/service-whoami-flask/jupyterhub_config.py +++ b/examples/service-whoami-flask/jupyterhub_config.py @@ -1,6 +1,3 @@ -import os -import sys - c.JupyterHub.services = [ { 'name': 'whoami', @@ -8,10 +5,12 @@ c.JupyterHub.services = [ 'command': ['flask', 'run', '--port=10101'], 'environment': {'FLASK_APP': 'whoami-flask.py'}, }, - { - 'name': 'whoami-oauth', - 'url': 'http://127.0.0.1:10201', - 'command': ['flask', 'run', '--port=10201'], - 'environment': {'FLASK_APP': 'whoami-oauth.py'}, - }, ] + +# dummy auth and simple spawner for testing +# any username and password will work +c.JupyterHub.spawner_class = 'simple' +c.JupyterHub.authenticator_class = 'dummy' + +# listen only on localhost while testing with wide-open auth +c.JupyterHub.ip = '127.0.0.1' diff --git a/examples/service-whoami-flask/whoami-flask.py b/examples/service-whoami-flask/whoami-flask.py index b3353ae5..2e08391b 100644 --- a/examples/service-whoami-flask/whoami-flask.py +++ b/examples/service-whoami-flask/whoami-flask.py @@ -4,42 +4,42 @@ whoami service authentication with the Hub """ import json import os +import secrets from functools import wraps -from urllib.parse import quote -from flask import Flask -from flask import redirect -from flask import request -from flask import Response - -from jupyterhub.services.auth import HubAuth +from flask import Flask, Response, make_response, redirect, request, session +from jupyterhub.services.auth import HubOAuth prefix = os.environ.get('JUPYTERHUB_SERVICE_PREFIX', '/') -auth = HubAuth(api_token=os.environ['JUPYTERHUB_API_TOKEN'], cache_max_age=60) +auth = HubOAuth(api_token=os.environ['JUPYTERHUB_API_TOKEN'], cache_max_age=60) app = Flask(__name__) +# encryption key for session cookies +app.secret_key = secrets.token_bytes(32) def authenticated(f): - """Decorator for authenticating with the Hub""" + """Decorator for authenticating with the Hub via OAuth""" @wraps(f) def decorated(*args, **kwargs): - cookie = request.cookies.get(auth.cookie_name) - token = request.headers.get(auth.auth_header_name) - if cookie: - user = auth.user_for_cookie(cookie) - elif token: + token = session.get("token") + + if token: user = auth.user_for_token(token) else: user = None + if user: return f(user, *args, **kwargs) else: # redirect to login url on failed auth - return redirect(auth.login_url + '?next=%s' % quote(request.path)) + state = auth.generate_state(next_url=request.path) + response = make_response(redirect(auth.login_url + '&state=%s' % state)) + response.set_cookie(auth.state_cookie_name, state) + return response return decorated @@ -50,3 +50,24 @@ def whoami(user): return Response( json.dumps(user, indent=1, sort_keys=True), mimetype='application/json' ) + + +@app.route(prefix + 'oauth_callback') +def oauth_callback(): + code = request.args.get('code', None) + if code is None: + return 403 + + # validate state field + arg_state = request.args.get('state', None) + cookie_state = request.cookies.get(auth.state_cookie_name) + if arg_state is None or arg_state != cookie_state: + # state doesn't match + return 403 + + token = auth.token_for_code(code) + # store token in session cookie + session["token"] = token + next_url = auth.get_next_url(cookie_state) or prefix + response = make_response(redirect(next_url)) + return response diff --git a/examples/service-whoami-flask/whoami-oauth.py b/examples/service-whoami-flask/whoami-oauth.py deleted file mode 100644 index 26837dcd..00000000 --- a/examples/service-whoami-flask/whoami-oauth.py +++ /dev/null @@ -1,72 +0,0 @@ -#!/usr/bin/env python3 -""" -whoami service authentication with the Hub -""" -import json -import os -from functools import wraps - -from flask import Flask -from flask import make_response -from flask import redirect -from flask import request -from flask import Response - -from jupyterhub.services.auth import HubOAuth - - -prefix = os.environ.get('JUPYTERHUB_SERVICE_PREFIX', '/') - -auth = HubOAuth(api_token=os.environ['JUPYTERHUB_API_TOKEN'], cache_max_age=60) - -app = Flask(__name__) - - -def authenticated(f): - """Decorator for authenticating with the Hub via OAuth""" - - @wraps(f) - def decorated(*args, **kwargs): - token = request.cookies.get(auth.cookie_name) - if token: - user = auth.user_for_token(token) - else: - user = None - if user: - return f(user, *args, **kwargs) - else: - # redirect to login url on failed auth - state = auth.generate_state(next_url=request.path) - response = make_response(redirect(auth.login_url + '&state=%s' % state)) - response.set_cookie(auth.state_cookie_name, state) - return response - - return decorated - - -@app.route(prefix) -@authenticated -def whoami(user): - return Response( - json.dumps(user, indent=1, sort_keys=True), mimetype='application/json' - ) - - -@app.route(prefix + 'oauth_callback') -def oauth_callback(): - code = request.args.get('code', None) - if code is None: - return 403 - - # validate state field - arg_state = request.args.get('state', None) - cookie_state = request.cookies.get(auth.state_cookie_name) - if arg_state is None or arg_state != cookie_state: - # state doesn't match - return 403 - - token = auth.token_for_code(code) - next_url = auth.get_next_url(cookie_state) or prefix - response = make_response(redirect(next_url)) - response.set_cookie(auth.cookie_name, token) - return response diff --git a/examples/service-whoami/README.md b/examples/service-whoami/README.md index b8ce3442..15eea647 100644 --- a/examples/service-whoami/README.md +++ b/examples/service-whoami/README.md @@ -2,37 +2,100 @@ Uses `jupyterhub.services.HubAuthenticated` to authenticate requests with the Hub. -There is an implementation each of cookie-based `HubAuthenticated` and OAuth-based `HubOAuthenticated`. +There is an implementation each of api-token-based `HubAuthenticated` and OAuth-based `HubOAuthenticated`. ## Run -1. Launch JupyterHub and the `whoami service` with +1. Launch JupyterHub and the `whoami` services with - jupyterhub --ip=127.0.0.1 + jupyterhub -2. Visit http://127.0.0.1:8000/services/whoami or http://127.0.0.1:8000/services/whoami-oauth +2. Visit http://127.0.0.1:8000/services/whoami-oauth -After logging in with your local-system credentials, you should see a JSON dump of your user info: +After logging in with any username and password, you should see a JSON dump of your user info: ```json { - "admin": false, - "last_activity": "2016-05-27T14:05:18.016372", - "name": "queequeg", - "pending": null, - "server": "/user/queequeg" + "admin": false, + "groups": [], + "kind": "user", + "name": "queequeg", + "scopes": ["access:services!service=whoami-oauth"], + "session_id": "5a2164273a7346728873bcc2e3c26415" } ``` +What is contained in the model will depend on the permissions +requested in the `oauth_client_allowed_scopes` configuration of the service `whoami-oauth` service. +The default is the minimum required for identification and access to the service, +which will provide the username and current scopes. + +The `whoami-api` service powered by the base `HubAuthenticated` class only supports token-authenticated API requests, +not browser visits, because it does not implement OAuth. Visit it by requesting an api token from the tokens page (`/hub/token`), +and making a direct request: + +```bash +token="d584cbc5bba2430fb153aadb305029b4" +curl -H "Authorization: token $token" http://127.0.0.1:8000/services/whoami-api/ | jq . +``` + +```json +{ + "admin": false, + "created": "2021-12-20T09:49:37.258427Z", + "groups": [], + "kind": "user", + "last_activity": "2021-12-20T10:07:31.298056Z", + "name": "queequeg", + "pending": null, + "roles": ["user"], + "scopes": [ + "access:servers!user=queequeg", + "access:services", + "delete:servers!user=queequeg", + "read:servers!user=queequeg", + "read:tokens!user=queequeg", + "read:users!user=queequeg", + "read:users:activity!user=queequeg", + "read:users:groups!user=queequeg", + "read:users:name!user=queequeg", + "servers!user=queequeg", + "tokens!user=queequeg", + "users:activity!user=queequeg" + ], + "server": null, + "servers": {}, + "session_id": null +} +``` + +The above is a more complete user model than the `whoami-oauth` example, because +the token was issued with the default `token` role, +which has the `inherit` metascope, +meaning the token has access to everything the tokens owner has access to. + This relies on the Hub starting the whoami services, via config (see [jupyterhub_config.py](./jupyterhub_config.py)). -You may set the `hub_users` configuration in the service script -to restrict access to the service to a whitelist of allowed users. -By default, any authenticated user is allowed. +To govern access to the services, create **roles** with the scope `access:services!service=$service-name`, +and assign users to the scope. + +The jupyterhub_config.py grants access for all users to all services via the default 'user' role, with: + +```python +c.JupyterHub.load_roles = [ + { + "name": "user", + # grant all users access to all services + "scopes": ["access:services", "self"], + } +] +``` A similar service could be run externally, by setting the JupyterHub service environment variables: JUPYTERHUB_API_TOKEN JUPYTERHUB_SERVICE_PREFIX + JUPYTERHUB_OAUTH_SCOPES + JUPYTERHUB_CLIENT_ID # for whoami-oauth only or instantiating and configuring a HubAuth object yourself, and attaching it as `self.hub_auth` in your HubAuthenticated handlers. diff --git a/examples/service-whoami/jupyterhub_config.py b/examples/service-whoami/jupyterhub_config.py index d9ccd889..ec435a1d 100644 --- a/examples/service-whoami/jupyterhub_config.py +++ b/examples/service-whoami/jupyterhub_config.py @@ -1,9 +1,8 @@ -import os import sys c.JupyterHub.services = [ { - 'name': 'whoami', + 'name': 'whoami-api', 'url': 'http://127.0.0.1:10101', 'command': [sys.executable, './whoami.py'], }, @@ -11,5 +10,27 @@ c.JupyterHub.services = [ 'name': 'whoami-oauth', 'url': 'http://127.0.0.1:10102', 'command': [sys.executable, './whoami-oauth.py'], + # the default oauth roles is minimal, + # only requesting access to the service, + # and identification by name, + # nothing more. + # Specifying 'oauth_client_allowed_scopes' as a list of scopes + # allows requesting more information about users, + # or the ability to take actions on users' behalf, as required. + # the 'inherit' scope means the full permissions of the owner + # 'oauth_client_allowed_scopes': ['inherit'], }, ] + +c.JupyterHub.load_roles = [ + { + "name": "user", + # grant all users access to all services + "scopes": ["access:services", "self"], + } +] + +# dummy spawner and authenticator for testing, don't actually use these! +c.JupyterHub.authenticator_class = 'dummy' +c.JupyterHub.spawner_class = 'simple' +c.JupyterHub.ip = '127.0.0.1' # let's just run on localhost while dummy auth is enabled diff --git a/examples/service-whoami/whoami-oauth.py b/examples/service-whoami/whoami-oauth.py index c1a576c9..41466231 100644 --- a/examples/service-whoami/whoami-oauth.py +++ b/examples/service-whoami/whoami-oauth.py @@ -1,32 +1,22 @@ """An example service authenticating with the Hub. -This example service serves `/services/whoami/`, +This example service serves `/services/whoami-oauth/`, authenticated with the Hub, showing the user their own info. """ import json import os -from getpass import getuser from urllib.parse import urlparse from tornado.httpserver import HTTPServer from tornado.ioloop import IOLoop -from tornado.web import Application -from tornado.web import authenticated -from tornado.web import RequestHandler +from tornado.web import Application, RequestHandler, authenticated -from jupyterhub.services.auth import HubOAuthCallbackHandler -from jupyterhub.services.auth import HubOAuthenticated +from jupyterhub.services.auth import HubOAuthCallbackHandler, HubOAuthenticated from jupyterhub.utils import url_path_join class WhoAmIHandler(HubOAuthenticated, RequestHandler): - # hub_users can be a set of users who are allowed to access the service - # `getuser()` here would mean only the user who started the service - # can access the service: - - # hub_users = {getuser()} - @authenticated def get(self): user_model = self.get_current_user() diff --git a/examples/service-whoami/whoami.py b/examples/service-whoami/whoami.py index 6dc56c9e..da79d9ad 100644 --- a/examples/service-whoami/whoami.py +++ b/examples/service-whoami/whoami.py @@ -1,28 +1,21 @@ """An example service authenticating with the Hub. -This serves `/services/whoami/`, authenticated with the Hub, showing the user their own info. +This serves `/services/whoami-api/`, authenticated with the Hub, showing the user their own info. + +HubAuthenticated only supports token-based access. """ import json import os -from getpass import getuser from urllib.parse import urlparse from tornado.httpserver import HTTPServer from tornado.ioloop import IOLoop -from tornado.web import Application -from tornado.web import authenticated -from tornado.web import RequestHandler +from tornado.web import Application, RequestHandler, authenticated from jupyterhub.services.auth import HubAuthenticated class WhoAmIHandler(HubAuthenticated, RequestHandler): - # hub_users can be a set of users who are allowed to access the service - # `getuser()` here would mean only the user who started the service - # can access the service: - - # hub_users = {getuser()} - @authenticated def get(self): user_model = self.get_current_user() diff --git a/examples/spawn-form/jupyterhub_config.py b/examples/spawn-form/jupyterhub_config.py index ff7c2526..58fe59dc 100644 --- a/examples/spawn-form/jupyterhub_config.py +++ b/examples/spawn-form/jupyterhub_config.py @@ -10,10 +10,15 @@ class DemoFormSpawner(LocalProcessSpawner): def _options_form_default(self): default_env = "YOURNAME=%s\n" % self.user.name return """ - - - - +
+ + +
+
+ + +
""".format( env=default_env ) diff --git a/hooks/README.md b/hooks/README.md deleted file mode 100644 index 6cdbadad..00000000 --- a/hooks/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Docker Cloud build hooks - -These are the hooks diff --git a/hooks/post_build b/hooks/post_build deleted file mode 100755 index 874fd6d5..00000000 --- a/hooks/post_build +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash -set -exuo pipefail - -docker build --build-arg BASE_IMAGE=$DOCKER_REPO:$DOCKER_TAG -t ${DOCKER_REPO}-onbuild:$DOCKER_TAG onbuild diff --git a/hooks/post_push b/hooks/post_push deleted file mode 100755 index 1b19f796..00000000 --- a/hooks/post_push +++ /dev/null @@ -1,44 +0,0 @@ -#!/bin/bash -set -exuo pipefail - -export ONBUILD=${DOCKER_REPO}-onbuild -# push ONBUILD image -docker push $ONBUILD:$DOCKER_TAG - -function get_hub_version() { - rm -f hub_version - docker run --rm -v $PWD:/version -u $(id -u) -i $DOCKER_REPO:$DOCKER_TAG sh -c 'jupyterhub --version > /version/hub_version' - hub_xyz=$(cat hub_version) - split=( ${hub_xyz//./ } ) - hub_xy="${split[0]}.${split[1]}" - # add .dev on hub_xy so it's 1.0.dev - if [[ ! -z "${split[3]:-}" ]]; then - hub_xy="${hub_xy}.${split[3]}" - latest=0 - else - latest=1 - fi -} - - -get_hub_version - -# when building master, push 0.9.0.dev as well -docker tag $DOCKER_REPO:$DOCKER_TAG $DOCKER_REPO:$hub_xyz -docker push $DOCKER_REPO:$hub_xyz -docker tag $ONBUILD:$DOCKER_TAG $ONBUILD:$hub_xyz -docker push $ONBUILD:$hub_xyz - -# when building 0.9.x, push 0.9 as well -docker tag $DOCKER_REPO:$DOCKER_TAG $DOCKER_REPO:$hub_xy -docker push $DOCKER_REPO:$hub_xy -docker tag $ONBUILD:$DOCKER_TAG $ONBUILD:$hub_xy -docker push $ONBUILD:$hub_xyz - -# if building a stable release, tag latest as well -if [[ "$latest" == "1" ]]; then - docker tag $DOCKER_REPO:$DOCKER_TAG $DOCKER_REPO:latest - docker push $DOCKER_REPO:latest - docker tag $ONBUILD:$DOCKER_TAG $ONBUILD:latest - docker push $ONBUILD:latest -fi diff --git a/jsx/.eslintrc.json b/jsx/.eslintrc.json new file mode 100644 index 00000000..597e92cb --- /dev/null +++ b/jsx/.eslintrc.json @@ -0,0 +1,44 @@ +{ + "extends": ["plugin:react/recommended"], + "parserOptions": { + "ecmaVersion": 2018, + "sourceType": "module", + "ecmaFeatures": { + "jsx": true + } + }, + "settings": { + "react": { + "version": "detect" + } + }, + "plugins": ["eslint-plugin-react", "prettier", "unused-imports"], + "env": { + "es6": true, + "browser": true + }, + "rules": { + "semi": "off", + "quotes": "off", + "prettier/prettier": "warn", + "no-unused-vars": "off", + "unused-imports/no-unused-imports": "error", + "unused-imports/no-unused-vars": [ + "warn", + { + "vars": "all", + "varsIgnorePattern": "^regeneratorRuntime|^_", + "args": "after-used", + "argsIgnorePattern": "^_" + } + ] + }, + "overrides": [ + { + "files": ["**/*.test.js", "**/*.test.jsx"], + "env": { + "jest": true + } + } + ] +} diff --git a/jsx/.gitignore b/jsx/.gitignore new file mode 100644 index 00000000..6ae0fdb6 --- /dev/null +++ b/jsx/.gitignore @@ -0,0 +1,2 @@ +node_modules +build/admin-react.js diff --git a/jsx/README.md b/jsx/README.md new file mode 100644 index 00000000..99d635af --- /dev/null +++ b/jsx/README.md @@ -0,0 +1,64 @@ +# Jupyterhub Admin Dashboard - React Variant + +This repository contains current updates to the Jupyterhub Admin Dashboard, +reducing the complexity from a mass of templated HTML to a simple React web application. +This will integrate with Jupyterhub, speeding up client interactions while simplifying the +admin dashboard codebase. + +### Build Commands + +- `yarn build`: Installs all dependencies and bundles the application +- `yarn hot`: Bundles the application and runs a mock (serverless) version on port 8000 +- `yarn lint`: Lints JSX with ESLint +- `yarn lint --fix`: Lints and fixes errors JSX with ESLint / formats with Prettier +- `yarn place`: Copies the transpiled React bundle to /share/jupyterhub/static/js/admin-react.js for use. + +### Good To Know + +Just some basics on how the React Admin app is built. + +#### General build structure: + +This app is written in JSX, and then transpiled into an ES5 bundle with Babel and Webpack. All JSX components are unit tested with a mixture of Jest and Enzyme and can be run both manually and per-commit. Most logic is separated into components under the `/src/components` directory, each directory containing a `.jsx`, `.test.jsx`, and sometimes a `.css` file. These components are all pulled together, given client-side routes, and connected to the Redux store in `/src/App.jsx` which serves as an entrypoint to the application. + +#### Centralized state and data management with Redux: + +The app uses Redux throughout the components via the `useSelector` and `useDispatch` hooks to store and update user and group data from the API. With Redux, this data is available to any connected component. This means that if one component receives new data, they all do. + +#### API functions + +All API functions used by the front end are packaged as a library of props within `/src/util/withAPI.js`. This keeps our web service logic separate from our presentational logic, allowing us to connect API functionality to our components at a high level and keep the code more modular. This connection specifically happens in `/src/App.jsx`, within the route assignments. + +#### Pagination + +Indicies of paginated user and group data are stored in a `page` variable in the query string, as well as the `user_page` / `group_page` state variables in Redux. This allows the app to maintain two sources of truth, as well as protect the admin user's place in the collection on page reload. The limit is constant at this point and is held in the Redux state. + +On updates to the paginated data, the app can respond in one of two ways. If a user/group record is either added or deleted, the pagination will reset and data will be pulled back with no offset. Alternatively, if a record is modified, the offset will remain and the change will be shown. + +Code examples: + +```js +// Pagination limit is pulled in from Redux. +var limit = useSelector((state) => state.limit); + +// Page query string is parsed and checked +var page = parseInt(new URLQuerySearch(props.location).get("page")); +page = isNaN(page) ? 0 : page; + +// A slice is created representing the records to be returned +var slice = [page * limit, limit]; + +// A user's notebook server status was changed from stopped to running, user data is being refreshed from the slice. +startServer().then(() => { + updateUsers(...slice) + // After data is fetched, the Redux store is updated with the data and a copy of the page number. + .then((data) => dispatchPageChange(data, page)); +}); + +// Alternatively, a new user was added, and user data is being refreshed from offset 0. +addUser().then(() => { + updateUsers(0, limit) + // After data is fetched, the Redux store is updated with the data and asserts page 0. + .then((data) => dispatchPageChange(data, 0)); +}); +``` diff --git a/jsx/package.json b/jsx/package.json new file mode 100644 index 00000000..8ac6f020 --- /dev/null +++ b/jsx/package.json @@ -0,0 +1,75 @@ +{ + "name": "jupyterhub-admin-react", + "version": "1.0.0", + "description": "React application for the Jupyter Hub admin dashboard service", + "main": "index.js", + "author": "nabarber", + "license": "BSD-3-Clause", + "scripts": { + "build": "yarn && webpack", + "hot": "webpack && webpack-dev-server", + "place": "cp build/admin-react.js* ../share/jupyterhub/static/js/", + "test": "jest --verbose", + "snap": "jest --updateSnapshot", + "lint": "eslint --ext .jsx --ext .js src/", + "lint:fix": "eslint --ext .jsx --ext .js src/ --fix" + }, + "babel": { + "presets": [ + "@babel/preset-env", + "@babel/preset-react" + ], + "plugins": [] + }, + "jest": { + "moduleNameMapper": { + "\\.(jpg|jpeg|png|gif|eot|otf|webp|svg|ttf|woff|woff2|mp4|webm|wav|mp3|m4a|aac|oga)$": "/__mocks__/fileMock.js", + "\\.(css|less)$": "identity-obj-proxy" + } + }, + "dependencies": { + "bootstrap": "^4.5.3", + "history": "^5.0.0", + "lodash.debounce": "^4.0.8", + "prop-types": "^15.7.2", + "react": "^17.0.1", + "react-bootstrap": "^2.1.1", + "react-dom": "^17.0.1", + "react-icons": "^4.1.0", + "react-multi-select-component": "^3.0.7", + "react-object-table-viewer": "^1.0.7", + "react-redux": "^7.2.2", + "react-router": "^5.2.0", + "react-router-dom": "^5.2.0", + "recompose": "npm:react-recompose@^0.31.2", + "redux": "^4.0.5", + "regenerator-runtime": "^0.13.9" + }, + "devDependencies": { + "@babel/core": "^7.12.3", + "@babel/preset-env": "^7.12.11", + "@babel/preset-react": "^7.12.10", + "@testing-library/jest-dom": "^5.15.1", + "@testing-library/react": "^12.1.2", + "@testing-library/user-event": "^13.5.0", + "@webpack-cli/serve": "^1.7.0", + "@wojtekmaj/enzyme-adapter-react-17": "^0.6.5", + "babel-jest": "^26.6.3", + "babel-loader": "^8.2.1", + "css-loader": "^5.0.1", + "enzyme": "^3.11.0", + "eslint": "^7.18.0", + "eslint-plugin-prettier": "^3.3.1", + "eslint-plugin-react": "^7.22.0", + "eslint-plugin-unused-imports": "^1.1.1", + "file-loader": "^6.2.0", + "identity-obj-proxy": "^3.0.0", + "jest": "^26.6.3", + "prettier": "^2.2.1", + "sinon": "^13.0.1", + "style-loader": "^2.0.0", + "webpack": "^5.6.0", + "webpack-cli": "^4.10.0", + "webpack-dev-server": "^4.9.3" + } +} diff --git a/jsx/src/App.jsx b/jsx/src/App.jsx new file mode 100644 index 00000000..db3dd300 --- /dev/null +++ b/jsx/src/App.jsx @@ -0,0 +1,60 @@ +import React from "react"; +import ReactDOM from "react-dom"; +import { Provider } from "react-redux"; +import { createStore } from "redux"; +import { compose } from "recompose"; +import { initialState, reducers } from "./Store"; +import withAPI from "./util/withAPI"; +import { HashRouter, Switch, Route } from "react-router-dom"; + +import ServerDashboard from "./components/ServerDashboard/ServerDashboard"; +import Groups from "./components/Groups/Groups"; +import GroupEdit from "./components/GroupEdit/GroupEdit"; +import CreateGroup from "./components/CreateGroup/CreateGroup"; +import AddUser from "./components/AddUser/AddUser"; +import EditUser from "./components/EditUser/EditUser"; + +import "./style/root.css"; + +const store = createStore(reducers, initialState); + +const App = () => { + return ( +
+ + + + + + + + + + + + +
+ ); +}; + +ReactDOM.render(, document.getElementById("react-admin-hook")); diff --git a/jsx/src/Store.js b/jsx/src/Store.js new file mode 100644 index 00000000..a54abb81 --- /dev/null +++ b/jsx/src/Store.js @@ -0,0 +1,55 @@ +export const initialState = { + user_data: undefined, + user_page: { offset: 0, limit: window.api_page_limit || 100 }, + name_filter: "", + groups_data: undefined, + groups_page: { offset: 0, limit: window.api_page_limit || 100 }, + limit: window.api_page_limit || 100, +}; + +export const reducers = (state = initialState, action) => { + switch (action.type) { + // Updates the client user model data and stores the page + case "USER_OFFSET": + return Object.assign({}, state, { + user_page: Object.assign({}, state.user_page, { + offset: action.value.offset, + }), + }); + + case "USER_NAME_FILTER": + // set offset to 0 if name filter changed, + // otherwise leave it alone + const newOffset = + action.value.name_filter !== state.name_filter ? 0 : state.name_filter; + return Object.assign({}, state, { + user_page: Object.assign({}, state.user_page, { + offset: newOffset, + }), + name_filter: action.value.name_filter, + }); + + case "USER_PAGE": + return Object.assign({}, state, { + user_page: action.value.page, + user_data: action.value.data, + }); + + // Updates the client group user model data and stores the page + case "GROUPS_OFFSET": + return Object.assign({}, state, { + groups_page: Object.assign({}, state.groups_page, { + offset: action.value.offset, + }), + }); + + case "GROUPS_PAGE": + return Object.assign({}, state, { + groups_page: action.value.page, + groups_data: action.value.data, + }); + + default: + return state; + } +}; diff --git a/jsx/src/components/AddUser/AddUser.jsx b/jsx/src/components/AddUser/AddUser.jsx new file mode 100644 index 00000000..b56279bd --- /dev/null +++ b/jsx/src/components/AddUser/AddUser.jsx @@ -0,0 +1,132 @@ +import React, { useState } from "react"; +import { useDispatch, useSelector } from "react-redux"; +import { Link } from "react-router-dom"; +import PropTypes from "prop-types"; + +const AddUser = (props) => { + var [users, setUsers] = useState([]), + [admin, setAdmin] = useState(false), + [errorAlert, setErrorAlert] = useState(null), + limit = useSelector((state) => state.limit); + + var dispatch = useDispatch(); + + var dispatchPageChange = (data, page) => { + dispatch({ + type: "USER_PAGE", + value: { + data: data, + page: page, + }, + }); + }; + + var { addUsers, failRegexEvent, updateUsers, history } = props; + + return ( + <> +
+ {errorAlert != null ? ( +
+
+
+ {errorAlert} + +
+
+
+ ) : ( + <> + )} +
+
+
+
+

Add Users

+
+
+ +
+ +

+ setAdmin(!admin)} + /> + + +
+ +
+
+ + + +
+
+
+
+
+ + ); +}; + +AddUser.propTypes = { + addUsers: PropTypes.func, + failRegexEvent: PropTypes.func, + updateUsers: PropTypes.func, + history: PropTypes.shape({ + push: PropTypes.func, + }), +}; + +export default AddUser; diff --git a/jsx/src/components/AddUser/AddUser.test.js b/jsx/src/components/AddUser/AddUser.test.js new file mode 100644 index 00000000..ee91fdbd --- /dev/null +++ b/jsx/src/components/AddUser/AddUser.test.js @@ -0,0 +1,139 @@ +import React from "react"; +import "@testing-library/jest-dom"; +import { act } from "react-dom/test-utils"; +import { render, screen, fireEvent } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { Provider, useDispatch, useSelector } from "react-redux"; +import { createStore } from "redux"; +import { HashRouter } from "react-router-dom"; +// eslint-disable-next-line +import regeneratorRuntime from "regenerator-runtime"; + +import AddUser from "./AddUser"; + +jest.mock("react-redux", () => ({ + ...jest.requireActual("react-redux"), + useDispatch: jest.fn(), + useSelector: jest.fn(), +})); + +var mockAsync = (result) => + jest.fn().mockImplementation(() => Promise.resolve(result)); + +var mockAsyncRejection = () => + jest.fn().mockImplementation(() => Promise.reject()); + +var addUserJsx = (spy, spy2, spy3) => ( + {}, {})}> + + {} }} + /> + + +); + +var mockAppState = () => ({ + limit: 3, +}); + +beforeEach(() => { + useDispatch.mockImplementation(() => { + return () => {}; + }); + useSelector.mockImplementation((callback) => { + return callback(mockAppState()); + }); +}); + +afterEach(() => { + useDispatch.mockClear(); +}); + +test("Renders", async () => { + await act(async () => { + render(addUserJsx()); + }); + expect(screen.getByTestId("container")).toBeVisible(); +}); + +test("Removes users when they fail Regex", async () => { + let callbackSpy = mockAsync(); + + await act(async () => { + render(addUserJsx(callbackSpy)); + }); + + let textarea = screen.getByTestId("user-textarea"); + let submit = screen.getByTestId("submit"); + + fireEvent.blur(textarea, { target: { value: "foo \n bar\na@b.co\n \n\n" } }); + await act(async () => { + fireEvent.click(submit); + }); + + expect(callbackSpy).toHaveBeenCalledWith(["foo", "bar", "a@b.co"], false); +}); + +test("Correctly submits admin", async () => { + let callbackSpy = mockAsync(); + + await act(async () => { + render(addUserJsx(callbackSpy)); + }); + + let textarea = screen.getByTestId("user-textarea"); + let submit = screen.getByTestId("submit"); + let check = screen.getByTestId("check"); + + userEvent.click(check); + fireEvent.blur(textarea, { target: { value: "foo" } }); + await act(async () => { + fireEvent.click(submit); + }); + + expect(callbackSpy).toHaveBeenCalledWith(["foo"], true); +}); + +test("Shows a UI error dialogue when user creation fails", async () => { + let callbackSpy = mockAsyncRejection(); + + await act(async () => { + render(addUserJsx(callbackSpy)); + }); + + let submit = screen.getByTestId("submit"); + + await act(async () => { + fireEvent.click(submit); + }); + + let errorDialog = screen.getByText("Failed to create user."); + + expect(errorDialog).toBeVisible(); + expect(callbackSpy).toHaveBeenCalled(); +}); + +test("Shows a more specific UI error dialogue when user creation returns an improper status code", async () => { + let callbackSpy = mockAsync({ status: 409 }); + + await act(async () => { + render(addUserJsx(callbackSpy)); + }); + + let submit = screen.getByTestId("submit"); + + await act(async () => { + fireEvent.click(submit); + }); + + let errorDialog = screen.getByText( + "Failed to create user. User already exists.", + ); + + expect(errorDialog).toBeVisible(); + expect(callbackSpy).toHaveBeenCalled(); +}); diff --git a/jsx/src/components/CreateGroup/CreateGroup.jsx b/jsx/src/components/CreateGroup/CreateGroup.jsx new file mode 100644 index 00000000..37deb4ed --- /dev/null +++ b/jsx/src/components/CreateGroup/CreateGroup.jsx @@ -0,0 +1,117 @@ +import React, { useState } from "react"; +import { useDispatch, useSelector } from "react-redux"; +import { Link } from "react-router-dom"; +import PropTypes from "prop-types"; + +const CreateGroup = (props) => { + var [groupName, setGroupName] = useState(""), + [errorAlert, setErrorAlert] = useState(null), + limit = useSelector((state) => state.limit); + + var dispatch = useDispatch(); + + var dispatchPageUpdate = (data, page) => { + dispatch({ + type: "GROUPS_PAGE", + value: { + data: data, + page: page, + }, + }); + }; + + var { createGroup, updateGroups, history } = props; + + return ( + <> +
+ {errorAlert != null ? ( +
+
+
+ {errorAlert} + +
+
+
+ ) : ( + <> + )} +
+
+
+
+

Create Group

+
+
+
+ { + setGroupName(e.target.value.trim()); + }} + > +
+
+
+ + + +
+
+
+
+
+ + ); +}; + +CreateGroup.propTypes = { + createGroup: PropTypes.func, + updateGroups: PropTypes.func, + failRegexEvent: PropTypes.func, + history: PropTypes.shape({ + push: PropTypes.func, + }), +}; + +export default CreateGroup; diff --git a/jsx/src/components/CreateGroup/CreateGroup.test.js b/jsx/src/components/CreateGroup/CreateGroup.test.js new file mode 100644 index 00000000..cfa20128 --- /dev/null +++ b/jsx/src/components/CreateGroup/CreateGroup.test.js @@ -0,0 +1,115 @@ +import React from "react"; +import "@testing-library/jest-dom"; +import { act } from "react-dom/test-utils"; +import { render, screen, fireEvent } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { Provider, useDispatch, useSelector } from "react-redux"; +import { createStore } from "redux"; +import { HashRouter } from "react-router-dom"; +// eslint-disable-next-line +import regeneratorRuntime from "regenerator-runtime"; +import CreateGroup from "./CreateGroup"; + +jest.mock("react-redux", () => ({ + ...jest.requireActual("react-redux"), + useDispatch: jest.fn(), + useSelector: jest.fn(), +})); + +var mockAsync = (result) => + jest.fn().mockImplementation(() => Promise.resolve(result)); + +var mockAsyncRejection = () => + jest.fn().mockImplementation(() => Promise.reject()); + +var createGroupJsx = (callbackSpy) => ( + {}, {})}> + + {} }} + /> + + +); + +var mockAppState = () => ({ + limit: 3, +}); + +beforeEach(() => { + useDispatch.mockImplementation(() => { + return () => () => {}; + }); + useSelector.mockImplementation((callback) => { + return callback(mockAppState()); + }); +}); + +afterEach(() => { + useDispatch.mockClear(); +}); + +test("Renders", async () => { + await act(async () => { + render(createGroupJsx()); + }); + expect(screen.getByTestId("container")).toBeVisible(); +}); + +test("Calls createGroup on submit", async () => { + let callbackSpy = mockAsync({ status: 200 }); + + await act(async () => { + render(createGroupJsx(callbackSpy)); + }); + + let input = screen.getByTestId("group-input"); + let submit = screen.getByTestId("submit"); + + userEvent.type(input, "groupname"); + await act(async () => fireEvent.click(submit)); + + expect(callbackSpy).toHaveBeenNthCalledWith(1, "groupname"); +}); + +test("Shows a UI error dialogue when group creation fails", async () => { + let callbackSpy = mockAsyncRejection(); + + await act(async () => { + render(createGroupJsx(callbackSpy)); + }); + + let submit = screen.getByTestId("submit"); + + await act(async () => { + fireEvent.click(submit); + }); + + let errorDialog = screen.getByText("Failed to create group."); + + expect(errorDialog).toBeVisible(); + expect(callbackSpy).toHaveBeenCalled(); +}); + +test("Shows a more specific UI error dialogue when user creation returns an improper status code", async () => { + let callbackSpy = mockAsync({ status: 409 }); + + await act(async () => { + render(createGroupJsx(callbackSpy)); + }); + + let submit = screen.getByTestId("submit"); + + await act(async () => { + fireEvent.click(submit); + }); + + let errorDialog = screen.getByText( + "Failed to create group. Group already exists.", + ); + + expect(errorDialog).toBeVisible(); + expect(callbackSpy).toHaveBeenCalled(); +}); diff --git a/jsx/src/components/EditUser/EditUser.jsx b/jsx/src/components/EditUser/EditUser.jsx new file mode 100644 index 00000000..2575b808 --- /dev/null +++ b/jsx/src/components/EditUser/EditUser.jsx @@ -0,0 +1,178 @@ +import React, { useState } from "react"; +import { useDispatch, useSelector } from "react-redux"; +import PropTypes from "prop-types"; +import { Link } from "react-router-dom"; + +const EditUser = (props) => { + var limit = useSelector((state) => state.limit), + [errorAlert, setErrorAlert] = useState(null); + + var dispatch = useDispatch(); + + var dispatchPageChange = (data, page) => { + dispatch({ + type: "USER_PAGE", + value: { + data: data, + page: page, + }, + }); + }; + + var { editUser, deleteUser, noChangeEvent, updateUsers, history } = props; + + if (props.location.state == undefined) { + props.history.push("/"); + return <>; + } + + var { username, has_admin } = props.location.state; + + var [updatedUsername, setUpdatedUsername] = useState(""), + [admin, setAdmin] = useState(has_admin); + + return ( + <> +
+ {errorAlert != null ? ( +
+
+
+ {errorAlert} + +
+
+
+ ) : ( + <> + )} +
+
+
+
+

Editing user {username}

+
+
+
+
+ +

+ setAdmin(!admin)} + /> + + +

+ +
+ +
+
+ + + +
+
+
+
+
+ + ); +}; + +EditUser.propTypes = { + location: PropTypes.shape({ + state: PropTypes.shape({ + username: PropTypes.string, + has_admin: PropTypes.bool, + }), + }), + history: PropTypes.shape({ + push: PropTypes.func, + }), + editUser: PropTypes.func, + deleteUser: PropTypes.func, + failRegexEvent: PropTypes.func, + noChangeEvent: PropTypes.func, + updateUsers: PropTypes.func, +}; + +export default EditUser; diff --git a/jsx/src/components/EditUser/EditUser.test.js b/jsx/src/components/EditUser/EditUser.test.js new file mode 100644 index 00000000..cff36387 --- /dev/null +++ b/jsx/src/components/EditUser/EditUser.test.js @@ -0,0 +1,139 @@ +import React from "react"; +import "@testing-library/jest-dom"; +import { act } from "react-dom/test-utils"; +import { render, screen, fireEvent } from "@testing-library/react"; +import { Provider, useDispatch, useSelector } from "react-redux"; +import { createStore } from "redux"; +import { HashRouter } from "react-router-dom"; +// eslint-disable-next-line +import regeneratorRuntime from "regenerator-runtime"; + +import EditUser from "./EditUser"; + +jest.mock("react-redux", () => ({ + ...jest.requireActual("react-redux"), + useDispatch: jest.fn(), + useSelector: jest.fn(), +})); + +var mockAsync = (data) => + jest.fn().mockImplementation(() => Promise.resolve(data)); + +var mockAsyncRejection = () => + jest.fn().mockImplementation(() => Promise.reject()); + +var editUserJsx = (callbackSpy, empty) => ( + {}, {})}> + + {} }} + failRegexEvent={callbackSpy} + noChangeEvent={callbackSpy} + /> + + +); + +var mockAppState = () => ({ + limit: 3, +}); + +beforeEach(() => { + useDispatch.mockImplementation(() => { + return () => {}; + }); + useSelector.mockImplementation((callback) => { + return callback(mockAppState()); + }); +}); + +afterEach(() => { + useDispatch.mockClear(); +}); + +test("Renders", async () => { + let callbackSpy = mockAsync({ key: "value", status: 200 }); + + await act(async () => { + render(editUserJsx(callbackSpy)); + }); + + expect(screen.getByTestId("container")).toBeVisible(); +}); + +test("Calls the delete user function when the button is pressed", async () => { + let callbackSpy = mockAsync({ key: "value", status: 200 }); + + await act(async () => { + render(editUserJsx(callbackSpy)); + }); + + let deleteUser = screen.getByTestId("delete-user"); + + await act(async () => { + fireEvent.click(deleteUser); + }); + + expect(callbackSpy).toHaveBeenCalled(); +}); + +test("Submits the edits when the button is pressed", async () => { + let callbackSpy = mockAsync({ key: "value", status: 200 }); + + await act(async () => { + render(editUserJsx(callbackSpy)); + }); + + let submit = screen.getByTestId("submit"); + await act(async () => { + fireEvent.click(submit); + }); + + expect(callbackSpy).toHaveBeenCalled(); +}); + +test("Shows a UI error dialogue when user edit fails", async () => { + let callbackSpy = mockAsyncRejection(); + + await act(async () => { + render(editUserJsx(callbackSpy)); + }); + + let submit = screen.getByTestId("submit"); + let usernameInput = screen.getByTestId("edit-username-input"); + + fireEvent.blur(usernameInput, { target: { value: "whatever" } }); + await act(async () => { + fireEvent.click(submit); + }); + + let errorDialog = screen.getByText("Failed to edit user."); + + expect(errorDialog).toBeVisible(); + expect(callbackSpy).toHaveBeenCalled(); +}); + +test("Shows a UI error dialogue when user edit returns an improper status code", async () => { + let callbackSpy = mockAsync({ status: 409 }); + + await act(async () => { + render(editUserJsx(callbackSpy)); + }); + + let submit = screen.getByTestId("submit"); + let usernameInput = screen.getByTestId("edit-username-input"); + + fireEvent.blur(usernameInput, { target: { value: "whatever" } }); + await act(async () => { + fireEvent.click(submit); + }); + + let errorDialog = screen.getByText("Failed to edit user."); + + expect(errorDialog).toBeVisible(); + expect(callbackSpy).toHaveBeenCalled(); +}); diff --git a/jsx/src/components/GroupEdit/GroupEdit.jsx b/jsx/src/components/GroupEdit/GroupEdit.jsx new file mode 100644 index 00000000..21ac69b3 --- /dev/null +++ b/jsx/src/components/GroupEdit/GroupEdit.jsx @@ -0,0 +1,177 @@ +import React, { useState } from "react"; +import { useSelector, useDispatch } from "react-redux"; +import { Link } from "react-router-dom"; +import PropTypes from "prop-types"; +import GroupSelect from "../GroupSelect/GroupSelect"; + +const GroupEdit = (props) => { + var [selected, setSelected] = useState([]), + [changed, setChanged] = useState(false), + [errorAlert, setErrorAlert] = useState(null), + limit = useSelector((state) => state.limit); + + var dispatch = useDispatch(); + + const dispatchPageUpdate = (data, page) => { + dispatch({ + type: "GROUPS_PAGE", + value: { + data: data, + page: page, + }, + }); + }; + + var { + addToGroup, + removeFromGroup, + deleteGroup, + updateGroups, + validateUser, + history, + location, + } = props; + + if (!location.state) { + history.push("/groups"); + return <>; + } + + var { group_data } = location.state; + + if (!group_data) return
; + + return ( +
+ {errorAlert != null ? ( +
+
+
+ {errorAlert} + +
+
+
+ ) : ( + <> + )} +
+
+

Editing Group {group_data.name}

+

+
Manage group members
+
+
+ { + setSelected(selection); + setChanged(true); + }} + /> +
+
+ + + + +

+

+
+
+
+ ); +}; + +GroupEdit.propTypes = { + location: PropTypes.shape({ + state: PropTypes.shape({ + group_data: PropTypes.object, + callback: PropTypes.func, + }), + }), + history: PropTypes.shape({ + push: PropTypes.func, + }), + addToGroup: PropTypes.func, + removeFromGroup: PropTypes.func, + deleteGroup: PropTypes.func, + updateGroups: PropTypes.func, + validateUser: PropTypes.func, +}; + +export default GroupEdit; diff --git a/jsx/src/components/GroupEdit/GroupEdit.test.jsx b/jsx/src/components/GroupEdit/GroupEdit.test.jsx new file mode 100644 index 00000000..62505b31 --- /dev/null +++ b/jsx/src/components/GroupEdit/GroupEdit.test.jsx @@ -0,0 +1,228 @@ +import React from "react"; +import "@testing-library/jest-dom"; +import { act } from "react-dom/test-utils"; +import { render, screen, fireEvent } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { Provider, useSelector } from "react-redux"; +import { createStore } from "redux"; +import { HashRouter } from "react-router-dom"; +// eslint-disable-next-line +import regeneratorRuntime from "regenerator-runtime"; + +import GroupEdit from "./GroupEdit"; + +jest.mock("react-redux", () => ({ + ...jest.requireActual("react-redux"), + useSelector: jest.fn(), +})); + +var mockAsync = (data) => + jest.fn().mockImplementation(() => Promise.resolve(data)); + +var mockAsyncRejection = () => + jest.fn().mockImplementation(() => Promise.reject()); + +var okPacket = new Promise((resolve) => resolve(true)); + +var groupEditJsx = (callbackSpy) => ( + {}, {})}> + + {}, + }, + }} + addToGroup={callbackSpy} + removeFromGroup={callbackSpy} + deleteGroup={callbackSpy} + history={{ push: () => callbackSpy }} + updateGroups={callbackSpy} + validateUser={jest.fn().mockImplementation(() => okPacket)} + /> + + +); + +var mockAppState = () => ({ + limit: 3, +}); + +beforeEach(() => { + useSelector.mockImplementation((callback) => { + return callback(mockAppState()); + }); +}); + +afterEach(() => { + useSelector.mockClear(); +}); + +test("Renders", async () => { + let callbackSpy = mockAsync(); + + await act(async () => { + render(groupEditJsx(callbackSpy)); + }); + + expect(screen.getByTestId("container")).toBeVisible(); +}); + +test("Adds user from input to user selectables on button click", async () => { + let callbackSpy = mockAsync(); + + await act(async () => { + render(groupEditJsx(callbackSpy)); + }); + + let input = screen.getByTestId("username-input"); + let validateUser = screen.getByTestId("validate-user"); + let submit = screen.getByTestId("submit"); + + userEvent.type(input, "bar"); + fireEvent.click(validateUser); + await act(async () => okPacket); + + await act(async () => { + fireEvent.click(submit); + }); + + expect(callbackSpy).toHaveBeenNthCalledWith(1, ["bar"], "group"); +}); + +test("Removes a user recently added from input from the selectables list", async () => { + let callbackSpy = mockAsync(); + + await act(async () => { + render(groupEditJsx(callbackSpy)); + }); + + let selectedUser = screen.getByText("foo"); + fireEvent.click(selectedUser); + + let unselectedUser = screen.getByText("foo"); + + expect(unselectedUser.className).toBe("item unselected"); +}); + +test("Grays out a user, already in the group, when unselected and calls deleteUser on submit", async () => { + let callbackSpy = mockAsync(); + + await act(async () => { + render(groupEditJsx(callbackSpy)); + }); + + let submit = screen.getByTestId("submit"); + + let groupUser = screen.getByText("foo"); + fireEvent.click(groupUser); + + let unselectedUser = screen.getByText("foo"); + expect(unselectedUser.className).toBe("item unselected"); + + // test deleteUser call + await act(async () => { + fireEvent.click(submit); + }); + + expect(callbackSpy).toHaveBeenNthCalledWith(1, ["foo"], "group"); +}); + +test("Calls deleteGroup on button click", async () => { + let callbackSpy = mockAsync(); + + await act(async () => { + render(groupEditJsx(callbackSpy)); + }); + + let deleteGroup = screen.getByTestId("delete-group"); + + await act(async () => { + fireEvent.click(deleteGroup); + }); + + expect(callbackSpy).toHaveBeenNthCalledWith(1, "group"); +}); + +test("Shows a UI error dialogue when group edit fails", async () => { + let callbackSpy = mockAsyncRejection(); + + await act(async () => { + render(groupEditJsx(callbackSpy)); + }); + + let groupUser = screen.getByText("foo"); + fireEvent.click(groupUser); + + let submit = screen.getByTestId("submit"); + + await act(async () => { + fireEvent.click(submit); + }); + + let errorDialog = screen.getByText("Failed to edit group."); + + expect(errorDialog).toBeVisible(); + expect(callbackSpy).toHaveBeenCalled(); +}); + +test("Shows a UI error dialogue when group edit returns an improper status code", async () => { + let callbackSpy = mockAsync({ status: 403 }); + + await act(async () => { + render(groupEditJsx(callbackSpy)); + }); + + let groupUser = screen.getByText("foo"); + fireEvent.click(groupUser); + + let submit = screen.getByTestId("submit"); + + await act(async () => { + fireEvent.click(submit); + }); + + let errorDialog = screen.getByText("Failed to edit group."); + + expect(errorDialog).toBeVisible(); + expect(callbackSpy).toHaveBeenCalled(); +}); + +test("Shows a UI error dialogue when group delete fails", async () => { + let callbackSpy = mockAsyncRejection(); + + await act(async () => { + render(groupEditJsx(callbackSpy)); + }); + + let deleteGroup = screen.getByTestId("delete-group"); + + await act(async () => { + fireEvent.click(deleteGroup); + }); + + let errorDialog = screen.getByText("Failed to delete group."); + + expect(errorDialog).toBeVisible(); + expect(callbackSpy).toHaveBeenCalled(); +}); + +test("Shows a UI error dialogue when group delete returns an improper status code", async () => { + let callbackSpy = mockAsync({ status: 403 }); + + await act(async () => { + render(groupEditJsx(callbackSpy)); + }); + + let deleteGroup = screen.getByTestId("delete-group"); + + await act(async () => { + fireEvent.click(deleteGroup); + }); + + let errorDialog = screen.getByText("Failed to delete group."); + + expect(errorDialog).toBeVisible(); + expect(callbackSpy).toHaveBeenCalled(); +}); diff --git a/jsx/src/components/GroupSelect/GroupSelect.jsx b/jsx/src/components/GroupSelect/GroupSelect.jsx new file mode 100644 index 00000000..2f48bf3e --- /dev/null +++ b/jsx/src/components/GroupSelect/GroupSelect.jsx @@ -0,0 +1,110 @@ +import React, { useState } from "react"; +import PropTypes from "prop-types"; +import "./group-select.css"; + +const GroupSelect = (props) => { + var { onChange, validateUser, users } = props; + + var [selected, setSelected] = useState(users); + var [username, setUsername] = useState(""); + var [error, setError] = useState(null); + + if (!users) return null; + + return ( +
+ {error != null ? ( +
+
{error}
+
+ ) : ( + <> + )} +
+
+ { + setUsername(e.target.value); + }} + /> + + + +
+
+
+
+
+
+ {selected.map((e, i) => ( +
{ + let updated_selection = selected + .slice(0, i) + .concat(selected.slice(i + 1)); + onChange(updated_selection, users); + setSelected(updated_selection); + }} + > + {e} +
+ ))} + {users.map((e, i) => + selected.includes(e) ? undefined : ( +
{ + let updated_selection = selected.concat([e]); + onChange(updated_selection, users); + setSelected(updated_selection); + }} + > + {e} +
+ ), + )} +
+
+

+

+
+
+ ); +}; + +GroupSelect.propTypes = { + onChange: PropTypes.func, + validateUser: PropTypes.func, + users: PropTypes.array, +}; + +export default GroupSelect; diff --git a/jsx/src/components/GroupSelect/group-select.css b/jsx/src/components/GroupSelect/group-select.css new file mode 100644 index 00000000..1a46d380 --- /dev/null +++ b/jsx/src/components/GroupSelect/group-select.css @@ -0,0 +1,40 @@ +@import url(../../style/root.css); + +.users-container { + width: 100%; + position: relative; + padding: 5px; + overflow-x: scroll; +} + +.users-container div { + display: inline-block; +} + +.users-container .item { + padding: 3px; + padding-left: 6px; + padding-right: 6px; + border-radius: 3px; + font-size: 14px; + margin-left: 4px; + margin-right: 4px; + transition: 30ms ease-in all; + cursor: pointer; + user-select: none; + border: solid 1px #dfdfdf; +} + +.users-container .item.unselected { + background-color: #f7f7f7; + color: #777; +} + +.users-container .item.selected { + background-color: orange; + color: white; +} + +.users-container .item:hover { + opacity: 0.7; +} diff --git a/jsx/src/components/Groups/Groups.jsx b/jsx/src/components/Groups/Groups.jsx new file mode 100644 index 00000000..ef3b4dae --- /dev/null +++ b/jsx/src/components/Groups/Groups.jsx @@ -0,0 +1,122 @@ +import React, { useEffect } from "react"; +import { useSelector, useDispatch } from "react-redux"; +import PropTypes from "prop-types"; + +import { Link } from "react-router-dom"; +import PaginationFooter from "../PaginationFooter/PaginationFooter"; + +const Groups = (props) => { + var groups_data = useSelector((state) => state.groups_data), + groups_page = useSelector((state) => state.groups_page), + dispatch = useDispatch(); + + var offset = groups_page ? groups_page.offset : 0; + + const setOffset = (offset) => { + dispatch({ + type: "GROUPS_OFFSET", + value: { + offset: offset, + }, + }); + }; + var limit = groups_page ? groups_page.limit : window.api_page_limit; + var total = groups_page ? groups_page.total : undefined; + + var { updateGroups, history } = props; + + const dispatchPageUpdate = (data, page) => { + dispatch({ + type: "GROUPS_PAGE", + value: { + data: data, + page: page, + }, + }); + }; + + useEffect(() => { + updateGroups(offset, limit).then((data) => + dispatchPageUpdate(data.items, data._pagination), + ); + }, [offset, limit]); + + if (!groups_data || !groups_page) { + return
; + } + + return ( +
+
+
+
+
+

Groups

+
+
+
    + {groups_data.length > 0 ? ( + groups_data.map((e, i) => ( +
  • + + {e.users.length + " users"} + + + {e.name} + +
  • + )) + ) : ( +
    +

    no groups created...

    +
    + )} +
+ setOffset(offset + limit)} + prev={() => setOffset(offset >= limit ? offset - limit : 0)} + /> +
+
+ + +
+
+
+
+
+ ); +}; + +Groups.propTypes = { + updateUsers: PropTypes.func, + updateGroups: PropTypes.func, + history: PropTypes.shape({ + push: PropTypes.func, + }), + location: PropTypes.shape({ + search: PropTypes.string, + }), +}; + +export default Groups; diff --git a/jsx/src/components/Groups/Groups.test.js b/jsx/src/components/Groups/Groups.test.js new file mode 100644 index 00000000..ed24c8bc --- /dev/null +++ b/jsx/src/components/Groups/Groups.test.js @@ -0,0 +1,136 @@ +import React from "react"; +import "@testing-library/jest-dom"; +import { act } from "react-dom/test-utils"; +import { render, screen, fireEvent } from "@testing-library/react"; +import { Provider, useDispatch, useSelector } from "react-redux"; +import { createStore } from "redux"; +import { HashRouter } from "react-router-dom"; +// eslint-disable-next-line +import regeneratorRuntime from "regenerator-runtime"; + +import { initialState, reducers } from "../../Store"; +import Groups from "./Groups"; + +jest.mock("react-redux", () => ({ + ...jest.requireActual("react-redux"), + useSelector: jest.fn(), +})); + +var mockAsync = () => + jest.fn().mockImplementation(() => Promise.resolve({ key: "value" })); + +var groupsJsx = (callbackSpy) => ( + + + + + +); + +var mockReducers = jest.fn((state, action) => { + if (action.type === "GROUPS_PAGE" && !action.value.data) { + // no-op from mock, don't update state + return state; + } + state = reducers(state, action); + // mocked useSelector seems to cause a problem + // this should get the right state back? + // not sure + // useSelector.mockImplementation((callback) => callback(state); + return state; +}); + +var mockAppState = () => + Object.assign({}, initialState, { + groups_data: [ + { kind: "group", name: "testgroup", users: [] }, + { kind: "group", name: "testgroup2", users: ["foo", "bar"] }, + ], + groups_page: { + offset: 0, + limit: 2, + total: 4, + next: { + offset: 2, + limit: 2, + url: "http://localhost:8000/hub/api/groups?offset=2&limit=2", + }, + }, + }); + +beforeEach(() => { + useSelector.mockImplementation((callback) => { + return callback(mockAppState()); + }); +}); + +afterEach(() => { + useSelector.mockClear(); + mockReducers.mockClear(); +}); + +test("Renders", async () => { + let callbackSpy = mockAsync(); + + await act(async () => { + render(groupsJsx(callbackSpy)); + }); + + expect(screen.getByTestId("container")).toBeVisible(); +}); + +test("Renders groups_data prop into links", async () => { + let callbackSpy = mockAsync(); + + await act(async () => { + render(groupsJsx(callbackSpy)); + }); + + let testgroup = screen.getByText("testgroup"); + let testgroup2 = screen.getByText("testgroup2"); + + expect(testgroup).toBeVisible(); + expect(testgroup2).toBeVisible(); +}); + +test("Renders nothing if required data is not available", async () => { + useSelector.mockImplementation((callback) => { + return callback({}); + }); + + let callbackSpy = mockAsync(); + + await act(async () => { + render(groupsJsx(callbackSpy)); + }); + + let noShow = screen.getByTestId("no-show"); + expect(noShow).toBeVisible(); +}); + +test("Interacting with PaginationFooter causes state update and refresh via useEffect call", async () => { + let callbackSpy = mockAsync(); + + await act(async () => { + render(groupsJsx(callbackSpy)); + }); + + expect(callbackSpy).toBeCalledWith(0, 2); + + var lastState = + mockReducers.mock.results[mockReducers.mock.results.length - 1].value; + expect(lastState.groups_page.offset).toEqual(0); + expect(lastState.groups_page.limit).toEqual(2); + + let next = screen.getByTestId("paginate-next"); + fireEvent.click(next); + + lastState = + mockReducers.mock.results[mockReducers.mock.results.length - 1].value; + expect(lastState.groups_page.offset).toEqual(2); + expect(lastState.groups_page.limit).toEqual(2); + + // FIXME: mocked useSelector, state seem to prevent updateGroups from being called + // making the test environment not representative + // expect(callbackSpy).toHaveBeenCalledWith(2, 2); +}); diff --git a/jsx/src/components/PaginationFooter/PaginationFooter.jsx b/jsx/src/components/PaginationFooter/PaginationFooter.jsx new file mode 100644 index 00000000..942f63a6 --- /dev/null +++ b/jsx/src/components/PaginationFooter/PaginationFooter.jsx @@ -0,0 +1,57 @@ +import React from "react"; +import PropTypes from "prop-types"; + +import "./pagination-footer.css"; + +const PaginationFooter = (props) => { + let { offset, limit, visible, total, next, prev } = props; + return ( +
+

+ Displaying {offset}-{offset + visible} +

+

+ {offset >= 1 ? ( + + ) : ( + + )} + {offset + visible < total ? ( + + ) : ( + + )} +

+
+ ); +}; + +PaginationFooter.propTypes = { + endpoint: PropTypes.string, + page: PropTypes.number, + limit: PropTypes.number, + numOffset: PropTypes.number, + numElements: PropTypes.number, +}; + +export default PaginationFooter; diff --git a/jsx/src/components/PaginationFooter/pagination-footer.css b/jsx/src/components/PaginationFooter/pagination-footer.css new file mode 100644 index 00000000..a764dfc9 --- /dev/null +++ b/jsx/src/components/PaginationFooter/pagination-footer.css @@ -0,0 +1,14 @@ +@import url(../../style/root.css); + +.pagination-footer * button { + margin-right: 10px; +} + +.pagination-footer * .inactive-pagination { + color: gray; + cursor: not-allowed; +} + +.pagination-footer * button.spaced { + color: var(--blue); +} diff --git a/jsx/src/components/ServerDashboard/ServerDashboard.jsx b/jsx/src/components/ServerDashboard/ServerDashboard.jsx new file mode 100644 index 00000000..0ec29fd7 --- /dev/null +++ b/jsx/src/components/ServerDashboard/ServerDashboard.jsx @@ -0,0 +1,623 @@ +import React, { useEffect, useState } from "react"; +import { useSelector, useDispatch } from "react-redux"; +import { debounce } from "lodash"; +import PropTypes from "prop-types"; + +import { + Button, + Col, + Row, + FormControl, + Card, + CardGroup, + Collapse, +} from "react-bootstrap"; +import ReactObjectTableViewer from "react-object-table-viewer"; + +import { Link } from "react-router-dom"; +import { FaSort, FaSortUp, FaSortDown } from "react-icons/fa"; + +import "./server-dashboard.css"; +import { timeSince } from "../../util/timeSince"; +import PaginationFooter from "../PaginationFooter/PaginationFooter"; + +const AccessServerButton = ({ url }) => ( + + + +); + +const ServerDashboard = (props) => { + let base_url = window.base_url || "/"; + // sort methods + var usernameDesc = (e) => e.sort((a, b) => (a.name > b.name ? 1 : -1)), + usernameAsc = (e) => e.sort((a, b) => (a.name < b.name ? 1 : -1)), + adminDesc = (e) => e.sort((a) => (a.admin ? -1 : 1)), + adminAsc = (e) => e.sort((a) => (a.admin ? 1 : -1)), + dateDesc = (e) => + e.sort((a, b) => + new Date(a.last_activity) - new Date(b.last_activity) > 0 ? -1 : 1, + ), + dateAsc = (e) => + e.sort((a, b) => + new Date(a.last_activity) - new Date(b.last_activity) > 0 ? 1 : -1, + ), + runningAsc = (e) => e.sort((a) => (a.server == null ? -1 : 1)), + runningDesc = (e) => e.sort((a) => (a.server == null ? 1 : -1)); + + var [errorAlert, setErrorAlert] = useState(null); + var [sortMethod, setSortMethod] = useState(null); + var [disabledButtons, setDisabledButtons] = useState({}); + var [collapseStates, setCollapseStates] = useState({}); + + var user_data = useSelector((state) => state.user_data), + user_page = useSelector((state) => state.user_page), + name_filter = useSelector((state) => state.name_filter); + + var offset = user_page ? user_page.offset : 0; + var limit = user_page ? user_page.limit : window.api_page_limit; + var total = user_page ? user_page.total : undefined; + + const dispatch = useDispatch(); + + var { + updateUsers, + shutdownHub, + startServer, + stopServer, + startAll, + stopAll, + history, + } = props; + + const dispatchPageUpdate = (data, page) => { + dispatch({ + type: "USER_PAGE", + value: { + data: data, + page: page, + }, + }); + }; + + const setOffset = (newOffset) => { + dispatch({ + type: "USER_OFFSET", + value: { + offset: newOffset, + }, + }); + }; + + const setNameFilter = (name_filter) => { + dispatch({ + type: "USER_NAME_FILTER", + value: { + name_filter: name_filter, + }, + }); + }; + + useEffect(() => { + updateUsers(offset, limit, name_filter) + .then((data) => dispatchPageUpdate(data.items, data._pagination)) + .catch((err) => setErrorAlert("Failed to update user list.")); + }, [offset, limit, name_filter]); + + if (!user_data || !user_page) { + return
; + } + + var slice = [offset, limit, name_filter]; + + const handleSearch = debounce(async (event) => { + setNameFilter(event.target.value); + }, 300); + + if (sortMethod != null) { + user_data = sortMethod(user_data); + } + + const StopServerButton = ({ serverName, userName }) => { + var [isDisabled, setIsDisabled] = useState(false); + return ( + + ); + }; + + const StartServerButton = ({ serverName, userName }) => { + var [isDisabled, setIsDisabled] = useState(false); + return ( + + ); + }; + + const EditUserCell = ({ user }) => { + return ( +
+ ); + }; + + const ServerRowTable = ({ data }) => { + const sortedData = Object.keys(data) + .sort() + .reduce(function (result, key) { + let value = data[key]; + switch (key) { + case "last_activity": + case "created": + case "started": + // format timestamps + value = value ? timeSince(value) : value; + break; + } + if (Array.isArray(value)) { + // cast arrays (e.g. roles, groups) to string + value = value.sort().join(", "); + } + result[key] = value; + return result; + }, {}); + return ( + + ); + }; + + const serverRow = (user, server) => { + const { servers, ...userNoServers } = user; + const serverNameDash = server.name ? `-${server.name}` : ""; + const userServerName = user.name + serverNameDash; + const open = collapseStates[userServerName] || false; + return [ + + + + + + + + + , + + + , + ]; + }; + + let servers = user_data.flatMap((user) => { + let userServers = Object.values({ + "": user.server || {}, + ...(user.servers || {}), + }); + return userServers.map((server) => [user, server]); + }); + + return ( +
+ {errorAlert != null ? ( +
+
+
+ {errorAlert} + +
+
+
+ ) : ( + <> + )} +
+ +
+ + + + + {"> Manage Groups"} + + +
StudentGrade
{qstudent}{qgrade}
+ +
+ + {" "} + + + {user.name} + + {user.admin ? "admin" : ""} +

{server.name}

+
+ {server.last_activity ? timeSince(server.last_activity) : "Never"} + + {server.started ? ( + // Stop Single-user server + <> + + + + ) : ( + // Start Single-user server + <> + + + + + + )} +
+ + + + User + + + + Server + + + + +
+ + + + + + + + + + + + + + + + + + + {servers.flatMap(([user, server]) => serverRow(user, server))} + +
+ User{" "} + setSortMethod(() => method)} + testid="user-sort" + /> + + Admin{" "} + setSortMethod(() => method)} + testid="admin-sort" + /> + + Server{" "} + setSortMethod(() => method)} + testid="server-sort" + /> + + Last Activity{" "} + setSortMethod(() => method)} + testid="last-activity-sort" + /> + + Running{" "} + setSortMethod(() => method)} + testid="running-status-sort" + /> + Actions
+ + + {/* Start all servers */} + + + {/* Stop all servers */} + + + {/* Shutdown Jupyterhub */} + +
+ setOffset(offset + limit)} + prev={() => setOffset(offset - limit)} + /> +

+ + + ); +}; + +ServerDashboard.propTypes = { + user_data: PropTypes.array, + updateUsers: PropTypes.func, + shutdownHub: PropTypes.func, + startServer: PropTypes.func, + stopServer: PropTypes.func, + startAll: PropTypes.func, + stopAll: PropTypes.func, + dispatch: PropTypes.func, + history: PropTypes.shape({ + push: PropTypes.func, + }), + location: PropTypes.shape({ + search: PropTypes.string, + }), +}; + +const SortHandler = (props) => { + var { sorts, callback, testid } = props; + + var [direction, setDirection] = useState(undefined); + + return ( +
{ + if (!direction) { + callback(sorts.desc); + setDirection("desc"); + } else if (direction == "asc") { + callback(sorts.desc); + setDirection("desc"); + } else { + callback(sorts.asc); + setDirection("asc"); + } + }} + > + {!direction ? ( + + ) : direction == "asc" ? ( + + ) : ( + + )} +
+ ); +}; + +SortHandler.propTypes = { + sorts: PropTypes.object, + callback: PropTypes.func, + testid: PropTypes.string, +}; + +export default ServerDashboard; diff --git a/jsx/src/components/ServerDashboard/ServerDashboard.test.js b/jsx/src/components/ServerDashboard/ServerDashboard.test.js new file mode 100644 index 00000000..901badae --- /dev/null +++ b/jsx/src/components/ServerDashboard/ServerDashboard.test.js @@ -0,0 +1,646 @@ +import React from "react"; +import "@testing-library/jest-dom"; +import { act } from "react-dom/test-utils"; +import userEvent from "@testing-library/user-event"; +import { render, screen, fireEvent } from "@testing-library/react"; +import { HashRouter, Switch } from "react-router-dom"; +import { Provider, useSelector } from "react-redux"; +import { createStore } from "redux"; +// eslint-disable-next-line +import regeneratorRuntime from "regenerator-runtime"; + +import ServerDashboard from "./ServerDashboard"; +import { initialState, reducers } from "../../Store"; +import * as sinon from "sinon"; + +let clock; + +jest.mock("react-redux", () => ({ + ...jest.requireActual("react-redux"), + useSelector: jest.fn(), +})); + +var serverDashboardJsx = (spy) => ( + + + + + + + +); + +var mockAsync = (data) => + jest.fn().mockImplementation(() => Promise.resolve(data ? data : { k: "v" })); + +var mockAsyncRejection = () => + jest.fn().mockImplementation(() => Promise.reject()); + +var mockAppState = () => + Object.assign({}, initialState, { + user_data: [ + { + kind: "user", + name: "foo", + admin: true, + groups: [], + server: "/user/foo/", + pending: null, + created: "2020-12-07T18:46:27.112695Z", + last_activity: "2020-12-07T21:00:33.336354Z", + servers: { + "": { + name: "", + last_activity: "2020-12-07T20:58:02.437408Z", + started: "2020-12-07T20:58:01.508266Z", + pending: null, + ready: true, + state: { pid: 28085 }, + url: "/user/foo/", + user_options: {}, + progress_url: "/hub/api/users/foo/server/progress", + }, + }, + }, + { + kind: "user", + name: "bar", + admin: false, + groups: [], + server: null, + pending: null, + created: "2020-12-07T18:46:27.115528Z", + last_activity: "2020-12-07T20:43:51.013613Z", + servers: {}, + }, + ], + user_page: { + offset: 0, + limit: 2, + total: 4, + next: { + offset: 2, + limit: 2, + url: "http://localhost:8000/hub/api/groups?offset=2&limit=2", + }, + }, + }); + +var mockReducers = jest.fn((state, action) => { + if (action.type === "USER_PAGE" && !action.value.data) { + // no-op from mock, don't update state + return state; + } + state = reducers(state, action); + // mocked useSelector seems to cause a problem + // this should get the right state back? + // not sure + // useSelector.mockImplementation((callback) => callback(state); + return state; +}); + +beforeEach(() => { + clock = sinon.useFakeTimers(); + useSelector.mockImplementation((callback) => { + return callback(mockAppState()); + }); +}); + +afterEach(() => { + useSelector.mockClear(); + mockReducers.mockClear(); + clock.restore(); +}); + +test("Renders", async () => { + let callbackSpy = mockAsync(); + + await act(async () => { + render(serverDashboardJsx(callbackSpy)); + }); + + expect(screen.getByTestId("container")).toBeVisible(); +}); + +test("Renders users from props.user_data into table", async () => { + let callbackSpy = mockAsync(); + + await act(async () => { + render(serverDashboardJsx(callbackSpy)); + }); + + let foo = screen.getByTestId("user-name-div-foo"); + let bar = screen.getByTestId("user-name-div-bar"); + + expect(foo).toBeVisible(); + expect(bar).toBeVisible(); +}); + +test("Renders correctly the status of a single-user server", async () => { + let callbackSpy = mockAsync(); + + await act(async () => { + render(serverDashboardJsx(callbackSpy)); + }); + + let start = screen.getByText("Start Server"); + let stop = screen.getByText("Stop Server"); + + expect(start).toBeVisible(); + expect(stop).toBeVisible(); +}); + +test("Renders spawn page link", async () => { + let callbackSpy = mockAsync(); + + await act(async () => { + render(serverDashboardJsx(callbackSpy)); + }); + + let link = screen.getByText("Spawn Page").closest("a"); + let url = new URL(link.href); + expect(url.pathname).toEqual("/spawn/bar"); +}); + +test("Invokes the startServer event on button click", async () => { + let callbackSpy = mockAsync(); + + await act(async () => { + render(serverDashboardJsx(callbackSpy)); + }); + + let start = screen.getByText("Start Server"); + + await act(async () => { + fireEvent.click(start); + }); + + expect(callbackSpy).toHaveBeenCalled(); +}); + +test("Invokes the stopServer event on button click", async () => { + let callbackSpy = mockAsync(); + + await act(async () => { + render(serverDashboardJsx(callbackSpy)); + }); + + let stop = screen.getByText("Stop Server"); + + await act(async () => { + fireEvent.click(stop); + }); + + expect(callbackSpy).toHaveBeenCalled(); +}); + +test("Invokes the shutdownHub event on button click", async () => { + let callbackSpy = mockAsync(); + + await act(async () => { + render(serverDashboardJsx(callbackSpy)); + }); + + let shutdown = screen.getByText("Shutdown Hub"); + + await act(async () => { + fireEvent.click(shutdown); + }); + + expect(callbackSpy).toHaveBeenCalled(); +}); + +test("Sorts according to username", async () => { + let callbackSpy = mockAsync(); + + await act(async () => { + render(serverDashboardJsx(callbackSpy)); + }); + + let handler = screen.getByTestId("user-sort"); + fireEvent.click(handler); + + let first = screen.getAllByTestId("user-row-name")[0]; + expect(first.textContent).toContain("bar"); + + fireEvent.click(handler); + + first = screen.getAllByTestId("user-row-name")[0]; + expect(first.textContent).toContain("foo"); +}); + +test("Sorts according to admin", async () => { + let callbackSpy = mockAsync(); + + await act(async () => { + render(serverDashboardJsx(callbackSpy)); + }); + + let handler = screen.getByTestId("admin-sort"); + fireEvent.click(handler); + + let first = screen.getAllByTestId("user-row-admin")[0]; + expect(first.textContent).toBe("admin"); + + fireEvent.click(handler); + + first = screen.getAllByTestId("user-row-admin")[0]; + expect(first.textContent).toBe(""); +}); + +test("Sorts according to last activity", async () => { + let callbackSpy = mockAsync(); + + await act(async () => { + render(serverDashboardJsx(callbackSpy)); + }); + + let handler = screen.getByTestId("last-activity-sort"); + fireEvent.click(handler); + + let first = screen.getAllByTestId("user-row-name")[0]; + expect(first.textContent).toContain("foo"); + + fireEvent.click(handler); + + first = screen.getAllByTestId("user-row-name")[0]; + expect(first.textContent).toContain("bar"); +}); + +test("Sorts according to server status (running/not running)", async () => { + let callbackSpy = mockAsync(); + + await act(async () => { + render(serverDashboardJsx(callbackSpy)); + }); + + let handler = screen.getByTestId("running-status-sort"); + fireEvent.click(handler); + + let first = screen.getAllByTestId("user-row-name")[0]; + expect(first.textContent).toContain("foo"); + + fireEvent.click(handler); + + first = screen.getAllByTestId("user-row-name")[0]; + expect(first.textContent).toContain("bar"); +}); + +test("Shows server details with button click", async () => { + let callbackSpy = mockAsync(); + + await act(async () => { + render(serverDashboardJsx(callbackSpy)); + }); + let button = screen.getByTestId("foo-collapse-button"); + let collapse = screen.getByTestId("foo-collapse"); + let collapseBar = screen.getByTestId("bar-collapse"); + + // expect().toBeVisible does not work here with collapse. + expect(collapse).toHaveClass("collapse"); + expect(collapse).not.toHaveClass("show"); + expect(collapseBar).not.toHaveClass("show"); + + await act(async () => { + fireEvent.click(button); + }); + clock.tick(400); + + expect(collapse).toHaveClass("collapse show"); + expect(collapseBar).not.toHaveClass("show"); + + await act(async () => { + fireEvent.click(button); + }); + clock.tick(400); + + expect(collapse).toHaveClass("collapse"); + expect(collapse).not.toHaveClass("show"); + expect(collapseBar).not.toHaveClass("show"); + + await act(async () => { + fireEvent.click(button); + }); + clock.tick(400); + + expect(collapse).toHaveClass("collapse show"); + expect(collapseBar).not.toHaveClass("show"); +}); + +test("Renders nothing if required data is not available", async () => { + useSelector.mockImplementation((callback) => { + return callback({}); + }); + + let callbackSpy = mockAsync(); + + await act(async () => { + render(serverDashboardJsx(callbackSpy)); + }); + + let noShow = screen.getByTestId("no-show"); + + expect(noShow).toBeVisible(); +}); + +test("Shows a UI error dialogue when start all servers fails", async () => { + let spy = mockAsync(); + let rejectSpy = mockAsyncRejection; + + await act(async () => { + render( + {}, {})}> + + + + + + , + ); + }); + + let startAll = screen.getByTestId("start-all"); + + await act(async () => { + fireEvent.click(startAll); + }); + + let errorDialog = screen.getByText("Failed to start servers."); + + expect(errorDialog).toBeVisible(); +}); + +test("Shows a UI error dialogue when stop all servers fails", async () => { + let spy = mockAsync(); + let rejectSpy = mockAsyncRejection; + + await act(async () => { + render( + {}, {})}> + + + + + + , + ); + }); + + let stopAll = screen.getByTestId("stop-all"); + + await act(async () => { + fireEvent.click(stopAll); + }); + + let errorDialog = screen.getByText("Failed to stop servers."); + + expect(errorDialog).toBeVisible(); +}); + +test("Shows a UI error dialogue when start user server fails", async () => { + let spy = mockAsync(); + let rejectSpy = mockAsyncRejection(); + + await act(async () => { + render( + {}, {})}> + + + + + + , + ); + }); + + let start = screen.getByText("Start Server"); + + await act(async () => { + fireEvent.click(start); + }); + + let errorDialog = screen.getByText("Failed to start server."); + + expect(errorDialog).toBeVisible(); +}); + +test("Shows a UI error dialogue when start user server returns an improper status code", async () => { + let spy = mockAsync(); + let rejectSpy = mockAsync({ status: 403 }); + + await act(async () => { + render( + {}, {})}> + + + + + + , + ); + }); + + let start = screen.getByText("Start Server"); + + await act(async () => { + fireEvent.click(start); + }); + + let errorDialog = screen.getByText("Failed to start server."); + + expect(errorDialog).toBeVisible(); +}); + +test("Shows a UI error dialogue when stop user servers fails", async () => { + let spy = mockAsync(); + let rejectSpy = mockAsyncRejection(); + + await act(async () => { + render( + {}, {})}> + + + + + + , + ); + }); + + let stop = screen.getByText("Stop Server"); + + await act(async () => { + fireEvent.click(stop); + }); + + let errorDialog = screen.getByText("Failed to stop server."); + + expect(errorDialog).toBeVisible(); +}); + +test("Shows a UI error dialogue when stop user server returns an improper status code", async () => { + let spy = mockAsync(); + let rejectSpy = mockAsync({ status: 403 }); + + await act(async () => { + render( + {}, {})}> + + + + + + , + ); + }); + + let stop = screen.getByText("Stop Server"); + + await act(async () => { + fireEvent.click(stop); + }); + + let errorDialog = screen.getByText("Failed to stop server."); + + expect(errorDialog).toBeVisible(); +}); + +test("Search for user calls updateUsers with name filter", async () => { + let spy = mockAsync(); + let mockUpdateUsers = jest.fn((offset, limit, name_filter) => { + return Promise.resolve({ + items: [], + _pagination: { + offset: offset, + limit: limit, + total: offset + limit * 2, + next: { + offset: offset + limit, + limit: limit, + }, + }, + }); + }); + await act(async () => { + render( + + + + + + + , + ); + }); + + let search = screen.getByLabelText("user-search"); + + expect(mockUpdateUsers.mock.calls).toHaveLength(1); + + userEvent.type(search, "a"); + expect(search.value).toEqual("a"); + clock.tick(400); + expect(mockReducers.mock.calls).toHaveLength(3); + var lastState = + mockReducers.mock.results[mockReducers.mock.results.length - 1].value; + expect(lastState.name_filter).toEqual("a"); + // TODO: this should + expect(mockUpdateUsers.mock.calls).toHaveLength(1); + userEvent.type(search, "b"); + expect(search.value).toEqual("ab"); + clock.tick(400); + expect(mockReducers.mock.calls).toHaveLength(4); + lastState = + mockReducers.mock.results[mockReducers.mock.results.length - 1].value; + expect(lastState.name_filter).toEqual("ab"); + expect(lastState.user_page.offset).toEqual(0); +}); + +test("Interacting with PaginationFooter causes state update and refresh via useEffect call", async () => { + let callbackSpy = mockAsync(); + + await act(async () => { + render(serverDashboardJsx(callbackSpy)); + }); + + expect(callbackSpy).toBeCalledWith(0, 2, ""); + + expect(mockReducers.mock.results).toHaveLength(2); + lastState = + mockReducers.mock.results[mockReducers.mock.results.length - 1].value; + console.log(lastState); + expect(lastState.user_page.offset).toEqual(0); + expect(lastState.user_page.limit).toEqual(2); + + let next = screen.getByTestId("paginate-next"); + fireEvent.click(next); + clock.tick(400); + + expect(mockReducers.mock.results).toHaveLength(3); + var lastState = + mockReducers.mock.results[mockReducers.mock.results.length - 1].value; + expect(lastState.user_page.offset).toEqual(2); + expect(lastState.user_page.limit).toEqual(2); + + // FIXME: should call updateUsers, does in reality. + // tests don't reflect reality due to mocked state/useSelector + // unclear how to fix this. + // expect(callbackSpy.mock.calls).toHaveLength(2); + // expect(callbackSpy).toHaveBeenCalledWith(2, 2, ""); +}); diff --git a/jsx/src/components/ServerDashboard/server-dashboard.css b/jsx/src/components/ServerDashboard/server-dashboard.css new file mode 100644 index 00000000..6d8329ba --- /dev/null +++ b/jsx/src/components/ServerDashboard/server-dashboard.css @@ -0,0 +1,32 @@ +@import url(../../style/root.css); + +.server-dashboard-container { + padding-right: 15px; + padding-left: 15px; + margin-right: auto; + margin-left: auto; +} + +.server-dashboard-container .add-users-button { + border: 1px solid #ddd; +} + +.server-dashboard-container tbody { + color: #626262; +} + +.admin-table-head { + user-select: none; +} + +.sort-icon { + display: inline-block; + top: 0.125em; + position: relative; + user-select: none; + cursor: pointer; +} + +tr.noborder > td { + border: none !important; +} diff --git a/jsx/src/style/root.css b/jsx/src/style/root.css new file mode 100644 index 00000000..8adb8e40 --- /dev/null +++ b/jsx/src/style/root.css @@ -0,0 +1,35 @@ +:root { + --red: #d7191e; + --orange: #f1ad4e; + --blue: #2e7ab6; + --white: #ffffff; + --gray: #f7f7f7; +} + +/* Color Classes */ +.red { + background-color: var(--red); +} +.orange { + background-color: var(--orange); +} +.blue { + background-color: var(--blue); +} +.white { + background-color: var(--white); +} + +/* Resets */ + +.resets .modal { + display: block; + visibility: visible; + z-index: 2000; +} + +/* Global Util Classes */ +.adjacent-span-spacing { + margin-right: 5px; + margin-left: 5px; +} diff --git a/jsx/src/util/jhapiUtil.js b/jsx/src/util/jhapiUtil.js new file mode 100644 index 00000000..2578519f --- /dev/null +++ b/jsx/src/util/jhapiUtil.js @@ -0,0 +1,13 @@ +export const jhapiRequest = (endpoint, method, data) => { + let base_url = window.base_url || "/", + api_url = `${base_url}hub/api`; + return fetch(api_url + endpoint, { + method: method, + json: true, + headers: { + "Content-Type": "application/json", + Accept: "application/jupyterhub-pagination+json", + }, + body: data ? JSON.stringify(data) : null, + }); +}; diff --git a/jsx/src/util/timeSince.js b/jsx/src/util/timeSince.js new file mode 100644 index 00000000..2c6f1b26 --- /dev/null +++ b/jsx/src/util/timeSince.js @@ -0,0 +1,23 @@ +export const timeSince = (time) => { + var msPerMinute = 60 * 1000; + var msPerHour = msPerMinute * 60; + var msPerDay = msPerHour * 24; + var msPerMonth = msPerDay * 30; + var msPerYear = msPerDay * 365; + + var elapsed = Date.now() - Date.parse(time); + + if (elapsed < msPerMinute) { + return Math.round(elapsed / 1000) + " seconds ago"; + } else if (elapsed < msPerHour) { + return Math.round(elapsed / msPerMinute) + " minutes ago"; + } else if (elapsed < msPerDay) { + return Math.round(elapsed / msPerHour) + " hours ago"; + } else if (elapsed < msPerMonth) { + return Math.round(elapsed / msPerDay) + " days ago"; + } else if (elapsed < msPerYear) { + return Math.round(elapsed / msPerMonth) + " months ago"; + } else { + return Math.round(elapsed / msPerYear) + " years ago"; + } +}; diff --git a/jsx/src/util/withAPI.js b/jsx/src/util/withAPI.js new file mode 100644 index 00000000..f17f7f25 --- /dev/null +++ b/jsx/src/util/withAPI.js @@ -0,0 +1,58 @@ +import { withProps } from "recompose"; +import { jhapiRequest } from "./jhapiUtil"; + +const withAPI = withProps(() => ({ + updateUsers: (offset, limit, name_filter) => + jhapiRequest( + `/users?include_stopped_servers&offset=${offset}&limit=${limit}&name_filter=${ + name_filter || "" + }`, + "GET", + ).then((data) => data.json()), + updateGroups: (offset, limit) => + jhapiRequest(`/groups?offset=${offset}&limit=${limit}`, "GET").then( + (data) => data.json(), + ), + shutdownHub: () => jhapiRequest("/shutdown", "POST"), + startServer: (name, serverName = "") => + jhapiRequest("/users/" + name + "/servers/" + (serverName || ""), "POST"), + stopServer: (name, serverName = "") => + jhapiRequest("/users/" + name + "/servers/" + (serverName || ""), "DELETE"), + startAll: (names) => + names.map((e) => jhapiRequest("/users/" + e + "/server", "POST")), + stopAll: (names) => + names.map((e) => jhapiRequest("/users/" + e + "/server", "DELETE")), + addToGroup: (users, groupname) => + jhapiRequest("/groups/" + groupname + "/users", "POST", { users }), + removeFromGroup: (users, groupname) => + jhapiRequest("/groups/" + groupname + "/users", "DELETE", { users }), + createGroup: (groupName) => jhapiRequest("/groups/" + groupName, "POST"), + deleteGroup: (name) => jhapiRequest("/groups/" + name, "DELETE"), + addUsers: (usernames, admin) => + jhapiRequest("/users", "POST", { usernames, admin }), + editUser: (username, updated_username, admin) => + jhapiRequest("/users/" + username, "PATCH", { + name: updated_username, + admin, + }), + deleteUser: (username) => jhapiRequest("/users/" + username, "DELETE"), + findUser: (username) => jhapiRequest("/users/" + username, "GET"), + validateUser: (username) => + jhapiRequest("/users/" + username, "GET") + .then((data) => data.status) + .then((data) => (data > 200 ? false : true)), + // Temporarily Unused + failRegexEvent: () => { + return null; + }, + noChangeEvent: () => { + return null; + }, + // + refreshGroupsData: () => + jhapiRequest("/groups", "GET").then((data) => data.json()), + refreshUserData: () => + jhapiRequest("/users", "GET").then((data) => data.json()), +})); + +export default withAPI; diff --git a/jsx/webpack.config.js b/jsx/webpack.config.js new file mode 100644 index 00000000..426066fc --- /dev/null +++ b/jsx/webpack.config.js @@ -0,0 +1,99 @@ +const webpack = require("webpack"); +const path = require("path"); + +module.exports = { + entry: path.resolve(__dirname, "src", "App.jsx"), + mode: "production", + module: { + rules: [ + { + test: /\.(js|jsx)/, + exclude: /node_modules/, + use: "babel-loader", + }, + { + test: /\.(css)/, + exclude: /node_modules/, + use: ["style-loader", "css-loader"], + }, + { + test: /\.(png|jpe?g|gif|svg|woff2?|ttf)$/i, + exclude: /node_modules/, + use: "file-loader", + }, + ], + }, + output: { + publicPath: "/", + filename: "admin-react.js", + path: path.resolve(__dirname, "build"), + }, + resolve: { + extensions: [".css", ".js", ".jsx"], + }, + plugins: [new webpack.HotModuleReplacementPlugin()], + devServer: { + static: { + directory: path.resolve(__dirname, "build"), + }, + port: 9000, + onBeforeSetupMiddleware: (devServer) => { + const app = devServer.app; + + var user_data = JSON.parse( + '[{"kind":"user","name":"foo","admin":true,"groups":[],"server":"/user/foo/","pending":null,"created":"2020-12-07T18:46:27.112695Z","last_activity":"2020-12-07T21:00:33.336354Z","servers":{"":{"name":"","last_activity":"2020-12-07T20:58:02.437408Z","started":"2020-12-07T20:58:01.508266Z","pending":null,"ready":true,"state":{"pid":28085},"url":"/user/foo/","user_options":{},"progress_url":"/hub/api/users/foo/server/progress"}}},{"kind":"user","name":"bar","admin":false,"groups":[],"server":null,"pending":null,"created":"2020-12-07T18:46:27.115528Z","last_activity":"2020-12-07T20:43:51.013613Z","servers":{}}]', + ); + var group_data = JSON.parse( + '[{"kind":"group","name":"testgroup","users":[]}, {"kind":"group","name":"testgroup2","users":["foo", "bar"]}]', + ); + + // get user_data + app.get("/hub/api/users", (req, res) => { + res + .set("Content-Type", "application/json") + .send(JSON.stringify(user_data)); + }); + // get group_data + app.get("/hub/api/groups", (req, res) => { + res + .set("Content-Type", "application/json") + .send(JSON.stringify(group_data)); + }); + // add users to group + app.post("/hub/api/groups/*/users", (req, res) => { + console.log(req.url, req.body); + res.status(200).end(); + }); + // remove users from group + app.delete("/hub/api/groups/*", (req, res) => { + console.log(req.url, req.body); + res.status(200).end(); + }); + // add users + app.post("/hub/api/users", (req, res) => { + console.log(req.url, req.body); + res.status(200).end(); + }); + // delete user + app.delete("/hub/api/users", (req, res) => { + console.log(req.url, req.body); + res.status(200).end(); + }); + // start user server + app.post("/hub/api/users/*/server", (req, res) => { + console.log(req.url, req.body); + res.status(200).end(); + }); + // stop user server + app.delete("/hub/api/users/*/server", (req, res) => { + console.log(req.url, req.body); + res.status(200).end(); + }); + // shutdown hub + app.post("/hub/api/shutdown", (req, res) => { + console.log(req.url, req.body); + res.status(200).end(); + }); + }, + }, +}; diff --git a/jsx/yarn.lock b/jsx/yarn.lock new file mode 100644 index 00000000..115940e7 --- /dev/null +++ b/jsx/yarn.lock @@ -0,0 +1,7608 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@ampproject/remapping@^2.1.0": + version "2.2.0" + resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.0.tgz#56c133824780de3174aed5ab6834f3026790154d" + integrity sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w== + dependencies: + "@jridgewell/gen-mapping" "^0.1.0" + "@jridgewell/trace-mapping" "^0.3.9" + +"@babel/code-frame@7.12.11": + version "7.12.11" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.12.11.tgz#f4ad435aa263db935b8f10f2c552d23fb716a63f" + integrity sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw== + dependencies: + "@babel/highlight" "^7.10.4" + +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.18.6.tgz#3b25d38c89600baa2dcc219edfa88a74eb2c427a" + integrity sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q== + dependencies: + "@babel/highlight" "^7.18.6" + +"@babel/compat-data@^7.17.7", "@babel/compat-data@^7.18.8": + version "7.18.8" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.18.8.tgz#2483f565faca607b8535590e84e7de323f27764d" + integrity sha512-HSmX4WZPPK3FUxYp7g2T6EyO8j96HlZJlxmKPSh6KAcqwyDrfx7hKjXpAW/0FhFfTJsR0Yt4lAjLI2coMptIHQ== + +"@babel/core@^7.1.0", "@babel/core@^7.12.3", "@babel/core@^7.7.5": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.18.10.tgz#39ad504991d77f1f3da91be0b8b949a5bc466fb8" + integrity sha512-JQM6k6ENcBFKVtWvLavlvi/mPcpYZ3+R+2EySDEMSMbp7Mn4FexlbbJVrx2R7Ijhr01T8gyqrOaABWIOgxeUyw== + dependencies: + "@ampproject/remapping" "^2.1.0" + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.18.10" + "@babel/helper-compilation-targets" "^7.18.9" + "@babel/helper-module-transforms" "^7.18.9" + "@babel/helpers" "^7.18.9" + "@babel/parser" "^7.18.10" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.18.10" + "@babel/types" "^7.18.10" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.1" + semver "^6.3.0" + +"@babel/generator@^7.18.10": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.18.10.tgz#794f328bfabdcbaf0ebf9bf91b5b57b61fa77a2a" + integrity sha512-0+sW7e3HjQbiHbj1NeU/vN8ornohYlacAfZIaXhdoGweQqgcNy69COVciYYqEXJ/v+9OBA7Frxm4CVAuNqKeNA== + dependencies: + "@babel/types" "^7.18.10" + "@jridgewell/gen-mapping" "^0.3.2" + jsesc "^2.5.1" + +"@babel/helper-annotate-as-pure@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.18.6.tgz#eaa49f6f80d5a33f9a5dd2276e6d6e451be0a6bb" + integrity sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-builder-binary-assignment-operator-visitor@^7.18.6": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.18.9.tgz#acd4edfd7a566d1d51ea975dff38fd52906981bb" + integrity sha512-yFQ0YCHoIqarl8BCRwBL8ulYUaZpz3bNsA7oFepAzee+8/+ImtADXNOmO5vJvsPff3qi+hvpkY/NYBTrBQgdNw== + dependencies: + "@babel/helper-explode-assignable-expression" "^7.18.6" + "@babel/types" "^7.18.9" + +"@babel/helper-compilation-targets@^7.17.7", "@babel/helper-compilation-targets@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.18.9.tgz#69e64f57b524cde3e5ff6cc5a9f4a387ee5563bf" + integrity sha512-tzLCyVmqUiFlcFoAPLA/gL9TeYrF61VLNtb+hvkuVaB5SUjW7jcfrglBIX1vUIoT7CLP3bBlIMeyEsIl2eFQNg== + dependencies: + "@babel/compat-data" "^7.18.8" + "@babel/helper-validator-option" "^7.18.6" + browserslist "^4.20.2" + semver "^6.3.0" + +"@babel/helper-create-class-features-plugin@^7.18.6": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.18.9.tgz#d802ee16a64a9e824fcbf0a2ffc92f19d58550ce" + integrity sha512-WvypNAYaVh23QcjpMR24CwZY2Nz6hqdOcFdPbNpV56hL5H6KiFheO7Xm1aPdlLQ7d5emYZX7VZwPp9x3z+2opw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.18.9" + "@babel/helper-member-expression-to-functions" "^7.18.9" + "@babel/helper-optimise-call-expression" "^7.18.6" + "@babel/helper-replace-supers" "^7.18.9" + "@babel/helper-split-export-declaration" "^7.18.6" + +"@babel/helper-create-regexp-features-plugin@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.18.6.tgz#3e35f4e04acbbf25f1b3534a657610a000543d3c" + integrity sha512-7LcpH1wnQLGrI+4v+nPp+zUvIkF9x0ddv1Hkdue10tg3gmRnLy97DXh4STiOf1qeIInyD69Qv5kKSZzKD8B/7A== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + regexpu-core "^5.1.0" + +"@babel/helper-define-polyfill-provider@^0.3.2": + version "0.3.2" + resolved "https://registry.yarnpkg.com/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.2.tgz#bd10d0aca18e8ce012755395b05a79f45eca5073" + integrity sha512-r9QJJ+uDWrd+94BSPcP6/de67ygLtvVy6cK4luE6MOuDsZIdoaPBnfSpbO/+LTifjPckbKXRuI9BB/Z2/y3iTg== + dependencies: + "@babel/helper-compilation-targets" "^7.17.7" + "@babel/helper-plugin-utils" "^7.16.7" + debug "^4.1.1" + lodash.debounce "^4.0.8" + resolve "^1.14.2" + semver "^6.1.2" + +"@babel/helper-environment-visitor@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz#0c0cee9b35d2ca190478756865bb3528422f51be" + integrity sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg== + +"@babel/helper-explode-assignable-expression@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.18.6.tgz#41f8228ef0a6f1a036b8dfdfec7ce94f9a6bc096" + integrity sha512-eyAYAsQmB80jNfg4baAtLeWAQHfHFiR483rzFK+BhETlGZaQC9bsfrugfXDCbRHLQbIA7U5NxhhOxN7p/dWIcg== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-function-name@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.18.9.tgz#940e6084a55dee867d33b4e487da2676365e86b0" + integrity sha512-fJgWlZt7nxGksJS9a0XdSaI4XvpExnNIgRP+rVefWh5U7BL8pPuir6SJUmFKRfjWQ51OtWSzwOxhaH/EBWWc0A== + dependencies: + "@babel/template" "^7.18.6" + "@babel/types" "^7.18.9" + +"@babel/helper-hoist-variables@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz#d4d2c8fb4baeaa5c68b99cc8245c56554f926678" + integrity sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-member-expression-to-functions@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.18.9.tgz#1531661e8375af843ad37ac692c132841e2fd815" + integrity sha512-RxifAh2ZoVU67PyKIO4AMi1wTenGfMR/O/ae0CCRqwgBAt5v7xjdtRw7UoSbsreKrQn5t7r89eruK/9JjYHuDg== + dependencies: + "@babel/types" "^7.18.9" + +"@babel/helper-module-imports@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz#1e3ebdbbd08aad1437b428c50204db13c5a3ca6e" + integrity sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-module-transforms@^7.18.6", "@babel/helper-module-transforms@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.18.9.tgz#5a1079c005135ed627442df31a42887e80fcb712" + integrity sha512-KYNqY0ICwfv19b31XzvmI/mfcylOzbLtowkw+mfvGPAQ3kfCnMLYbED3YecL5tPd8nAYFQFAd6JHp2LxZk/J1g== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-simple-access" "^7.18.6" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/helper-validator-identifier" "^7.18.6" + "@babel/template" "^7.18.6" + "@babel/traverse" "^7.18.9" + "@babel/types" "^7.18.9" + +"@babel/helper-optimise-call-expression@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.18.6.tgz#9369aa943ee7da47edab2cb4e838acf09d290ffe" + integrity sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.16.7", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.18.9", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.18.9.tgz#4b8aea3b069d8cb8a72cdfe28ddf5ceca695ef2f" + integrity sha512-aBXPT3bmtLryXaoJLyYPXPlSD4p1ld9aYeR+sJNOZjJJGiOpb+fKfh3NkcCu7J54nUJwCERPBExCCpyCOHnu/w== + +"@babel/helper-remap-async-to-generator@^7.18.6", "@babel/helper-remap-async-to-generator@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.18.9.tgz#997458a0e3357080e54e1d79ec347f8a8cd28519" + integrity sha512-dI7q50YKd8BAv3VEfgg7PS7yD3Rtbi2J1XMXaalXO0W0164hYLnh8zpjRS0mte9MfVp/tltvr/cfdXPvJr1opA== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-wrap-function" "^7.18.9" + "@babel/types" "^7.18.9" + +"@babel/helper-replace-supers@^7.18.6", "@babel/helper-replace-supers@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.18.9.tgz#1092e002feca980fbbb0bd4d51b74a65c6a500e6" + integrity sha512-dNsWibVI4lNT6HiuOIBr1oyxo40HvIVmbwPUm3XZ7wMh4k2WxrxTqZwSqw/eEmXDS9np0ey5M2bz9tBmO9c+YQ== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-member-expression-to-functions" "^7.18.9" + "@babel/helper-optimise-call-expression" "^7.18.6" + "@babel/traverse" "^7.18.9" + "@babel/types" "^7.18.9" + +"@babel/helper-simple-access@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.18.6.tgz#d6d8f51f4ac2978068df934b569f08f29788c7ea" + integrity sha512-iNpIgTgyAvDQpDj76POqg+YEt8fPxx3yaNBg3S30dxNKm2SWfYhD0TGrK/Eu9wHpUW63VQU894TsTg+GLbUa1g== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-skip-transparent-expression-wrappers@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.18.9.tgz#778d87b3a758d90b471e7b9918f34a9a02eb5818" + integrity sha512-imytd2gHi3cJPsybLRbmFrF7u5BIEuI2cNheyKi3/iOBC63kNn3q8Crn2xVuESli0aM4KYsyEqKyS7lFL8YVtw== + dependencies: + "@babel/types" "^7.18.9" + +"@babel/helper-split-export-declaration@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz#7367949bc75b20c6d5a5d4a97bba2824ae8ef075" + integrity sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-string-parser@^7.18.10": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.18.10.tgz#181f22d28ebe1b3857fa575f5c290b1aaf659b56" + integrity sha512-XtIfWmeNY3i4t7t4D2t02q50HvqHybPqW2ki1kosnvWCwuCMeo81Jf0gwr85jy/neUdg5XDdeFE/80DXiO+njw== + +"@babel/helper-validator-identifier@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.18.6.tgz#9c97e30d31b2b8c72a1d08984f2ca9b574d7a076" + integrity sha512-MmetCkz9ej86nJQV+sFCxoGGrUbU3q02kgLciwkrt9QqEB7cP39oKEY0PakknEO0Gu20SskMRi+AYZ3b1TpN9g== + +"@babel/helper-validator-option@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz#bf0d2b5a509b1f336099e4ff36e1a63aa5db4db8" + integrity sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw== + +"@babel/helper-wrap-function@^7.18.9": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.18.10.tgz#a7fcd3ab9b1be4c9b52cf7d7fdc1e88c2ce93396" + integrity sha512-95NLBP59VWdfK2lyLKe6eTMq9xg+yWKzxzxbJ1wcYNi1Auz200+83fMDADjRxBvc2QQor5zja2yTQzXGhk2GtQ== + dependencies: + "@babel/helper-function-name" "^7.18.9" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.18.10" + "@babel/types" "^7.18.10" + +"@babel/helpers@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.18.9.tgz#4bef3b893f253a1eced04516824ede94dcfe7ff9" + integrity sha512-Jf5a+rbrLoR4eNdUmnFu8cN5eNJT6qdTdOg5IHIzq87WwyRw9PwguLFOWYgktN/60IP4fgDUawJvs7PjQIzELQ== + dependencies: + "@babel/template" "^7.18.6" + "@babel/traverse" "^7.18.9" + "@babel/types" "^7.18.9" + +"@babel/highlight@^7.10.4", "@babel/highlight@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.18.6.tgz#81158601e93e2563795adcbfbdf5d64be3f2ecdf" + integrity sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g== + dependencies: + "@babel/helper-validator-identifier" "^7.18.6" + chalk "^2.0.0" + js-tokens "^4.0.0" + +"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.18.10": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.18.10.tgz#94b5f8522356e69e8277276adf67ed280c90ecc1" + integrity sha512-TYk3OA0HKL6qNryUayb5UUEhM/rkOQozIBEA5ITXh5DWrSp0TlUQXMyZmnWxG/DizSWBeeQ0Zbc5z8UGaaqoeg== + +"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.18.6.tgz#da5b8f9a580acdfbe53494dba45ea389fb09a4d2" + integrity sha512-Dgxsyg54Fx1d4Nge8UnvTrED63vrwOdPmyvPzlNN/boaliRP54pm3pGzZD1SJUwrBA+Cs/xdG8kXX6Mn/RfISQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.18.9.tgz#a11af19aa373d68d561f08e0a57242350ed0ec50" + integrity sha512-AHrP9jadvH7qlOj6PINbgSuphjQUAK7AOT7DPjBo9EHoLhQTnnK5u45e1Hd4DbSQEO9nqPWtQ89r+XEOWFScKg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-skip-transparent-expression-wrappers" "^7.18.9" + "@babel/plugin-proposal-optional-chaining" "^7.18.9" + +"@babel/plugin-proposal-async-generator-functions@^7.18.10": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.18.10.tgz#85ea478c98b0095c3e4102bff3b67d306ed24952" + integrity sha512-1mFuY2TOsR1hxbjCo4QL+qlIjV07p4H4EUYw2J/WCqsvFV6V9X9z9YhXbWndc/4fw+hYGlDT7egYxliMp5O6Ew== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-remap-async-to-generator" "^7.18.9" + "@babel/plugin-syntax-async-generators" "^7.8.4" + +"@babel/plugin-proposal-class-properties@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz#b110f59741895f7ec21a6fff696ec46265c446a3" + integrity sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-proposal-class-static-block@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.18.6.tgz#8aa81d403ab72d3962fc06c26e222dacfc9b9020" + integrity sha512-+I3oIiNxrCpup3Gi8n5IGMwj0gOCAjcJUSQEcotNnCCPMEnixawOQ+KeJPlgfjzx+FKQ1QSyZOWe7wmoJp7vhw== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + +"@babel/plugin-proposal-dynamic-import@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.18.6.tgz#72bcf8d408799f547d759298c3c27c7e7faa4d94" + integrity sha512-1auuwmK+Rz13SJj36R+jqFPMJWyKEDd7lLSdOj4oJK0UTgGueSAtkrCvz9ewmgyU/P941Rv2fQwZJN8s6QruXw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + +"@babel/plugin-proposal-export-namespace-from@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.18.9.tgz#5f7313ab348cdb19d590145f9247540e94761203" + integrity sha512-k1NtHyOMvlDDFeb9G5PhUXuGj8m/wiwojgQVEhJ/fsVsMCpLyOP4h0uGEjYJKrRI+EVPlb5Jk+Gt9P97lOGwtA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + +"@babel/plugin-proposal-json-strings@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.18.6.tgz#7e8788c1811c393aff762817e7dbf1ebd0c05f0b" + integrity sha512-lr1peyn9kOdbYc0xr0OdHTZ5FMqS6Di+H0Fz2I/JwMzGmzJETNeOFq2pBySw6X/KFL5EWDjlJuMsUGRFb8fQgQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-json-strings" "^7.8.3" + +"@babel/plugin-proposal-logical-assignment-operators@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.18.9.tgz#8148cbb350483bf6220af06fa6db3690e14b2e23" + integrity sha512-128YbMpjCrP35IOExw2Fq+x55LMP42DzhOhX2aNNIdI9avSWl2PI0yuBWarr3RYpZBSPtabfadkH2yeRiMD61Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + +"@babel/plugin-proposal-nullish-coalescing-operator@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.18.6.tgz#fdd940a99a740e577d6c753ab6fbb43fdb9467e1" + integrity sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + +"@babel/plugin-proposal-numeric-separator@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.18.6.tgz#899b14fbafe87f053d2c5ff05b36029c62e13c75" + integrity sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + +"@babel/plugin-proposal-object-rest-spread@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.18.9.tgz#f9434f6beb2c8cae9dfcf97d2a5941bbbf9ad4e7" + integrity sha512-kDDHQ5rflIeY5xl69CEqGEZ0KY369ehsCIEbTGb4siHG5BE9sga/T0r0OUwyZNLMmZE79E1kbsqAjwFCW4ds6Q== + dependencies: + "@babel/compat-data" "^7.18.8" + "@babel/helper-compilation-targets" "^7.18.9" + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-transform-parameters" "^7.18.8" + +"@babel/plugin-proposal-optional-catch-binding@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.18.6.tgz#f9400d0e6a3ea93ba9ef70b09e72dd6da638a2cb" + integrity sha512-Q40HEhs9DJQyaZfUjjn6vE8Cv4GmMHCYuMGIWUnlxH6400VGxOuwWsPt4FxXxJkC/5eOzgn0z21M9gMT4MOhbw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + +"@babel/plugin-proposal-optional-chaining@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.18.9.tgz#e8e8fe0723f2563960e4bf5e9690933691915993" + integrity sha512-v5nwt4IqBXihxGsW2QmCWMDS3B3bzGIk/EQVZz2ei7f3NJl8NzAJVvUmpDW5q1CRNY+Beb/k58UAH1Km1N411w== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-skip-transparent-expression-wrappers" "^7.18.9" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + +"@babel/plugin-proposal-private-methods@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.18.6.tgz#5209de7d213457548a98436fa2882f52f4be6bea" + integrity sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-proposal-private-property-in-object@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.18.6.tgz#a64137b232f0aca3733a67eb1a144c192389c503" + integrity sha512-9Rysx7FOctvT5ouj5JODjAFAkgGoudQuLPamZb0v1TGLpapdNaftzifU8NTWQm0IRjqoYypdrSmyWgkocDQ8Dw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + +"@babel/plugin-proposal-unicode-property-regex@^7.18.6", "@babel/plugin-proposal-unicode-property-regex@^7.4.4": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.18.6.tgz#af613d2cd5e643643b65cded64207b15c85cb78e" + integrity sha512-2BShG/d5yoZyXZfVePH91urL5wTG6ASZU9M4o03lKK8u8UW1y08OMttBSOADTcJrnPMpvDXRG3G8fyLh4ovs8w== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-syntax-async-generators@^7.8.4": + version "7.8.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" + integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-bigint@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" + integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-class-properties@^7.12.13", "@babel/plugin-syntax-class-properties@^7.8.3": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" + integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== + dependencies: + "@babel/helper-plugin-utils" "^7.12.13" + +"@babel/plugin-syntax-class-static-block@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz#195df89b146b4b78b3bf897fd7a257c84659d406" + integrity sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-dynamic-import@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz#62bf98b2da3cd21d626154fc96ee5b3cb68eacb3" + integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-export-namespace-from@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz#028964a9ba80dbc094c915c487ad7c4e7a66465a" + integrity sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.3" + +"@babel/plugin-syntax-import-assertions@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.18.6.tgz#cd6190500a4fa2fe31990a963ffab4b63e4505e4" + integrity sha512-/DU3RXad9+bZwrgWJQKbr39gYbJpLJHezqEzRzi/BHRlJ9zsQb4CK2CA/5apllXNomwA1qHwzvHl+AdEmC5krQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-syntax-import-meta@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" + integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-json-strings@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" + integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-jsx@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.18.6.tgz#a8feef63b010150abd97f1649ec296e849943ca0" + integrity sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-syntax-logical-assignment-operators@^7.10.4", "@babel/plugin-syntax-logical-assignment-operators@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" + integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" + integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-numeric-separator@^7.10.4", "@babel/plugin-syntax-numeric-separator@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" + integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-object-rest-spread@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" + integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-catch-binding@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" + integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-chaining@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" + integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-private-property-in-object@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz#0dc6671ec0ea22b6e94a1114f857970cd39de1ad" + integrity sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-top-level-await@^7.14.5", "@babel/plugin-syntax-top-level-await@^7.8.3": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" + integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-transform-arrow-functions@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.18.6.tgz#19063fcf8771ec7b31d742339dac62433d0611fe" + integrity sha512-9S9X9RUefzrsHZmKMbDXxweEH+YlE8JJEuat9FdvW9Qh1cw7W64jELCtWNkPBPX5En45uy28KGvA/AySqUh8CQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-async-to-generator@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.18.6.tgz#ccda3d1ab9d5ced5265fdb13f1882d5476c71615" + integrity sha512-ARE5wZLKnTgPW7/1ftQmSi1CmkqqHo2DNmtztFhvgtOWSDfq0Cq9/9L+KnZNYSNrydBekhW3rwShduf59RoXag== + dependencies: + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-remap-async-to-generator" "^7.18.6" + +"@babel/plugin-transform-block-scoped-functions@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.18.6.tgz#9187bf4ba302635b9d70d986ad70f038726216a8" + integrity sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-block-scoping@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.18.9.tgz#f9b7e018ac3f373c81452d6ada8bd5a18928926d" + integrity sha512-5sDIJRV1KtQVEbt/EIBwGy4T01uYIo4KRB3VUqzkhrAIOGx7AoctL9+Ux88btY0zXdDyPJ9mW+bg+v+XEkGmtw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-classes@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.18.9.tgz#90818efc5b9746879b869d5ce83eb2aa48bbc3da" + integrity sha512-EkRQxsxoytpTlKJmSPYrsOMjCILacAjtSVkd4gChEe2kXjFCun3yohhW5I7plXJhCemM0gKsaGMcO8tinvCA5g== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.18.9" + "@babel/helper-optimise-call-expression" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-replace-supers" "^7.18.9" + "@babel/helper-split-export-declaration" "^7.18.6" + globals "^11.1.0" + +"@babel/plugin-transform-computed-properties@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.18.9.tgz#2357a8224d402dad623caf6259b611e56aec746e" + integrity sha512-+i0ZU1bCDymKakLxn5srGHrsAPRELC2WIbzwjLhHW9SIE1cPYkLCL0NlnXMZaM1vhfgA2+M7hySk42VBvrkBRw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-destructuring@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.18.9.tgz#68906549c021cb231bee1db21d3b5b095f8ee292" + integrity sha512-p5VCYNddPLkZTq4XymQIaIfZNJwT9YsjkPOhkVEqt6QIpQFZVM9IltqqYpOEkJoN1DPznmxUDyZ5CTZs/ZCuHA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-dotall-regex@^7.18.6", "@babel/plugin-transform-dotall-regex@^7.4.4": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.18.6.tgz#b286b3e7aae6c7b861e45bed0a2fafd6b1a4fef8" + integrity sha512-6S3jpun1eEbAxq7TdjLotAsl4WpQI9DxfkycRcKrjhQYzU87qpXdknpBg/e+TdcMehqGnLFi7tnFUBR02Vq6wg== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-duplicate-keys@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.18.9.tgz#687f15ee3cdad6d85191eb2a372c4528eaa0ae0e" + integrity sha512-d2bmXCtZXYc59/0SanQKbiWINadaJXqtvIQIzd4+hNwkWBgyCd5F/2t1kXoUdvPMrxzPvhK6EMQRROxsue+mfw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-exponentiation-operator@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.18.6.tgz#421c705f4521888c65e91fdd1af951bfefd4dacd" + integrity sha512-wzEtc0+2c88FVR34aQmiz56dxEkxr2g8DQb/KfaFa1JYXOFVsbhvAonFN6PwVWj++fKmku8NP80plJ5Et4wqHw== + dependencies: + "@babel/helper-builder-binary-assignment-operator-visitor" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-for-of@^7.18.8": + version "7.18.8" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.18.8.tgz#6ef8a50b244eb6a0bdbad0c7c61877e4e30097c1" + integrity sha512-yEfTRnjuskWYo0k1mHUqrVWaZwrdq8AYbfrpqULOJOaucGSp4mNMVps+YtA8byoevxS/urwU75vyhQIxcCgiBQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-function-name@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.18.9.tgz#cc354f8234e62968946c61a46d6365440fc764e0" + integrity sha512-WvIBoRPaJQ5yVHzcnJFor7oS5Ls0PYixlTYE63lCj2RtdQEl15M68FXQlxnG6wdraJIXRdR7KI+hQ7q/9QjrCQ== + dependencies: + "@babel/helper-compilation-targets" "^7.18.9" + "@babel/helper-function-name" "^7.18.9" + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-literals@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.18.9.tgz#72796fdbef80e56fba3c6a699d54f0de557444bc" + integrity sha512-IFQDSRoTPnrAIrI5zoZv73IFeZu2dhu6irxQjY9rNjTT53VmKg9fenjvoiOWOkJ6mm4jKVPtdMzBY98Fp4Z4cg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-member-expression-literals@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.18.6.tgz#ac9fdc1a118620ac49b7e7a5d2dc177a1bfee88e" + integrity sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-modules-amd@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.18.6.tgz#8c91f8c5115d2202f277549848874027d7172d21" + integrity sha512-Pra5aXsmTsOnjM3IajS8rTaLCy++nGM4v3YR4esk5PCsyg9z8NA5oQLwxzMUtDBd8F+UmVza3VxoAaWCbzH1rg== + dependencies: + "@babel/helper-module-transforms" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + babel-plugin-dynamic-import-node "^2.3.3" + +"@babel/plugin-transform-modules-commonjs@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.18.6.tgz#afd243afba166cca69892e24a8fd8c9f2ca87883" + integrity sha512-Qfv2ZOWikpvmedXQJDSbxNqy7Xr/j2Y8/KfijM0iJyKkBTmWuvCA1yeH1yDM7NJhBW/2aXxeucLj6i80/LAJ/Q== + dependencies: + "@babel/helper-module-transforms" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-simple-access" "^7.18.6" + babel-plugin-dynamic-import-node "^2.3.3" + +"@babel/plugin-transform-modules-systemjs@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.18.9.tgz#545df284a7ac6a05125e3e405e536c5853099a06" + integrity sha512-zY/VSIbbqtoRoJKo2cDTewL364jSlZGvn0LKOf9ntbfxOvjfmyrdtEEOAdswOswhZEb8UH3jDkCKHd1sPgsS0A== + dependencies: + "@babel/helper-hoist-variables" "^7.18.6" + "@babel/helper-module-transforms" "^7.18.9" + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-validator-identifier" "^7.18.6" + babel-plugin-dynamic-import-node "^2.3.3" + +"@babel/plugin-transform-modules-umd@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.18.6.tgz#81d3832d6034b75b54e62821ba58f28ed0aab4b9" + integrity sha512-dcegErExVeXcRqNtkRU/z8WlBLnvD4MRnHgNs3MytRO1Mn1sHRyhbcpYbVMGclAqOjdW+9cfkdZno9dFdfKLfQ== + dependencies: + "@babel/helper-module-transforms" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-named-capturing-groups-regex@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.18.6.tgz#c89bfbc7cc6805d692f3a49bc5fc1b630007246d" + integrity sha512-UmEOGF8XgaIqD74bC8g7iV3RYj8lMf0Bw7NJzvnS9qQhM4mg+1WHKotUIdjxgD2RGrgFLZZPCFPFj3P/kVDYhg== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-new-target@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.18.6.tgz#d128f376ae200477f37c4ddfcc722a8a1b3246a8" + integrity sha512-DjwFA/9Iu3Z+vrAn+8pBUGcjhxKguSMlsFqeCKbhb9BAV756v0krzVK04CRDi/4aqmk8BsHb4a/gFcaA5joXRw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-object-super@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.18.6.tgz#fb3c6ccdd15939b6ff7939944b51971ddc35912c" + integrity sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-replace-supers" "^7.18.6" + +"@babel/plugin-transform-parameters@^7.18.8": + version "7.18.8" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.18.8.tgz#ee9f1a0ce6d78af58d0956a9378ea3427cccb48a" + integrity sha512-ivfbE3X2Ss+Fj8nnXvKJS6sjRG4gzwPMsP+taZC+ZzEGjAYlvENixmt1sZ5Ca6tWls+BlKSGKPJ6OOXvXCbkFg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-property-literals@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.18.6.tgz#e22498903a483448e94e032e9bbb9c5ccbfc93a3" + integrity sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-react-display-name@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.18.6.tgz#8b1125f919ef36ebdfff061d664e266c666b9415" + integrity sha512-TV4sQ+T013n61uMoygyMRm+xf04Bd5oqFpv2jAEQwSZ8NwQA7zeRPg1LMVg2PWi3zWBz+CLKD+v5bcpZ/BS0aA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-react-jsx-development@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.18.6.tgz#dbe5c972811e49c7405b630e4d0d2e1380c0ddc5" + integrity sha512-SA6HEjwYFKF7WDjWcMcMGUimmw/nhNRDWxr+KaLSCrkD/LMDBvWRmHAYgE1HDeF8KUuI8OAu+RT6EOtKxSW2qA== + dependencies: + "@babel/plugin-transform-react-jsx" "^7.18.6" + +"@babel/plugin-transform-react-jsx@^7.18.6": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.18.10.tgz#ea47b2c4197102c196cbd10db9b3bb20daa820f1" + integrity sha512-gCy7Iikrpu3IZjYZolFE4M1Sm+nrh1/6za2Ewj77Z+XirT4TsbJcvOFOyF+fRPwU6AKKK136CZxx6L8AbSFG6A== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/plugin-syntax-jsx" "^7.18.6" + "@babel/types" "^7.18.10" + +"@babel/plugin-transform-react-pure-annotations@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.18.6.tgz#561af267f19f3e5d59291f9950fd7b9663d0d844" + integrity sha512-I8VfEPg9r2TRDdvnHgPepTKvuRomzA8+u+nhY7qSI1fR2hRNebasZEETLyM5mAUr0Ku56OkXJ0I7NHJnO6cJiQ== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-regenerator@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.18.6.tgz#585c66cb84d4b4bf72519a34cfce761b8676ca73" + integrity sha512-poqRI2+qiSdeldcz4wTSTXBRryoq3Gc70ye7m7UD5Ww0nE29IXqMl6r7Nd15WBgRd74vloEMlShtH6CKxVzfmQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + regenerator-transform "^0.15.0" + +"@babel/plugin-transform-reserved-words@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.18.6.tgz#b1abd8ebf8edaa5f7fe6bbb8d2133d23b6a6f76a" + integrity sha512-oX/4MyMoypzHjFrT1CdivfKZ+XvIPMFXwwxHp/r0Ddy2Vuomt4HDFGmft1TAY2yiTKiNSsh3kjBAzcM8kSdsjA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-shorthand-properties@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.18.6.tgz#6d6df7983d67b195289be24909e3f12a8f664dc9" + integrity sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-spread@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.18.9.tgz#6ea7a6297740f381c540ac56caf75b05b74fb664" + integrity sha512-39Q814wyoOPtIB/qGopNIL9xDChOE1pNU0ZY5dO0owhiVt/5kFm4li+/bBtwc7QotG0u5EPzqhZdjMtmqBqyQA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-skip-transparent-expression-wrappers" "^7.18.9" + +"@babel/plugin-transform-sticky-regex@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.18.6.tgz#c6706eb2b1524028e317720339583ad0f444adcc" + integrity sha512-kfiDrDQ+PBsQDO85yj1icueWMfGfJFKN1KCkndygtu/C9+XUfydLC8Iv5UYJqRwy4zk8EcplRxEOeLyjq1gm6Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-template-literals@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.18.9.tgz#04ec6f10acdaa81846689d63fae117dd9c243a5e" + integrity sha512-S8cOWfT82gTezpYOiVaGHrCbhlHgKhQt8XH5ES46P2XWmX92yisoZywf5km75wv5sYcXDUCLMmMxOLCtthDgMA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-typeof-symbol@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.18.9.tgz#c8cea68263e45addcd6afc9091429f80925762c0" + integrity sha512-SRfwTtF11G2aemAZWivL7PD+C9z52v9EvMqH9BuYbabyPuKUvSWks3oCg6041pT925L4zVFqaVBeECwsmlguEw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-unicode-escapes@^7.18.10": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.18.10.tgz#1ecfb0eda83d09bbcb77c09970c2dd55832aa246" + integrity sha512-kKAdAI+YzPgGY/ftStBFXTI1LZFju38rYThnfMykS+IXy8BVx+res7s2fxf1l8I35DV2T97ezo6+SGrXz6B3iQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-unicode-regex@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.18.6.tgz#194317225d8c201bbae103364ffe9e2cea36cdca" + integrity sha512-gE7A6Lt7YLnNOL3Pb9BNeZvi+d8l7tcRrG4+pwJjK9hD2xX4mEvjlQW60G9EEmfXVYRPv9VRQcyegIVHCql/AA== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/preset-env@^7.12.11": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.18.10.tgz#83b8dfe70d7eea1aae5a10635ab0a5fe60dfc0f4" + integrity sha512-wVxs1yjFdW3Z/XkNfXKoblxoHgbtUF7/l3PvvP4m02Qz9TZ6uZGxRVYjSQeR87oQmHco9zWitW5J82DJ7sCjvA== + dependencies: + "@babel/compat-data" "^7.18.8" + "@babel/helper-compilation-targets" "^7.18.9" + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-validator-option" "^7.18.6" + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.18.6" + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.18.9" + "@babel/plugin-proposal-async-generator-functions" "^7.18.10" + "@babel/plugin-proposal-class-properties" "^7.18.6" + "@babel/plugin-proposal-class-static-block" "^7.18.6" + "@babel/plugin-proposal-dynamic-import" "^7.18.6" + "@babel/plugin-proposal-export-namespace-from" "^7.18.9" + "@babel/plugin-proposal-json-strings" "^7.18.6" + "@babel/plugin-proposal-logical-assignment-operators" "^7.18.9" + "@babel/plugin-proposal-nullish-coalescing-operator" "^7.18.6" + "@babel/plugin-proposal-numeric-separator" "^7.18.6" + "@babel/plugin-proposal-object-rest-spread" "^7.18.9" + "@babel/plugin-proposal-optional-catch-binding" "^7.18.6" + "@babel/plugin-proposal-optional-chaining" "^7.18.9" + "@babel/plugin-proposal-private-methods" "^7.18.6" + "@babel/plugin-proposal-private-property-in-object" "^7.18.6" + "@babel/plugin-proposal-unicode-property-regex" "^7.18.6" + "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-class-properties" "^7.12.13" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + "@babel/plugin-syntax-import-assertions" "^7.18.6" + "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + "@babel/plugin-syntax-top-level-await" "^7.14.5" + "@babel/plugin-transform-arrow-functions" "^7.18.6" + "@babel/plugin-transform-async-to-generator" "^7.18.6" + "@babel/plugin-transform-block-scoped-functions" "^7.18.6" + "@babel/plugin-transform-block-scoping" "^7.18.9" + "@babel/plugin-transform-classes" "^7.18.9" + "@babel/plugin-transform-computed-properties" "^7.18.9" + "@babel/plugin-transform-destructuring" "^7.18.9" + "@babel/plugin-transform-dotall-regex" "^7.18.6" + "@babel/plugin-transform-duplicate-keys" "^7.18.9" + "@babel/plugin-transform-exponentiation-operator" "^7.18.6" + "@babel/plugin-transform-for-of" "^7.18.8" + "@babel/plugin-transform-function-name" "^7.18.9" + "@babel/plugin-transform-literals" "^7.18.9" + "@babel/plugin-transform-member-expression-literals" "^7.18.6" + "@babel/plugin-transform-modules-amd" "^7.18.6" + "@babel/plugin-transform-modules-commonjs" "^7.18.6" + "@babel/plugin-transform-modules-systemjs" "^7.18.9" + "@babel/plugin-transform-modules-umd" "^7.18.6" + "@babel/plugin-transform-named-capturing-groups-regex" "^7.18.6" + "@babel/plugin-transform-new-target" "^7.18.6" + "@babel/plugin-transform-object-super" "^7.18.6" + "@babel/plugin-transform-parameters" "^7.18.8" + "@babel/plugin-transform-property-literals" "^7.18.6" + "@babel/plugin-transform-regenerator" "^7.18.6" + "@babel/plugin-transform-reserved-words" "^7.18.6" + "@babel/plugin-transform-shorthand-properties" "^7.18.6" + "@babel/plugin-transform-spread" "^7.18.9" + "@babel/plugin-transform-sticky-regex" "^7.18.6" + "@babel/plugin-transform-template-literals" "^7.18.9" + "@babel/plugin-transform-typeof-symbol" "^7.18.9" + "@babel/plugin-transform-unicode-escapes" "^7.18.10" + "@babel/plugin-transform-unicode-regex" "^7.18.6" + "@babel/preset-modules" "^0.1.5" + "@babel/types" "^7.18.10" + babel-plugin-polyfill-corejs2 "^0.3.2" + babel-plugin-polyfill-corejs3 "^0.5.3" + babel-plugin-polyfill-regenerator "^0.4.0" + core-js-compat "^3.22.1" + semver "^6.3.0" + +"@babel/preset-modules@^0.1.5": + version "0.1.5" + resolved "https://registry.yarnpkg.com/@babel/preset-modules/-/preset-modules-0.1.5.tgz#ef939d6e7f268827e1841638dc6ff95515e115d9" + integrity sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/plugin-proposal-unicode-property-regex" "^7.4.4" + "@babel/plugin-transform-dotall-regex" "^7.4.4" + "@babel/types" "^7.4.4" + esutils "^2.0.2" + +"@babel/preset-react@^7.12.10": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/preset-react/-/preset-react-7.18.6.tgz#979f76d6277048dc19094c217b507f3ad517dd2d" + integrity sha512-zXr6atUmyYdiWRVLOZahakYmOBHtWc2WGCkP8PYTgZi0iJXDY2CN180TdrIW4OGOAdLc7TifzDIvtx6izaRIzg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-validator-option" "^7.18.6" + "@babel/plugin-transform-react-display-name" "^7.18.6" + "@babel/plugin-transform-react-jsx" "^7.18.6" + "@babel/plugin-transform-react-jsx-development" "^7.18.6" + "@babel/plugin-transform-react-pure-annotations" "^7.18.6" + +"@babel/runtime@^7.1.2", "@babel/runtime@^7.12.1", "@babel/runtime@^7.12.13", "@babel/runtime@^7.12.5", "@babel/runtime@^7.15.4", "@babel/runtime@^7.16.3", "@babel/runtime@^7.17.2", "@babel/runtime@^7.18.3", "@babel/runtime@^7.5.5", "@babel/runtime@^7.6.2", "@babel/runtime@^7.6.3", "@babel/runtime@^7.7.6", "@babel/runtime@^7.8.4", "@babel/runtime@^7.8.7", "@babel/runtime@^7.9.2": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.18.9.tgz#b4fcfce55db3d2e5e080d2490f608a3b9f407f4a" + integrity sha512-lkqXDcvlFT5rvEjiu6+QYO+1GXrEHRo2LOtS7E4GtX5ESIZOgepqsZBVIj6Pv+a6zqsya9VCgiK1KAK4BvJDAw== + dependencies: + regenerator-runtime "^0.13.4" + +"@babel/template@^7.18.10", "@babel/template@^7.18.6", "@babel/template@^7.3.3": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.18.10.tgz#6f9134835970d1dbf0835c0d100c9f38de0c5e71" + integrity sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/parser" "^7.18.10" + "@babel/types" "^7.18.10" + +"@babel/traverse@^7.1.0", "@babel/traverse@^7.18.10", "@babel/traverse@^7.18.9": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.18.10.tgz#37ad97d1cb00efa869b91dd5d1950f8a6cf0cb08" + integrity sha512-J7ycxg0/K9XCtLyHf0cz2DqDihonJeIo+z+HEdRe9YuT8TY4A66i+Ab2/xZCEW7Ro60bPCBBfqqboHSamoV3+g== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.18.10" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.18.9" + "@babel/helper-hoist-variables" "^7.18.6" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/parser" "^7.18.10" + "@babel/types" "^7.18.10" + debug "^4.1.0" + globals "^11.1.0" + +"@babel/types@^7.0.0", "@babel/types@^7.18.10", "@babel/types@^7.18.6", "@babel/types@^7.18.9", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.18.10.tgz#4908e81b6b339ca7c6b7a555a5fc29446f26dde6" + integrity sha512-MJvnbEiiNkpjo+LknnmRrqbY1GPUUggjv+wQVjetM/AONoupqRALB7I6jGqNUAZsKcRIEu2J6FRFvsczljjsaQ== + dependencies: + "@babel/helper-string-parser" "^7.18.10" + "@babel/helper-validator-identifier" "^7.18.6" + to-fast-properties "^2.0.0" + +"@bcoe/v8-coverage@^0.2.3": + version "0.2.3" + resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" + integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== + +"@cnakazawa/watch@^1.0.3": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@cnakazawa/watch/-/watch-1.0.4.tgz#f864ae85004d0fcab6f50be9141c4da368d1656a" + integrity sha512-v9kIhKwjeZThiWrLmj0y17CWoyddASLj9O2yvbZkbvw/N3rWOYy9zkV66ursAoVr0mV15bL8g0c4QZUE6cdDoQ== + dependencies: + exec-sh "^0.3.2" + minimist "^1.2.0" + +"@discoveryjs/json-ext@^0.5.0": + version "0.5.7" + resolved "https://registry.yarnpkg.com/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz#1d572bfbbe14b7704e0ba0f39b74815b84870d70" + integrity sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw== + +"@eslint/eslintrc@^0.4.3": + version "0.4.3" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.4.3.tgz#9e42981ef035beb3dd49add17acb96e8ff6f394c" + integrity sha512-J6KFFz5QCYUJq3pf0mjEcCJVERbzv71PUIDczuh9JkwGEzced6CO5ADLHB1rbf/+oPBtoPfMYNOpGDzCANlbXw== + dependencies: + ajv "^6.12.4" + debug "^4.1.1" + espree "^7.3.0" + globals "^13.9.0" + ignore "^4.0.6" + import-fresh "^3.2.1" + js-yaml "^3.13.1" + minimatch "^3.0.4" + strip-json-comments "^3.1.1" + +"@humanwhocodes/config-array@^0.5.0": + version "0.5.0" + resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.5.0.tgz#1407967d4c6eecd7388f83acf1eaf4d0c6e58ef9" + integrity sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg== + dependencies: + "@humanwhocodes/object-schema" "^1.2.0" + debug "^4.1.1" + minimatch "^3.0.4" + +"@humanwhocodes/object-schema@^1.2.0": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" + integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== + +"@istanbuljs/load-nyc-config@^1.0.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" + integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== + dependencies: + camelcase "^5.3.1" + find-up "^4.1.0" + get-package-type "^0.1.0" + js-yaml "^3.13.1" + resolve-from "^5.0.0" + +"@istanbuljs/schema@^0.1.2": + version "0.1.3" + resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" + integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== + +"@jest/console@^26.6.2": + version "26.6.2" + resolved "https://registry.yarnpkg.com/@jest/console/-/console-26.6.2.tgz#4e04bc464014358b03ab4937805ee36a0aeb98f2" + integrity sha512-IY1R2i2aLsLr7Id3S6p2BA82GNWryt4oSvEXLAKc+L2zdi89dSkE8xC1C+0kpATG4JhBJREnQOH7/zmccM2B0g== + dependencies: + "@jest/types" "^26.6.2" + "@types/node" "*" + chalk "^4.0.0" + jest-message-util "^26.6.2" + jest-util "^26.6.2" + slash "^3.0.0" + +"@jest/core@^26.6.3": + version "26.6.3" + resolved "https://registry.yarnpkg.com/@jest/core/-/core-26.6.3.tgz#7639fcb3833d748a4656ada54bde193051e45fad" + integrity sha512-xvV1kKbhfUqFVuZ8Cyo+JPpipAHHAV3kcDBftiduK8EICXmTFddryy3P7NfZt8Pv37rA9nEJBKCCkglCPt/Xjw== + dependencies: + "@jest/console" "^26.6.2" + "@jest/reporters" "^26.6.2" + "@jest/test-result" "^26.6.2" + "@jest/transform" "^26.6.2" + "@jest/types" "^26.6.2" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + exit "^0.1.2" + graceful-fs "^4.2.4" + jest-changed-files "^26.6.2" + jest-config "^26.6.3" + jest-haste-map "^26.6.2" + jest-message-util "^26.6.2" + jest-regex-util "^26.0.0" + jest-resolve "^26.6.2" + jest-resolve-dependencies "^26.6.3" + jest-runner "^26.6.3" + jest-runtime "^26.6.3" + jest-snapshot "^26.6.2" + jest-util "^26.6.2" + jest-validate "^26.6.2" + jest-watcher "^26.6.2" + micromatch "^4.0.2" + p-each-series "^2.1.0" + rimraf "^3.0.0" + slash "^3.0.0" + strip-ansi "^6.0.0" + +"@jest/environment@^26.6.2": + version "26.6.2" + resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-26.6.2.tgz#ba364cc72e221e79cc8f0a99555bf5d7577cf92c" + integrity sha512-nFy+fHl28zUrRsCeMB61VDThV1pVTtlEokBRgqPrcT1JNq4yRNIyTHfyht6PqtUvY9IsuLGTrbG8kPXjSZIZwA== + dependencies: + "@jest/fake-timers" "^26.6.2" + "@jest/types" "^26.6.2" + "@types/node" "*" + jest-mock "^26.6.2" + +"@jest/fake-timers@^26.6.2": + version "26.6.2" + resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-26.6.2.tgz#459c329bcf70cee4af4d7e3f3e67848123535aad" + integrity sha512-14Uleatt7jdzefLPYM3KLcnUl1ZNikaKq34enpb5XG9i81JpppDb5muZvonvKyrl7ftEHkKS5L5/eB/kxJ+bvA== + dependencies: + "@jest/types" "^26.6.2" + "@sinonjs/fake-timers" "^6.0.1" + "@types/node" "*" + jest-message-util "^26.6.2" + jest-mock "^26.6.2" + jest-util "^26.6.2" + +"@jest/globals@^26.6.2": + version "26.6.2" + resolved "https://registry.yarnpkg.com/@jest/globals/-/globals-26.6.2.tgz#5b613b78a1aa2655ae908eba638cc96a20df720a" + integrity sha512-85Ltnm7HlB/KesBUuALwQ68YTU72w9H2xW9FjZ1eL1U3lhtefjjl5c2MiUbpXt/i6LaPRvoOFJ22yCBSfQ0JIA== + dependencies: + "@jest/environment" "^26.6.2" + "@jest/types" "^26.6.2" + expect "^26.6.2" + +"@jest/reporters@^26.6.2": + version "26.6.2" + resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-26.6.2.tgz#1f518b99637a5f18307bd3ecf9275f6882a667f6" + integrity sha512-h2bW53APG4HvkOnVMo8q3QXa6pcaNt1HkwVsOPMBV6LD/q9oSpxNSYZQYkAnjdMjrJ86UuYeLo+aEZClV6opnw== + dependencies: + "@bcoe/v8-coverage" "^0.2.3" + "@jest/console" "^26.6.2" + "@jest/test-result" "^26.6.2" + "@jest/transform" "^26.6.2" + "@jest/types" "^26.6.2" + chalk "^4.0.0" + collect-v8-coverage "^1.0.0" + exit "^0.1.2" + glob "^7.1.2" + graceful-fs "^4.2.4" + istanbul-lib-coverage "^3.0.0" + istanbul-lib-instrument "^4.0.3" + istanbul-lib-report "^3.0.0" + istanbul-lib-source-maps "^4.0.0" + istanbul-reports "^3.0.2" + jest-haste-map "^26.6.2" + jest-resolve "^26.6.2" + jest-util "^26.6.2" + jest-worker "^26.6.2" + slash "^3.0.0" + source-map "^0.6.0" + string-length "^4.0.1" + terminal-link "^2.0.0" + v8-to-istanbul "^7.0.0" + optionalDependencies: + node-notifier "^8.0.0" + +"@jest/schemas@^28.1.3": + version "28.1.3" + resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-28.1.3.tgz#ad8b86a66f11f33619e3d7e1dcddd7f2d40ff905" + integrity sha512-/l/VWsdt/aBXgjshLWOFyFt3IVdYypu5y2Wn2rOO1un6nkqIn8SLXzgIMYXFyYsRWDyF5EthmKJMIdJvk08grg== + dependencies: + "@sinclair/typebox" "^0.24.1" + +"@jest/source-map@^26.6.2": + version "26.6.2" + resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-26.6.2.tgz#29af5e1e2e324cafccc936f218309f54ab69d535" + integrity sha512-YwYcCwAnNmOVsZ8mr3GfnzdXDAl4LaenZP5z+G0c8bzC9/dugL8zRmxZzdoTl4IaS3CryS1uWnROLPFmb6lVvA== + dependencies: + callsites "^3.0.0" + graceful-fs "^4.2.4" + source-map "^0.6.0" + +"@jest/test-result@^26.6.2": + version "26.6.2" + resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-26.6.2.tgz#55da58b62df134576cc95476efa5f7949e3f5f18" + integrity sha512-5O7H5c/7YlojphYNrK02LlDIV2GNPYisKwHm2QTKjNZeEzezCbwYs9swJySv2UfPMyZ0VdsmMv7jIlD/IKYQpQ== + dependencies: + "@jest/console" "^26.6.2" + "@jest/types" "^26.6.2" + "@types/istanbul-lib-coverage" "^2.0.0" + collect-v8-coverage "^1.0.0" + +"@jest/test-sequencer@^26.6.3": + version "26.6.3" + resolved "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-26.6.3.tgz#98e8a45100863886d074205e8ffdc5a7eb582b17" + integrity sha512-YHlVIjP5nfEyjlrSr8t/YdNfU/1XEt7c5b4OxcXCjyRhjzLYu/rO69/WHPuYcbCWkz8kAeZVZp2N2+IOLLEPGw== + dependencies: + "@jest/test-result" "^26.6.2" + graceful-fs "^4.2.4" + jest-haste-map "^26.6.2" + jest-runner "^26.6.3" + jest-runtime "^26.6.3" + +"@jest/transform@^26.6.2": + version "26.6.2" + resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-26.6.2.tgz#5ac57c5fa1ad17b2aae83e73e45813894dcf2e4b" + integrity sha512-E9JjhUgNzvuQ+vVAL21vlyfy12gP0GhazGgJC4h6qUt1jSdUXGWJ1wfu/X7Sd8etSgxV4ovT1pb9v5D6QW4XgA== + dependencies: + "@babel/core" "^7.1.0" + "@jest/types" "^26.6.2" + babel-plugin-istanbul "^6.0.0" + chalk "^4.0.0" + convert-source-map "^1.4.0" + fast-json-stable-stringify "^2.0.0" + graceful-fs "^4.2.4" + jest-haste-map "^26.6.2" + jest-regex-util "^26.0.0" + jest-util "^26.6.2" + micromatch "^4.0.2" + pirates "^4.0.1" + slash "^3.0.0" + source-map "^0.6.1" + write-file-atomic "^3.0.0" + +"@jest/types@^26.6.2": + version "26.6.2" + resolved "https://registry.yarnpkg.com/@jest/types/-/types-26.6.2.tgz#bef5a532030e1d88a2f5a6d933f84e97226ed48e" + integrity sha512-fC6QCp7Sc5sX6g8Tvbmj4XUTbyrik0akgRy03yjXbQaBWWNWGE7SGtJk98m0N8nzegD/7SggrUlivxo5ax4KWQ== + dependencies: + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^15.0.0" + chalk "^4.0.0" + +"@jridgewell/gen-mapping@^0.1.0": + version "0.1.1" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz#e5d2e450306a9491e3bd77e323e38d7aff315996" + integrity sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w== + dependencies: + "@jridgewell/set-array" "^1.0.0" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@jridgewell/gen-mapping@^0.3.0", "@jridgewell/gen-mapping@^0.3.2": + version "0.3.2" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz#c1aedc61e853f2bb9f5dfe6d4442d3b565b253b9" + integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A== + dependencies: + "@jridgewell/set-array" "^1.0.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.9" + +"@jridgewell/resolve-uri@^3.0.3": + version "3.1.0" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" + integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== + +"@jridgewell/set-array@^1.0.0", "@jridgewell/set-array@^1.0.1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" + integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== + +"@jridgewell/source-map@^0.3.2": + version "0.3.2" + resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.2.tgz#f45351aaed4527a298512ec72f81040c998580fb" + integrity sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw== + dependencies: + "@jridgewell/gen-mapping" "^0.3.0" + "@jridgewell/trace-mapping" "^0.3.9" + +"@jridgewell/sourcemap-codec@^1.4.10": + version "1.4.14" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" + integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== + +"@jridgewell/trace-mapping@^0.3.7", "@jridgewell/trace-mapping@^0.3.9": + version "0.3.14" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.14.tgz#b231a081d8f66796e475ad588a1ef473112701ed" + integrity sha512-bJWEfQ9lPTvm3SneWwRFVLzrh6nhjwqw7TUFFBEMzwvg7t7PCDenf2lDwqo4NQXzdpgBXyFgDWnQA+2vkruksQ== + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@leichtgewicht/ip-codec@^2.0.1": + version "2.0.4" + resolved "https://registry.yarnpkg.com/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz#b2ac626d6cb9c8718ab459166d4bb405b8ffa78b" + integrity sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A== + +"@popperjs/core@^2.11.5": + version "2.11.5" + resolved "https://registry.yarnpkg.com/@popperjs/core/-/core-2.11.5.tgz#db5a11bf66bdab39569719555b0f76e138d7bd64" + integrity sha512-9X2obfABZuDVLCgPK9aX0a/x4jaOEweTTWE2+9sr0Qqqevj2Uv5XorvusThmc9XGYpS9yI+fhh8RTafBtGposw== + +"@react-aria/ssr@^3.2.0": + version "3.3.0" + resolved "https://registry.yarnpkg.com/@react-aria/ssr/-/ssr-3.3.0.tgz#25e81daf0c7a270a4a891159d8d984578e4512d8" + integrity sha512-yNqUDuOVZIUGP81R87BJVi/ZUZp/nYOBXbPsRe7oltJOfErQZD+UezMpw4vM2KRz18cURffvmC8tJ6JTeyDtaQ== + dependencies: + "@babel/runtime" "^7.6.2" + +"@restart/hooks@^0.4.6", "@restart/hooks@^0.4.7": + version "0.4.7" + resolved "https://registry.yarnpkg.com/@restart/hooks/-/hooks-0.4.7.tgz#d79ca6472c01ce04389fc73d4a79af1b5e33cd39" + integrity sha512-ZbjlEHcG+FQtpDPHd7i4FzNNvJf2enAwZfJbpM8CW7BhmOAbsHpZe3tsHwfQUrBuyrxWqPYp2x5UMnilWcY22A== + dependencies: + dequal "^2.0.2" + +"@restart/ui@^1.2.0": + version "1.3.1" + resolved "https://registry.yarnpkg.com/@restart/ui/-/ui-1.3.1.tgz#ae16be26128cc205efb7e0bf93d1f34deb1fe116" + integrity sha512-MYvMs2eeZTHu2dBJHOXKx72vxzEZeWbZx2z1QjeXq62iYjpjIyukBC2ZEy8x+sb9Gl0AiOiHkPXrl1wn95aOGQ== + dependencies: + "@babel/runtime" "^7.18.3" + "@popperjs/core" "^2.11.5" + "@react-aria/ssr" "^3.2.0" + "@restart/hooks" "^0.4.7" + "@types/warning" "^3.0.0" + dequal "^2.0.2" + dom-helpers "^5.2.0" + uncontrollable "^7.2.1" + warning "^4.0.3" + +"@sinclair/typebox@^0.24.1": + version "0.24.26" + resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.24.26.tgz#84f9e8c1d93154e734a7947609a1dc7c7a81cc22" + integrity sha512-1ZVIyyS1NXDRVT8GjWD5jULjhDyM3IsIHef2VGUMdnWOlX2tkPjyEX/7K0TGSH2S8EaPhp1ylFdjSjUGQ+gecg== + +"@sinonjs/commons@^1.6.0", "@sinonjs/commons@^1.7.0", "@sinonjs/commons@^1.8.3": + version "1.8.3" + resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d" + integrity sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ== + dependencies: + type-detect "4.0.8" + +"@sinonjs/fake-timers@>=5", "@sinonjs/fake-timers@^9.1.2": + version "9.1.2" + resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-9.1.2.tgz#4eaab737fab77332ab132d396a3c0d364bd0ea8c" + integrity sha512-BPS4ynJW/o92PUR4wgriz2Ud5gpST5vz6GQfMixEDK0Z8ZCUv2M7SkBLykH56T++Xs+8ln9zTGbOvNGIe02/jw== + dependencies: + "@sinonjs/commons" "^1.7.0" + +"@sinonjs/fake-timers@^6.0.1": + version "6.0.1" + resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-6.0.1.tgz#293674fccb3262ac782c7aadfdeca86b10c75c40" + integrity sha512-MZPUxrmFubI36XS1DI3qmI0YdN1gks62JtFZvxR67ljjSNCeK6U08Zx4msEWOXuofgqUt6zPHSi1H9fbjR/NRA== + dependencies: + "@sinonjs/commons" "^1.7.0" + +"@sinonjs/samsam@^6.1.1": + version "6.1.1" + resolved "https://registry.yarnpkg.com/@sinonjs/samsam/-/samsam-6.1.1.tgz#627f7f4cbdb56e6419fa2c1a3e4751ce4f6a00b1" + integrity sha512-cZ7rKJTLiE7u7Wi/v9Hc2fs3Ucc3jrWeMgPHbbTCeVAB2S0wOBbYlkJVeNSL04i7fdhT8wIbDq1zhC/PXTD2SA== + dependencies: + "@sinonjs/commons" "^1.6.0" + lodash.get "^4.4.2" + type-detect "^4.0.8" + +"@sinonjs/text-encoding@^0.7.1": + version "0.7.2" + resolved "https://registry.yarnpkg.com/@sinonjs/text-encoding/-/text-encoding-0.7.2.tgz#5981a8db18b56ba38ef0efb7d995b12aa7b51918" + integrity sha512-sXXKG+uL9IrKqViTtao2Ws6dy0znu9sOaP1di/jKGW1M6VssO8vlpXCQcpZ+jisQ1tTFAC5Jo/EOzFbggBagFQ== + +"@testing-library/dom@^8.0.0": + version "8.16.0" + resolved "https://registry.yarnpkg.com/@testing-library/dom/-/dom-8.16.0.tgz#d6fc50250aed17b1035ca1bd64655e342db3936a" + integrity sha512-uxF4zmnLHHDlmW4l+0WDjcgLVwCvH+OVLpD8Dfp+Bjfz85prwxWGbwXgJdLtkgjD0qfOzkJF9SmA6YZPsMYX4w== + dependencies: + "@babel/code-frame" "^7.10.4" + "@babel/runtime" "^7.12.5" + "@types/aria-query" "^4.2.0" + aria-query "^5.0.0" + chalk "^4.1.0" + dom-accessibility-api "^0.5.9" + lz-string "^1.4.4" + pretty-format "^27.0.2" + +"@testing-library/jest-dom@^5.15.1": + version "5.16.4" + resolved "https://registry.yarnpkg.com/@testing-library/jest-dom/-/jest-dom-5.16.4.tgz#938302d7b8b483963a3ae821f1c0808f872245cd" + integrity sha512-Gy+IoFutbMQcky0k+bqqumXZ1cTGswLsFqmNLzNdSKkU9KGV2u9oXhukCbbJ9/LRPKiqwxEE8VpV/+YZlfkPUA== + dependencies: + "@babel/runtime" "^7.9.2" + "@types/testing-library__jest-dom" "^5.9.1" + aria-query "^5.0.0" + chalk "^3.0.0" + css "^3.0.0" + css.escape "^1.5.1" + dom-accessibility-api "^0.5.6" + lodash "^4.17.15" + redent "^3.0.0" + +"@testing-library/react@^12.1.2": + version "12.1.5" + resolved "https://registry.yarnpkg.com/@testing-library/react/-/react-12.1.5.tgz#bb248f72f02a5ac9d949dea07279095fa577963b" + integrity sha512-OfTXCJUFgjd/digLUuPxa0+/3ZxsQmE7ub9kcbW/wi96Bh3o/p5vrETcBGfP17NWPGqeYYl5LTRpwyGoMC4ysg== + dependencies: + "@babel/runtime" "^7.12.5" + "@testing-library/dom" "^8.0.0" + "@types/react-dom" "<18.0.0" + +"@testing-library/user-event@^13.5.0": + version "13.5.0" + resolved "https://registry.yarnpkg.com/@testing-library/user-event/-/user-event-13.5.0.tgz#69d77007f1e124d55314a2b73fd204b333b13295" + integrity sha512-5Kwtbo3Y/NowpkbRuSepbyMFkZmHgD+vPzYB/RJ4oxt5Gj/avFFBYjhw27cqSVPVw/3a67NK1PbiIr9k4Gwmdg== + dependencies: + "@babel/runtime" "^7.12.5" + +"@tootallnate/once@1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" + integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw== + +"@types/aria-query@^4.2.0": + version "4.2.2" + resolved "https://registry.yarnpkg.com/@types/aria-query/-/aria-query-4.2.2.tgz#ed4e0ad92306a704f9fb132a0cfcf77486dbe2bc" + integrity sha512-HnYpAE1Y6kRyKM/XkEuiRQhTHvkzMBurTHnpFLYLBGPIylZNPs9jJcuOOYWxPLJCSEtmZT0Y8rHDokKN7rRTig== + +"@types/babel__core@^7.0.0", "@types/babel__core@^7.1.7": + version "7.1.19" + resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.19.tgz#7b497495b7d1b4812bdb9d02804d0576f43ee460" + integrity sha512-WEOTgRsbYkvA/KCsDwVEGkd7WAr1e3g31VHQ8zy5gul/V1qKullU/BU5I68X5v7V3GnB9eotmom4v5a5gjxorw== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + "@types/babel__generator" "*" + "@types/babel__template" "*" + "@types/babel__traverse" "*" + +"@types/babel__generator@*": + version "7.6.4" + resolved "https://registry.yarnpkg.com/@types/babel__generator/-/babel__generator-7.6.4.tgz#1f20ce4c5b1990b37900b63f050182d28c2439b7" + integrity sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg== + dependencies: + "@babel/types" "^7.0.0" + +"@types/babel__template@*": + version "7.4.1" + resolved "https://registry.yarnpkg.com/@types/babel__template/-/babel__template-7.4.1.tgz#3d1a48fd9d6c0edfd56f2ff578daed48f36c8969" + integrity sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + +"@types/babel__traverse@*", "@types/babel__traverse@^7.0.4", "@types/babel__traverse@^7.0.6": + version "7.17.1" + resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.17.1.tgz#1a0e73e8c28c7e832656db372b779bfd2ef37314" + integrity sha512-kVzjari1s2YVi77D3w1yuvohV2idweYXMCDzqBiVNN63TcDWrIlTVOYpqVrvbbyOE/IyzBoTKF0fdnLPEORFxA== + dependencies: + "@babel/types" "^7.3.0" + +"@types/body-parser@*": + version "1.19.2" + resolved "https://registry.yarnpkg.com/@types/body-parser/-/body-parser-1.19.2.tgz#aea2059e28b7658639081347ac4fab3de166e6f0" + integrity sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g== + dependencies: + "@types/connect" "*" + "@types/node" "*" + +"@types/bonjour@^3.5.9": + version "3.5.10" + resolved "https://registry.yarnpkg.com/@types/bonjour/-/bonjour-3.5.10.tgz#0f6aadfe00ea414edc86f5d106357cda9701e275" + integrity sha512-p7ienRMiS41Nu2/igbJxxLDWrSZ0WxM8UQgCeO9KhoVF7cOVFkrKsiDr1EsJIla8vV3oEEjGcz11jc5yimhzZw== + dependencies: + "@types/node" "*" + +"@types/connect-history-api-fallback@^1.3.5": + version "1.3.5" + resolved "https://registry.yarnpkg.com/@types/connect-history-api-fallback/-/connect-history-api-fallback-1.3.5.tgz#d1f7a8a09d0ed5a57aee5ae9c18ab9b803205dae" + integrity sha512-h8QJa8xSb1WD4fpKBDcATDNGXghFj6/3GRWG6dhmRcu0RX1Ubasur2Uvx5aeEwlf0MwblEC2bMzzMQntxnw/Cw== + dependencies: + "@types/express-serve-static-core" "*" + "@types/node" "*" + +"@types/connect@*": + version "3.4.35" + resolved "https://registry.yarnpkg.com/@types/connect/-/connect-3.4.35.tgz#5fcf6ae445e4021d1fc2219a4873cc73a3bb2ad1" + integrity sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ== + dependencies: + "@types/node" "*" + +"@types/eslint-scope@^3.7.3": + version "3.7.4" + resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.4.tgz#37fc1223f0786c39627068a12e94d6e6fc61de16" + integrity sha512-9K4zoImiZc3HlIp6AVUDE4CWYx22a+lhSZMYNpbjW04+YF0KWj4pJXnEMjdnFTiQibFFmElcsasJXDbdI/EPhA== + dependencies: + "@types/eslint" "*" + "@types/estree" "*" + +"@types/eslint@*": + version "8.4.5" + resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-8.4.5.tgz#acdfb7dd36b91cc5d812d7c093811a8f3d9b31e4" + integrity sha512-dhsC09y1gpJWnK+Ff4SGvCuSnk9DaU0BJZSzOwa6GVSg65XtTugLBITDAAzRU5duGBoXBHpdR/9jHGxJjNflJQ== + dependencies: + "@types/estree" "*" + "@types/json-schema" "*" + +"@types/estree@*": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.0.tgz#5fb2e536c1ae9bf35366eed879e827fa59ca41c2" + integrity sha512-WulqXMDUTYAXCjZnk6JtIHPigp55cVtDgDrO2gHRwhyJto21+1zbVCtOYB2L1F9w4qCQ0rOGWBnBe0FNTiEJIQ== + +"@types/estree@^0.0.51": + version "0.0.51" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.51.tgz#cfd70924a25a3fd32b218e5e420e6897e1ac4f40" + integrity sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ== + +"@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.18": + version "4.17.30" + resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.30.tgz#0f2f99617fa8f9696170c46152ccf7500b34ac04" + integrity sha512-gstzbTWro2/nFed1WXtf+TtrpwxH7Ggs4RLYTLbeVgIkUQOI3WG/JKjgeOU1zXDvezllupjrf8OPIdvTbIaVOQ== + dependencies: + "@types/node" "*" + "@types/qs" "*" + "@types/range-parser" "*" + +"@types/express@*", "@types/express@^4.17.13": + version "4.17.13" + resolved "https://registry.yarnpkg.com/@types/express/-/express-4.17.13.tgz#a76e2995728999bab51a33fabce1d705a3709034" + integrity sha512-6bSZTPaTIACxn48l50SR+axgrqm6qXFIxrdAKaG6PaJk3+zuUr35hBlgT7vOmJcum+OEaIBLtHV/qloEAFITeA== + dependencies: + "@types/body-parser" "*" + "@types/express-serve-static-core" "^4.17.18" + "@types/qs" "*" + "@types/serve-static" "*" + +"@types/graceful-fs@^4.1.2": + version "4.1.5" + resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.5.tgz#21ffba0d98da4350db64891f92a9e5db3cdb4e15" + integrity sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw== + dependencies: + "@types/node" "*" + +"@types/hoist-non-react-statics@^3.3.0": + version "3.3.1" + resolved "https://registry.yarnpkg.com/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.1.tgz#1124aafe5118cb591977aeb1ceaaed1070eb039f" + integrity sha512-iMIqiko6ooLrTh1joXodJK5X9xeEALT1kM5G3ZLhD3hszxBdIEd5C75U834D9mLcINgD4OyZf5uQXjkuYydWvA== + dependencies: + "@types/react" "*" + hoist-non-react-statics "^3.3.0" + +"@types/http-proxy@^1.17.8": + version "1.17.9" + resolved "https://registry.yarnpkg.com/@types/http-proxy/-/http-proxy-1.17.9.tgz#7f0e7931343761efde1e2bf48c40f02f3f75705a" + integrity sha512-QsbSjA/fSk7xB+UXlCT3wHBy5ai9wOcNDWwZAtud+jXhwOM3l+EYZh8Lng4+/6n8uar0J7xILzqftJdJ/Wdfkw== + dependencies: + "@types/node" "*" + +"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": + version "2.0.4" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44" + integrity sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g== + +"@types/istanbul-lib-report@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" + integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== + dependencies: + "@types/istanbul-lib-coverage" "*" + +"@types/istanbul-reports@^3.0.0": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz#9153fe98bba2bd565a63add9436d6f0d7f8468ff" + integrity sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw== + dependencies: + "@types/istanbul-lib-report" "*" + +"@types/jest@*": + version "28.1.6" + resolved "https://registry.yarnpkg.com/@types/jest/-/jest-28.1.6.tgz#d6a9cdd38967d2d746861fb5be6b120e38284dd4" + integrity sha512-0RbGAFMfcBJKOmqRazM8L98uokwuwD5F8rHrv/ZMbrZBwVOWZUyPG6VFNscjYr/vjM3Vu4fRrCPbOs42AfemaQ== + dependencies: + jest-matcher-utils "^28.0.0" + pretty-format "^28.0.0" + +"@types/json-schema@*", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9": + version "7.0.11" + resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.11.tgz#d421b6c527a3037f7c84433fd2c4229e016863d3" + integrity sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ== + +"@types/mime@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/mime/-/mime-3.0.0.tgz#e9a9903894405c6a6551f1774df4e64d9804d69c" + integrity sha512-fccbsHKqFDXClBZTDLA43zl0+TbxyIwyzIzwwhvoJvhNjOErCdeX2xJbURimv2EbSVUGav001PaCJg4mZxMl4w== + +"@types/node@*": + version "18.6.3" + resolved "https://registry.yarnpkg.com/@types/node/-/node-18.6.3.tgz#4e4a95b6fe44014563ceb514b2598b3e623d1c98" + integrity sha512-6qKpDtoaYLM+5+AFChLhHermMQxc3TOEFIDzrZLPRGHPrLEwqFkkT5Kx3ju05g6X7uDPazz3jHbKPX0KzCjntg== + +"@types/normalize-package-data@^2.4.0": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.1.tgz#d3357479a0fdfdd5907fe67e17e0a85c906e1301" + integrity sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw== + +"@types/prettier@^2.0.0": + version "2.6.4" + resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.6.4.tgz#ad899dad022bab6b5a9f0a0fe67c2f7a4a8950ed" + integrity sha512-fOwvpvQYStpb/zHMx0Cauwywu9yLDmzWiiQBC7gJyq5tYLUXFZvDG7VK1B7WBxxjBJNKFOZ0zLoOQn8vmATbhw== + +"@types/prop-types@*": + version "15.7.5" + resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.5.tgz#5f19d2b85a98e9558036f6a3cacc8819420f05cf" + integrity sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w== + +"@types/qs@*": + version "6.9.7" + resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.7.tgz#63bb7d067db107cc1e457c303bc25d511febf6cb" + integrity sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw== + +"@types/range-parser@*": + version "1.2.4" + resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.4.tgz#cd667bcfdd025213aafb7ca5915a932590acdcdc" + integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw== + +"@types/react-dom@<18.0.0": + version "17.0.17" + resolved "https://registry.yarnpkg.com/@types/react-dom/-/react-dom-17.0.17.tgz#2e3743277a793a96a99f1bf87614598289da68a1" + integrity sha512-VjnqEmqGnasQKV0CWLevqMTXBYG9GbwuE6x3VetERLh0cq2LTptFE73MrQi2S7GkKXCf2GgwItB/melLnxfnsg== + dependencies: + "@types/react" "^17" + +"@types/react-redux@^7.1.20": + version "7.1.24" + resolved "https://registry.yarnpkg.com/@types/react-redux/-/react-redux-7.1.24.tgz#6caaff1603aba17b27d20f8ad073e4c077e975c0" + integrity sha512-7FkurKcS1k0FHZEtdbbgN8Oc6b+stGSfZYjQGicofJ0j4U0qIn/jaSvnP2pLwZKiai3/17xqqxkkrxTgN8UNbQ== + dependencies: + "@types/hoist-non-react-statics" "^3.3.0" + "@types/react" "*" + hoist-non-react-statics "^3.3.0" + redux "^4.0.0" + +"@types/react-transition-group@^4.4.4": + version "4.4.5" + resolved "https://registry.yarnpkg.com/@types/react-transition-group/-/react-transition-group-4.4.5.tgz#aae20dcf773c5aa275d5b9f7cdbca638abc5e416" + integrity sha512-juKD/eiSM3/xZYzjuzH6ZwpP+/lejltmiS3QEzV/vmb/Q8+HfDmxu+Baga8UEMGBqV88Nbg4l2hY/K2DkyaLLA== + dependencies: + "@types/react" "*" + +"@types/react@*", "@types/react@>=16.9.11": + version "18.0.15" + resolved "https://registry.yarnpkg.com/@types/react/-/react-18.0.15.tgz#d355644c26832dc27f3e6cbf0c4f4603fc4ab7fe" + integrity sha512-iz3BtLuIYH1uWdsv6wXYdhozhqj20oD4/Hk2DNXIn1kFsmp9x8d9QB6FnPhfkbhd2PgEONt9Q1x/ebkwjfFLow== + dependencies: + "@types/prop-types" "*" + "@types/scheduler" "*" + csstype "^3.0.2" + +"@types/react@^17": + version "17.0.48" + resolved "https://registry.yarnpkg.com/@types/react/-/react-17.0.48.tgz#a4532a8b91d7b27b8768b6fc0c3bccb760d15a6c" + integrity sha512-zJ6IYlJ8cYYxiJfUaZOQee4lh99mFihBoqkOSEGV+dFi9leROW6+PgstzQ+w3gWTnUfskALtQPGHK6dYmPj+2A== + dependencies: + "@types/prop-types" "*" + "@types/scheduler" "*" + csstype "^3.0.2" + +"@types/retry@0.12.0": + version "0.12.0" + resolved "https://registry.yarnpkg.com/@types/retry/-/retry-0.12.0.tgz#2b35eccfcee7d38cd72ad99232fbd58bffb3c84d" + integrity sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA== + +"@types/scheduler@*": + version "0.16.2" + resolved "https://registry.yarnpkg.com/@types/scheduler/-/scheduler-0.16.2.tgz#1a62f89525723dde24ba1b01b092bf5df8ad4d39" + integrity sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew== + +"@types/serve-index@^1.9.1": + version "1.9.1" + resolved "https://registry.yarnpkg.com/@types/serve-index/-/serve-index-1.9.1.tgz#1b5e85370a192c01ec6cec4735cf2917337a6278" + integrity sha512-d/Hs3nWDxNL2xAczmOVZNj92YZCS6RGxfBPjKzuu/XirCgXdpKEb88dYNbrYGint6IVWLNP+yonwVAuRC0T2Dg== + dependencies: + "@types/express" "*" + +"@types/serve-static@*", "@types/serve-static@^1.13.10": + version "1.15.0" + resolved "https://registry.yarnpkg.com/@types/serve-static/-/serve-static-1.15.0.tgz#c7930ff61afb334e121a9da780aac0d9b8f34155" + integrity sha512-z5xyF6uh8CbjAu9760KDKsH2FcDxZ2tFCsA4HIMWE6IkiYMXfVoa+4f9KX+FN0ZLsaMw1WNG2ETLA6N+/YA+cg== + dependencies: + "@types/mime" "*" + "@types/node" "*" + +"@types/sockjs@^0.3.33": + version "0.3.33" + resolved "https://registry.yarnpkg.com/@types/sockjs/-/sockjs-0.3.33.tgz#570d3a0b99ac995360e3136fd6045113b1bd236f" + integrity sha512-f0KEEe05NvUnat+boPTZ0dgaLZ4SfSouXUgv5noUiefG2ajgKjmETo9ZJyuqsl7dfl2aHlLJUiki6B4ZYldiiw== + dependencies: + "@types/node" "*" + +"@types/stack-utils@^2.0.0": + version "2.0.1" + resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c" + integrity sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw== + +"@types/testing-library__jest-dom@^5.9.1": + version "5.14.5" + resolved "https://registry.yarnpkg.com/@types/testing-library__jest-dom/-/testing-library__jest-dom-5.14.5.tgz#d113709c90b3c75fdb127ec338dad7d5f86c974f" + integrity sha512-SBwbxYoyPIvxHbeHxTZX2Pe/74F/tX2/D3mMvzabdeJ25bBojfW0TyB8BHrbq/9zaaKICJZjLP+8r6AeZMFCuQ== + dependencies: + "@types/jest" "*" + +"@types/warning@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/warning/-/warning-3.0.0.tgz#0d2501268ad8f9962b740d387c4654f5f8e23e52" + integrity sha512-t/Tvs5qR47OLOr+4E9ckN8AmP2Tf16gWq+/qA4iUGS/OOyHVO8wv2vjJuX8SNOUTJyWb+2t7wJm6cXILFnOROA== + +"@types/ws@^8.5.1": + version "8.5.3" + resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.3.tgz#7d25a1ffbecd3c4f2d35068d0b283c037003274d" + integrity sha512-6YOoWjruKj1uLf3INHH7D3qTXwFfEsg1kf3c0uDdSBJwfa/llkwIjrAGV7j7mVgGNbzTQ3HiHKKDXl6bJPD97w== + dependencies: + "@types/node" "*" + +"@types/yargs-parser@*": + version "21.0.0" + resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b" + integrity sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA== + +"@types/yargs@^15.0.0": + version "15.0.14" + resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-15.0.14.tgz#26d821ddb89e70492160b66d10a0eb6df8f6fb06" + integrity sha512-yEJzHoxf6SyQGhBhIYGXQDSCkJjB6HohDShto7m8vaKg9Yp0Yn8+71J9eakh2bnPg6BfsH9PRMhiRTZnd4eXGQ== + dependencies: + "@types/yargs-parser" "*" + +"@webassemblyjs/ast@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.11.1.tgz#2bfd767eae1a6996f432ff7e8d7fc75679c0b6a7" + integrity sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw== + dependencies: + "@webassemblyjs/helper-numbers" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + +"@webassemblyjs/floating-point-hex-parser@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz#f6c61a705f0fd7a6aecaa4e8198f23d9dc179e4f" + integrity sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ== + +"@webassemblyjs/helper-api-error@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz#1a63192d8788e5c012800ba6a7a46c705288fd16" + integrity sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg== + +"@webassemblyjs/helper-buffer@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz#832a900eb444884cde9a7cad467f81500f5e5ab5" + integrity sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA== + +"@webassemblyjs/helper-numbers@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz#64d81da219fbbba1e3bd1bfc74f6e8c4e10a62ae" + integrity sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ== + dependencies: + "@webassemblyjs/floating-point-hex-parser" "1.11.1" + "@webassemblyjs/helper-api-error" "1.11.1" + "@xtuc/long" "4.2.2" + +"@webassemblyjs/helper-wasm-bytecode@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz#f328241e41e7b199d0b20c18e88429c4433295e1" + integrity sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q== + +"@webassemblyjs/helper-wasm-section@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz#21ee065a7b635f319e738f0dd73bfbda281c097a" + integrity sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + +"@webassemblyjs/ieee754@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz#963929e9bbd05709e7e12243a099180812992614" + integrity sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ== + dependencies: + "@xtuc/ieee754" "^1.2.0" + +"@webassemblyjs/leb128@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.11.1.tgz#ce814b45574e93d76bae1fb2644ab9cdd9527aa5" + integrity sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw== + dependencies: + "@xtuc/long" "4.2.2" + +"@webassemblyjs/utf8@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.1.tgz#d1f8b764369e7c6e6bae350e854dec9a59f0a3ff" + integrity sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ== + +"@webassemblyjs/wasm-edit@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz#ad206ebf4bf95a058ce9880a8c092c5dec8193d6" + integrity sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/helper-wasm-section" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + "@webassemblyjs/wasm-opt" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + "@webassemblyjs/wast-printer" "1.11.1" + +"@webassemblyjs/wasm-gen@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz#86c5ea304849759b7d88c47a32f4f039ae3c8f76" + integrity sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/ieee754" "1.11.1" + "@webassemblyjs/leb128" "1.11.1" + "@webassemblyjs/utf8" "1.11.1" + +"@webassemblyjs/wasm-opt@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz#657b4c2202f4cf3b345f8a4c6461c8c2418985f2" + integrity sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + +"@webassemblyjs/wasm-parser@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz#86ca734534f417e9bd3c67c7a1c75d8be41fb199" + integrity sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-api-error" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/ieee754" "1.11.1" + "@webassemblyjs/leb128" "1.11.1" + "@webassemblyjs/utf8" "1.11.1" + +"@webassemblyjs/wast-printer@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz#d0c73beda8eec5426f10ae8ef55cee5e7084c2f0" + integrity sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@xtuc/long" "4.2.2" + +"@webpack-cli/configtest@^1.2.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@webpack-cli/configtest/-/configtest-1.2.0.tgz#7b20ce1c12533912c3b217ea68262365fa29a6f5" + integrity sha512-4FB8Tj6xyVkyqjj1OaTqCjXYULB9FMkqQ8yGrZjRDrYh0nOE+7Lhs45WioWQQMV+ceFlE368Ukhe6xdvJM9Egg== + +"@webpack-cli/info@^1.5.0": + version "1.5.0" + resolved "https://registry.yarnpkg.com/@webpack-cli/info/-/info-1.5.0.tgz#6c78c13c5874852d6e2dd17f08a41f3fe4c261b1" + integrity sha512-e8tSXZpw2hPl2uMJY6fsMswaok5FdlGNRTktvFk2sD8RjH0hE2+XistawJx1vmKteh4NmGmNUrp+Tb2w+udPcQ== + dependencies: + envinfo "^7.7.3" + +"@webpack-cli/serve@^1.7.0": + version "1.7.0" + resolved "https://registry.yarnpkg.com/@webpack-cli/serve/-/serve-1.7.0.tgz#e1993689ac42d2b16e9194376cfb6753f6254db1" + integrity sha512-oxnCNGj88fL+xzV+dacXs44HcDwf1ovs3AuEzvP7mqXw7fQntqIhQ1BRmynh4qEKQSSSRSWVyXRjmTbZIX9V2Q== + +"@wojtekmaj/enzyme-adapter-react-17@^0.6.5": + version "0.6.7" + resolved "https://registry.yarnpkg.com/@wojtekmaj/enzyme-adapter-react-17/-/enzyme-adapter-react-17-0.6.7.tgz#7784bd32f518b186218cebb26c98c852676f30b0" + integrity sha512-B+byiwi/T1bx5hcj9wc0fUL5Hlb5giSXJzcnEfJVl2j6dGV2NJfcxDBYX0WWwIxlzNiFz8kAvlkFWI2y/nscZQ== + dependencies: + "@wojtekmaj/enzyme-adapter-utils" "^0.1.4" + enzyme-shallow-equal "^1.0.0" + has "^1.0.0" + prop-types "^15.7.0" + react-is "^17.0.0" + react-test-renderer "^17.0.0" + +"@wojtekmaj/enzyme-adapter-utils@^0.1.4": + version "0.1.4" + resolved "https://registry.yarnpkg.com/@wojtekmaj/enzyme-adapter-utils/-/enzyme-adapter-utils-0.1.4.tgz#bcd411ad6e368f17dce5425582c2907104cdb1ad" + integrity sha512-ARGIQSIIv3oBia1m5Ihn1VU0FGmft6KPe39SBKTb8p7LSXO23YI4kNtc4M/cKoIY7P+IYdrZcgMObvedyjoSQA== + dependencies: + function.prototype.name "^1.1.0" + has "^1.0.0" + object.fromentries "^2.0.0" + prop-types "^15.7.0" + +"@xtuc/ieee754@^1.2.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" + integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== + +"@xtuc/long@4.2.2": + version "4.2.2" + resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" + integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== + +abab@^2.0.3, abab@^2.0.5: + version "2.0.6" + resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.6.tgz#41b80f2c871d19686216b82309231cfd3cb3d291" + integrity sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA== + +accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8: + version "1.3.8" + resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" + integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== + dependencies: + mime-types "~2.1.34" + negotiator "0.6.3" + +acorn-globals@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-6.0.0.tgz#46cdd39f0f8ff08a876619b55f5ac8a6dc770b45" + integrity sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg== + dependencies: + acorn "^7.1.1" + acorn-walk "^7.1.1" + +acorn-import-assertions@^1.7.6: + version "1.8.0" + resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz#ba2b5939ce62c238db6d93d81c9b111b29b855e9" + integrity sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw== + +acorn-jsx@^5.3.1: + version "5.3.2" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" + integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== + +acorn-walk@^7.1.1: + version "7.2.0" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" + integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== + +acorn@^7.1.1, acorn@^7.4.0: + version "7.4.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" + integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== + +acorn@^8.2.4, acorn@^8.5.0, acorn@^8.7.1: + version "8.8.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.8.0.tgz#88c0187620435c7f6015803f5539dae05a9dbea8" + integrity sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w== + +agent-base@6: + version "6.0.2" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" + integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== + dependencies: + debug "4" + +ajv-formats@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ajv-formats/-/ajv-formats-2.1.1.tgz#6e669400659eb74973bbf2e33327180a0996b520" + integrity sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA== + dependencies: + ajv "^8.0.0" + +ajv-keywords@^3.5.2: + version "3.5.2" + resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d" + integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== + +ajv-keywords@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-5.1.0.tgz#69d4d385a4733cdbeab44964a1170a88f87f0e16" + integrity sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw== + dependencies: + fast-deep-equal "^3.1.3" + +ajv@^6.10.0, ajv@^6.12.4, ajv@^6.12.5: + version "6.12.6" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + +ajv@^8.0.0, ajv@^8.0.1, ajv@^8.8.0: + version "8.11.0" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.11.0.tgz#977e91dd96ca669f54a11e23e378e33b884a565f" + integrity sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg== + dependencies: + fast-deep-equal "^3.1.1" + json-schema-traverse "^1.0.0" + require-from-string "^2.0.2" + uri-js "^4.2.2" + +ansi-colors@^4.1.1: + version "4.1.3" + resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.3.tgz#37611340eb2243e70cc604cad35d63270d48781b" + integrity sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw== + +ansi-escapes@^4.2.1: + version "4.3.2" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" + integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== + dependencies: + type-fest "^0.21.3" + +ansi-html-community@^0.0.8: + version "0.0.8" + resolved "https://registry.yarnpkg.com/ansi-html-community/-/ansi-html-community-0.0.8.tgz#69fbc4d6ccbe383f9736934ae34c3f8290f1bf41" + integrity sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw== + +ansi-regex@^5.0.0, ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + +ansi-styles@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== + dependencies: + color-convert "^1.9.0" + +ansi-styles@^4.0.0, ansi-styles@^4.1.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +ansi-styles@^5.0.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" + integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== + +anymatch@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb" + integrity sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw== + dependencies: + micromatch "^3.1.4" + normalize-path "^2.1.1" + +anymatch@^3.0.3, anymatch@~3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" + integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== + dependencies: + normalize-path "^3.0.0" + picomatch "^2.0.4" + +argparse@^1.0.7: + version "1.0.10" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== + dependencies: + sprintf-js "~1.0.2" + +aria-query@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-5.0.0.tgz#210c21aaf469613ee8c9a62c7f86525e058db52c" + integrity sha512-V+SM7AbUwJ+EBnB8+DXs0hPZHO0W6pqBcc0dW90OwtVG02PswOu/teuARoLQjdDOH+t9pJgGnW5/Qmouf3gPJg== + +arr-diff@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" + integrity sha512-YVIQ82gZPGBebQV/a8dar4AitzCQs0jjXwMPZllpXMaGjXPYVUawSxQrRsjhjupyVxEvbHgUmIhKVlND+j02kA== + +arr-flatten@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" + integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== + +arr-union@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" + integrity sha512-sKpyeERZ02v1FeCZT8lrfJq5u6goHCtpTAzPwJYe7c8SPFOboNjNg1vz2L4VTn9T4PQxEx13TbXLmYUcS6Ug7Q== + +array-flatten@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" + integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg== + +array-flatten@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099" + integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== + +array-includes@^3.1.5: + version "3.1.5" + resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.5.tgz#2c320010db8d31031fd2a5f6b3bbd4b1aad31bdb" + integrity sha512-iSDYZMMyTPkiFasVqfuAQnWAYcvO/SeBSCGKePoEthjp4LEMTe4uLc7b025o4jAZpHhihh8xPo99TNWUWWkGDQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + get-intrinsic "^1.1.1" + is-string "^1.0.7" + +array-unique@^0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" + integrity sha512-SleRWjh9JUud2wH1hPs9rZBZ33H6T9HOiL0uwGnGx9FpE6wKGyfWugmbkEOIs6qWrZhg0LWeLziLrEwQJhs5mQ== + +array.prototype.filter@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/array.prototype.filter/-/array.prototype.filter-1.0.1.tgz#20688792acdb97a09488eaaee9eebbf3966aae21" + integrity sha512-Dk3Ty7N42Odk7PjU/Ci3zT4pLj20YvuVnneG/58ICM6bt4Ij5kZaJTVQ9TSaWaIECX2sFyz4KItkVZqHNnciqw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.0" + es-array-method-boxes-properly "^1.0.0" + is-string "^1.0.7" + +array.prototype.flat@^1.2.3: + version "1.3.0" + resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.3.0.tgz#0b0c1567bf57b38b56b4c97b8aa72ab45e4adc7b" + integrity sha512-12IUEkHsAhA4DY5s0FPgNXIdc8VRSqD9Zp78a5au9abH/SOBrsp082JOWFNTjkMozh8mqcdiKuaLGhPeYztxSw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.2" + es-shim-unscopables "^1.0.0" + +array.prototype.flatmap@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/array.prototype.flatmap/-/array.prototype.flatmap-1.3.0.tgz#a7e8ed4225f4788a70cd910abcf0791e76a5534f" + integrity sha512-PZC9/8TKAIxcWKdyeb77EzULHPrIX/tIZebLJUQOMR1OwYosT8yggdfWScfTBCDj5utONvOuPQQumYsU2ULbkg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.2" + es-shim-unscopables "^1.0.0" + +assign-symbols@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" + integrity sha512-Q+JC7Whu8HhmTdBph/Tq59IoRtoy6KAm5zzPv00WdujX82lbAL8K7WVjne7vdCsAmbF4AYaDOPyO3k0kl8qIrw== + +astral-regex@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-2.0.0.tgz#483143c567aeed4785759c0865786dc77d7d2e31" + integrity sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ== + +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== + +atob@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" + integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== + +babel-jest@^26.6.3: + version "26.6.3" + resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-26.6.3.tgz#d87d25cb0037577a0c89f82e5755c5d293c01056" + integrity sha512-pl4Q+GAVOHwvjrck6jKjvmGhnO3jHX/xuB9d27f+EJZ/6k+6nMuPjorrYp7s++bKKdANwzElBWnLWaObvTnaZA== + dependencies: + "@jest/transform" "^26.6.2" + "@jest/types" "^26.6.2" + "@types/babel__core" "^7.1.7" + babel-plugin-istanbul "^6.0.0" + babel-preset-jest "^26.6.2" + chalk "^4.0.0" + graceful-fs "^4.2.4" + slash "^3.0.0" + +babel-loader@^8.2.1: + version "8.2.5" + resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.2.5.tgz#d45f585e654d5a5d90f5350a779d7647c5ed512e" + integrity sha512-OSiFfH89LrEMiWd4pLNqGz4CwJDtbs2ZVc+iGu2HrkRfPxId9F2anQj38IxWpmRfsUY0aBZYi1EFcd3mhtRMLQ== + dependencies: + find-cache-dir "^3.3.1" + loader-utils "^2.0.0" + make-dir "^3.1.0" + schema-utils "^2.6.5" + +babel-plugin-dynamic-import-node@^2.3.3: + version "2.3.3" + resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz#84fda19c976ec5c6defef57f9427b3def66e17a3" + integrity sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ== + dependencies: + object.assign "^4.1.0" + +babel-plugin-istanbul@^6.0.0: + version "6.1.1" + resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" + integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@istanbuljs/load-nyc-config" "^1.0.0" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-instrument "^5.0.4" + test-exclude "^6.0.0" + +babel-plugin-jest-hoist@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-26.6.2.tgz#8185bd030348d254c6d7dd974355e6a28b21e62d" + integrity sha512-PO9t0697lNTmcEHH69mdtYiOIkkOlj9fySqfO3K1eCcdISevLAE0xY59VLLUj0SoiPiTX/JU2CYFpILydUa5Lw== + dependencies: + "@babel/template" "^7.3.3" + "@babel/types" "^7.3.3" + "@types/babel__core" "^7.0.0" + "@types/babel__traverse" "^7.0.6" + +babel-plugin-polyfill-corejs2@^0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.2.tgz#e4c31d4c89b56f3cf85b92558954c66b54bd972d" + integrity sha512-LPnodUl3lS0/4wN3Rb+m+UK8s7lj2jcLRrjho4gLw+OJs+I4bvGXshINesY5xx/apM+biTnQ9reDI8yj+0M5+Q== + dependencies: + "@babel/compat-data" "^7.17.7" + "@babel/helper-define-polyfill-provider" "^0.3.2" + semver "^6.1.1" + +babel-plugin-polyfill-corejs3@^0.5.3: + version "0.5.3" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.5.3.tgz#d7e09c9a899079d71a8b670c6181af56ec19c5c7" + integrity sha512-zKsXDh0XjnrUEW0mxIHLfjBfnXSMr5Q/goMe/fxpQnLm07mcOZiIZHBNWCMx60HmdvjxfXcalac0tfFg0wqxyw== + dependencies: + "@babel/helper-define-polyfill-provider" "^0.3.2" + core-js-compat "^3.21.0" + +babel-plugin-polyfill-regenerator@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.4.0.tgz#8f51809b6d5883e07e71548d75966ff7635527fe" + integrity sha512-RW1cnryiADFeHmfLS+WW/G431p1PsW5qdRdz0SDRi7TKcUgc7Oh/uXkT7MZ/+tGsT1BkczEAmD5XjUyJ5SWDTw== + dependencies: + "@babel/helper-define-polyfill-provider" "^0.3.2" + +babel-preset-current-node-syntax@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b" + integrity sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ== + dependencies: + "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-bigint" "^7.8.3" + "@babel/plugin-syntax-class-properties" "^7.8.3" + "@babel/plugin-syntax-import-meta" "^7.8.3" + "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.8.3" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.8.3" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-top-level-await" "^7.8.3" + +babel-preset-jest@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-26.6.2.tgz#747872b1171df032252426586881d62d31798fee" + integrity sha512-YvdtlVm9t3k777c5NPQIv6cxFFFapys25HiUmuSgHwIZhfifweR5c5Sf5nwE3MAbfu327CYSvps8Yx6ANLyleQ== + dependencies: + babel-plugin-jest-hoist "^26.6.2" + babel-preset-current-node-syntax "^1.0.0" + +balanced-match@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +base@^0.11.1: + version "0.11.2" + resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" + integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== + dependencies: + cache-base "^1.0.1" + class-utils "^0.3.5" + component-emitter "^1.2.1" + define-property "^1.0.0" + isobject "^3.0.1" + mixin-deep "^1.2.0" + pascalcase "^0.1.1" + +batch@0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16" + integrity sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw== + +big.js@^5.2.2: + version "5.2.2" + resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" + integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== + +binary-extensions@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" + integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== + +body-parser@1.20.0: + version "1.20.0" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.0.tgz#3de69bd89011c11573d7bfee6a64f11b6bd27cc5" + integrity sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg== + dependencies: + bytes "3.1.2" + content-type "~1.0.4" + debug "2.6.9" + depd "2.0.0" + destroy "1.2.0" + http-errors "2.0.0" + iconv-lite "0.4.24" + on-finished "2.4.1" + qs "6.10.3" + raw-body "2.5.1" + type-is "~1.6.18" + unpipe "1.0.0" + +bonjour-service@^1.0.11: + version "1.0.13" + resolved "https://registry.yarnpkg.com/bonjour-service/-/bonjour-service-1.0.13.tgz#4ac003dc1626023252d58adf2946f57e5da450c1" + integrity sha512-LWKRU/7EqDUC9CTAQtuZl5HzBALoCYwtLhffW3et7vZMwv3bWLpJf8bRYlMD5OCcDpTfnPgNCV4yo9ZIaJGMiA== + dependencies: + array-flatten "^2.1.2" + dns-equal "^1.0.0" + fast-deep-equal "^3.1.3" + multicast-dns "^7.2.5" + +boolbase@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" + integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww== + +bootstrap@^4.5.3: + version "4.6.2" + resolved "https://registry.yarnpkg.com/bootstrap/-/bootstrap-4.6.2.tgz#8e0cd61611728a5bf65a3a2b8d6ff6c77d5d7479" + integrity sha512-51Bbp/Uxr9aTuy6ca/8FbFloBUJZLHwnhTcnjIeRn2suQWsWzcuJhGjKDB5eppVte/8oCdOL3VuwxvZDUggwGQ== + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +braces@^2.3.1: + version "2.3.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" + integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== + dependencies: + arr-flatten "^1.1.0" + array-unique "^0.3.2" + extend-shallow "^2.0.1" + fill-range "^4.0.0" + isobject "^3.0.1" + repeat-element "^1.1.2" + snapdragon "^0.8.1" + snapdragon-node "^2.0.1" + split-string "^3.0.2" + to-regex "^3.0.1" + +braces@^3.0.2, braces@~3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" + integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== + dependencies: + fill-range "^7.0.1" + +browser-process-hrtime@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" + integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== + +browserslist@^4.14.5, browserslist@^4.20.2, browserslist@^4.21.3: + version "4.21.3" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.21.3.tgz#5df277694eb3c48bc5c4b05af3e8b7e09c5a6d1a" + integrity sha512-898rgRXLAyRkM1GryrrBHGkqA5hlpkV5MhtZwg9QXeiyLUYs2k00Un05aX5l2/yJIOObYKOpS2JNo8nJDE7fWQ== + dependencies: + caniuse-lite "^1.0.30001370" + electron-to-chromium "^1.4.202" + node-releases "^2.0.6" + update-browserslist-db "^1.0.5" + +bser@2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" + integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== + dependencies: + node-int64 "^0.4.0" + +buffer-from@^1.0.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" + integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== + +bytes@3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" + integrity sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw== + +bytes@3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" + integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== + +cache-base@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" + integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== + dependencies: + collection-visit "^1.0.0" + component-emitter "^1.2.1" + get-value "^2.0.6" + has-value "^1.0.0" + isobject "^3.0.1" + set-value "^2.0.0" + to-object-path "^0.3.0" + union-value "^1.0.0" + unset-value "^1.0.0" + +call-bind@^1.0.0, call-bind@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" + integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== + dependencies: + function-bind "^1.1.1" + get-intrinsic "^1.0.2" + +callsites@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== + +camelcase@^5.0.0, camelcase@^5.3.1: + version "5.3.1" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" + integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== + +camelcase@^6.0.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" + integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== + +caniuse-lite@^1.0.30001370: + version "1.0.30001373" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001373.tgz#2dc3bc3bfcb5d5a929bec11300883040d7b4b4be" + integrity sha512-pJYArGHrPp3TUqQzFYRmP/lwJlj8RCbVe3Gd3eJQkAV8SAC6b19XS9BjMvRdvaS8RMkaTN8ZhoHP6S1y8zzwEQ== + +capture-exit@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/capture-exit/-/capture-exit-2.0.0.tgz#fb953bfaebeb781f62898239dabb426d08a509a4" + integrity sha512-PiT/hQmTonHhl/HFGN+Lx3JJUznrVYJ3+AQsnthneZbvW7x+f08Tk7yLJTLEOUvBTbduLeeBkxEaYXUOUrRq6g== + dependencies: + rsvp "^4.8.4" + +chalk@^2.0.0: + version "2.4.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + +chalk@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-3.0.0.tgz#3f73c2bf526591f574cc492c51e2456349f844e4" + integrity sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +chalk@^4.0.0, chalk@^4.1.0: + version "4.1.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +change-emitter@^0.1.2: + version "0.1.6" + resolved "https://registry.yarnpkg.com/change-emitter/-/change-emitter-0.1.6.tgz#e8b2fe3d7f1ab7d69a32199aff91ea6931409515" + integrity sha512-YXzt1cQ4a2jqazhcuSWEOc1K2q8g9H6eWNsyZgi640LDzRWVQ2eDe+Y/kVdftH+vYdPF2rgDb3dLdpxE1jvAxw== + +char-regex@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" + integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== + +cheerio-select@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cheerio-select/-/cheerio-select-2.1.0.tgz#4d8673286b8126ca2a8e42740d5e3c4884ae21b4" + integrity sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g== + dependencies: + boolbase "^1.0.0" + css-select "^5.1.0" + css-what "^6.1.0" + domelementtype "^2.3.0" + domhandler "^5.0.3" + domutils "^3.0.1" + +cheerio@^1.0.0-rc.3: + version "1.0.0-rc.12" + resolved "https://registry.yarnpkg.com/cheerio/-/cheerio-1.0.0-rc.12.tgz#788bf7466506b1c6bf5fae51d24a2c4d62e47683" + integrity sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q== + dependencies: + cheerio-select "^2.1.0" + dom-serializer "^2.0.0" + domhandler "^5.0.3" + domutils "^3.0.1" + htmlparser2 "^8.0.1" + parse5 "^7.0.0" + parse5-htmlparser2-tree-adapter "^7.0.0" + +chokidar@^3.5.3: + version "3.5.3" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" + integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== + dependencies: + anymatch "~3.1.2" + braces "~3.0.2" + glob-parent "~5.1.2" + is-binary-path "~2.1.0" + is-glob "~4.0.1" + normalize-path "~3.0.0" + readdirp "~3.6.0" + optionalDependencies: + fsevents "~2.3.2" + +chrome-trace-event@^1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz#1015eced4741e15d06664a957dbbf50d041e26ac" + integrity sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg== + +ci-info@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46" + integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ== + +cjs-module-lexer@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-0.6.0.tgz#4186fcca0eae175970aee870b9fe2d6cf8d5655f" + integrity sha512-uc2Vix1frTfnuzxxu1Hp4ktSvM3QaI4oXl4ZUqL1wjTu/BGki9TrCWoqLTg/drR1KwAEarXuRFCG2Svr1GxPFw== + +class-utils@^0.3.5: + version "0.3.6" + resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" + integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== + dependencies: + arr-union "^3.1.0" + define-property "^0.2.5" + isobject "^3.0.0" + static-extend "^0.1.1" + +classnames@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/classnames/-/classnames-2.3.1.tgz#dfcfa3891e306ec1dad105d0e88f4417b8535e8e" + integrity sha512-OlQdbZ7gLfGarSqxesMesDa5uz7KFbID8Kpq/SxIoNGDqY8lSYs0D+hhtBXhcdB3rcbXArFr7vlHheLk1voeNA== + +cliui@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1" + integrity sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.0" + wrap-ansi "^6.2.0" + +clone-deep@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/clone-deep/-/clone-deep-4.0.1.tgz#c19fd9bdbbf85942b4fd979c84dcf7d5f07c2387" + integrity sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ== + dependencies: + is-plain-object "^2.0.4" + kind-of "^6.0.2" + shallow-clone "^3.0.0" + +co@^4.6.0: + version "4.6.0" + resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" + integrity sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ== + +collect-v8-coverage@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59" + integrity sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg== + +collection-visit@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" + integrity sha512-lNkKvzEeMBBjUGHZ+q6z9pSJla0KWAQPvtzhEV9+iGyQYG+pBpl7xKDhxoNSOZH2hhv0v5k0y2yAM4o4SjoSkw== + dependencies: + map-visit "^1.0.0" + object-visit "^1.0.0" + +color-convert@^1.9.0: + version "1.9.3" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== + dependencies: + color-name "1.1.3" + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== + +color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +colorette@^2.0.10, colorette@^2.0.14: + version "2.0.19" + resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.19.tgz#cdf044f47ad41a0f4b56b3a0d5b4e6e1a2d5a798" + integrity sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ== + +combined-stream@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== + dependencies: + delayed-stream "~1.0.0" + +commander@^2.19.0, commander@^2.20.0: + version "2.20.3" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" + integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== + +commander@^7.0.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7" + integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== + +commondir@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" + integrity sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg== + +component-emitter@^1.2.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" + integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== + +compressible@~2.0.16: + version "2.0.18" + resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba" + integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg== + dependencies: + mime-db ">= 1.43.0 < 2" + +compression@^1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" + integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== + dependencies: + accepts "~1.3.5" + bytes "3.0.0" + compressible "~2.0.16" + debug "2.6.9" + on-headers "~1.0.2" + safe-buffer "5.1.2" + vary "~1.1.2" + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== + +connect-history-api-fallback@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz#647264845251a0daf25b97ce87834cace0f5f1c8" + integrity sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA== + +content-disposition@0.5.4: + version "0.5.4" + resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" + integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== + dependencies: + safe-buffer "5.2.1" + +content-type@~1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" + integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== + +convert-source-map@^1.4.0, convert-source-map@^1.6.0, convert-source-map@^1.7.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" + integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA== + dependencies: + safe-buffer "~5.1.1" + +cookie-signature@1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" + integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ== + +cookie@0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b" + integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== + +copy-descriptor@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" + integrity sha512-XgZ0pFcakEUlbwQEVNg3+QAis1FyTL3Qel9FYy8pSkQqoG3PNoT0bOCQtOXcOkur21r2Eq2kI+IE+gsmAEVlYw== + +core-js-compat@^3.21.0, core-js-compat@^3.22.1: + version "3.24.1" + resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.24.1.tgz#d1af84a17e18dfdd401ee39da9996f9a7ba887de" + integrity sha512-XhdNAGeRnTpp8xbD+sR/HFDK9CbeeeqXT6TuofXh3urqEevzkWmLRgrVoykodsw8okqo2pu1BOmuCKrHx63zdw== + dependencies: + browserslist "^4.21.3" + semver "7.0.0" + +core-util-is@~1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" + integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== + +cross-spawn@^6.0.0: + version "6.0.5" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" + integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== + dependencies: + nice-try "^1.0.4" + path-key "^2.0.1" + semver "^5.5.0" + shebang-command "^1.2.0" + which "^1.2.9" + +cross-spawn@^7.0.0, cross-spawn@^7.0.2, cross-spawn@^7.0.3: + version "7.0.3" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + +css-loader@^5.0.1: + version "5.2.7" + resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-5.2.7.tgz#9b9f111edf6fb2be5dc62525644cbc9c232064ae" + integrity sha512-Q7mOvpBNBG7YrVGMxRxcBJZFL75o+cH2abNASdibkj/fffYD8qWbInZrD0S9ccI6vZclF3DsHE7njGlLtaHbhg== + dependencies: + icss-utils "^5.1.0" + loader-utils "^2.0.0" + postcss "^8.2.15" + postcss-modules-extract-imports "^3.0.0" + postcss-modules-local-by-default "^4.0.0" + postcss-modules-scope "^3.0.0" + postcss-modules-values "^4.0.0" + postcss-value-parser "^4.1.0" + schema-utils "^3.0.0" + semver "^7.3.5" + +css-select@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/css-select/-/css-select-5.1.0.tgz#b8ebd6554c3637ccc76688804ad3f6a6fdaea8a6" + integrity sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg== + dependencies: + boolbase "^1.0.0" + css-what "^6.1.0" + domhandler "^5.0.2" + domutils "^3.0.1" + nth-check "^2.0.1" + +css-what@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/css-what/-/css-what-6.1.0.tgz#fb5effcf76f1ddea2c81bdfaa4de44e79bac70f4" + integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw== + +css.escape@^1.5.1: + version "1.5.1" + resolved "https://registry.yarnpkg.com/css.escape/-/css.escape-1.5.1.tgz#42e27d4fa04ae32f931a4b4d4191fa9cddee97cb" + integrity sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg== + +css@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/css/-/css-3.0.0.tgz#4447a4d58fdd03367c516ca9f64ae365cee4aa5d" + integrity sha512-DG9pFfwOrzc+hawpmqX/dHYHJG+Bsdb0klhyi1sDneOgGOXy9wQIC8hzyVp1e4NRYDBdxcylvywPkkXCHAzTyQ== + dependencies: + inherits "^2.0.4" + source-map "^0.6.1" + source-map-resolve "^0.6.0" + +cssesc@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" + integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== + +cssom@^0.4.4: + version "0.4.4" + resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.4.4.tgz#5a66cf93d2d0b661d80bf6a44fb65f5c2e4e0a10" + integrity sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw== + +cssom@~0.3.6: + version "0.3.8" + resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.3.8.tgz#9f1276f5b2b463f2114d3f2c75250af8c1a36f4a" + integrity sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg== + +cssstyle@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/cssstyle/-/cssstyle-2.3.0.tgz#ff665a0ddbdc31864b09647f34163443d90b0852" + integrity sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A== + dependencies: + cssom "~0.3.6" + +csstype@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.0.tgz#4ddcac3718d787cf9df0d1b7d15033925c8f29f2" + integrity sha512-uX1KG+x9h5hIJsaKR9xHUeUraxf8IODOwq9JLNPq6BwB04a/xgpq3rcx47l5BZu5zBPlgD342tdke3Hom/nJRA== + +data-urls@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/data-urls/-/data-urls-2.0.0.tgz#156485a72963a970f5d5821aaf642bef2bf2db9b" + integrity sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ== + dependencies: + abab "^2.0.3" + whatwg-mimetype "^2.3.0" + whatwg-url "^8.0.0" + +debug@2.6.9, debug@^2.2.0, debug@^2.3.3: + version "2.6.9" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +debug@4, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1: + version "4.3.4" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== + dependencies: + ms "2.1.2" + +decamelize@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" + integrity sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA== + +decimal.js@^10.2.1: + version "10.3.1" + resolved "https://registry.yarnpkg.com/decimal.js/-/decimal.js-10.3.1.tgz#d8c3a444a9c6774ba60ca6ad7261c3a94fd5e783" + integrity sha512-V0pfhfr8suzyPGOx3nmq4aHqabehUZn6Ch9kyFpV79TGDTWFmHqUqXdabR7QHqxzrYolF4+tVmJhUG4OURg5dQ== + +decode-uri-component@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" + integrity sha512-hjf+xovcEn31w/EUYdTXQh/8smFL/dzYjohQGEIgjyNavaJfBY2p5F527Bo1VPATxv0VYTUC2bOcXvqFwk78Og== + +deep-is@^0.1.3, deep-is@~0.1.3: + version "0.1.4" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" + integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== + +deepmerge@^4.2.2: + version "4.2.2" + resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" + integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== + +default-gateway@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/default-gateway/-/default-gateway-6.0.3.tgz#819494c888053bdb743edbf343d6cdf7f2943a71" + integrity sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg== + dependencies: + execa "^5.0.0" + +define-lazy-prop@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" + integrity sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og== + +define-properties@^1.1.3, define-properties@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.4.tgz#0b14d7bd7fbeb2f3572c3a7eda80ea5d57fb05b1" + integrity sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA== + dependencies: + has-property-descriptors "^1.0.0" + object-keys "^1.1.1" + +define-property@^0.2.5: + version "0.2.5" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" + integrity sha512-Rr7ADjQZenceVOAKop6ALkkRAmH1A4Gx9hV/7ZujPUN2rkATqFO0JZLZInbAjpZYoJ1gUx8MRMQVkYemcbMSTA== + dependencies: + is-descriptor "^0.1.0" + +define-property@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" + integrity sha512-cZTYKFWspt9jZsMscWo8sc/5lbPC9Q0N5nBLgb+Yd915iL3udB1uFgS3B8YCx66UVHq018DAVFoee7x+gxggeA== + dependencies: + is-descriptor "^1.0.0" + +define-property@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" + integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== + dependencies: + is-descriptor "^1.0.2" + isobject "^3.0.1" + +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== + +depd@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" + integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== + +depd@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" + integrity sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ== + +dequal@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/dequal/-/dequal-2.0.3.tgz#2644214f1997d39ed0ee0ece72335490a7ac67be" + integrity sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA== + +destroy@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015" + integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== + +detect-newline@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" + integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== + +detect-node@^2.0.4: + version "2.1.0" + resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.1.0.tgz#c9c70775a49c3d03bc2c06d9a73be550f978f8b1" + integrity sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g== + +diff-sequences@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-26.6.2.tgz#48ba99157de1923412eed41db6b6d4aa9ca7c0b1" + integrity sha512-Mv/TDa3nZ9sbc5soK+OoA74BsS3mL37yixCvUAQkiuA4Wz6YtwP/K47n2rv2ovzHZvoiQeA5FTQOschKkEwB0Q== + +diff-sequences@^28.1.1: + version "28.1.1" + resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-28.1.1.tgz#9989dc731266dc2903457a70e996f3a041913ac6" + integrity sha512-FU0iFaH/E23a+a718l8Qa/19bF9p06kgE0KipMOMadwa3SjnaElKzPaUC0vnibs6/B/9ni97s61mcejk8W1fQw== + +diff@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/diff/-/diff-5.1.0.tgz#bc52d298c5ea8df9194800224445ed43ffc87e40" + integrity sha512-D+mk+qE8VC/PAUrlAU34N+VfXev0ghe5ywmpqrawphmVZc1bEfn56uo9qpyGp1p4xpzOHkSW4ztBd6L7Xx4ACw== + +discontinuous-range@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/discontinuous-range/-/discontinuous-range-1.0.0.tgz#e38331f0844bba49b9a9cb71c771585aab1bc65a" + integrity sha512-c68LpLbO+7kP/b1Hr1qs8/BJ09F5khZGTxqxZuhzxpmwJKOgRFHJWIb9/KmqnqHhLdO55aOxFH/EGBvUQbL/RQ== + +dns-equal@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d" + integrity sha512-z+paD6YUQsk+AbGCEM4PrOXSss5gd66QfcVBFTKR/HpFL9jCqikS94HYwKww6fQyO7IxrIIyUu+g0Ka9tUS2Cg== + +dns-packet@^5.2.2: + version "5.4.0" + resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-5.4.0.tgz#1f88477cf9f27e78a213fb6d118ae38e759a879b" + integrity sha512-EgqGeaBB8hLiHLZtp/IbaDQTL8pZ0+IvwzSHA6d7VyMDM+B9hgddEMa9xjK5oYnw0ci0JQ6g2XCD7/f6cafU6g== + dependencies: + "@leichtgewicht/ip-codec" "^2.0.1" + +doctrine@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" + integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== + dependencies: + esutils "^2.0.2" + +doctrine@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" + integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== + dependencies: + esutils "^2.0.2" + +dom-accessibility-api@^0.5.6, dom-accessibility-api@^0.5.9: + version "0.5.14" + resolved "https://registry.yarnpkg.com/dom-accessibility-api/-/dom-accessibility-api-0.5.14.tgz#56082f71b1dc7aac69d83c4285eef39c15d93f56" + integrity sha512-NMt+m9zFMPZe0JcY9gN224Qvk6qLIdqex29clBvc/y75ZBX9YA9wNK3frsYvu2DI1xcCIwxwnX+TlsJ2DSOADg== + +dom-helpers@^5.0.1, dom-helpers@^5.2.0, dom-helpers@^5.2.1: + version "5.2.1" + resolved "https://registry.yarnpkg.com/dom-helpers/-/dom-helpers-5.2.1.tgz#d9400536b2bf8225ad98fe052e029451ac40e902" + integrity sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA== + dependencies: + "@babel/runtime" "^7.8.7" + csstype "^3.0.2" + +dom-serializer@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-2.0.0.tgz#e41b802e1eedf9f6cae183ce5e622d789d7d8e53" + integrity sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg== + dependencies: + domelementtype "^2.3.0" + domhandler "^5.0.2" + entities "^4.2.0" + +domelementtype@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.3.0.tgz#5c45e8e869952626331d7aab326d01daf65d589d" + integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw== + +domexception@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/domexception/-/domexception-2.0.1.tgz#fb44aefba793e1574b0af6aed2801d057529f304" + integrity sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg== + dependencies: + webidl-conversions "^5.0.0" + +domhandler@^5.0.1, domhandler@^5.0.2, domhandler@^5.0.3: + version "5.0.3" + resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-5.0.3.tgz#cc385f7f751f1d1fc650c21374804254538c7d31" + integrity sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w== + dependencies: + domelementtype "^2.3.0" + +domutils@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/domutils/-/domutils-3.0.1.tgz#696b3875238338cb186b6c0612bd4901c89a4f1c" + integrity sha512-z08c1l761iKhDFtfXO04C7kTdPBLi41zwOZl00WS8b5eiaebNpY00HKbztwBq+e3vyqWNwWF3mP9YLUeqIrF+Q== + dependencies: + dom-serializer "^2.0.0" + domelementtype "^2.3.0" + domhandler "^5.0.1" + +ee-first@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" + integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== + +electron-to-chromium@^1.4.202: + version "1.4.208" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.208.tgz#ecb5b47c8cc212a43172ffc5ce50178a638a5d74" + integrity sha512-diMr4t69FigAGUk2KovP0bygEtN/9AkqEVkzjEp0cu+zFFbZMVvwACpTTfuj1mAmFR5kNoSW8wGKDFWIvmThiQ== + +emittery@^0.7.1: + version "0.7.2" + resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.7.2.tgz#25595908e13af0f5674ab419396e2fb394cdfa82" + integrity sha512-A8OG5SR/ij3SsJdWDJdkkSYUjQdCUx6APQXem0SaEePBSRg4eymGYwBkKo1Y6DU+af/Jn2dBQqDBvjnr9Vi8nQ== + +emoji-regex@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + +emojis-list@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78" + integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q== + +encodeurl@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" + integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== + +end-of-stream@^1.1.0: + version "1.4.4" + resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" + integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== + dependencies: + once "^1.4.0" + +enhanced-resolve@^5.10.0: + version "5.10.0" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.10.0.tgz#0dc579c3bb2a1032e357ac45b8f3a6f3ad4fb1e6" + integrity sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ== + dependencies: + graceful-fs "^4.2.4" + tapable "^2.2.0" + +enquirer@^2.3.5: + version "2.3.6" + resolved "https://registry.yarnpkg.com/enquirer/-/enquirer-2.3.6.tgz#2a7fe5dd634a1e4125a975ec994ff5456dc3734d" + integrity sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg== + dependencies: + ansi-colors "^4.1.1" + +entities@^4.2.0, entities@^4.3.0: + version "4.3.1" + resolved "https://registry.yarnpkg.com/entities/-/entities-4.3.1.tgz#c34062a94c865c322f9d67b4384e4169bcede6a4" + integrity sha512-o4q/dYJlmyjP2zfnaWDUC6A3BQFmVTX+tZPezK7k0GLSU9QYCauscf5Y+qcEPzKL+EixVouYDgLQK5H9GrLpkg== + +envinfo@^7.7.3: + version "7.8.1" + resolved "https://registry.yarnpkg.com/envinfo/-/envinfo-7.8.1.tgz#06377e3e5f4d379fea7ac592d5ad8927e0c4d475" + integrity sha512-/o+BXHmB7ocbHEAs6F2EnG0ogybVVUdkRunTT2glZU9XAaGmhqskrvKwqXuDfNjEO0LZKWdejEEpnq8aM0tOaw== + +enzyme-shallow-equal@^1.0.0, enzyme-shallow-equal@^1.0.1: + version "1.0.4" + resolved "https://registry.yarnpkg.com/enzyme-shallow-equal/-/enzyme-shallow-equal-1.0.4.tgz#b9256cb25a5f430f9bfe073a84808c1d74fced2e" + integrity sha512-MttIwB8kKxypwHvRynuC3ahyNc+cFbR8mjVIltnmzQ0uKGqmsfO4bfBuLxb0beLNPhjblUEYvEbsg+VSygvF1Q== + dependencies: + has "^1.0.3" + object-is "^1.1.2" + +enzyme@^3.11.0: + version "3.11.0" + resolved "https://registry.yarnpkg.com/enzyme/-/enzyme-3.11.0.tgz#71d680c580fe9349f6f5ac6c775bc3e6b7a79c28" + integrity sha512-Dw8/Gs4vRjxY6/6i9wU0V+utmQO9kvh9XLnz3LIudviOnVYDEe2ec+0k+NQoMamn1VrjKgCUOWj5jG/5M5M0Qw== + dependencies: + array.prototype.flat "^1.2.3" + cheerio "^1.0.0-rc.3" + enzyme-shallow-equal "^1.0.1" + function.prototype.name "^1.1.2" + has "^1.0.3" + html-element-map "^1.2.0" + is-boolean-object "^1.0.1" + is-callable "^1.1.5" + is-number-object "^1.0.4" + is-regex "^1.0.5" + is-string "^1.0.5" + is-subset "^0.1.1" + lodash.escape "^4.0.1" + lodash.isequal "^4.5.0" + object-inspect "^1.7.0" + object-is "^1.0.2" + object.assign "^4.1.0" + object.entries "^1.1.1" + object.values "^1.1.1" + raf "^3.4.1" + rst-selector-parser "^2.2.3" + string.prototype.trim "^1.2.1" + +error-ex@^1.3.1: + version "1.3.2" + resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" + integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== + dependencies: + is-arrayish "^0.2.1" + +es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19.2, es-abstract@^1.19.5: + version "1.20.1" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.20.1.tgz#027292cd6ef44bd12b1913b828116f54787d1814" + integrity sha512-WEm2oBhfoI2sImeM4OF2zE2V3BYdSF+KnSi9Sidz51fQHd7+JuF8Xgcj9/0o+OWeIeIS/MiuNnlruQrJf16GQA== + dependencies: + call-bind "^1.0.2" + es-to-primitive "^1.2.1" + function-bind "^1.1.1" + function.prototype.name "^1.1.5" + get-intrinsic "^1.1.1" + get-symbol-description "^1.0.0" + has "^1.0.3" + has-property-descriptors "^1.0.0" + has-symbols "^1.0.3" + internal-slot "^1.0.3" + is-callable "^1.2.4" + is-negative-zero "^2.0.2" + is-regex "^1.1.4" + is-shared-array-buffer "^1.0.2" + is-string "^1.0.7" + is-weakref "^1.0.2" + object-inspect "^1.12.0" + object-keys "^1.1.1" + object.assign "^4.1.2" + regexp.prototype.flags "^1.4.3" + string.prototype.trimend "^1.0.5" + string.prototype.trimstart "^1.0.5" + unbox-primitive "^1.0.2" + +es-array-method-boxes-properly@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/es-array-method-boxes-properly/-/es-array-method-boxes-properly-1.0.0.tgz#873f3e84418de4ee19c5be752990b2e44718d09e" + integrity sha512-wd6JXUmyHmt8T5a2xreUwKcGPq6f1f+WwIJkijUqiGcJz1qqnZgP6XIK+QyIWU5lT7imeNxUll48bziG+TSYcA== + +es-module-lexer@^0.9.0: + version "0.9.3" + resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-0.9.3.tgz#6f13db00cc38417137daf74366f535c8eb438f19" + integrity sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ== + +es-shim-unscopables@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz#702e632193201e3edf8713635d083d378e510241" + integrity sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w== + dependencies: + has "^1.0.3" + +es-to-primitive@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" + integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== + dependencies: + is-callable "^1.1.4" + is-date-object "^1.0.1" + is-symbol "^1.0.2" + +escalade@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" + integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== + +escape-html@~1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" + integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== + +escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== + +escape-string-regexp@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" + integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== + +escape-string-regexp@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" + integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== + +escodegen@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-2.0.0.tgz#5e32b12833e8aa8fa35e1bf0befa89380484c7dd" + integrity sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw== + dependencies: + esprima "^4.0.1" + estraverse "^5.2.0" + esutils "^2.0.2" + optionator "^0.8.1" + optionalDependencies: + source-map "~0.6.1" + +eslint-plugin-prettier@^3.3.1: + version "3.4.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-prettier/-/eslint-plugin-prettier-3.4.1.tgz#e9ddb200efb6f3d05ffe83b1665a716af4a387e5" + integrity sha512-htg25EUYUeIhKHXjOinK4BgCcDwtLHjqaxCDsMy5nbnUMkKFvIhMVCp+5GFUXQ4Nr8lBsPqtGAqBenbpFqAA2g== + dependencies: + prettier-linter-helpers "^1.0.0" + +eslint-plugin-react@^7.22.0: + version "7.30.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.30.1.tgz#2be4ab23ce09b5949c6631413ba64b2810fd3e22" + integrity sha512-NbEvI9jtqO46yJA3wcRF9Mo0lF9T/jhdHqhCHXiXtD+Zcb98812wvokjWpU7Q4QH5edo6dmqrukxVvWWXHlsUg== + dependencies: + array-includes "^3.1.5" + array.prototype.flatmap "^1.3.0" + doctrine "^2.1.0" + estraverse "^5.3.0" + jsx-ast-utils "^2.4.1 || ^3.0.0" + minimatch "^3.1.2" + object.entries "^1.1.5" + object.fromentries "^2.0.5" + object.hasown "^1.1.1" + object.values "^1.1.5" + prop-types "^15.8.1" + resolve "^2.0.0-next.3" + semver "^6.3.0" + string.prototype.matchall "^4.0.7" + +eslint-plugin-unused-imports@^1.1.1: + version "1.1.5" + resolved "https://registry.yarnpkg.com/eslint-plugin-unused-imports/-/eslint-plugin-unused-imports-1.1.5.tgz#a2b992ef0faf6c6c75c3815cc47bde76739513c2" + integrity sha512-TeV8l8zkLQrq9LBeYFCQmYVIXMjfHgdRQLw7dEZp4ZB3PeR10Y5Uif11heCsHRmhdRIYMoewr1d9ouUHLbLHew== + dependencies: + eslint-rule-composer "^0.3.0" + +eslint-rule-composer@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/eslint-rule-composer/-/eslint-rule-composer-0.3.0.tgz#79320c927b0c5c0d3d3d2b76c8b4a488f25bbaf9" + integrity sha512-bt+Sh8CtDmn2OajxvNO+BX7Wn4CIWMpTRm3MaiKPCQcnnlm0CS2mhui6QaoeQugs+3Kj2ESKEEGJUdVafwhiCg== + +eslint-scope@5.1.1, eslint-scope@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" + integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== + dependencies: + esrecurse "^4.3.0" + estraverse "^4.1.1" + +eslint-utils@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-2.1.0.tgz#d2de5e03424e707dc10c74068ddedae708741b27" + integrity sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg== + dependencies: + eslint-visitor-keys "^1.1.0" + +eslint-visitor-keys@^1.1.0, eslint-visitor-keys@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz#30ebd1ef7c2fdff01c3a4f151044af25fab0523e" + integrity sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ== + +eslint-visitor-keys@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303" + integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw== + +eslint@^7.18.0: + version "7.32.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-7.32.0.tgz#c6d328a14be3fb08c8d1d21e12c02fdb7a2a812d" + integrity sha512-VHZ8gX+EDfz+97jGcgyGCyRia/dPOd6Xh9yPv8Bl1+SoaIwD+a/vlrOmGRUyOYu7MwUhc7CxqeaDZU13S4+EpA== + dependencies: + "@babel/code-frame" "7.12.11" + "@eslint/eslintrc" "^0.4.3" + "@humanwhocodes/config-array" "^0.5.0" + ajv "^6.10.0" + chalk "^4.0.0" + cross-spawn "^7.0.2" + debug "^4.0.1" + doctrine "^3.0.0" + enquirer "^2.3.5" + escape-string-regexp "^4.0.0" + eslint-scope "^5.1.1" + eslint-utils "^2.1.0" + eslint-visitor-keys "^2.0.0" + espree "^7.3.1" + esquery "^1.4.0" + esutils "^2.0.2" + fast-deep-equal "^3.1.3" + file-entry-cache "^6.0.1" + functional-red-black-tree "^1.0.1" + glob-parent "^5.1.2" + globals "^13.6.0" + ignore "^4.0.6" + import-fresh "^3.0.0" + imurmurhash "^0.1.4" + is-glob "^4.0.0" + js-yaml "^3.13.1" + json-stable-stringify-without-jsonify "^1.0.1" + levn "^0.4.1" + lodash.merge "^4.6.2" + minimatch "^3.0.4" + natural-compare "^1.4.0" + optionator "^0.9.1" + progress "^2.0.0" + regexpp "^3.1.0" + semver "^7.2.1" + strip-ansi "^6.0.0" + strip-json-comments "^3.1.0" + table "^6.0.9" + text-table "^0.2.0" + v8-compile-cache "^2.0.3" + +espree@^7.3.0, espree@^7.3.1: + version "7.3.1" + resolved "https://registry.yarnpkg.com/espree/-/espree-7.3.1.tgz#f2df330b752c6f55019f8bd89b7660039c1bbbb6" + integrity sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g== + dependencies: + acorn "^7.4.0" + acorn-jsx "^5.3.1" + eslint-visitor-keys "^1.3.0" + +esprima@^4.0.0, esprima@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== + +esquery@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.4.0.tgz#2148ffc38b82e8c7057dfed48425b3e61f0f24a5" + integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w== + dependencies: + estraverse "^5.1.0" + +esrecurse@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" + integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== + dependencies: + estraverse "^5.2.0" + +estraverse@^4.1.1: + version "4.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" + integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== + +estraverse@^5.1.0, estraverse@^5.2.0, estraverse@^5.3.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" + integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== + +esutils@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== + +etag@~1.8.1: + version "1.8.1" + resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" + integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg== + +eventemitter3@^4.0.0: + version "4.0.7" + resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" + integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== + +events@^3.2.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" + integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== + +exec-sh@^0.3.2: + version "0.3.6" + resolved "https://registry.yarnpkg.com/exec-sh/-/exec-sh-0.3.6.tgz#ff264f9e325519a60cb5e273692943483cca63bc" + integrity sha512-nQn+hI3yp+oD0huYhKwvYI32+JFeq+XkNcD1GAo3Y/MjxsfVGmrrzrnzjWiNY6f+pUCP440fThsFh5gZrRAU/w== + +execa@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" + integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA== + dependencies: + cross-spawn "^6.0.0" + get-stream "^4.0.0" + is-stream "^1.1.0" + npm-run-path "^2.0.0" + p-finally "^1.0.0" + signal-exit "^3.0.0" + strip-eof "^1.0.0" + +execa@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/execa/-/execa-4.1.0.tgz#4e5491ad1572f2f17a77d388c6c857135b22847a" + integrity sha512-j5W0//W7f8UxAn8hXVnwG8tLwdiUy4FJLcSupCg6maBYZDpyBvTApK7KyuI4bKj8KOh1r2YH+6ucuYtJv1bTZA== + dependencies: + cross-spawn "^7.0.0" + get-stream "^5.0.0" + human-signals "^1.1.1" + is-stream "^2.0.0" + merge-stream "^2.0.0" + npm-run-path "^4.0.0" + onetime "^5.1.0" + signal-exit "^3.0.2" + strip-final-newline "^2.0.0" + +execa@^5.0.0: + version "5.1.1" + resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" + integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== + dependencies: + cross-spawn "^7.0.3" + get-stream "^6.0.0" + human-signals "^2.1.0" + is-stream "^2.0.0" + merge-stream "^2.0.0" + npm-run-path "^4.0.1" + onetime "^5.1.2" + signal-exit "^3.0.3" + strip-final-newline "^2.0.0" + +exit@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" + integrity sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ== + +expand-brackets@^2.1.4: + version "2.1.4" + resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" + integrity sha512-w/ozOKR9Obk3qoWeY/WDi6MFta9AoMR+zud60mdnbniMcBxRuFJyDt2LdX/14A1UABeqk+Uk+LDfUpvoGKppZA== + dependencies: + debug "^2.3.3" + define-property "^0.2.5" + extend-shallow "^2.0.1" + posix-character-classes "^0.1.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + +expect@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/expect/-/expect-26.6.2.tgz#c6b996bf26bf3fe18b67b2d0f51fc981ba934417" + integrity sha512-9/hlOBkQl2l/PLHJx6JjoDF6xPKcJEsUlWKb23rKE7KzeDqUZKXKNMW27KIue5JMdBV9HgmoJPcc8HtO85t9IA== + dependencies: + "@jest/types" "^26.6.2" + ansi-styles "^4.0.0" + jest-get-type "^26.3.0" + jest-matcher-utils "^26.6.2" + jest-message-util "^26.6.2" + jest-regex-util "^26.0.0" + +express@^4.17.3: + version "4.18.1" + resolved "https://registry.yarnpkg.com/express/-/express-4.18.1.tgz#7797de8b9c72c857b9cd0e14a5eea80666267caf" + integrity sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q== + dependencies: + accepts "~1.3.8" + array-flatten "1.1.1" + body-parser "1.20.0" + content-disposition "0.5.4" + content-type "~1.0.4" + cookie "0.5.0" + cookie-signature "1.0.6" + debug "2.6.9" + depd "2.0.0" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + finalhandler "1.2.0" + fresh "0.5.2" + http-errors "2.0.0" + merge-descriptors "1.0.1" + methods "~1.1.2" + on-finished "2.4.1" + parseurl "~1.3.3" + path-to-regexp "0.1.7" + proxy-addr "~2.0.7" + qs "6.10.3" + range-parser "~1.2.1" + safe-buffer "5.2.1" + send "0.18.0" + serve-static "1.15.0" + setprototypeof "1.2.0" + statuses "2.0.1" + type-is "~1.6.18" + utils-merge "1.0.1" + vary "~1.1.2" + +extend-shallow@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" + integrity sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug== + dependencies: + is-extendable "^0.1.0" + +extend-shallow@^3.0.0, extend-shallow@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" + integrity sha512-BwY5b5Ql4+qZoefgMj2NUmx+tehVTH/Kf4k1ZEtOHNFcm2wSxMRo992l6X3TIgni2eZVTZ85xMOjF31fwZAj6Q== + dependencies: + assign-symbols "^1.0.0" + is-extendable "^1.0.1" + +extglob@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" + integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== + dependencies: + array-unique "^0.3.2" + define-property "^1.0.0" + expand-brackets "^2.1.4" + extend-shallow "^2.0.1" + fragment-cache "^0.2.1" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + +fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-diff@^1.1.2: + version "1.2.0" + resolved "https://registry.yarnpkg.com/fast-diff/-/fast-diff-1.2.0.tgz#73ee11982d86caaf7959828d519cfe927fac5f03" + integrity sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w== + +fast-json-stable-stringify@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fast-levenshtein@^2.0.6, fast-levenshtein@~2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== + +fastest-levenshtein@^1.0.12: + version "1.0.14" + resolved "https://registry.yarnpkg.com/fastest-levenshtein/-/fastest-levenshtein-1.0.14.tgz#9054384e4b7a78c88d01a4432dc18871af0ac859" + integrity sha512-tFfWHjnuUfKE186Tfgr+jtaFc0mZTApEgKDOeyN+FwOqRkO/zK/3h1AiRd8u8CY53owL3CUmGr/oI9p/RdyLTA== + +faye-websocket@^0.11.3: + version "0.11.4" + resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.11.4.tgz#7f0d9275cfdd86a1c963dc8b65fcc451edcbb1da" + integrity sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g== + dependencies: + websocket-driver ">=0.5.1" + +fb-watchman@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.1.tgz#fc84fb39d2709cf3ff6d743706157bb5708a8a85" + integrity sha512-DkPJKQeY6kKwmuMretBhr7G6Vodr7bFwDYTXIkfG1gjvNpaxBTQV3PbXg6bR1c1UP4jPOX0jHUbbHANL9vRjVg== + dependencies: + bser "2.1.1" + +file-entry-cache@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" + integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== + dependencies: + flat-cache "^3.0.4" + +file-loader@^6.2.0: + version "6.2.0" + resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-6.2.0.tgz#baef7cf8e1840df325e4390b4484879480eebe4d" + integrity sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw== + dependencies: + loader-utils "^2.0.0" + schema-utils "^3.0.0" + +fill-range@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" + integrity sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ== + dependencies: + extend-shallow "^2.0.1" + is-number "^3.0.0" + repeat-string "^1.6.1" + to-regex-range "^2.1.0" + +fill-range@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" + integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== + dependencies: + to-regex-range "^5.0.1" + +finalhandler@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32" + integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== + dependencies: + debug "2.6.9" + encodeurl "~1.0.2" + escape-html "~1.0.3" + on-finished "2.4.1" + parseurl "~1.3.3" + statuses "2.0.1" + unpipe "~1.0.0" + +find-cache-dir@^3.3.1: + version "3.3.2" + resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-3.3.2.tgz#b30c5b6eff0730731aea9bbd9dbecbd80256d64b" + integrity sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig== + dependencies: + commondir "^1.0.1" + make-dir "^3.0.2" + pkg-dir "^4.1.0" + +find-up@^4.0.0, find-up@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" + integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== + dependencies: + locate-path "^5.0.0" + path-exists "^4.0.0" + +flat-cache@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" + integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== + dependencies: + flatted "^3.1.0" + rimraf "^3.0.2" + +flatted@^3.1.0: + version "3.2.6" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.6.tgz#022e9218c637f9f3fc9c35ab9c9193f05add60b2" + integrity sha512-0sQoMh9s0BYsm+12Huy/rkKxVu4R1+r96YX5cG44rHV0pQ6iC3Q+mkoMFaGWObMFYQxCVT+ssG1ksneA2MI9KQ== + +follow-redirects@^1.0.0: + version "1.15.1" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.1.tgz#0ca6a452306c9b276e4d3127483e29575e207ad5" + integrity sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA== + +for-in@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" + integrity sha512-7EwmXrOjyL+ChxMhmG5lnW9MPt1aIeZEwKhQzoBUdTV0N3zuwWDZYVJatDvZ2OyzPUvdIAZDsCetk3coyMfcnQ== + +form-data@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" + integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" + +forwarded@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" + integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== + +fragment-cache@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" + integrity sha512-GMBAbW9antB8iZRHLoGw0b3HANt57diZYFO/HL1JGIC1MjKrdmhxvrJbupnVvpys0zsz7yBApXdQyfepKly2kA== + dependencies: + map-cache "^0.2.2" + +fresh@0.5.2: + version "0.5.2" + resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" + integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== + +fs-monkey@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/fs-monkey/-/fs-monkey-1.0.3.tgz#ae3ac92d53bb328efe0e9a1d9541f6ad8d48e2d3" + integrity sha512-cybjIfiiE+pTWicSCLFHSrXZ6EilF30oh91FDP9S2B051prEa7QWfrVTQm10/dDpswBDXZugPa1Ogu8Yh+HV0Q== + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== + +fsevents@^2.1.2, fsevents@~2.3.2: + version "2.3.2" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" + integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== + +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +function.prototype.name@^1.1.0, function.prototype.name@^1.1.2, function.prototype.name@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621" + integrity sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.0" + functions-have-names "^1.2.2" + +functional-red-black-tree@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" + integrity sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g== + +functions-have-names@^1.2.2: + version "1.2.3" + resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" + integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== + +gensync@^1.0.0-beta.2: + version "1.0.0-beta.2" + resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" + integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== + +get-caller-file@^2.0.1: + version "2.0.5" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== + +get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.2.tgz#336975123e05ad0b7ba41f152ee4aadbea6cf598" + integrity sha512-Jfm3OyCxHh9DJyc28qGk+JmfkpO41A4XkneDSujN9MDXrm4oDKdHvndhZ2dN94+ERNfkYJWDclW6k2L/ZGHjXA== + dependencies: + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.3" + +get-package-type@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" + integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== + +get-stream@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" + integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== + dependencies: + pump "^3.0.0" + +get-stream@^5.0.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-5.2.0.tgz#4966a1795ee5ace65e706c4b7beb71257d6e22d3" + integrity sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA== + dependencies: + pump "^3.0.0" + +get-stream@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" + integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== + +get-symbol-description@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" + integrity sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.1" + +get-value@^2.0.3, get-value@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" + integrity sha512-Ln0UQDlxH1BapMu3GPtf7CuYNwRZf2gwCuPqbyG6pB8WfmFpzqcy4xtAaAMUhnNqjMKTiCPZG2oMT3YSx8U2NA== + +glob-parent@^5.1.2, glob-parent@~5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" + integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== + dependencies: + is-glob "^4.0.1" + +glob-to-regexp@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e" + integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw== + +glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4: + version "7.2.3" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" + integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.1.1" + once "^1.3.0" + path-is-absolute "^1.0.0" + +globals@^11.1.0: + version "11.12.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + +globals@^13.6.0, globals@^13.9.0: + version "13.17.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-13.17.0.tgz#902eb1e680a41da93945adbdcb5a9f361ba69bd4" + integrity sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw== + dependencies: + type-fest "^0.20.2" + +goober@^2.0.30: + version "2.1.10" + resolved "https://registry.yarnpkg.com/goober/-/goober-2.1.10.tgz#058def43ba1e3b06f973dbb372a4978aa42f1049" + integrity sha512-7PpuQMH10jaTWm33sQgBQvz45pHR8N4l3Cu3WMGEWmHShAcTuuP7I+5/DwKo39fwti5A80WAjvqgz6SSlgWmGA== + +graceful-fs@^4.1.2, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: + version "4.2.10" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" + integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== + +growly@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/growly/-/growly-1.3.0.tgz#f10748cbe76af964b7c96c93c6bcc28af120c081" + integrity sha512-+xGQY0YyAWCnqy7Cd++hc2JqMYzlm0dG30Jd0beaA64sROr8C4nt8Yc9V5Ro3avlSUDTN0ulqP/VBKi1/lLygw== + +handle-thing@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-2.0.1.tgz#857f79ce359580c340d43081cc648970d0bb234e" + integrity sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg== + +harmony-reflect@^1.4.6: + version "1.6.2" + resolved "https://registry.yarnpkg.com/harmony-reflect/-/harmony-reflect-1.6.2.tgz#31ecbd32e648a34d030d86adb67d4d47547fe710" + integrity sha512-HIp/n38R9kQjDEziXyDTuW3vvoxxyxjxFzXLrBr18uB47GnSt+G9D29fqrpM5ZkspMcPICud3XsBJQ4Y2URg8g== + +has-bigints@^1.0.1, has-bigints@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" + integrity sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ== + +has-flag@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== + +has-flag@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + +has-property-descriptors@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz#610708600606d36961ed04c196193b6a607fa861" + integrity sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ== + dependencies: + get-intrinsic "^1.1.1" + +has-symbols@^1.0.1, has-symbols@^1.0.2, has-symbols@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" + integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== + +has-tostringtag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25" + integrity sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ== + dependencies: + has-symbols "^1.0.2" + +has-value@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" + integrity sha512-gpG936j8/MzaeID5Yif+577c17TxaDmhuyVgSwtnL/q8UUTySg8Mecb+8Cf1otgLoD7DDH75axp86ER7LFsf3Q== + dependencies: + get-value "^2.0.3" + has-values "^0.1.4" + isobject "^2.0.0" + +has-value@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" + integrity sha512-IBXk4GTsLYdQ7Rvt+GRBrFSVEkmuOUy4re0Xjd9kJSUQpnTrWR4/y9RpfexN9vkAPMFuQoeWKwqzPozRTlasGw== + dependencies: + get-value "^2.0.6" + has-values "^1.0.0" + isobject "^3.0.0" + +has-values@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" + integrity sha512-J8S0cEdWuQbqD9//tlZxiMuMNmxB8PlEwvYwuxsTmR1G5RXUePEX/SJn7aD0GMLieuZYSwNH0cQuJGwnYunXRQ== + +has-values@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" + integrity sha512-ODYZC64uqzmtfGMEAX/FvZiRyWLpAC3vYnNunURUnkGVTS+mI0smVsWaPydRBsE3g+ok7h960jChO8mFcWlHaQ== + dependencies: + is-number "^3.0.0" + kind-of "^4.0.0" + +has@^1.0.0, has@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +history@^4.9.0: + version "4.10.1" + resolved "https://registry.yarnpkg.com/history/-/history-4.10.1.tgz#33371a65e3a83b267434e2b3f3b1b4c58aad4cf3" + integrity sha512-36nwAD620w12kuzPAsyINPWJqlNbij+hpK1k9XRloDtym8mxzGYl2c17LnV6IAGB2Dmg4tEa7G7DlawS0+qjew== + dependencies: + "@babel/runtime" "^7.1.2" + loose-envify "^1.2.0" + resolve-pathname "^3.0.0" + tiny-invariant "^1.0.2" + tiny-warning "^1.0.0" + value-equal "^1.0.1" + +history@^5.0.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/history/-/history-5.3.0.tgz#1548abaa245ba47992f063a0783db91ef201c73b" + integrity sha512-ZqaKwjjrAYUYfLG+htGaIIZ4nioX2L70ZUMIFysS3xvBsSG4x/n1V6TXV3N8ZYNuFGlDirFg32T7B6WOUPDYcQ== + dependencies: + "@babel/runtime" "^7.7.6" + +hoist-non-react-statics@^2.5.5: + version "2.5.5" + resolved "https://registry.yarnpkg.com/hoist-non-react-statics/-/hoist-non-react-statics-2.5.5.tgz#c5903cf409c0dfd908f388e619d86b9c1174cb47" + integrity sha512-rqcy4pJo55FTTLWt+bU8ukscqHeE/e9KWvsOW2b/a3afxQZhwkQdT1rPPCJ0rYXdj4vNcasY8zHTH+jF/qStxw== + +hoist-non-react-statics@^3.1.0, hoist-non-react-statics@^3.3.0, hoist-non-react-statics@^3.3.2: + version "3.3.2" + resolved "https://registry.yarnpkg.com/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz#ece0acaf71d62c2969c2ec59feff42a4b1a85b45" + integrity sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw== + dependencies: + react-is "^16.7.0" + +hosted-git-info@^2.1.4: + version "2.8.9" + resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" + integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw== + +hpack.js@^2.1.6: + version "2.1.6" + resolved "https://registry.yarnpkg.com/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" + integrity sha512-zJxVehUdMGIKsRaNt7apO2Gqp0BdqW5yaiGHXXmbpvxgBYVZnAql+BJb4RO5ad2MgpbZKn5G6nMnegrH1FcNYQ== + dependencies: + inherits "^2.0.1" + obuf "^1.0.0" + readable-stream "^2.0.1" + wbuf "^1.1.0" + +html-element-map@^1.2.0: + version "1.3.1" + resolved "https://registry.yarnpkg.com/html-element-map/-/html-element-map-1.3.1.tgz#44b2cbcfa7be7aa4ff59779e47e51012e1c73c08" + integrity sha512-6XMlxrAFX4UEEGxctfFnmrFaaZFNf9i5fNuV5wZ3WWQ4FVaNP1aX1LkX9j2mfEx1NpjeE/rL3nmgEn23GdFmrg== + dependencies: + array.prototype.filter "^1.0.0" + call-bind "^1.0.2" + +html-encoding-sniffer@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz#42a6dc4fd33f00281176e8b23759ca4e4fa185f3" + integrity sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ== + dependencies: + whatwg-encoding "^1.0.5" + +html-entities@^2.3.2: + version "2.3.3" + resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-2.3.3.tgz#117d7626bece327fc8baace8868fa6f5ef856e46" + integrity sha512-DV5Ln36z34NNTDgnz0EWGBLZENelNAtkiFA4kyNOG2tDI6Mz1uSWiq1wAKdyjnJwyDiDO7Fa2SO1CTxPXL8VxA== + +html-escaper@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" + integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== + +htmlparser2@^8.0.1: + version "8.0.1" + resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-8.0.1.tgz#abaa985474fcefe269bc761a779b544d7196d010" + integrity sha512-4lVbmc1diZC7GUJQtRQ5yBAeUCL1exyMwmForWkRLnwyzWBFxN633SALPMGYaWZvKe9j1pRZJpauvmxENSp/EA== + dependencies: + domelementtype "^2.3.0" + domhandler "^5.0.2" + domutils "^3.0.1" + entities "^4.3.0" + +http-deceiver@^1.2.7: + version "1.2.7" + resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" + integrity sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw== + +http-errors@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" + integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== + dependencies: + depd "2.0.0" + inherits "2.0.4" + setprototypeof "1.2.0" + statuses "2.0.1" + toidentifier "1.0.1" + +http-errors@~1.6.2: + version "1.6.3" + resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d" + integrity sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A== + dependencies: + depd "~1.1.2" + inherits "2.0.3" + setprototypeof "1.1.0" + statuses ">= 1.4.0 < 2" + +http-parser-js@>=0.5.1: + version "0.5.8" + resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.5.8.tgz#af23090d9ac4e24573de6f6aecc9d84a48bf20e3" + integrity sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q== + +http-proxy-agent@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz#8a8c8ef7f5932ccf953c296ca8291b95aa74aa3a" + integrity sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg== + dependencies: + "@tootallnate/once" "1" + agent-base "6" + debug "4" + +http-proxy-middleware@^2.0.3: + version "2.0.6" + resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz#e1a4dd6979572c7ab5a4e4b55095d1f32a74963f" + integrity sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw== + dependencies: + "@types/http-proxy" "^1.17.8" + http-proxy "^1.18.1" + is-glob "^4.0.1" + is-plain-obj "^3.0.0" + micromatch "^4.0.2" + +http-proxy@^1.18.1: + version "1.18.1" + resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549" + integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== + dependencies: + eventemitter3 "^4.0.0" + follow-redirects "^1.0.0" + requires-port "^1.0.0" + +https-proxy-agent@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6" + integrity sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA== + dependencies: + agent-base "6" + debug "4" + +human-signals@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-1.1.1.tgz#c5b1cd14f50aeae09ab6c59fe63ba3395fe4dfa3" + integrity sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw== + +human-signals@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" + integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== + +iconv-lite@0.4.24: + version "0.4.24" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" + integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== + dependencies: + safer-buffer ">= 2.1.2 < 3" + +icss-utils@^5.0.0, icss-utils@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-5.1.0.tgz#c6be6858abd013d768e98366ae47e25d5887b1ae" + integrity sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA== + +identity-obj-proxy@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/identity-obj-proxy/-/identity-obj-proxy-3.0.0.tgz#94d2bda96084453ef36fbc5aaec37e0f79f1fc14" + integrity sha512-00n6YnVHKrinT9t0d9+5yZC6UBNJANpYEQvL2LlX6Ab9lnmxzIRcEmTPuyGScvl1+jKuCICX1Z0Ab1pPKKdikA== + dependencies: + harmony-reflect "^1.4.6" + +ignore@^4.0.6: + version "4.0.6" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc" + integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg== + +import-fresh@^3.0.0, import-fresh@^3.2.1: + version "3.3.0" + resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" + integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== + dependencies: + parent-module "^1.0.0" + resolve-from "^4.0.0" + +import-local@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/import-local/-/import-local-3.1.0.tgz#b4479df8a5fd44f6cdce24070675676063c95cb4" + integrity sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg== + dependencies: + pkg-dir "^4.2.0" + resolve-cwd "^3.0.0" + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== + +indent-string@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" + integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.3: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +inherits@2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" + integrity sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw== + +internal-slot@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.3.tgz#7347e307deeea2faac2ac6205d4bc7d34967f59c" + integrity sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA== + dependencies: + get-intrinsic "^1.1.0" + has "^1.0.3" + side-channel "^1.0.4" + +interpret@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/interpret/-/interpret-2.2.0.tgz#1a78a0b5965c40a5416d007ad6f50ad27c417df9" + integrity sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw== + +invariant@^2.2.4: + version "2.2.4" + resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" + integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA== + dependencies: + loose-envify "^1.0.0" + +ipaddr.js@1.9.1: + version "1.9.1" + resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" + integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== + +ipaddr.js@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-2.0.1.tgz#eca256a7a877e917aeb368b0a7497ddf42ef81c0" + integrity sha512-1qTgH9NG+IIJ4yfKs2e6Pp1bZg8wbDbKHT21HrLIeYBTRLgMYKnMTPAuI3Lcs61nfx5h1xlXnbJtH1kX5/d/ng== + +is-accessor-descriptor@^0.1.6: + version "0.1.6" + resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" + integrity sha512-e1BM1qnDbMRG3ll2U9dSK0UMHuWOs3pY3AtcFsmvwPtKL3MML/Q86i+GilLfvqEs4GW+ExB91tQ3Ig9noDIZ+A== + dependencies: + kind-of "^3.0.2" + +is-accessor-descriptor@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" + integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== + dependencies: + kind-of "^6.0.0" + +is-arrayish@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== + +is-bigint@^1.0.1: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.4.tgz#08147a1875bc2b32005d41ccd8291dffc6691df3" + integrity sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg== + dependencies: + has-bigints "^1.0.1" + +is-binary-path@~2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" + integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== + dependencies: + binary-extensions "^2.0.0" + +is-boolean-object@^1.0.1, is-boolean-object@^1.1.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.1.2.tgz#5c6dc200246dd9321ae4b885a114bb1f75f63719" + integrity sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-buffer@^1.1.5: + version "1.1.6" + resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" + integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== + +is-callable@^1.1.4, is-callable@^1.1.5, is-callable@^1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.4.tgz#47301d58dd0259407865547853df6d61fe471945" + integrity sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w== + +is-ci@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-2.0.0.tgz#6bc6334181810e04b5c22b3d589fdca55026404c" + integrity sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w== + dependencies: + ci-info "^2.0.0" + +is-core-module@^2.9.0: + version "2.9.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.9.0.tgz#e1c34429cd51c6dd9e09e0799e396e27b19a9c69" + integrity sha512-+5FPy5PnwmO3lvfMb0AsoPaBG+5KHUI0wYFXOtYPnVVVspTFUuMZNfNaNVRt3FZadstu2c8x23vykRW/NBoU6A== + dependencies: + has "^1.0.3" + +is-data-descriptor@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" + integrity sha512-+w9D5ulSoBNlmw9OHn3U2v51SyoCd0he+bB3xMl62oijhrspxowjU+AIcDY0N3iEJbUEkB15IlMASQsxYigvXg== + dependencies: + kind-of "^3.0.2" + +is-data-descriptor@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" + integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== + dependencies: + kind-of "^6.0.0" + +is-date-object@^1.0.1: + version "1.0.5" + resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" + integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ== + dependencies: + has-tostringtag "^1.0.0" + +is-descriptor@^0.1.0: + version "0.1.6" + resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" + integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== + dependencies: + is-accessor-descriptor "^0.1.6" + is-data-descriptor "^0.1.4" + kind-of "^5.0.0" + +is-descriptor@^1.0.0, is-descriptor@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" + integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== + dependencies: + is-accessor-descriptor "^1.0.0" + is-data-descriptor "^1.0.0" + kind-of "^6.0.2" + +is-docker@^2.0.0, is-docker@^2.1.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa" + integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== + +is-extendable@^0.1.0, is-extendable@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" + integrity sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw== + +is-extendable@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" + integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== + dependencies: + is-plain-object "^2.0.4" + +is-extglob@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== + +is-fullwidth-code-point@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + +is-generator-fn@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" + integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== + +is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1: + version "4.0.3" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== + dependencies: + is-extglob "^2.1.1" + +is-negative-zero@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.2.tgz#7bf6f03a28003b8b3965de3ac26f664d765f3150" + integrity sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA== + +is-number-object@^1.0.4: + version "1.0.7" + resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.0.7.tgz#59d50ada4c45251784e9904f5246c742f07a42fc" + integrity sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ== + dependencies: + has-tostringtag "^1.0.0" + +is-number@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" + integrity sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg== + dependencies: + kind-of "^3.0.2" + +is-number@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" + integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== + +is-plain-obj@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-3.0.0.tgz#af6f2ea14ac5a646183a5bbdb5baabbc156ad9d7" + integrity sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA== + +is-plain-object@^2.0.3, is-plain-object@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" + integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== + dependencies: + isobject "^3.0.1" + +is-potential-custom-element-name@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" + integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ== + +is-regex@^1.0.5, is-regex@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" + integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-shared-array-buffer@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz#8f259c573b60b6a32d4058a1a07430c0a7344c79" + integrity sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA== + dependencies: + call-bind "^1.0.2" + +is-stream@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" + integrity sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ== + +is-stream@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" + integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== + +is-string@^1.0.5, is-string@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" + integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg== + dependencies: + has-tostringtag "^1.0.0" + +is-subset@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/is-subset/-/is-subset-0.1.1.tgz#8a59117d932de1de00f245fcdd39ce43f1e939a6" + integrity sha512-6Ybun0IkarhmEqxXCNw/C0bna6Zb/TkfUX9UbwJtK6ObwAVCxmAP308WWTHviM/zAqXk05cdhYsUsZeGQh99iw== + +is-symbol@^1.0.2, is-symbol@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c" + integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg== + dependencies: + has-symbols "^1.0.2" + +is-typedarray@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" + integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA== + +is-weakref@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-weakref/-/is-weakref-1.0.2.tgz#9529f383a9338205e89765e0392efc2f100f06f2" + integrity sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ== + dependencies: + call-bind "^1.0.2" + +is-windows@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" + integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== + +is-wsl@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271" + integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww== + dependencies: + is-docker "^2.0.0" + +isarray@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" + integrity sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ== + +isarray@1.0.0, isarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ== + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== + +isobject@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" + integrity sha512-+OUdGJlgjOBZDfxnDjYYG6zp487z0JGNQq3cYQYg5f5hKR+syHMsaztzGeml/4kGG55CSpKSpWTY+jYGgsHLgA== + dependencies: + isarray "1.0.0" + +isobject@^3.0.0, isobject@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" + integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== + +istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" + integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== + +istanbul-lib-instrument@^4.0.3: + version "4.0.3" + resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz#873c6fff897450118222774696a3f28902d77c1d" + integrity sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ== + dependencies: + "@babel/core" "^7.7.5" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-coverage "^3.0.0" + semver "^6.3.0" + +istanbul-lib-instrument@^5.0.4: + version "5.2.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.0.tgz#31d18bdd127f825dd02ea7bfdfd906f8ab840e9f" + integrity sha512-6Lthe1hqXHBNsqvgDzGO6l03XNeu3CrG4RqQ1KM9+l5+jNGpEJfIELx1NS3SEHmJQA8np/u+E4EPRKRiu6m19A== + dependencies: + "@babel/core" "^7.12.3" + "@babel/parser" "^7.14.7" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-coverage "^3.2.0" + semver "^6.3.0" + +istanbul-lib-report@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6" + integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== + dependencies: + istanbul-lib-coverage "^3.0.0" + make-dir "^3.0.0" + supports-color "^7.1.0" + +istanbul-lib-source-maps@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551" + integrity sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw== + dependencies: + debug "^4.1.1" + istanbul-lib-coverage "^3.0.0" + source-map "^0.6.1" + +istanbul-reports@^3.0.2: + version "3.1.5" + resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.1.5.tgz#cc9a6ab25cb25659810e4785ed9d9fb742578bae" + integrity sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w== + dependencies: + html-escaper "^2.0.0" + istanbul-lib-report "^3.0.0" + +jest-changed-files@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-26.6.2.tgz#f6198479e1cc66f22f9ae1e22acaa0b429c042d0" + integrity sha512-fDS7szLcY9sCtIip8Fjry9oGf3I2ht/QT21bAHm5Dmf0mD4X3ReNUf17y+bO6fR8WgbIZTlbyG1ak/53cbRzKQ== + dependencies: + "@jest/types" "^26.6.2" + execa "^4.0.0" + throat "^5.0.0" + +jest-cli@^26.6.3: + version "26.6.3" + resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-26.6.3.tgz#43117cfef24bc4cd691a174a8796a532e135e92a" + integrity sha512-GF9noBSa9t08pSyl3CY4frMrqp+aQXFGFkf5hEPbh/pIUFYWMK6ZLTfbmadxJVcJrdRoChlWQsA2VkJcDFK8hg== + dependencies: + "@jest/core" "^26.6.3" + "@jest/test-result" "^26.6.2" + "@jest/types" "^26.6.2" + chalk "^4.0.0" + exit "^0.1.2" + graceful-fs "^4.2.4" + import-local "^3.0.2" + is-ci "^2.0.0" + jest-config "^26.6.3" + jest-util "^26.6.2" + jest-validate "^26.6.2" + prompts "^2.0.1" + yargs "^15.4.1" + +jest-config@^26.6.3: + version "26.6.3" + resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-26.6.3.tgz#64f41444eef9eb03dc51d5c53b75c8c71f645349" + integrity sha512-t5qdIj/bCj2j7NFVHb2nFB4aUdfucDn3JRKgrZnplb8nieAirAzRSHP8uDEd+qV6ygzg9Pz4YG7UTJf94LPSyg== + dependencies: + "@babel/core" "^7.1.0" + "@jest/test-sequencer" "^26.6.3" + "@jest/types" "^26.6.2" + babel-jest "^26.6.3" + chalk "^4.0.0" + deepmerge "^4.2.2" + glob "^7.1.1" + graceful-fs "^4.2.4" + jest-environment-jsdom "^26.6.2" + jest-environment-node "^26.6.2" + jest-get-type "^26.3.0" + jest-jasmine2 "^26.6.3" + jest-regex-util "^26.0.0" + jest-resolve "^26.6.2" + jest-util "^26.6.2" + jest-validate "^26.6.2" + micromatch "^4.0.2" + pretty-format "^26.6.2" + +jest-diff@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-26.6.2.tgz#1aa7468b52c3a68d7d5c5fdcdfcd5e49bd164394" + integrity sha512-6m+9Z3Gv9wN0WFVasqjCL/06+EFCMTqDEUl/b87HYK2rAPTyfz4ZIuSlPhY51PIQRWx5TaxeF1qmXKe9gfN3sA== + dependencies: + chalk "^4.0.0" + diff-sequences "^26.6.2" + jest-get-type "^26.3.0" + pretty-format "^26.6.2" + +jest-diff@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-28.1.3.tgz#948a192d86f4e7a64c5264ad4da4877133d8792f" + integrity sha512-8RqP1B/OXzjjTWkqMX67iqgwBVJRgCyKD3L9nq+6ZqJMdvjE8RgHktqZ6jNrkdMT+dJuYNI3rhQpxaz7drJHfw== + dependencies: + chalk "^4.0.0" + diff-sequences "^28.1.1" + jest-get-type "^28.0.2" + pretty-format "^28.1.3" + +jest-docblock@^26.0.0: + version "26.0.0" + resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-26.0.0.tgz#3e2fa20899fc928cb13bd0ff68bd3711a36889b5" + integrity sha512-RDZ4Iz3QbtRWycd8bUEPxQsTlYazfYn/h5R65Fc6gOfwozFhoImx+affzky/FFBuqISPTqjXomoIGJVKBWoo0w== + dependencies: + detect-newline "^3.0.0" + +jest-each@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-26.6.2.tgz#02526438a77a67401c8a6382dfe5999952c167cb" + integrity sha512-Mer/f0KaATbjl8MCJ+0GEpNdqmnVmDYqCTJYTvoo7rqmRiDllmp2AYN+06F93nXcY3ur9ShIjS+CO/uD+BbH4A== + dependencies: + "@jest/types" "^26.6.2" + chalk "^4.0.0" + jest-get-type "^26.3.0" + jest-util "^26.6.2" + pretty-format "^26.6.2" + +jest-environment-jsdom@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-26.6.2.tgz#78d09fe9cf019a357009b9b7e1f101d23bd1da3e" + integrity sha512-jgPqCruTlt3Kwqg5/WVFyHIOJHsiAvhcp2qiR2QQstuG9yWox5+iHpU3ZrcBxW14T4fe5Z68jAfLRh7joCSP2Q== + dependencies: + "@jest/environment" "^26.6.2" + "@jest/fake-timers" "^26.6.2" + "@jest/types" "^26.6.2" + "@types/node" "*" + jest-mock "^26.6.2" + jest-util "^26.6.2" + jsdom "^16.4.0" + +jest-environment-node@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-26.6.2.tgz#824e4c7fb4944646356f11ac75b229b0035f2b0c" + integrity sha512-zhtMio3Exty18dy8ee8eJ9kjnRyZC1N4C1Nt/VShN1apyXc8rWGtJ9lI7vqiWcyyXS4BVSEn9lxAM2D+07/Tag== + dependencies: + "@jest/environment" "^26.6.2" + "@jest/fake-timers" "^26.6.2" + "@jest/types" "^26.6.2" + "@types/node" "*" + jest-mock "^26.6.2" + jest-util "^26.6.2" + +jest-get-type@^26.3.0: + version "26.3.0" + resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-26.3.0.tgz#e97dc3c3f53c2b406ca7afaed4493b1d099199e0" + integrity sha512-TpfaviN1R2pQWkIihlfEanwOXK0zcxrKEE4MlU6Tn7keoXdN6/3gK/xl0yEh8DOunn5pOVGKf8hB4R9gVh04ig== + +jest-get-type@^28.0.2: + version "28.0.2" + resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-28.0.2.tgz#34622e628e4fdcd793d46db8a242227901fcf203" + integrity sha512-ioj2w9/DxSYHfOm5lJKCdcAmPJzQXmbM/Url3rhlghrPvT3tt+7a/+oXc9azkKmLvoiXjtV83bEWqi+vs5nlPA== + +jest-haste-map@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-26.6.2.tgz#dd7e60fe7dc0e9f911a23d79c5ff7fb5c2cafeaa" + integrity sha512-easWIJXIw71B2RdR8kgqpjQrbMRWQBgiBwXYEhtGUTaX+doCjBheluShdDMeR8IMfJiTqH4+zfhtg29apJf/8w== + dependencies: + "@jest/types" "^26.6.2" + "@types/graceful-fs" "^4.1.2" + "@types/node" "*" + anymatch "^3.0.3" + fb-watchman "^2.0.0" + graceful-fs "^4.2.4" + jest-regex-util "^26.0.0" + jest-serializer "^26.6.2" + jest-util "^26.6.2" + jest-worker "^26.6.2" + micromatch "^4.0.2" + sane "^4.0.3" + walker "^1.0.7" + optionalDependencies: + fsevents "^2.1.2" + +jest-jasmine2@^26.6.3: + version "26.6.3" + resolved "https://registry.yarnpkg.com/jest-jasmine2/-/jest-jasmine2-26.6.3.tgz#adc3cf915deacb5212c93b9f3547cd12958f2edd" + integrity sha512-kPKUrQtc8aYwBV7CqBg5pu+tmYXlvFlSFYn18ev4gPFtrRzB15N2gW/Roew3187q2w2eHuu0MU9TJz6w0/nPEg== + dependencies: + "@babel/traverse" "^7.1.0" + "@jest/environment" "^26.6.2" + "@jest/source-map" "^26.6.2" + "@jest/test-result" "^26.6.2" + "@jest/types" "^26.6.2" + "@types/node" "*" + chalk "^4.0.0" + co "^4.6.0" + expect "^26.6.2" + is-generator-fn "^2.0.0" + jest-each "^26.6.2" + jest-matcher-utils "^26.6.2" + jest-message-util "^26.6.2" + jest-runtime "^26.6.3" + jest-snapshot "^26.6.2" + jest-util "^26.6.2" + pretty-format "^26.6.2" + throat "^5.0.0" + +jest-leak-detector@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-26.6.2.tgz#7717cf118b92238f2eba65054c8a0c9c653a91af" + integrity sha512-i4xlXpsVSMeKvg2cEKdfhh0H39qlJlP5Ex1yQxwF9ubahboQYMgTtz5oML35AVA3B4Eu+YsmwaiKVev9KCvLxg== + dependencies: + jest-get-type "^26.3.0" + pretty-format "^26.6.2" + +jest-matcher-utils@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-26.6.2.tgz#8e6fd6e863c8b2d31ac6472eeb237bc595e53e7a" + integrity sha512-llnc8vQgYcNqDrqRDXWwMr9i7rS5XFiCwvh6DTP7Jqa2mqpcCBBlpCbn+trkG0KNhPu/h8rzyBkriOtBstvWhw== + dependencies: + chalk "^4.0.0" + jest-diff "^26.6.2" + jest-get-type "^26.3.0" + pretty-format "^26.6.2" + +jest-matcher-utils@^28.0.0: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-28.1.3.tgz#5a77f1c129dd5ba3b4d7fc20728806c78893146e" + integrity sha512-kQeJ7qHemKfbzKoGjHHrRKH6atgxMk8Enkk2iPQ3XwO6oE/KYD8lMYOziCkeSB9G4adPM4nR1DE8Tf5JeWH6Bw== + dependencies: + chalk "^4.0.0" + jest-diff "^28.1.3" + jest-get-type "^28.0.2" + pretty-format "^28.1.3" + +jest-message-util@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-26.6.2.tgz#58173744ad6fc0506b5d21150b9be56ef001ca07" + integrity sha512-rGiLePzQ3AzwUshu2+Rn+UMFk0pHN58sOG+IaJbk5Jxuqo3NYO1U2/MIR4S1sKgsoYSXSzdtSa0TgrmtUwEbmA== + dependencies: + "@babel/code-frame" "^7.0.0" + "@jest/types" "^26.6.2" + "@types/stack-utils" "^2.0.0" + chalk "^4.0.0" + graceful-fs "^4.2.4" + micromatch "^4.0.2" + pretty-format "^26.6.2" + slash "^3.0.0" + stack-utils "^2.0.2" + +jest-mock@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-26.6.2.tgz#d6cb712b041ed47fe0d9b6fc3474bc6543feb302" + integrity sha512-YyFjePHHp1LzpzYcmgqkJ0nm0gg/lJx2aZFzFy1S6eUqNjXsOqTK10zNRff2dNfssgokjkG65OlWNcIlgd3zew== + dependencies: + "@jest/types" "^26.6.2" + "@types/node" "*" + +jest-pnp-resolver@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz#b704ac0ae028a89108a4d040b3f919dfddc8e33c" + integrity sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w== + +jest-regex-util@^26.0.0: + version "26.0.0" + resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-26.0.0.tgz#d25e7184b36e39fd466c3bc41be0971e821fee28" + integrity sha512-Gv3ZIs/nA48/Zvjrl34bf+oD76JHiGDUxNOVgUjh3j890sblXryjY4rss71fPtD/njchl6PSE2hIhvyWa1eT0A== + +jest-resolve-dependencies@^26.6.3: + version "26.6.3" + resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-26.6.3.tgz#6680859ee5d22ee5dcd961fe4871f59f4c784fb6" + integrity sha512-pVwUjJkxbhe4RY8QEWzN3vns2kqyuldKpxlxJlzEYfKSvY6/bMvxoFrYYzUO1Gx28yKWN37qyV7rIoIp2h8fTg== + dependencies: + "@jest/types" "^26.6.2" + jest-regex-util "^26.0.0" + jest-snapshot "^26.6.2" + +jest-resolve@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-26.6.2.tgz#a3ab1517217f469b504f1b56603c5bb541fbb507" + integrity sha512-sOxsZOq25mT1wRsfHcbtkInS+Ek7Q8jCHUB0ZUTP0tc/c41QHriU/NunqMfCUWsL4H3MHpvQD4QR9kSYhS7UvQ== + dependencies: + "@jest/types" "^26.6.2" + chalk "^4.0.0" + graceful-fs "^4.2.4" + jest-pnp-resolver "^1.2.2" + jest-util "^26.6.2" + read-pkg-up "^7.0.1" + resolve "^1.18.1" + slash "^3.0.0" + +jest-runner@^26.6.3: + version "26.6.3" + resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-26.6.3.tgz#2d1fed3d46e10f233fd1dbd3bfaa3fe8924be159" + integrity sha512-atgKpRHnaA2OvByG/HpGA4g6CSPS/1LK0jK3gATJAoptC1ojltpmVlYC3TYgdmGp+GLuhzpH30Gvs36szSL2JQ== + dependencies: + "@jest/console" "^26.6.2" + "@jest/environment" "^26.6.2" + "@jest/test-result" "^26.6.2" + "@jest/types" "^26.6.2" + "@types/node" "*" + chalk "^4.0.0" + emittery "^0.7.1" + exit "^0.1.2" + graceful-fs "^4.2.4" + jest-config "^26.6.3" + jest-docblock "^26.0.0" + jest-haste-map "^26.6.2" + jest-leak-detector "^26.6.2" + jest-message-util "^26.6.2" + jest-resolve "^26.6.2" + jest-runtime "^26.6.3" + jest-util "^26.6.2" + jest-worker "^26.6.2" + source-map-support "^0.5.6" + throat "^5.0.0" + +jest-runtime@^26.6.3: + version "26.6.3" + resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-26.6.3.tgz#4f64efbcfac398331b74b4b3c82d27d401b8fa2b" + integrity sha512-lrzyR3N8sacTAMeonbqpnSka1dHNux2uk0qqDXVkMv2c/A3wYnvQ4EXuI013Y6+gSKSCxdaczvf4HF0mVXHRdw== + dependencies: + "@jest/console" "^26.6.2" + "@jest/environment" "^26.6.2" + "@jest/fake-timers" "^26.6.2" + "@jest/globals" "^26.6.2" + "@jest/source-map" "^26.6.2" + "@jest/test-result" "^26.6.2" + "@jest/transform" "^26.6.2" + "@jest/types" "^26.6.2" + "@types/yargs" "^15.0.0" + chalk "^4.0.0" + cjs-module-lexer "^0.6.0" + collect-v8-coverage "^1.0.0" + exit "^0.1.2" + glob "^7.1.3" + graceful-fs "^4.2.4" + jest-config "^26.6.3" + jest-haste-map "^26.6.2" + jest-message-util "^26.6.2" + jest-mock "^26.6.2" + jest-regex-util "^26.0.0" + jest-resolve "^26.6.2" + jest-snapshot "^26.6.2" + jest-util "^26.6.2" + jest-validate "^26.6.2" + slash "^3.0.0" + strip-bom "^4.0.0" + yargs "^15.4.1" + +jest-serializer@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-serializer/-/jest-serializer-26.6.2.tgz#d139aafd46957d3a448f3a6cdabe2919ba0742d1" + integrity sha512-S5wqyz0DXnNJPd/xfIzZ5Xnp1HrJWBczg8mMfMpN78OJ5eDxXyf+Ygld9wX1DnUWbIbhM1YDY95NjR4CBXkb2g== + dependencies: + "@types/node" "*" + graceful-fs "^4.2.4" + +jest-snapshot@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-26.6.2.tgz#f3b0af1acb223316850bd14e1beea9837fb39c84" + integrity sha512-OLhxz05EzUtsAmOMzuupt1lHYXCNib0ECyuZ/PZOx9TrZcC8vL0x+DUG3TL+GLX3yHG45e6YGjIm0XwDc3q3og== + dependencies: + "@babel/types" "^7.0.0" + "@jest/types" "^26.6.2" + "@types/babel__traverse" "^7.0.4" + "@types/prettier" "^2.0.0" + chalk "^4.0.0" + expect "^26.6.2" + graceful-fs "^4.2.4" + jest-diff "^26.6.2" + jest-get-type "^26.3.0" + jest-haste-map "^26.6.2" + jest-matcher-utils "^26.6.2" + jest-message-util "^26.6.2" + jest-resolve "^26.6.2" + natural-compare "^1.4.0" + pretty-format "^26.6.2" + semver "^7.3.2" + +jest-util@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-26.6.2.tgz#907535dbe4d5a6cb4c47ac9b926f6af29576cbc1" + integrity sha512-MDW0fKfsn0OI7MS7Euz6h8HNDXVQ0gaM9uW6RjfDmd1DAFcaxX9OqIakHIqhbnmF08Cf2DLDG+ulq8YQQ0Lp0Q== + dependencies: + "@jest/types" "^26.6.2" + "@types/node" "*" + chalk "^4.0.0" + graceful-fs "^4.2.4" + is-ci "^2.0.0" + micromatch "^4.0.2" + +jest-validate@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-26.6.2.tgz#23d380971587150467342911c3d7b4ac57ab20ec" + integrity sha512-NEYZ9Aeyj0i5rQqbq+tpIOom0YS1u2MVu6+euBsvpgIme+FOfRmoC4R5p0JiAUpaFvFy24xgrpMknarR/93XjQ== + dependencies: + "@jest/types" "^26.6.2" + camelcase "^6.0.0" + chalk "^4.0.0" + jest-get-type "^26.3.0" + leven "^3.1.0" + pretty-format "^26.6.2" + +jest-watcher@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-26.6.2.tgz#a5b683b8f9d68dbcb1d7dae32172d2cca0592975" + integrity sha512-WKJob0P/Em2csiVthsI68p6aGKTIcsfjH9Gsx1f0A3Italz43e3ho0geSAVsmj09RWOELP1AZ/DXyJgOgDKxXQ== + dependencies: + "@jest/test-result" "^26.6.2" + "@jest/types" "^26.6.2" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + jest-util "^26.6.2" + string-length "^4.0.1" + +jest-worker@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-26.6.2.tgz#7f72cbc4d643c365e27b9fd775f9d0eaa9c7a8ed" + integrity sha512-KWYVV1c4i+jbMpaBC+U++4Va0cp8OisU185o73T1vo99hqi7w8tSJfUXYswwqqrjzwxa6KpRK54WhPvwf5w6PQ== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^7.0.0" + +jest-worker@^27.4.5: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-27.5.1.tgz#8d146f0900e8973b106b6f73cc1e9a8cb86f8db0" + integrity sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^8.0.0" + +jest@^26.6.3: + version "26.6.3" + resolved "https://registry.yarnpkg.com/jest/-/jest-26.6.3.tgz#40e8fdbe48f00dfa1f0ce8121ca74b88ac9148ef" + integrity sha512-lGS5PXGAzR4RF7V5+XObhqz2KZIDUA1yD0DG6pBVmy10eh0ZIXQImRuzocsI/N2XZ1GrLFwTS27In2i2jlpq1Q== + dependencies: + "@jest/core" "^26.6.3" + import-local "^3.0.2" + jest-cli "^26.6.3" + +"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +js-yaml@^3.13.1: + version "3.14.1" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" + integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +jsdom@^16.4.0: + version "16.7.0" + resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-16.7.0.tgz#918ae71965424b197c819f8183a754e18977b710" + integrity sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw== + dependencies: + abab "^2.0.5" + acorn "^8.2.4" + acorn-globals "^6.0.0" + cssom "^0.4.4" + cssstyle "^2.3.0" + data-urls "^2.0.0" + decimal.js "^10.2.1" + domexception "^2.0.1" + escodegen "^2.0.0" + form-data "^3.0.0" + html-encoding-sniffer "^2.0.1" + http-proxy-agent "^4.0.1" + https-proxy-agent "^5.0.0" + is-potential-custom-element-name "^1.0.1" + nwsapi "^2.2.0" + parse5 "6.0.1" + saxes "^5.0.1" + symbol-tree "^3.2.4" + tough-cookie "^4.0.0" + w3c-hr-time "^1.0.2" + w3c-xmlserializer "^2.0.0" + webidl-conversions "^6.1.0" + whatwg-encoding "^1.0.5" + whatwg-mimetype "^2.3.0" + whatwg-url "^8.5.0" + ws "^7.4.6" + xml-name-validator "^3.0.0" + +jsesc@^2.5.1: + version "2.5.2" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" + integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== + +jsesc@~0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" + integrity sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA== + +json-parse-even-better-errors@^2.3.0, json-parse-even-better-errors@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" + integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + +json-schema-traverse@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" + integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== + +json-stable-stringify-without-jsonify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== + +json5@^2.1.2, json5@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.1.tgz#655d50ed1e6f95ad1a3caababd2b0efda10b395c" + integrity sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA== + +"jsx-ast-utils@^2.4.1 || ^3.0.0": + version "3.3.2" + resolved "https://registry.yarnpkg.com/jsx-ast-utils/-/jsx-ast-utils-3.3.2.tgz#afe5efe4332cd3515c065072bd4d6b0aa22152bd" + integrity sha512-4ZCADZHRkno244xlNnn4AOG6sRQ7iBZ5BbgZ4vW4y5IZw7cVUD1PPeblm1xx/nfmMxPdt/LHsXZW8z/j58+l9Q== + dependencies: + array-includes "^3.1.5" + object.assign "^4.1.2" + +just-extend@^4.0.2: + version "4.2.1" + resolved "https://registry.yarnpkg.com/just-extend/-/just-extend-4.2.1.tgz#ef5e589afb61e5d66b24eca749409a8939a8c744" + integrity sha512-g3UB796vUFIY90VIv/WX3L2c8CS2MdWUww3CNrYmqza1Fg0DURc2K/O4YrnklBdQarSJ/y8JnJYDGc+1iumQjg== + +kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: + version "3.2.2" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" + integrity sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ== + dependencies: + is-buffer "^1.1.5" + +kind-of@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" + integrity sha512-24XsCxmEbRwEDbz/qz3stgin8TTzZ1ESR56OMCN0ujYg+vRutNSiOj9bHH9u85DKgXguraugV5sFuvbD4FW/hw== + dependencies: + is-buffer "^1.1.5" + +kind-of@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" + integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== + +kind-of@^6.0.0, kind-of@^6.0.2: + version "6.0.3" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" + integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== + +kleur@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" + integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== + +leven@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" + integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== + +levn@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" + integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== + dependencies: + prelude-ls "^1.2.1" + type-check "~0.4.0" + +levn@~0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" + integrity sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA== + dependencies: + prelude-ls "~1.1.2" + type-check "~0.3.2" + +lines-and-columns@^1.1.6: + version "1.2.4" + resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" + integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== + +loader-runner@^4.2.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-4.3.0.tgz#c1b4a163b99f614830353b16755e7149ac2314e1" + integrity sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg== + +loader-utils@^2.0.0: + version "2.0.4" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-2.0.4.tgz#8b5cb38b5c34a9a018ee1fc0e6a066d1dfcc528c" + integrity sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw== + dependencies: + big.js "^5.2.2" + emojis-list "^3.0.0" + json5 "^2.1.2" + +locate-path@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" + integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== + dependencies: + p-locate "^4.1.0" + +lodash.debounce@^4.0.8: + version "4.0.8" + resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af" + integrity sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow== + +lodash.escape@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/lodash.escape/-/lodash.escape-4.0.1.tgz#c9044690c21e04294beaa517712fded1fa88de98" + integrity sha512-nXEOnb/jK9g0DYMr1/Xvq6l5xMD7GDG55+GSYIYmS0G4tBk/hURD4JR9WCavs04t33WmJx9kCyp9vJ+mr4BOUw== + +lodash.flattendeep@^4.4.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz#fb030917f86a3134e5bc9bec0d69e0013ddfedb2" + integrity sha512-uHaJFihxmJcEX3kT4I23ABqKKalJ/zDrDg0lsFtc1h+3uw49SIJ5beyhx5ExVRti3AvKoOJngIj7xz3oylPdWQ== + +lodash.get@^4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99" + integrity sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ== + +lodash.isequal@^4.5.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/lodash.isequal/-/lodash.isequal-4.5.0.tgz#415c4478f2bcc30120c22ce10ed3226f7d3e18e0" + integrity sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ== + +lodash.merge@^4.6.2: + version "4.6.2" + resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== + +lodash.truncate@^4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/lodash.truncate/-/lodash.truncate-4.4.2.tgz#5a350da0b1113b837ecfffd5812cbe58d6eae193" + integrity sha512-jttmRe7bRse52OsWIMDLaXxWqRAmtIUccAQ3garviCqJjafXOfNMO0yMfNpdD6zbGaTU0P5Nz7e7gAT6cKmJRw== + +lodash@^4.17.15, lodash@^4.7.0: + version "4.17.21" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" + integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== + +loose-envify@^1.0.0, loose-envify@^1.1.0, loose-envify@^1.2.0, loose-envify@^1.3.1, loose-envify@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" + integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== + dependencies: + js-tokens "^3.0.0 || ^4.0.0" + +lru-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + +lz-string@^1.4.4: + version "1.4.4" + resolved "https://registry.yarnpkg.com/lz-string/-/lz-string-1.4.4.tgz#c0d8eaf36059f705796e1e344811cf4c498d3a26" + integrity sha512-0ckx7ZHRPqb0oUm8zNr+90mtf9DQB60H1wMCjBtfi62Kl3a7JbHob6gA2bC+xRvZoOL+1hzUK8jeuEIQE8svEQ== + +make-dir@^3.0.0, make-dir@^3.0.2, make-dir@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" + integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== + dependencies: + semver "^6.0.0" + +makeerror@1.0.12: + version "1.0.12" + resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" + integrity sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg== + dependencies: + tmpl "1.0.5" + +map-cache@^0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" + integrity sha512-8y/eV9QQZCiyn1SprXSrCmqJN0yNRATe+PO8ztwqrvrbdRLA3eYJF0yaR0YayLWkMbsQSKWS9N2gPcGEc4UsZg== + +map-visit@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" + integrity sha512-4y7uGv8bd2WdM9vpQsiQNo41Ln1NvhvDRuVt0k2JZQ+ezN2uaQes7lZeZ+QQUHOLQAtDaBJ+7wCbi+ab/KFs+w== + dependencies: + object-visit "^1.0.0" + +media-typer@0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" + integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== + +memfs@^3.4.3: + version "3.4.7" + resolved "https://registry.yarnpkg.com/memfs/-/memfs-3.4.7.tgz#e5252ad2242a724f938cb937e3c4f7ceb1f70e5a" + integrity sha512-ygaiUSNalBX85388uskeCyhSAoOSgzBbtVCr9jA2RROssFL9Q19/ZXFqS+2Th2sr1ewNIWgFdLzLC3Yl1Zv+lw== + dependencies: + fs-monkey "^1.0.3" + +merge-descriptors@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" + integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w== + +merge-stream@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" + integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== + +methods@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" + integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== + +micromatch@^3.1.4: + version "3.1.10" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" + integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== + dependencies: + arr-diff "^4.0.0" + array-unique "^0.3.2" + braces "^2.3.1" + define-property "^2.0.2" + extend-shallow "^3.0.2" + extglob "^2.0.4" + fragment-cache "^0.2.1" + kind-of "^6.0.2" + nanomatch "^1.2.9" + object.pick "^1.3.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.2" + +micromatch@^4.0.2: + version "4.0.5" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" + integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== + dependencies: + braces "^3.0.2" + picomatch "^2.3.1" + +mime-db@1.52.0, "mime-db@>= 1.43.0 < 2": + version "1.52.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + +mime-types@^2.1.12, mime-types@^2.1.27, mime-types@^2.1.31, mime-types@~2.1.17, mime-types@~2.1.24, mime-types@~2.1.34: + version "2.1.35" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + +mime@1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" + integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== + +mimic-fn@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" + integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== + +min-indent@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/min-indent/-/min-indent-1.0.1.tgz#a63f681673b30571fbe8bc25686ae746eefa9869" + integrity sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg== + +mini-create-react-context@^0.4.0: + version "0.4.1" + resolved "https://registry.yarnpkg.com/mini-create-react-context/-/mini-create-react-context-0.4.1.tgz#072171561bfdc922da08a60c2197a497cc2d1d5e" + integrity sha512-YWCYEmd5CQeHGSAKrYvXgmzzkrvssZcuuQDDeqkT+PziKGMgE+0MCCtcKbROzocGBG1meBLl2FotlRwf4gAzbQ== + dependencies: + "@babel/runtime" "^7.12.1" + tiny-warning "^1.0.3" + +minimalistic-assert@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" + integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== + +minimatch@^3.0.4, minimatch@^3.1.1, minimatch@^3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== + dependencies: + brace-expansion "^1.1.7" + +minimist@^1.1.1, minimist@^1.2.0: + version "1.2.6" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" + integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== + +mixin-deep@^1.2.0: + version "1.3.2" + resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" + integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== + dependencies: + for-in "^1.0.2" + is-extendable "^1.0.1" + +moo@^0.5.0: + version "0.5.1" + resolved "https://registry.yarnpkg.com/moo/-/moo-0.5.1.tgz#7aae7f384b9b09f620b6abf6f74ebbcd1b65dbc4" + integrity sha512-I1mnb5xn4fO80BH9BLcF0yLypy2UKl+Cb01Fu0hJRkJjlCRtxZMWkTdAtDd5ZqCOxtCkhmRwyI57vWT+1iZ67w== + +ms@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== + +ms@2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +ms@2.1.3: + version "2.1.3" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + +multicast-dns@^7.2.5: + version "7.2.5" + resolved "https://registry.yarnpkg.com/multicast-dns/-/multicast-dns-7.2.5.tgz#77eb46057f4d7adbd16d9290fa7299f6fa64cced" + integrity sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg== + dependencies: + dns-packet "^5.2.2" + thunky "^1.0.2" + +nanoid@^3.3.4: + version "3.3.4" + resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.4.tgz#730b67e3cd09e2deacf03c027c81c9d9dbc5e8ab" + integrity sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw== + +nanomatch@^1.2.9: + version "1.2.13" + resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" + integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== + dependencies: + arr-diff "^4.0.0" + array-unique "^0.3.2" + define-property "^2.0.2" + extend-shallow "^3.0.2" + fragment-cache "^0.2.1" + is-windows "^1.0.2" + kind-of "^6.0.2" + object.pick "^1.3.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + +natural-compare@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== + +nearley@^2.7.10: + version "2.20.1" + resolved "https://registry.yarnpkg.com/nearley/-/nearley-2.20.1.tgz#246cd33eff0d012faf197ff6774d7ac78acdd474" + integrity sha512-+Mc8UaAebFzgV+KpI5n7DasuuQCHA89dmwm7JXw3TV43ukfNQ9DnBH3Mdb2g/I4Fdxc26pwimBWvjIw0UAILSQ== + dependencies: + commander "^2.19.0" + moo "^0.5.0" + railroad-diagrams "^1.0.0" + randexp "0.4.6" + +negotiator@0.6.3: + version "0.6.3" + resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" + integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== + +neo-async@^2.6.2: + version "2.6.2" + resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" + integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== + +nice-try@^1.0.4: + version "1.0.5" + resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" + integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== + +nise@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/nise/-/nise-5.1.1.tgz#ac4237e0d785ecfcb83e20f389185975da5c31f3" + integrity sha512-yr5kW2THW1AkxVmCnKEh4nbYkJdB3I7LUkiUgOvEkOp414mc2UMaHMA7pjq1nYowhdoJZGwEKGaQVbxfpWj10A== + dependencies: + "@sinonjs/commons" "^1.8.3" + "@sinonjs/fake-timers" ">=5" + "@sinonjs/text-encoding" "^0.7.1" + just-extend "^4.0.2" + path-to-regexp "^1.7.0" + +node-forge@^1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3" + integrity sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA== + +node-int64@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" + integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw== + +node-notifier@^8.0.0: + version "8.0.2" + resolved "https://registry.yarnpkg.com/node-notifier/-/node-notifier-8.0.2.tgz#f3167a38ef0d2c8a866a83e318c1ba0efeb702c5" + integrity sha512-oJP/9NAdd9+x2Q+rfphB2RJCHjod70RcRLjosiPMMu5gjIfwVnOUGq2nbTjTUbmy0DJ/tFIVT30+Qe3nzl4TJg== + dependencies: + growly "^1.3.0" + is-wsl "^2.2.0" + semver "^7.3.2" + shellwords "^0.1.1" + uuid "^8.3.0" + which "^2.0.2" + +node-releases@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.6.tgz#8a7088c63a55e493845683ebf3c828d8c51c5503" + integrity sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg== + +normalize-package-data@^2.5.0: + version "2.5.0" + resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" + integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== + dependencies: + hosted-git-info "^2.1.4" + resolve "^1.10.0" + semver "2 || 3 || 4 || 5" + validate-npm-package-license "^3.0.1" + +normalize-path@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" + integrity sha512-3pKJwH184Xo/lnH6oyP1q2pMd7HcypqqmRs91/6/i2CGtWwIKGCkOOMTm/zXbgTEWHw1uNpNi/igc3ePOYHb6w== + dependencies: + remove-trailing-separator "^1.0.1" + +normalize-path@^3.0.0, normalize-path@~3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== + +npm-run-path@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" + integrity sha512-lJxZYlT4DW/bRUtFh1MQIWqmLwQfAxnqWG4HhEdjMlkrJYnJn0Jrr2u3mgxqaWsdiBc76TYkTG/mhrnYTuzfHw== + dependencies: + path-key "^2.0.0" + +npm-run-path@^4.0.0, npm-run-path@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" + integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== + dependencies: + path-key "^3.0.0" + +nth-check@^2.0.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-2.1.1.tgz#c9eab428effce36cd6b92c924bdb000ef1f1ed1d" + integrity sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w== + dependencies: + boolbase "^1.0.0" + +nwsapi@^2.2.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.1.tgz#10a9f268fbf4c461249ebcfe38e359aa36e2577c" + integrity sha512-JYOWTeFoS0Z93587vRJgASD5Ut11fYl5NyihP3KrYBvMe1FRRs6RN7m20SA/16GM4P6hTnZjT+UmDOt38UeXNg== + +object-assign@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== + +object-copy@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" + integrity sha512-79LYn6VAb63zgtmAteVOWo9Vdj71ZVBy3Pbse+VqxDpEP83XuujMrGqHIwAXJ5I/aM0zU7dIyIAhifVTPrNItQ== + dependencies: + copy-descriptor "^0.1.0" + define-property "^0.2.5" + kind-of "^3.0.3" + +object-inspect@^1.12.0, object-inspect@^1.7.0, object-inspect@^1.9.0: + version "1.12.2" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.2.tgz#c0641f26394532f28ab8d796ab954e43c009a8ea" + integrity sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ== + +object-is@^1.0.2, object-is@^1.1.2: + version "1.1.5" + resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.1.5.tgz#b9deeaa5fc7f1846a0faecdceec138e5778f53ac" + integrity sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + +object-keys@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + +object-visit@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" + integrity sha512-GBaMwwAVK9qbQN3Scdo0OyvgPW7l3lnaVMj84uTOZlswkX0KpF6fyDBJhtTthf7pymztoN36/KEr1DyhF96zEA== + dependencies: + isobject "^3.0.0" + +object.assign@^4.1.0, object.assign@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.2.tgz#0ed54a342eceb37b38ff76eb831a0e788cb63940" + integrity sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ== + dependencies: + call-bind "^1.0.0" + define-properties "^1.1.3" + has-symbols "^1.0.1" + object-keys "^1.1.1" + +object.entries@^1.1.1, object.entries@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.5.tgz#e1acdd17c4de2cd96d5a08487cfb9db84d881861" + integrity sha512-TyxmjUoZggd4OrrU1W66FMDG6CuqJxsFvymeyXI51+vQLN67zYfZseptRge703kKQdo4uccgAKebXFcRCzk4+g== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +object.fromentries@^2.0.0, object.fromentries@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/object.fromentries/-/object.fromentries-2.0.5.tgz#7b37b205109c21e741e605727fe8b0ad5fa08251" + integrity sha512-CAyG5mWQRRiBU57Re4FKoTBjXfDoNwdFVH2Y1tS9PqCsfUTymAohOkEMSG3aRNKmv4lV3O7p1et7c187q6bynw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +object.hasown@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/object.hasown/-/object.hasown-1.1.1.tgz#ad1eecc60d03f49460600430d97f23882cf592a3" + integrity sha512-LYLe4tivNQzq4JdaWW6WO3HMZZJWzkkH8fnI6EebWl0VZth2wL2Lovm74ep2/gZzlaTdV62JZHEqHQ2yVn8Q/A== + dependencies: + define-properties "^1.1.4" + es-abstract "^1.19.5" + +object.pick@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" + integrity sha512-tqa/UMy/CCoYmj+H5qc07qvSL9dqcs/WZENZ1JbtWBlATP+iVOe778gE6MSijnyCnORzDuX6hU+LA4SZ09YjFQ== + dependencies: + isobject "^3.0.1" + +object.values@^1.1.1, object.values@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.5.tgz#959f63e3ce9ef108720333082131e4a459b716ac" + integrity sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +obuf@^1.0.0, obuf@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" + integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== + +on-finished@2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f" + integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== + dependencies: + ee-first "1.1.1" + +on-headers@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" + integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== + +once@^1.3.0, once@^1.3.1, once@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== + dependencies: + wrappy "1" + +onetime@^5.1.0, onetime@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" + integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== + dependencies: + mimic-fn "^2.1.0" + +open@^8.0.9: + version "8.4.0" + resolved "https://registry.yarnpkg.com/open/-/open-8.4.0.tgz#345321ae18f8138f82565a910fdc6b39e8c244f8" + integrity sha512-XgFPPM+B28FtCCgSb9I+s9szOC1vZRSwgWsRUA5ylIxRTgKozqjOCrVOqGsYABPYK5qnfqClxZTFBa8PKt2v6Q== + dependencies: + define-lazy-prop "^2.0.0" + is-docker "^2.1.1" + is-wsl "^2.2.0" + +optionator@^0.8.1: + version "0.8.3" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" + integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== + dependencies: + deep-is "~0.1.3" + fast-levenshtein "~2.0.6" + levn "~0.3.0" + prelude-ls "~1.1.2" + type-check "~0.3.2" + word-wrap "~1.2.3" + +optionator@^0.9.1: + version "0.9.1" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" + integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== + dependencies: + deep-is "^0.1.3" + fast-levenshtein "^2.0.6" + levn "^0.4.1" + prelude-ls "^1.2.1" + type-check "^0.4.0" + word-wrap "^1.2.3" + +p-each-series@^2.1.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/p-each-series/-/p-each-series-2.2.0.tgz#105ab0357ce72b202a8a8b94933672657b5e2a9a" + integrity sha512-ycIL2+1V32th+8scbpTvyHNaHe02z0sjgh91XXjAk+ZeXoPN4Z46DVUnzdso0aX4KckKw0FNNFHdjZ2UsZvxiA== + +p-finally@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" + integrity sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow== + +p-limit@^2.2.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" + integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== + dependencies: + p-try "^2.0.0" + +p-locate@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" + integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== + dependencies: + p-limit "^2.2.0" + +p-retry@^4.5.0: + version "4.6.2" + resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-4.6.2.tgz#9baae7184057edd4e17231cee04264106e092a16" + integrity sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ== + dependencies: + "@types/retry" "0.12.0" + retry "^0.13.1" + +p-try@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" + integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== + +parent-module@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" + integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== + dependencies: + callsites "^3.0.0" + +parse-json@^5.0.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" + integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== + dependencies: + "@babel/code-frame" "^7.0.0" + error-ex "^1.3.1" + json-parse-even-better-errors "^2.3.0" + lines-and-columns "^1.1.6" + +parse5-htmlparser2-tree-adapter@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-7.0.0.tgz#23c2cc233bcf09bb7beba8b8a69d46b08c62c2f1" + integrity sha512-B77tOZrqqfUfnVcOrUvfdLbz4pu4RopLD/4vmu3HUPswwTA8OH0EMW9BlWR2B0RCoiZRAHEUu7IxeP1Pd1UU+g== + dependencies: + domhandler "^5.0.2" + parse5 "^7.0.0" + +parse5@6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/parse5/-/parse5-6.0.1.tgz#e1a1c085c569b3dc08321184f19a39cc27f7c30b" + integrity sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw== + +parse5@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/parse5/-/parse5-7.0.0.tgz#51f74a5257f5fcc536389e8c2d0b3802e1bfa91a" + integrity sha512-y/t8IXSPWTuRZqXc0ajH/UwDj4mnqLEbSttNbThcFhGrZuOyoyvNBO85PBp2jQa55wY9d07PBNjsK8ZP3K5U6g== + dependencies: + entities "^4.3.0" + +parseurl@~1.3.2, parseurl@~1.3.3: + version "1.3.3" + resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" + integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== + +pascalcase@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" + integrity sha512-XHXfu/yOQRy9vYOtUDVMN60OEJjW013GoObG1o+xwQTpB9eYJX/BjXMsdW13ZDPruFhYYn0AG22w0xgQMwl3Nw== + +path-exists@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" + integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== + +path-key@^2.0.0, path-key@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" + integrity sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw== + +path-key@^3.0.0, path-key@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + +path-parse@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== + +path-to-regexp@0.1.7: + version "0.1.7" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" + integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== + +path-to-regexp@^1.7.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-1.8.0.tgz#887b3ba9d84393e87a0a0b9f4cb756198b53548a" + integrity sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA== + dependencies: + isarray "0.0.1" + +performance-now@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" + integrity sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow== + +picocolors@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" + integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== + +picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== + +pirates@^4.0.1: + version "4.0.5" + resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.5.tgz#feec352ea5c3268fb23a37c702ab1699f35a5f3b" + integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ== + +pkg-dir@^4.1.0, pkg-dir@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" + integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== + dependencies: + find-up "^4.0.0" + +posix-character-classes@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" + integrity sha512-xTgYBc3fuo7Yt7JbiuFxSYGToMoz8fLoE6TC9Wx1P/u+LfeThMOAqmuyECnlBaaJb+u1m9hHiXUEtwW4OzfUJg== + +postcss-modules-extract-imports@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz#cda1f047c0ae80c97dbe28c3e76a43b88025741d" + integrity sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw== + +postcss-modules-local-by-default@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.0.tgz#ebbb54fae1598eecfdf691a02b3ff3b390a5a51c" + integrity sha512-sT7ihtmGSF9yhm6ggikHdV0hlziDTX7oFoXtuVWeDd3hHObNkcHRo9V3yg7vCAY7cONyxJC/XXCmmiHHcvX7bQ== + dependencies: + icss-utils "^5.0.0" + postcss-selector-parser "^6.0.2" + postcss-value-parser "^4.1.0" + +postcss-modules-scope@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz#9ef3151456d3bbfa120ca44898dfca6f2fa01f06" + integrity sha512-hncihwFA2yPath8oZ15PZqvWGkWf+XUfQgUGamS4LqoP1anQLOsOJw0vr7J7IwLpoY9fatA2qiGUGmuZL0Iqlg== + dependencies: + postcss-selector-parser "^6.0.4" + +postcss-modules-values@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz#d7c5e7e68c3bb3c9b27cbf48ca0bb3ffb4602c9c" + integrity sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ== + dependencies: + icss-utils "^5.0.0" + +postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4: + version "6.0.10" + resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz#79b61e2c0d1bfc2602d549e11d0876256f8df88d" + integrity sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w== + dependencies: + cssesc "^3.0.0" + util-deprecate "^1.0.2" + +postcss-value-parser@^4.1.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz#723c09920836ba6d3e5af019f92bc0971c02e514" + integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ== + +postcss@^8.2.15: + version "8.4.14" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.14.tgz#ee9274d5622b4858c1007a74d76e42e56fd21caf" + integrity sha512-E398TUmfAYFPBSdzgeieK2Y1+1cpdxJx8yXbK/m57nRhKSmk1GB2tO4lbLBtlkfPQTDKfe4Xqv1ASWPpayPEig== + dependencies: + nanoid "^3.3.4" + picocolors "^1.0.0" + source-map-js "^1.0.2" + +prelude-ls@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" + integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== + +prelude-ls@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" + integrity sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w== + +prettier-linter-helpers@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz#d23d41fe1375646de2d0104d3454a3008802cf7b" + integrity sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w== + dependencies: + fast-diff "^1.1.2" + +prettier@^2.2.1: + version "2.7.1" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.7.1.tgz#e235806850d057f97bb08368a4f7d899f7760c64" + integrity sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g== + +pretty-format@^26.6.2: + version "26.6.2" + resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-26.6.2.tgz#e35c2705f14cb7fe2fe94fa078345b444120fc93" + integrity sha512-7AeGuCYNGmycyQbCqd/3PWH4eOoX/OiCa0uphp57NVTeAGdJGaAliecxwBDHYQCIvrW7aDBZCYeNTP/WX69mkg== + dependencies: + "@jest/types" "^26.6.2" + ansi-regex "^5.0.0" + ansi-styles "^4.0.0" + react-is "^17.0.1" + +pretty-format@^27.0.2: + version "27.5.1" + resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-27.5.1.tgz#2181879fdea51a7a5851fb39d920faa63f01d88e" + integrity sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ== + dependencies: + ansi-regex "^5.0.1" + ansi-styles "^5.0.0" + react-is "^17.0.1" + +pretty-format@^28.0.0, pretty-format@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-28.1.3.tgz#c9fba8cedf99ce50963a11b27d982a9ae90970d5" + integrity sha512-8gFb/To0OmxHR9+ZTb14Df2vNxdGCX8g1xWGUTqUw5TiZvcQf5sHKObd5UcPyLLyowNwDAMTF3XWOG1B6mxl1Q== + dependencies: + "@jest/schemas" "^28.1.3" + ansi-regex "^5.0.1" + ansi-styles "^5.0.0" + react-is "^18.0.0" + +process-nextick-args@~2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" + integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== + +progress@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" + integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== + +prompts@^2.0.1: + version "2.4.2" + resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" + integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== + dependencies: + kleur "^3.0.3" + sisteransi "^1.0.5" + +prop-types-extra@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/prop-types-extra/-/prop-types-extra-1.1.1.tgz#58c3b74cbfbb95d304625975aa2f0848329a010b" + integrity sha512-59+AHNnHYCdiC+vMwY52WmvP5dM3QLeoumYuEyceQDi9aEhtwN9zIQ2ZNo25sMyXnbh32h+P1ezDsUpUH3JAew== + dependencies: + react-is "^16.3.2" + warning "^4.0.0" + +prop-types@^15.6.2, prop-types@^15.7.0, prop-types@^15.7.2, prop-types@^15.8.1: + version "15.8.1" + resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5" + integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg== + dependencies: + loose-envify "^1.4.0" + object-assign "^4.1.1" + react-is "^16.13.1" + +proxy-addr@~2.0.7: + version "2.0.7" + resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025" + integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== + dependencies: + forwarded "0.2.0" + ipaddr.js "1.9.1" + +psl@^1.1.33: + version "1.9.0" + resolved "https://registry.yarnpkg.com/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7" + integrity sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag== + +pump@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" + integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== + dependencies: + end-of-stream "^1.1.0" + once "^1.3.1" + +punycode@^2.1.0, punycode@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" + integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + +qs@6.10.3: + version "6.10.3" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.10.3.tgz#d6cde1b2ffca87b5aa57889816c5f81535e22e8e" + integrity sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ== + dependencies: + side-channel "^1.0.4" + +raf@^3.4.1: + version "3.4.1" + resolved "https://registry.yarnpkg.com/raf/-/raf-3.4.1.tgz#0742e99a4a6552f445d73e3ee0328af0ff1ede39" + integrity sha512-Sq4CW4QhwOHE8ucn6J34MqtZCeWFP2aQSmrlroYgqAV1PjStIhJXxYuTgUIfkEk7zTLjmIjLmU5q+fbD1NnOJA== + dependencies: + performance-now "^2.1.0" + +railroad-diagrams@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/railroad-diagrams/-/railroad-diagrams-1.0.0.tgz#eb7e6267548ddedfb899c1b90e57374559cddb7e" + integrity sha512-cz93DjNeLY0idrCNOH6PviZGRN9GJhsdm9hpn1YCS879fj4W+x5IFJhhkRZcwVgMmFF7R82UA/7Oh+R8lLZg6A== + +randexp@0.4.6: + version "0.4.6" + resolved "https://registry.yarnpkg.com/randexp/-/randexp-0.4.6.tgz#e986ad5e5e31dae13ddd6f7b3019aa7c87f60ca3" + integrity sha512-80WNmd9DA0tmZrw9qQa62GPPWfuXJknrmVmLcxvq4uZBdYqb1wYoKTmnlGUchvVWe0XiLupYkBoXVOxz3C8DYQ== + dependencies: + discontinuous-range "1.0.0" + ret "~0.1.10" + +randombytes@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" + integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== + dependencies: + safe-buffer "^5.1.0" + +range-parser@^1.2.1, range-parser@~1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" + integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== + +raw-body@2.5.1: + version "2.5.1" + resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857" + integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig== + dependencies: + bytes "3.1.2" + http-errors "2.0.0" + iconv-lite "0.4.24" + unpipe "1.0.0" + +react-bootstrap@^2.1.1: + version "2.4.0" + resolved "https://registry.yarnpkg.com/react-bootstrap/-/react-bootstrap-2.4.0.tgz#99bf9656e2e7a23ae1ae135d18fd5ad7c344b416" + integrity sha512-dn599jNK1Fg5GGjJH+lQQDwELVzigh/MdusKpB/0el+sCjsO5MZDH5gRMmBjRhC+vb7VlCDr6OXffPIDSkNMLw== + dependencies: + "@babel/runtime" "^7.17.2" + "@restart/hooks" "^0.4.6" + "@restart/ui" "^1.2.0" + "@types/react-transition-group" "^4.4.4" + classnames "^2.3.1" + dom-helpers "^5.2.1" + invariant "^2.2.4" + prop-types "^15.8.1" + prop-types-extra "^1.1.0" + react-transition-group "^4.4.2" + uncontrollable "^7.2.1" + warning "^4.0.3" + +react-dom@17.0.2, react-dom@^17.0.1: + version "17.0.2" + resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-17.0.2.tgz#ecffb6845e3ad8dbfcdc498f0d0a939736502c23" + integrity sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA== + dependencies: + loose-envify "^1.1.0" + object-assign "^4.1.1" + scheduler "^0.20.2" + +react-icons@^4.1.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/react-icons/-/react-icons-4.4.0.tgz#a13a8a20c254854e1ec9aecef28a95cdf24ef703" + integrity sha512-fSbvHeVYo/B5/L4VhB7sBA1i2tS8MkT0Hb9t2H1AVPkwGfVHLJCqyr2Py9dKMxsyM63Eng1GkdZfbWj+Fmv8Rg== + +"react-is@^16.12.0 || ^17.0.0 || ^18.0.0", react-is@^18.0.0: + version "18.2.0" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-18.2.0.tgz#199431eeaaa2e09f86427efbb4f1473edb47609b" + integrity sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w== + +react-is@^16.13.1, react-is@^16.3.2, react-is@^16.6.0, react-is@^16.7.0: + version "16.13.1" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" + integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== + +react-is@^17.0.0, react-is@^17.0.1, react-is@^17.0.2: + version "17.0.2" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-17.0.2.tgz#e691d4a8e9c789365655539ab372762b0efb54f0" + integrity sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w== + +react-lifecycles-compat@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/react-lifecycles-compat/-/react-lifecycles-compat-3.0.4.tgz#4f1a273afdfc8f3488a8c516bfda78f872352362" + integrity sha512-fBASbA6LnOU9dOU2eW7aQ8xmYBSXUIWr+UmF9b1efZBazGNO+rcXT/icdKnYm2pTwcRylVUYwW7H1PHfLekVzA== + +react-multi-select-component@^3.0.7: + version "3.1.7" + resolved "https://registry.yarnpkg.com/react-multi-select-component/-/react-multi-select-component-3.1.7.tgz#4d68fa57f20b02c596c20d8ef61cc3c304ade766" + integrity sha512-ycb+qCc8I+KLjlgVvo/I5Gsali8qw4vLUjfIMorgpt/RUY0+0aOtqxOAc4ojwQAe6tNKee23IRyCCQvU4tyBFA== + dependencies: + goober "^2.0.30" + +react-object-table-viewer@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/react-object-table-viewer/-/react-object-table-viewer-1.0.7.tgz#31816021fa4526641c6b66bd9433ec9b78c2e472" + integrity sha512-OezCet8+BmEdJJHO5WGPFPRWXxw4Ls6HsV4Uh1kRPlmRXLOTNqWt/ZHmH8NhTl1BA9HkdhEegKVqc2b61wDMLg== + dependencies: + react "^17.0.2" + react-dom "17.0.2" + +react-redux@^7.2.2: + version "7.2.8" + resolved "https://registry.yarnpkg.com/react-redux/-/react-redux-7.2.8.tgz#a894068315e65de5b1b68899f9c6ee0923dd28de" + integrity sha512-6+uDjhs3PSIclqoCk0kd6iX74gzrGc3W5zcAjbrFgEdIjRSQObdIwfx80unTkVUYvbQ95Y8Av3OvFHq1w5EOUw== + dependencies: + "@babel/runtime" "^7.15.4" + "@types/react-redux" "^7.1.20" + hoist-non-react-statics "^3.3.2" + loose-envify "^1.4.0" + prop-types "^15.7.2" + react-is "^17.0.2" + +react-router-dom@^5.2.0: + version "5.3.3" + resolved "https://registry.yarnpkg.com/react-router-dom/-/react-router-dom-5.3.3.tgz#8779fc28e6691d07afcaf98406d3812fe6f11199" + integrity sha512-Ov0tGPMBgqmbu5CDmN++tv2HQ9HlWDuWIIqn4b88gjlAN5IHI+4ZUZRcpz9Hl0azFIwihbLDYw1OiHGRo7ZIng== + dependencies: + "@babel/runtime" "^7.12.13" + history "^4.9.0" + loose-envify "^1.3.1" + prop-types "^15.6.2" + react-router "5.3.3" + tiny-invariant "^1.0.2" + tiny-warning "^1.0.0" + +react-router@5.3.3, react-router@^5.2.0: + version "5.3.3" + resolved "https://registry.yarnpkg.com/react-router/-/react-router-5.3.3.tgz#8e3841f4089e728cf82a429d92cdcaa5e4a3a288" + integrity sha512-mzQGUvS3bM84TnbtMYR8ZjKnuPJ71IjSzR+DE6UkUqvN4czWIqEs17yLL8xkAycv4ev0AiN+IGrWu88vJs/p2w== + dependencies: + "@babel/runtime" "^7.12.13" + history "^4.9.0" + hoist-non-react-statics "^3.1.0" + loose-envify "^1.3.1" + mini-create-react-context "^0.4.0" + path-to-regexp "^1.7.0" + prop-types "^15.6.2" + react-is "^16.6.0" + tiny-invariant "^1.0.2" + tiny-warning "^1.0.0" + +react-shallow-renderer@^16.13.1: + version "16.15.0" + resolved "https://registry.yarnpkg.com/react-shallow-renderer/-/react-shallow-renderer-16.15.0.tgz#48fb2cf9b23d23cde96708fe5273a7d3446f4457" + integrity sha512-oScf2FqQ9LFVQgA73vr86xl2NaOIX73rh+YFqcOp68CWj56tSfgtGKrEbyhCj0rSijyG9M1CYprTh39fBi5hzA== + dependencies: + object-assign "^4.1.1" + react-is "^16.12.0 || ^17.0.0 || ^18.0.0" + +react-test-renderer@^17.0.0: + version "17.0.2" + resolved "https://registry.yarnpkg.com/react-test-renderer/-/react-test-renderer-17.0.2.tgz#4cd4ae5ef1ad5670fc0ef776e8cc7e1231d9866c" + integrity sha512-yaQ9cB89c17PUb0x6UfWRs7kQCorVdHlutU1boVPEsB8IDZH6n9tHxMacc3y0JoXOJUsZb/t/Mb8FUWMKaM7iQ== + dependencies: + object-assign "^4.1.1" + react-is "^17.0.2" + react-shallow-renderer "^16.13.1" + scheduler "^0.20.2" + +react-transition-group@^4.4.2: + version "4.4.5" + resolved "https://registry.yarnpkg.com/react-transition-group/-/react-transition-group-4.4.5.tgz#e53d4e3f3344da8521489fbef8f2581d42becdd1" + integrity sha512-pZcd1MCJoiKiBR2NRxeCRg13uCXbydPnmB4EOeRrY7480qNWO8IIgQG6zlDkm6uRMsURXPuKq0GWtiM59a5Q6g== + dependencies: + "@babel/runtime" "^7.5.5" + dom-helpers "^5.0.1" + loose-envify "^1.4.0" + prop-types "^15.6.2" + +react@^17.0.1, react@^17.0.2: + version "17.0.2" + resolved "https://registry.yarnpkg.com/react/-/react-17.0.2.tgz#d0b5cc516d29eb3eee383f75b62864cfb6800037" + integrity sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA== + dependencies: + loose-envify "^1.1.0" + object-assign "^4.1.1" + +read-pkg-up@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-7.0.1.tgz#f3a6135758459733ae2b95638056e1854e7ef507" + integrity sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg== + dependencies: + find-up "^4.1.0" + read-pkg "^5.2.0" + type-fest "^0.8.1" + +read-pkg@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-5.2.0.tgz#7bf295438ca5a33e56cd30e053b34ee7250c93cc" + integrity sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg== + dependencies: + "@types/normalize-package-data" "^2.4.0" + normalize-package-data "^2.5.0" + parse-json "^5.0.0" + type-fest "^0.6.0" + +readable-stream@^2.0.1: + version "2.3.7" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" + integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + +readable-stream@^3.0.6: + version "3.6.0" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" + integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + +readdirp@~3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" + integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== + dependencies: + picomatch "^2.2.1" + +rechoir@^0.7.0: + version "0.7.1" + resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.7.1.tgz#9478a96a1ca135b5e88fc027f03ee92d6c645686" + integrity sha512-/njmZ8s1wVeR6pjTZ+0nCnv8SpZNRMT2D1RLOJQESlYFDBvwpTA4KWJpZ+sBJ4+vhjILRcK7JIFdGCdxEAAitg== + dependencies: + resolve "^1.9.0" + +"recompose@npm:react-recompose@^0.31.2": + version "0.31.2" + resolved "https://registry.yarnpkg.com/react-recompose/-/react-recompose-0.31.2.tgz#57c9b783fc94598d530fa661e22591fe83d607ad" + integrity sha512-xojvQpjssgIzb/Pfsz9oMT/9Aq1r769sU5F9q6PNv604sOFDL46D5OGGM9Qymy3dqWj5MjzLvAoLZzyA44QnGw== + dependencies: + "@babel/runtime" "^7.16.3" + change-emitter "^0.1.2" + hoist-non-react-statics "^2.5.5" + react-lifecycles-compat "^3.0.4" + symbol-observable "^1.2.0" + +redent@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/redent/-/redent-3.0.0.tgz#e557b7998316bb53c9f1f56fa626352c6963059f" + integrity sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg== + dependencies: + indent-string "^4.0.0" + strip-indent "^3.0.0" + +redux@^4.0.0, redux@^4.0.5: + version "4.2.0" + resolved "https://registry.yarnpkg.com/redux/-/redux-4.2.0.tgz#46f10d6e29b6666df758780437651eeb2b969f13" + integrity sha512-oSBmcKKIuIR4ME29/AeNUnl5L+hvBq7OaJWzaptTQJAntaPvxIJqfnjbaEiCzzaIz+XmVILfqAM3Ob0aXLPfjA== + dependencies: + "@babel/runtime" "^7.9.2" + +regenerate-unicode-properties@^10.0.1: + version "10.0.1" + resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-10.0.1.tgz#7f442732aa7934a3740c779bb9b3340dccc1fb56" + integrity sha512-vn5DU6yg6h8hP/2OkQo3K7uVILvY4iu0oI4t3HFa81UPkhGJwkRwM10JEc3upjdhHjs/k8GJY1sRBhk5sr69Bw== + dependencies: + regenerate "^1.4.2" + +regenerate@^1.4.2: + version "1.4.2" + resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a" + integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== + +regenerator-runtime@^0.13.4, regenerator-runtime@^0.13.9: + version "0.13.9" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52" + integrity sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA== + +regenerator-transform@^0.15.0: + version "0.15.0" + resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.15.0.tgz#cbd9ead5d77fae1a48d957cf889ad0586adb6537" + integrity sha512-LsrGtPmbYg19bcPHwdtmXwbW+TqNvtY4riE3P83foeHRroMbH6/2ddFBfab3t7kbzc7v7p4wbkIecHImqt0QNg== + dependencies: + "@babel/runtime" "^7.8.4" + +regex-not@^1.0.0, regex-not@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" + integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== + dependencies: + extend-shallow "^3.0.2" + safe-regex "^1.1.0" + +regexp.prototype.flags@^1.4.1, regexp.prototype.flags@^1.4.3: + version "1.4.3" + resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz#87cab30f80f66660181a3bb7bf5981a872b367ac" + integrity sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + functions-have-names "^1.2.2" + +regexpp@^3.1.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.2.0.tgz#0425a2768d8f23bad70ca4b90461fa2f1213e1b2" + integrity sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg== + +regexpu-core@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-5.1.0.tgz#2f8504c3fd0ebe11215783a41541e21c79942c6d" + integrity sha512-bb6hk+xWd2PEOkj5It46A16zFMs2mv86Iwpdu94la4S3sJ7C973h2dHpYKwIBGaWSO7cIRJ+UX0IeMaWcO4qwA== + dependencies: + regenerate "^1.4.2" + regenerate-unicode-properties "^10.0.1" + regjsgen "^0.6.0" + regjsparser "^0.8.2" + unicode-match-property-ecmascript "^2.0.0" + unicode-match-property-value-ecmascript "^2.0.0" + +regjsgen@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.6.0.tgz#83414c5354afd7d6627b16af5f10f41c4e71808d" + integrity sha512-ozE883Uigtqj3bx7OhL1KNbCzGyW2NQZPl6Hs09WTvCuZD5sTI4JY58bkbQWa/Y9hxIsvJ3M8Nbf7j54IqeZbA== + +regjsparser@^0.8.2: + version "0.8.4" + resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.8.4.tgz#8a14285ffcc5de78c5b95d62bbf413b6bc132d5f" + integrity sha512-J3LABycON/VNEu3abOviqGHuB/LOtOQj8SKmfP9anY5GfAVw/SPjwzSjxGjbZXIxbGfqTHtJw58C2Li/WkStmA== + dependencies: + jsesc "~0.5.0" + +remove-trailing-separator@^1.0.1: + version "1.1.0" + resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" + integrity sha512-/hS+Y0u3aOfIETiaiirUFwDBDzmXPvO+jAfKTitUngIPzdKc6Z0LoFjM/CK5PL4C+eKwHohlHAb6H0VFfmmUsw== + +repeat-element@^1.1.2: + version "1.1.4" + resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.4.tgz#be681520847ab58c7568ac75fbfad28ed42d39e9" + integrity sha512-LFiNfRcSu7KK3evMyYOuCzv3L10TW7yC1G2/+StMjK8Y6Vqd2MG7r/Qjw4ghtuCOjFvlnms/iMmLqpvW/ES/WQ== + +repeat-string@^1.6.1: + version "1.6.1" + resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" + integrity sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w== + +require-directory@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== + +require-from-string@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" + integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== + +require-main-filename@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" + integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== + +requires-port@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" + integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ== + +resolve-cwd@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" + integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== + dependencies: + resolve-from "^5.0.0" + +resolve-from@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" + integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== + +resolve-from@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" + integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== + +resolve-pathname@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/resolve-pathname/-/resolve-pathname-3.0.0.tgz#99d02224d3cf263689becbb393bc560313025dcd" + integrity sha512-C7rARubxI8bXFNB/hqcp/4iUeIXJhJZvFPFPiSPRnhU5UPxzMFIl+2E6yY6c4k9giDJAhtV+enfA+G89N6Csng== + +resolve-url@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" + integrity sha512-ZuF55hVUQaaczgOIwqWzkEcEidmlD/xl44x1UZnhOXcYuFN2S6+rcxpG+C1N3So0wvNI3DmJICUFfu2SxhBmvg== + +resolve@^1.10.0, resolve@^1.14.2, resolve@^1.18.1, resolve@^1.9.0: + version "1.22.1" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177" + integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw== + dependencies: + is-core-module "^2.9.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +resolve@^2.0.0-next.3: + version "2.0.0-next.4" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-2.0.0-next.4.tgz#3d37a113d6429f496ec4752d2a2e58efb1fd4660" + integrity sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ== + dependencies: + is-core-module "^2.9.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +ret@~0.1.10: + version "0.1.15" + resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" + integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== + +retry@^0.13.1: + version "0.13.1" + resolved "https://registry.yarnpkg.com/retry/-/retry-0.13.1.tgz#185b1587acf67919d63b357349e03537b2484658" + integrity sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg== + +rimraf@^3.0.0, rimraf@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== + dependencies: + glob "^7.1.3" + +rst-selector-parser@^2.2.3: + version "2.2.3" + resolved "https://registry.yarnpkg.com/rst-selector-parser/-/rst-selector-parser-2.2.3.tgz#81b230ea2fcc6066c89e3472de794285d9b03d91" + integrity sha512-nDG1rZeP6oFTLN6yNDV/uiAvs1+FS/KlrEwh7+y7dpuApDBy6bI2HTBcc0/V8lv9OTqfyD34eF7au2pm8aBbhA== + dependencies: + lodash.flattendeep "^4.4.0" + nearley "^2.7.10" + +rsvp@^4.8.4: + version "4.8.5" + resolved "https://registry.yarnpkg.com/rsvp/-/rsvp-4.8.5.tgz#c8f155311d167f68f21e168df71ec5b083113734" + integrity sha512-nfMOlASu9OnRJo1mbEk2cz0D56a1MBNrJ7orjRZQG10XDyuvwksKbuXNp6qa+kbn839HwjwhBzhFmdsaEAfauA== + +safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: + version "5.1.2" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.1.0, safe-buffer@~5.2.0: + version "5.2.1" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + +safe-regex@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" + integrity sha512-aJXcif4xnaNUzvUuC5gcb46oTS7zvg4jpMTnuqtrEPlR3vFr4pxtdTwaF1Qs3Enjn9HK+ZlwQui+a7z0SywIzg== + dependencies: + ret "~0.1.10" + +"safer-buffer@>= 2.1.2 < 3": + version "2.1.2" + resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + +sane@^4.0.3: + version "4.1.0" + resolved "https://registry.yarnpkg.com/sane/-/sane-4.1.0.tgz#ed881fd922733a6c461bc189dc2b6c006f3ffded" + integrity sha512-hhbzAgTIX8O7SHfp2c8/kREfEn4qO/9q8C9beyY6+tvZ87EpoZ3i1RIEvp27YBswnNbY9mWd6paKVmKbAgLfZA== + dependencies: + "@cnakazawa/watch" "^1.0.3" + anymatch "^2.0.0" + capture-exit "^2.0.0" + exec-sh "^0.3.2" + execa "^1.0.0" + fb-watchman "^2.0.0" + micromatch "^3.1.4" + minimist "^1.1.1" + walker "~1.0.5" + +saxes@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/saxes/-/saxes-5.0.1.tgz#eebab953fa3b7608dbe94e5dadb15c888fa6696d" + integrity sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw== + dependencies: + xmlchars "^2.2.0" + +scheduler@^0.20.2: + version "0.20.2" + resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.20.2.tgz#4baee39436e34aa93b4874bddcbf0fe8b8b50e91" + integrity sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ== + dependencies: + loose-envify "^1.1.0" + object-assign "^4.1.1" + +schema-utils@^2.6.5: + version "2.7.1" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-2.7.1.tgz#1ca4f32d1b24c590c203b8e7a50bf0ea4cd394d7" + integrity sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg== + dependencies: + "@types/json-schema" "^7.0.5" + ajv "^6.12.4" + ajv-keywords "^3.5.2" + +schema-utils@^3.0.0, schema-utils@^3.1.0, schema-utils@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.1.1.tgz#bc74c4b6b6995c1d88f76a8b77bea7219e0c8281" + integrity sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw== + dependencies: + "@types/json-schema" "^7.0.8" + ajv "^6.12.5" + ajv-keywords "^3.5.2" + +schema-utils@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-4.0.0.tgz#60331e9e3ae78ec5d16353c467c34b3a0a1d3df7" + integrity sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg== + dependencies: + "@types/json-schema" "^7.0.9" + ajv "^8.8.0" + ajv-formats "^2.1.1" + ajv-keywords "^5.0.0" + +select-hose@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" + integrity sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg== + +selfsigned@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-2.0.1.tgz#8b2df7fa56bf014d19b6007655fff209c0ef0a56" + integrity sha512-LmME957M1zOsUhG+67rAjKfiWFox3SBxE/yymatMZsAx+oMrJ0YQ8AToOnyCm7xbeg2ep37IHLxdu0o2MavQOQ== + dependencies: + node-forge "^1" + +"semver@2 || 3 || 4 || 5", semver@^5.5.0: + version "5.7.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" + integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== + +semver@7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.0.0.tgz#5f3ca35761e47e05b206c6daff2cf814f0316b8e" + integrity sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A== + +semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" + integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== + +semver@^7.2.1, semver@^7.3.2, semver@^7.3.5: + version "7.3.7" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f" + integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g== + dependencies: + lru-cache "^6.0.0" + +send@0.18.0: + version "0.18.0" + resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be" + integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== + dependencies: + debug "2.6.9" + depd "2.0.0" + destroy "1.2.0" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + fresh "0.5.2" + http-errors "2.0.0" + mime "1.6.0" + ms "2.1.3" + on-finished "2.4.1" + range-parser "~1.2.1" + statuses "2.0.1" + +serialize-javascript@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.0.tgz#efae5d88f45d7924141da8b5c3a7a7e663fefeb8" + integrity sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag== + dependencies: + randombytes "^2.1.0" + +serve-index@^1.9.1: + version "1.9.1" + resolved "https://registry.yarnpkg.com/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239" + integrity sha512-pXHfKNP4qujrtteMrSBb0rc8HJ9Ms/GrXwcUtUtD5s4ewDJI8bT3Cz2zTVRMKtri49pLx2e0Ya8ziP5Ya2pZZw== + dependencies: + accepts "~1.3.4" + batch "0.6.1" + debug "2.6.9" + escape-html "~1.0.3" + http-errors "~1.6.2" + mime-types "~2.1.17" + parseurl "~1.3.2" + +serve-static@1.15.0: + version "1.15.0" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540" + integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== + dependencies: + encodeurl "~1.0.2" + escape-html "~1.0.3" + parseurl "~1.3.3" + send "0.18.0" + +set-blocking@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" + integrity sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw== + +set-value@^2.0.0, set-value@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" + integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== + dependencies: + extend-shallow "^2.0.1" + is-extendable "^0.1.1" + is-plain-object "^2.0.3" + split-string "^3.0.1" + +setprototypeof@1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" + integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== + +setprototypeof@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" + integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== + +shallow-clone@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/shallow-clone/-/shallow-clone-3.0.1.tgz#8f2981ad92531f55035b01fb230769a40e02efa3" + integrity sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA== + dependencies: + kind-of "^6.0.2" + +shebang-command@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" + integrity sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg== + dependencies: + shebang-regex "^1.0.0" + +shebang-command@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + +shebang-regex@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" + integrity sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ== + +shebang-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + +shellwords@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/shellwords/-/shellwords-0.1.1.tgz#d6b9181c1a48d397324c84871efbcfc73fc0654b" + integrity sha512-vFwSUfQvqybiICwZY5+DAWIPLKsWO31Q91JSKl3UYv+K5c2QRPzn0qzec6QPu1Qc9eHYItiP3NdJqNVqetYAww== + +side-channel@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" + integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== + dependencies: + call-bind "^1.0.0" + get-intrinsic "^1.0.2" + object-inspect "^1.9.0" + +signal-exit@^3.0.0, signal-exit@^3.0.2, signal-exit@^3.0.3: + version "3.0.7" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" + integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== + +sinon@^13.0.1: + version "13.0.2" + resolved "https://registry.yarnpkg.com/sinon/-/sinon-13.0.2.tgz#c6a8ddd655dc1415bbdc5ebf0e5b287806850c3a" + integrity sha512-KvOrztAVqzSJWMDoxM4vM+GPys1df2VBoXm+YciyB/OLMamfS3VXh3oGh5WtrAGSzrgczNWFFY22oKb7Fi5eeA== + dependencies: + "@sinonjs/commons" "^1.8.3" + "@sinonjs/fake-timers" "^9.1.2" + "@sinonjs/samsam" "^6.1.1" + diff "^5.0.0" + nise "^5.1.1" + supports-color "^7.2.0" + +sisteransi@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" + integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== + +slash@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" + integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== + +slice-ansi@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-4.0.0.tgz#500e8dd0fd55b05815086255b3195adf2a45fe6b" + integrity sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ== + dependencies: + ansi-styles "^4.0.0" + astral-regex "^2.0.0" + is-fullwidth-code-point "^3.0.0" + +snapdragon-node@^2.0.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" + integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== + dependencies: + define-property "^1.0.0" + isobject "^3.0.0" + snapdragon-util "^3.0.1" + +snapdragon-util@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" + integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== + dependencies: + kind-of "^3.2.0" + +snapdragon@^0.8.1: + version "0.8.2" + resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" + integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== + dependencies: + base "^0.11.1" + debug "^2.2.0" + define-property "^0.2.5" + extend-shallow "^2.0.1" + map-cache "^0.2.2" + source-map "^0.5.6" + source-map-resolve "^0.5.0" + use "^3.1.0" + +sockjs@^0.3.24: + version "0.3.24" + resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.24.tgz#c9bc8995f33a111bea0395ec30aa3206bdb5ccce" + integrity sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ== + dependencies: + faye-websocket "^0.11.3" + uuid "^8.3.2" + websocket-driver "^0.7.4" + +source-map-js@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c" + integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== + +source-map-resolve@^0.5.0: + version "0.5.3" + resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.3.tgz#190866bece7553e1f8f267a2ee82c606b5509a1a" + integrity sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw== + dependencies: + atob "^2.1.2" + decode-uri-component "^0.2.0" + resolve-url "^0.2.1" + source-map-url "^0.4.0" + urix "^0.1.0" + +source-map-resolve@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.6.0.tgz#3d9df87e236b53f16d01e58150fc7711138e5ed2" + integrity sha512-KXBr9d/fO/bWo97NXsPIAW1bFSBOuCnjbNTBMO7N59hsv5i9yzRDfcYwwt0l04+VqnKC+EwzvJZIP/qkuMgR/w== + dependencies: + atob "^2.1.2" + decode-uri-component "^0.2.0" + +source-map-support@^0.5.6, source-map-support@~0.5.20: + version "0.5.21" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" + integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + +source-map-url@^0.4.0: + version "0.4.1" + resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.1.tgz#0af66605a745a5a2f91cf1bbf8a7afbc283dec56" + integrity sha512-cPiFOTLUKvJFIg4SKVScy4ilPPW6rFgMgfuZJPNoDuMs3nC1HbMUycBoJw77xFIp6z1UJQJOfx6C9GMH80DiTw== + +source-map@^0.5.6: + version "0.5.7" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" + integrity sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ== + +source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + +source-map@^0.7.3: + version "0.7.4" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.4.tgz#a9bbe705c9d8846f4e08ff6765acf0f1b0898656" + integrity sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA== + +spdx-correct@^3.0.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.1.tgz#dece81ac9c1e6713e5f7d1b6f17d468fa53d89a9" + integrity sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w== + dependencies: + spdx-expression-parse "^3.0.0" + spdx-license-ids "^3.0.0" + +spdx-exceptions@^2.1.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz#3f28ce1a77a00372683eade4a433183527a2163d" + integrity sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A== + +spdx-expression-parse@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz#cf70f50482eefdc98e3ce0a6833e4a53ceeba679" + integrity sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q== + dependencies: + spdx-exceptions "^2.1.0" + spdx-license-ids "^3.0.0" + +spdx-license-ids@^3.0.0: + version "3.0.11" + resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.11.tgz#50c0d8c40a14ec1bf449bae69a0ea4685a9d9f95" + integrity sha512-Ctl2BrFiM0X3MANYgj3CkygxhRmr9mi6xhejbdO960nF6EDJApTYpn0BQnDKlnNBULKiCN1n3w9EBkHK8ZWg+g== + +spdy-transport@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31" + integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw== + dependencies: + debug "^4.1.0" + detect-node "^2.0.4" + hpack.js "^2.1.6" + obuf "^1.1.2" + readable-stream "^3.0.6" + wbuf "^1.7.3" + +spdy@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/spdy/-/spdy-4.0.2.tgz#b74f466203a3eda452c02492b91fb9e84a27677b" + integrity sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA== + dependencies: + debug "^4.1.0" + handle-thing "^2.0.0" + http-deceiver "^1.2.7" + select-hose "^2.0.0" + spdy-transport "^3.0.0" + +split-string@^3.0.1, split-string@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" + integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== + dependencies: + extend-shallow "^3.0.0" + +sprintf-js@~1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== + +stack-utils@^2.0.2: + version "2.0.5" + resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.5.tgz#d25265fca995154659dbbfba3b49254778d2fdd5" + integrity sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA== + dependencies: + escape-string-regexp "^2.0.0" + +static-extend@^0.1.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" + integrity sha512-72E9+uLc27Mt718pMHt9VMNiAL4LMsmDbBva8mxWUCkT07fSzEGMYUCk0XWY6lp0j6RBAG4cJ3mWuZv2OE3s0g== + dependencies: + define-property "^0.2.5" + object-copy "^0.1.0" + +statuses@2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" + integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== + +"statuses@>= 1.4.0 < 2": + version "1.5.0" + resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" + integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA== + +string-length@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" + integrity sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ== + dependencies: + char-regex "^1.0.2" + strip-ansi "^6.0.0" + +string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string.prototype.matchall@^4.0.7: + version "4.0.7" + resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.7.tgz#8e6ecb0d8a1fb1fda470d81acecb2dba057a481d" + integrity sha512-f48okCX7JiwVi1NXCVWcFnZgADDC/n2vePlQ/KUCNqCikLLilQvwjMO8+BHVKvgzH0JB0J9LEPgxOGT02RoETg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + get-intrinsic "^1.1.1" + has-symbols "^1.0.3" + internal-slot "^1.0.3" + regexp.prototype.flags "^1.4.1" + side-channel "^1.0.4" + +string.prototype.trim@^1.2.1: + version "1.2.6" + resolved "https://registry.yarnpkg.com/string.prototype.trim/-/string.prototype.trim-1.2.6.tgz#824960787db37a9e24711802ed0c1d1c0254f83e" + integrity sha512-8lMR2m+U0VJTPp6JjvJTtGyc4FIGq9CdRt7O9p6T0e6K4vjU+OP+SQJpbe/SBmRcCUIvNUnjsbmY6lnMp8MhsQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + +string.prototype.trimend@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz#914a65baaab25fbdd4ee291ca7dde57e869cb8d0" + integrity sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + +string.prototype.trimstart@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz#5466d93ba58cfa2134839f81d7f42437e8c01fef" + integrity sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + +string_decoder@^1.1.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" + integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== + dependencies: + safe-buffer "~5.2.0" + +string_decoder@~1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== + dependencies: + safe-buffer "~5.1.0" + +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-bom@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" + integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== + +strip-eof@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" + integrity sha512-7FCwGGmx8mD5xQd3RPUvnSpUXHM3BWuzjtpD4TXsfcZ9EL4azvVVUscFYwD9nx8Kh+uCBC00XBtAykoMHwTh8Q== + +strip-final-newline@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" + integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== + +strip-indent@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-3.0.0.tgz#c32e1cee940b6b3432c771bc2c54bcce73cd3001" + integrity sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ== + dependencies: + min-indent "^1.0.0" + +strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== + +style-loader@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-2.0.0.tgz#9669602fd4690740eaaec137799a03addbbc393c" + integrity sha512-Z0gYUJmzZ6ZdRUqpg1r8GsaFKypE+3xAzuFeMuoHgjc9KZv3wMyCRjQIWEbhoFSq7+7yoHXySDJyyWQaPajeiQ== + dependencies: + loader-utils "^2.0.0" + schema-utils "^3.0.0" + +supports-color@^5.3.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== + dependencies: + has-flag "^3.0.0" + +supports-color@^7.0.0, supports-color@^7.1.0, supports-color@^7.2.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== + dependencies: + has-flag "^4.0.0" + +supports-color@^8.0.0: + version "8.1.1" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" + integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== + dependencies: + has-flag "^4.0.0" + +supports-hyperlinks@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-2.2.0.tgz#4f77b42488765891774b70c79babd87f9bd594bb" + integrity sha512-6sXEzV5+I5j8Bmq9/vUphGRM/RJNT9SCURJLjwfOg51heRtguGWDzcaBlgAzKhQa0EVNpPEKzQuBwZ8S8WaCeQ== + dependencies: + has-flag "^4.0.0" + supports-color "^7.0.0" + +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + +symbol-observable@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.2.0.tgz#c22688aed4eab3cdc2dfeacbb561660560a00804" + integrity sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ== + +symbol-tree@^3.2.4: + version "3.2.4" + resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" + integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== + +table@^6.0.9: + version "6.8.0" + resolved "https://registry.yarnpkg.com/table/-/table-6.8.0.tgz#87e28f14fa4321c3377ba286f07b79b281a3b3ca" + integrity sha512-s/fitrbVeEyHKFa7mFdkuQMWlH1Wgw/yEXMt5xACT4ZpzWFluehAxRtUUQKPuWhaLAWhFcVx6w3oC8VKaUfPGA== + dependencies: + ajv "^8.0.1" + lodash.truncate "^4.4.2" + slice-ansi "^4.0.0" + string-width "^4.2.3" + strip-ansi "^6.0.1" + +tapable@^2.1.1, tapable@^2.2.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" + integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== + +terminal-link@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/terminal-link/-/terminal-link-2.1.1.tgz#14a64a27ab3c0df933ea546fba55f2d078edc994" + integrity sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ== + dependencies: + ansi-escapes "^4.2.1" + supports-hyperlinks "^2.0.0" + +terser-webpack-plugin@^5.1.3: + version "5.3.3" + resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.3.tgz#8033db876dd5875487213e87c627bca323e5ed90" + integrity sha512-Fx60G5HNYknNTNQnzQ1VePRuu89ZVYWfjRAeT5rITuCY/1b08s49e5kSQwHDirKZWuoKOBRFS98EUUoZ9kLEwQ== + dependencies: + "@jridgewell/trace-mapping" "^0.3.7" + jest-worker "^27.4.5" + schema-utils "^3.1.1" + serialize-javascript "^6.0.0" + terser "^5.7.2" + +terser@^5.7.2: + version "5.14.2" + resolved "https://registry.yarnpkg.com/terser/-/terser-5.14.2.tgz#9ac9f22b06994d736174f4091aa368db896f1c10" + integrity sha512-oL0rGeM/WFQCUd0y2QrWxYnq7tfSuKBiqTjRPWrRgB46WD/kiwHwF8T23z78H6Q6kGCuuHcPB+KULHRdxvVGQA== + dependencies: + "@jridgewell/source-map" "^0.3.2" + acorn "^8.5.0" + commander "^2.20.0" + source-map-support "~0.5.20" + +test-exclude@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" + integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== + dependencies: + "@istanbuljs/schema" "^0.1.2" + glob "^7.1.4" + minimatch "^3.0.4" + +text-table@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== + +throat@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/throat/-/throat-5.0.0.tgz#c5199235803aad18754a667d659b5e72ce16764b" + integrity sha512-fcwX4mndzpLQKBS1DVYhGAcYaYt7vsHNIvQV+WXMvnow5cgjPphq5CaayLaGsjRdSCKZFNGt7/GYAuXaNOiYCA== + +thunky@^1.0.2: + version "1.1.0" + resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" + integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== + +tiny-invariant@^1.0.2: + version "1.2.0" + resolved "https://registry.yarnpkg.com/tiny-invariant/-/tiny-invariant-1.2.0.tgz#a1141f86b672a9148c72e978a19a73b9b94a15a9" + integrity sha512-1Uhn/aqw5C6RI4KejVeTg6mIS7IqxnLJ8Mv2tV5rTc0qWobay7pDUz6Wi392Cnc8ak1H0F2cjoRzb2/AW4+Fvg== + +tiny-warning@^1.0.0, tiny-warning@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/tiny-warning/-/tiny-warning-1.0.3.tgz#94a30db453df4c643d0fd566060d60a875d84754" + integrity sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA== + +tmpl@1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" + integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== + +to-fast-properties@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" + integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== + +to-object-path@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" + integrity sha512-9mWHdnGRuh3onocaHzukyvCZhzvr6tiflAy/JRFXcJX0TjgfWA9pk9t8CMbzmBE4Jfw58pXbkngtBtqYxzNEyg== + dependencies: + kind-of "^3.0.2" + +to-regex-range@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" + integrity sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg== + dependencies: + is-number "^3.0.0" + repeat-string "^1.6.1" + +to-regex-range@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" + integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== + dependencies: + is-number "^7.0.0" + +to-regex@^3.0.1, to-regex@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" + integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== + dependencies: + define-property "^2.0.2" + extend-shallow "^3.0.2" + regex-not "^1.0.2" + safe-regex "^1.1.0" + +toidentifier@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" + integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== + +tough-cookie@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.0.0.tgz#d822234eeca882f991f0f908824ad2622ddbece4" + integrity sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg== + dependencies: + psl "^1.1.33" + punycode "^2.1.1" + universalify "^0.1.2" + +tr46@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240" + integrity sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw== + dependencies: + punycode "^2.1.1" + +type-check@^0.4.0, type-check@~0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" + integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== + dependencies: + prelude-ls "^1.2.1" + +type-check@~0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" + integrity sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg== + dependencies: + prelude-ls "~1.1.2" + +type-detect@4.0.8, type-detect@^4.0.8: + version "4.0.8" + resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" + integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== + +type-fest@^0.20.2: + version "0.20.2" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" + integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== + +type-fest@^0.21.3: + version "0.21.3" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" + integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== + +type-fest@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.6.0.tgz#8d2a2370d3df886eb5c90ada1c5bf6188acf838b" + integrity sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg== + +type-fest@^0.8.1: + version "0.8.1" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d" + integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA== + +type-is@~1.6.18: + version "1.6.18" + resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" + integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== + dependencies: + media-typer "0.3.0" + mime-types "~2.1.24" + +typedarray-to-buffer@^3.1.5: + version "3.1.5" + resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080" + integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q== + dependencies: + is-typedarray "^1.0.0" + +unbox-primitive@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e" + integrity sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw== + dependencies: + call-bind "^1.0.2" + has-bigints "^1.0.2" + has-symbols "^1.0.3" + which-boxed-primitive "^1.0.2" + +uncontrollable@^7.2.1: + version "7.2.1" + resolved "https://registry.yarnpkg.com/uncontrollable/-/uncontrollable-7.2.1.tgz#1fa70ba0c57a14d5f78905d533cf63916dc75738" + integrity sha512-svtcfoTADIB0nT9nltgjujTi7BzVmwjZClOmskKu/E8FW9BXzg9os8OLr4f8Dlnk0rYWJIWr4wv9eKUXiQvQwQ== + dependencies: + "@babel/runtime" "^7.6.3" + "@types/react" ">=16.9.11" + invariant "^2.2.4" + react-lifecycles-compat "^3.0.4" + +unicode-canonical-property-names-ecmascript@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz#301acdc525631670d39f6146e0e77ff6bbdebddc" + integrity sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ== + +unicode-match-property-ecmascript@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz#54fd16e0ecb167cf04cf1f756bdcc92eba7976c3" + integrity sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q== + dependencies: + unicode-canonical-property-names-ecmascript "^2.0.0" + unicode-property-aliases-ecmascript "^2.0.0" + +unicode-match-property-value-ecmascript@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.0.0.tgz#1a01aa57247c14c568b89775a54938788189a714" + integrity sha512-7Yhkc0Ye+t4PNYzOGKedDhXbYIBe1XEQYQxOPyhcXNMJ0WCABqqj6ckydd6pWRZTHV4GuCPKdBAUiMc60tsKVw== + +unicode-property-aliases-ecmascript@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.0.0.tgz#0a36cb9a585c4f6abd51ad1deddb285c165297c8" + integrity sha512-5Zfuy9q/DFr4tfO7ZPeVXb1aPoeQSdeFMLpYuFebehDAhbuevLs5yxSZmIFN1tP5F9Wl4IpJrYojg85/zgyZHQ== + +union-value@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" + integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== + dependencies: + arr-union "^3.1.0" + get-value "^2.0.6" + is-extendable "^0.1.1" + set-value "^2.0.1" + +universalify@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" + integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== + +unpipe@1.0.0, unpipe@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" + integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== + +unset-value@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" + integrity sha512-PcA2tsuGSF9cnySLHTLSh2qrQiJ70mn+r+Glzxv2TWZblxsxCC52BDlZoPCsz7STd9pN7EZetkWZBAvk4cgZdQ== + dependencies: + has-value "^0.3.1" + isobject "^3.0.0" + +update-browserslist-db@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.5.tgz#be06a5eedd62f107b7c19eb5bcefb194411abf38" + integrity sha512-dteFFpCyvuDdr9S/ff1ISkKt/9YZxKjI9WlRR99c180GaztJtRa/fn18FdxGVKVsnPY7/a/FDN68mcvUmP4U7Q== + dependencies: + escalade "^3.1.1" + picocolors "^1.0.0" + +uri-js@^4.2.2: + version "4.4.1" + resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== + dependencies: + punycode "^2.1.0" + +urix@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" + integrity sha512-Am1ousAhSLBeB9cG/7k7r2R0zj50uDRlZHPGbazid5s9rlF1F/QKYObEKSIunSjIOkJZqwRRLpvewjEkM7pSqg== + +use@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" + integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== + +util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== + +utils-merge@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" + integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== + +uuid@^8.3.0, uuid@^8.3.2: + version "8.3.2" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" + integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== + +v8-compile-cache@^2.0.3: + version "2.3.0" + resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee" + integrity sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA== + +v8-to-istanbul@^7.0.0: + version "7.1.2" + resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-7.1.2.tgz#30898d1a7fa0c84d225a2c1434fb958f290883c1" + integrity sha512-TxNb7YEUwkLXCQYeudi6lgQ/SZrzNO4kMdlqVxaZPUIUjCv6iSSypUQX70kNBSERpQ8fk48+d61FXk+tgqcWow== + dependencies: + "@types/istanbul-lib-coverage" "^2.0.1" + convert-source-map "^1.6.0" + source-map "^0.7.3" + +validate-npm-package-license@^3.0.1: + version "3.0.4" + resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" + integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== + dependencies: + spdx-correct "^3.0.0" + spdx-expression-parse "^3.0.0" + +value-equal@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/value-equal/-/value-equal-1.0.1.tgz#1e0b794c734c5c0cade179c437d356d931a34d6c" + integrity sha512-NOJ6JZCAWr0zlxZt+xqCHNTEKOsrks2HQd4MqhP1qy4z1SkbEP467eNx6TgDKXMvUOb+OENfJCZwM+16n7fRfw== + +vary@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" + integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== + +w3c-hr-time@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz#0a89cdf5cc15822df9c360543676963e0cc308cd" + integrity sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ== + dependencies: + browser-process-hrtime "^1.0.0" + +w3c-xmlserializer@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz#3e7104a05b75146cc60f564380b7f683acf1020a" + integrity sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA== + dependencies: + xml-name-validator "^3.0.0" + +walker@^1.0.7, walker@~1.0.5: + version "1.0.8" + resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" + integrity sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ== + dependencies: + makeerror "1.0.12" + +warning@^4.0.0, warning@^4.0.3: + version "4.0.3" + resolved "https://registry.yarnpkg.com/warning/-/warning-4.0.3.tgz#16e9e077eb8a86d6af7d64aa1e05fd85b4678ca3" + integrity sha512-rpJyN222KWIvHJ/F53XSZv0Zl/accqHR8et1kpaMTD/fLCRxtV8iX8czMzY7sVZupTI3zcUTg8eycS2kNF9l6w== + dependencies: + loose-envify "^1.0.0" + +watchpack@^2.4.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d" + integrity sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg== + dependencies: + glob-to-regexp "^0.4.1" + graceful-fs "^4.1.2" + +wbuf@^1.1.0, wbuf@^1.7.3: + version "1.7.3" + resolved "https://registry.yarnpkg.com/wbuf/-/wbuf-1.7.3.tgz#c1d8d149316d3ea852848895cb6a0bfe887b87df" + integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA== + dependencies: + minimalistic-assert "^1.0.0" + +webidl-conversions@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-5.0.0.tgz#ae59c8a00b121543a2acc65c0434f57b0fc11aff" + integrity sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA== + +webidl-conversions@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-6.1.0.tgz#9111b4d7ea80acd40f5270d666621afa78b69514" + integrity sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w== + +webpack-cli@^4.10.0: + version "4.10.0" + resolved "https://registry.yarnpkg.com/webpack-cli/-/webpack-cli-4.10.0.tgz#37c1d69c8d85214c5a65e589378f53aec64dab31" + integrity sha512-NLhDfH/h4O6UOy+0LSso42xvYypClINuMNBVVzX4vX98TmTaTUxwRbXdhucbFMd2qLaCTcLq/PdYrvi8onw90w== + dependencies: + "@discoveryjs/json-ext" "^0.5.0" + "@webpack-cli/configtest" "^1.2.0" + "@webpack-cli/info" "^1.5.0" + "@webpack-cli/serve" "^1.7.0" + colorette "^2.0.14" + commander "^7.0.0" + cross-spawn "^7.0.3" + fastest-levenshtein "^1.0.12" + import-local "^3.0.2" + interpret "^2.2.0" + rechoir "^0.7.0" + webpack-merge "^5.7.3" + +webpack-dev-middleware@^5.3.1: + version "5.3.3" + resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz#efae67c2793908e7311f1d9b06f2a08dcc97e51f" + integrity sha512-hj5CYrY0bZLB+eTO+x/j67Pkrquiy7kWepMHmUMoPsmcUaeEnQJqFzHJOyxgWlq746/wUuA64p9ta34Kyb01pA== + dependencies: + colorette "^2.0.10" + memfs "^3.4.3" + mime-types "^2.1.31" + range-parser "^1.2.1" + schema-utils "^4.0.0" + +webpack-dev-server@^4.9.3: + version "4.9.3" + resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-4.9.3.tgz#2360a5d6d532acb5410a668417ad549ee3b8a3c9" + integrity sha512-3qp/eoboZG5/6QgiZ3llN8TUzkSpYg1Ko9khWX1h40MIEUNS2mDoIa8aXsPfskER+GbTvs/IJZ1QTBBhhuetSw== + dependencies: + "@types/bonjour" "^3.5.9" + "@types/connect-history-api-fallback" "^1.3.5" + "@types/express" "^4.17.13" + "@types/serve-index" "^1.9.1" + "@types/serve-static" "^1.13.10" + "@types/sockjs" "^0.3.33" + "@types/ws" "^8.5.1" + ansi-html-community "^0.0.8" + bonjour-service "^1.0.11" + chokidar "^3.5.3" + colorette "^2.0.10" + compression "^1.7.4" + connect-history-api-fallback "^2.0.0" + default-gateway "^6.0.3" + express "^4.17.3" + graceful-fs "^4.2.6" + html-entities "^2.3.2" + http-proxy-middleware "^2.0.3" + ipaddr.js "^2.0.1" + open "^8.0.9" + p-retry "^4.5.0" + rimraf "^3.0.2" + schema-utils "^4.0.0" + selfsigned "^2.0.1" + serve-index "^1.9.1" + sockjs "^0.3.24" + spdy "^4.0.2" + webpack-dev-middleware "^5.3.1" + ws "^8.4.2" + +webpack-merge@^5.7.3: + version "5.8.0" + resolved "https://registry.yarnpkg.com/webpack-merge/-/webpack-merge-5.8.0.tgz#2b39dbf22af87776ad744c390223731d30a68f61" + integrity sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q== + dependencies: + clone-deep "^4.0.1" + wildcard "^2.0.0" + +webpack-sources@^3.2.3: + version "3.2.3" + resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-3.2.3.tgz#2d4daab8451fd4b240cc27055ff6a0c2ccea0cde" + integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== + +webpack@^5.6.0: + version "5.74.0" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.74.0.tgz#02a5dac19a17e0bb47093f2be67c695102a55980" + integrity sha512-A2InDwnhhGN4LYctJj6M1JEaGL7Luj6LOmyBHjcI8529cm5p6VXiTIW2sn6ffvEAKmveLzvu4jrihwXtPojlAA== + dependencies: + "@types/eslint-scope" "^3.7.3" + "@types/estree" "^0.0.51" + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/wasm-edit" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + acorn "^8.7.1" + acorn-import-assertions "^1.7.6" + browserslist "^4.14.5" + chrome-trace-event "^1.0.2" + enhanced-resolve "^5.10.0" + es-module-lexer "^0.9.0" + eslint-scope "5.1.1" + events "^3.2.0" + glob-to-regexp "^0.4.1" + graceful-fs "^4.2.9" + json-parse-even-better-errors "^2.3.1" + loader-runner "^4.2.0" + mime-types "^2.1.27" + neo-async "^2.6.2" + schema-utils "^3.1.0" + tapable "^2.1.1" + terser-webpack-plugin "^5.1.3" + watchpack "^2.4.0" + webpack-sources "^3.2.3" + +websocket-driver@>=0.5.1, websocket-driver@^0.7.4: + version "0.7.4" + resolved "https://registry.yarnpkg.com/websocket-driver/-/websocket-driver-0.7.4.tgz#89ad5295bbf64b480abcba31e4953aca706f5760" + integrity sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg== + dependencies: + http-parser-js ">=0.5.1" + safe-buffer ">=5.1.0" + websocket-extensions ">=0.1.1" + +websocket-extensions@>=0.1.1: + version "0.1.4" + resolved "https://registry.yarnpkg.com/websocket-extensions/-/websocket-extensions-0.1.4.tgz#7f8473bc839dfd87608adb95d7eb075211578a42" + integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== + +whatwg-encoding@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz#5abacf777c32166a51d085d6b4f3e7d27113ddb0" + integrity sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw== + dependencies: + iconv-lite "0.4.24" + +whatwg-mimetype@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf" + integrity sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g== + +whatwg-url@^8.0.0, whatwg-url@^8.5.0: + version "8.7.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-8.7.0.tgz#656a78e510ff8f3937bc0bcbe9f5c0ac35941b77" + integrity sha512-gAojqb/m9Q8a5IV96E3fHJM70AzCkgt4uXYX2O7EmuyOnLrViCQlsEBmF9UQIu3/aeAIp2U17rtbpZWNntQqdg== + dependencies: + lodash "^4.7.0" + tr46 "^2.1.0" + webidl-conversions "^6.1.0" + +which-boxed-primitive@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" + integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== + dependencies: + is-bigint "^1.0.1" + is-boolean-object "^1.1.0" + is-number-object "^1.0.4" + is-string "^1.0.5" + is-symbol "^1.0.3" + +which-module@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" + integrity sha512-B+enWhmw6cjfVC7kS8Pj9pCrKSc5txArRyaYGe088shv/FGWH+0Rjx/xPgtsWfsUtS27FkP697E4DDhgrgoc0Q== + +which@^1.2.9: + version "1.3.1" + resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" + integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== + dependencies: + isexe "^2.0.0" + +which@^2.0.1, which@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + +wildcard@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/wildcard/-/wildcard-2.0.0.tgz#a77d20e5200c6faaac979e4b3aadc7b3dd7f8fec" + integrity sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw== + +word-wrap@^1.2.3, word-wrap@~1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" + integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== + +wrap-ansi@^6.2.0: + version "6.2.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53" + integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== + +write-file-atomic@^3.0.0: + version "3.0.3" + resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8" + integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q== + dependencies: + imurmurhash "^0.1.4" + is-typedarray "^1.0.0" + signal-exit "^3.0.2" + typedarray-to-buffer "^3.1.5" + +ws@^7.4.6: + version "7.5.9" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591" + integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q== + +ws@^8.4.2: + version "8.8.1" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.8.1.tgz#5dbad0feb7ade8ecc99b830c1d77c913d4955ff0" + integrity sha512-bGy2JzvzkPowEJV++hF07hAD6niYSr0JzBNo/J29WsB57A2r7Wlc1UFcTR9IzrPvuNVO4B8LGqF8qcpsVOhJCA== + +xml-name-validator@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a" + integrity sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw== + +xmlchars@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" + integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== + +y18n@^4.0.0: + version "4.0.3" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.3.tgz#b5f259c82cd6e336921efd7bfd8bf560de9eeedf" + integrity sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ== + +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + +yargs-parser@^18.1.2: + version "18.1.3" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0" + integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ== + dependencies: + camelcase "^5.0.0" + decamelize "^1.2.0" + +yargs@^15.4.1: + version "15.4.1" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.4.1.tgz#0d87a16de01aee9d8bec2bfbf74f67851730f4f8" + integrity sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A== + dependencies: + cliui "^6.0.0" + decamelize "^1.2.0" + find-up "^4.1.0" + get-caller-file "^2.0.1" + require-directory "^2.1.1" + require-main-filename "^2.0.0" + set-blocking "^2.0.0" + string-width "^4.2.0" + which-module "^2.0.0" + y18n "^4.0.0" + yargs-parser "^18.1.2" diff --git a/jupyterhub/__init__.py b/jupyterhub/__init__.py index 5e882401..4b758c5d 100644 --- a/jupyterhub/__init__.py +++ b/jupyterhub/__init__.py @@ -1,2 +1,3 @@ -from ._version import __version__ -from ._version import version_info +from ._version import __version__, version_info + +__all__ = ["__version__", "version_info"] diff --git a/jupyterhub/_data.py b/jupyterhub/_data.py index 04452d18..d665f193 100644 --- a/jupyterhub/_data.py +++ b/jupyterhub/_data.py @@ -4,7 +4,7 @@ def get_data_files(): """Walk up until we find share/jupyterhub""" import sys - from os.path import join, abspath, dirname, exists, split + from os.path import abspath, dirname, exists, join, split path = abspath(dirname(__file__)) starting_points = [path] diff --git a/jupyterhub/_memoize.py b/jupyterhub/_memoize.py new file mode 100644 index 00000000..21907b1d --- /dev/null +++ b/jupyterhub/_memoize.py @@ -0,0 +1,154 @@ +"""Utilities for memoization + +Note: a memoized function should always return an _immutable_ +result to avoid later modifications polluting cached results. +""" +from collections import OrderedDict +from functools import wraps + + +class DoNotCache: + """Wrapper to return a result without caching it. + + In a function decorated with `@lru_cache_key`: + + return DoNotCache(result) + + is equivalent to: + + return result # but don't cache it! + """ + + def __init__(self, result): + self.result = result + + +class LRUCache: + """A simple Least-Recently-Used (LRU) cache with a max size""" + + def __init__(self, maxsize=1024): + self._cache = OrderedDict() + self.maxsize = maxsize + + def __contains__(self, key): + return key in self._cache + + def get(self, key, default=None): + """Get an item from the cache""" + if key in self._cache: + # cache hit, bump to front of the queue for LRU + result = self._cache[key] + self._cache.move_to_end(key) + return result + return default + + def set(self, key, value): + """Store an entry in the cache + + Purges oldest entry if cache is full + """ + self._cache[key] = value + # cache is full, purge oldest entry + if len(self._cache) > self.maxsize: + self._cache.popitem(last=False) + + __getitem__ = get + __setitem__ = set + + +def lru_cache_key(key_func, maxsize=1024): + """Like functools.lru_cache, but takes a custom key function, + as seen in sorted(key=func). + + Useful for non-hashable arguments which have a known hashable equivalent (e.g. sets, lists), + or mutable objects where only immutable fields might be used + (e.g. User, where only username affects output). + + For safety: Cached results should always be immutable, + such as using `frozenset` instead of mutable `set`. + + Example: + + @lru_cache_key(lambda user: user.name) + def func_user(user): + # output only varies by name + + Args: + key (callable): + Should have the same signature as the decorated function. + Returns a hashable key to use in the cache + maxsize (int): + The maximum size of the cache. + """ + + def cache_func(func): + cache = LRUCache(maxsize=maxsize) + # the actual decorated function: + @wraps(func) + def cached(*args, **kwargs): + cache_key = key_func(*args, **kwargs) + if cache_key in cache: + # cache hit + return cache[cache_key] + else: + # cache miss, call function and cache result + result = func(*args, **kwargs) + if isinstance(result, DoNotCache): + # DoNotCache prevents caching + result = result.result + else: + cache[cache_key] = result + return result + + return cached + + return cache_func + + +class FrozenDict(dict): + """A frozen dictionary subclass + + Immutable and hashable, so it can be used as a cache key + + Values will be frozen with `.freeze(value)` + and must be hashable after freezing. + + Not rigorous, but enough for our purposes. + """ + + _hash = None + + def __init__(self, d): + dict_set = dict.__setitem__ + for key, value in d.items(): + dict.__setitem__(self, key, self._freeze(value)) + + def _freeze(self, item): + """Make values of a dict hashable + - list, set -> frozenset + - dict -> recursive _FrozenDict + - anything else: assumed hashable + """ + if isinstance(item, FrozenDict): + return item + elif isinstance(item, list): + return tuple(self._freeze(e) for e in item) + elif isinstance(item, set): + return frozenset(item) + elif isinstance(item, dict): + return FrozenDict(item) + else: + # any other type is assumed hashable + return item + + def __setitem__(self, key): + raise RuntimeError("Cannot modify frozen {type(self).__name__}") + + def update(self, other): + raise RuntimeError("Cannot modify frozen {type(self).__name__}") + + def __hash__(self): + """Cache hash because we are immutable""" + if self._hash is None: + self._hash = hash(tuple((key, value) for key, value in self.items())) + return self._hash diff --git a/jupyterhub/_version.py b/jupyterhub/_version.py index 6ca01de4..c9e03659 100644 --- a/jupyterhub/_version.py +++ b/jupyterhub/_version.py @@ -1,14 +1,8 @@ """JupyterHub version info""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. - -version_info = ( - 1, - 0, - 1, - # "b2", # release (b1, rc1, or "" for final or dev) - "dev", # dev or nothing -) +# version_info updated by running `tbump` +version_info = (3, 1, 0, "", "dev") # pep 440 version: no dot before beta/rc, but before .dev # 0.1.0rc1 @@ -16,7 +10,18 @@ version_info = ( # 0.1.0b1.dev # 0.1.0.dev -__version__ = ".".join(map(str, version_info[:3])) + ".".join(version_info[3:]) +__version__ = ".".join(map(str, version_info[:3])) + ".".join(version_info[3:]).rstrip( + "." +) + +# Singleton flag to only log the major/minor mismatch warning once per mismatch combo. +_version_mismatch_warning_logged = {} + + +def reset_globals(): + """Used to reset globals between test cases.""" + global _version_mismatch_warning_logged + _version_mismatch_warning_logged = {} def _check_version(hub_version, singleuser_version, log): @@ -37,24 +42,34 @@ def _check_version(hub_version, singleuser_version, log): # compare minor X.Y versions if hub_version != singleuser_version: - from distutils.version import LooseVersion as V + from packaging.version import parse - hub_major_minor = V(hub_version).version[:2] - singleuser_major_minor = V(singleuser_version).version[:2] + hub = parse(hub_version) + hub_major_minor = (hub.major, hub.minor) + singleuser = parse(singleuser_version) + singleuser_major_minor = (singleuser.major, singleuser.minor) extra = "" + do_log = True if singleuser_major_minor == hub_major_minor: # patch-level mismatch or lower, log difference at debug-level # because this should be fine log_method = log.debug else: # log warning-level for more significant mismatch, such as 0.8 vs 0.9, etc. - log_method = log.warning - extra = " This could cause failure to authenticate and result in redirect loops!" - log_method( - "jupyterhub version %s != jupyterhub-singleuser version %s." + extra, - hub_version, - singleuser_version, - ) + key = f'{hub_version}-{singleuser_version}' + global _version_mismatch_warning_logged + if _version_mismatch_warning_logged.get(key): + do_log = False # We already logged this warning so don't log it again. + else: + log_method = log.warning + extra = " This could cause failure to authenticate and result in redirect loops!" + _version_mismatch_warning_logged[key] = True + if do_log: + log_method( + "jupyterhub version %s != jupyterhub-singleuser version %s." + extra, + hub_version, + singleuser_version, + ) else: log.debug( "jupyterhub and jupyterhub-singleuser both on version %s" % hub_version diff --git a/jupyterhub/alembic/env.py b/jupyterhub/alembic/env.py index 4846f4c1..8b4cba1b 100644 --- a/jupyterhub/alembic/env.py +++ b/jupyterhub/alembic/env.py @@ -3,17 +3,16 @@ import sys from logging.config import fileConfig from alembic import context -from sqlalchemy import engine_from_config -from sqlalchemy import pool +from sqlalchemy import engine_from_config, pool # this is the Alembic Config object, which provides # access to the values within the .ini file in use. config = context.config - # Interpret the config file for Python logging. # This line sets up loggers basically. if 'jupyterhub' in sys.modules: from traitlets.config import MultipleInstanceError + from jupyterhub.app import JupyterHub app = None @@ -28,7 +27,7 @@ if 'jupyterhub' in sys.modules: alembic_logger.propagate = True alembic_logger.parent = app.log else: - fileConfig(config.config_file_name) + fileConfig(config.config_file_name, disable_existing_loggers=False) else: fileConfig(config.config_file_name) @@ -42,6 +41,16 @@ target_metadata = orm.Base.metadata # my_important_option = config.get_main_option("my_important_option") # ... etc. +# pass these to context.configure(**config_opts) +common_config_opts = dict( + # target_metadata for autogenerate + target_metadata=target_metadata, + # transaction per migration to ensure + # each migration is 'complete' before running the next one + # (e.g. dropped tables) + transaction_per_migration=True, +) + def run_migrations_offline(): """Run migrations in 'offline' mode. @@ -55,8 +64,16 @@ def run_migrations_offline(): script output. """ - url = config.get_main_option("sqlalchemy.url") - context.configure(url=url, target_metadata=target_metadata, literal_binds=True) + connectable = config.attributes.get('connection', None) + config_opts = {} + config_opts.update(common_config_opts) + config_opts["literal_binds"] = True + + if connectable is None: + config_opts["url"] = config.get_main_option("sqlalchemy.url") + else: + config_opts["connection"] = connectable + context.configure(**config_opts) with context.begin_transaction(): context.run_migrations() @@ -69,14 +86,22 @@ def run_migrations_online(): and associate a connection with the context. """ - connectable = engine_from_config( - config.get_section(config.config_ini_section), - prefix='sqlalchemy.', - poolclass=pool.NullPool, - ) + connectable = config.attributes.get('connection', None) + config_opts = {} + config_opts.update(common_config_opts) + + if connectable is None: + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix='sqlalchemy.', + poolclass=pool.NullPool, + ) with connectable.connect() as connection: - context.configure(connection=connection, target_metadata=target_metadata) + context.configure( + connection=connection, + **common_config_opts, + ) with context.begin_transaction(): context.run_migrations() diff --git a/jupyterhub/alembic/versions/19c0846f6344_base_revision_for_0_5.py b/jupyterhub/alembic/versions/19c0846f6344_base_revision_for_0_5.py index 5aa2b46d..c8421da4 100644 --- a/jupyterhub/alembic/versions/19c0846f6344_base_revision_for_0_5.py +++ b/jupyterhub/alembic/versions/19c0846f6344_base_revision_for_0_5.py @@ -11,9 +11,6 @@ down_revision = None branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - def upgrade(): pass diff --git a/jupyterhub/alembic/versions/1cebaf56856c_session_id.py b/jupyterhub/alembic/versions/1cebaf56856c_session_id.py index abcad7ef..38f37b0b 100644 --- a/jupyterhub/alembic/versions/1cebaf56856c_session_id.py +++ b/jupyterhub/alembic/versions/1cebaf56856c_session_id.py @@ -15,15 +15,15 @@ import logging logger = logging.getLogger('alembic') -from alembic import op import sqlalchemy as sa +from alembic import op tables = ('oauth_access_tokens', 'oauth_codes') def add_column_if_table_exists(table, column): engine = op.get_bind().engine - if table not in engine.table_names(): + if table not in sa.inspect(engine).get_table_names(): # table doesn't exist, no need to upgrade # because jupyterhub will create it on launch logger.warning("Skipping upgrade of absent table: %s", table) diff --git a/jupyterhub/alembic/versions/3ec6993fe20c_encrypted_auth_state.py b/jupyterhub/alembic/versions/3ec6993fe20c_encrypted_auth_state.py index 8e234318..73d8bc44 100644 --- a/jupyterhub/alembic/versions/3ec6993fe20c_encrypted_auth_state.py +++ b/jupyterhub/alembic/versions/3ec6993fe20c_encrypted_auth_state.py @@ -22,8 +22,9 @@ import logging logger = logging.getLogger('alembic') -from alembic import op import sqlalchemy as sa +from alembic import op + from jupyterhub.orm import JSONDict diff --git a/jupyterhub/alembic/versions/4dc2d5a8c53c_user_options.py b/jupyterhub/alembic/versions/4dc2d5a8c53c_user_options.py index d74e46b7..617a2d6d 100644 --- a/jupyterhub/alembic/versions/4dc2d5a8c53c_user_options.py +++ b/jupyterhub/alembic/versions/4dc2d5a8c53c_user_options.py @@ -11,13 +11,15 @@ down_revision = '896818069c98' branch_labels = None depends_on = None -from alembic import op import sqlalchemy as sa +from alembic import op + from jupyterhub.orm import JSONDict def upgrade(): - tables = op.get_bind().engine.table_names() + engine = op.get_bind().engine + tables = sa.inspect(engine).get_table_names() if 'spawners' in tables: op.add_column('spawners', sa.Column('user_options', JSONDict())) diff --git a/jupyterhub/alembic/versions/56cc5a70207e_token_tracking.py b/jupyterhub/alembic/versions/56cc5a70207e_token_tracking.py index 7583e6f2..0c74c09d 100644 --- a/jupyterhub/alembic/versions/56cc5a70207e_token_tracking.py +++ b/jupyterhub/alembic/versions/56cc5a70207e_token_tracking.py @@ -11,16 +11,17 @@ down_revision = '1cebaf56856c' branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - import logging +import sqlalchemy as sa +from alembic import op + logger = logging.getLogger('alembic') def upgrade(): - tables = op.get_bind().engine.table_names() + engine = op.get_bind().engine + tables = sa.inspect(engine).get_table_names() op.add_column('api_tokens', sa.Column('created', sa.DateTime(), nullable=True)) op.add_column( 'api_tokens', sa.Column('last_activity', sa.DateTime(), nullable=True) diff --git a/jupyterhub/alembic/versions/651f5419b74d_api_token_scopes.py b/jupyterhub/alembic/versions/651f5419b74d_api_token_scopes.py new file mode 100644 index 00000000..483a91c5 --- /dev/null +++ b/jupyterhub/alembic/versions/651f5419b74d_api_token_scopes.py @@ -0,0 +1,115 @@ +"""api_token_scopes + +Revision ID: 651f5419b74d +Revises: 833da8570507 +Create Date: 2022-02-28 12:42:55.149046 + +""" +# revision identifiers, used by Alembic. +revision = '651f5419b74d' +down_revision = '833da8570507' +branch_labels = None +depends_on = None + +import sqlalchemy as sa +from alembic import op +from sqlalchemy import Column, ForeignKey, Table +from sqlalchemy.orm import relationship +from sqlalchemy.orm.session import Session + +from jupyterhub import orm, roles, scopes + + +def upgrade(): + c = op.get_bind() + + tables = sa.inspect(c.engine).get_table_names() + + # oauth codes are short lived, no need to upgrade them + if 'oauth_code_role_map' in tables: + op.drop_table('oauth_code_role_map') + + if 'oauth_codes' in tables: + op.add_column('oauth_codes', sa.Column('scopes', orm.JSONList(), nullable=True)) + + if 'api_tokens' in tables: + # may not be present, + # e.g. upgrade from 1.x, token table dropped + # in which case no migration to do + + # define new scopes column on API tokens + op.add_column('api_tokens', sa.Column('scopes', orm.JSONList(), nullable=True)) + + if 'api_token_role_map' in tables: + # redefine the to-be-removed api_token->role relationship + # so we can run a query on it for the migration + token_role_map = Table( + "api_token_role_map", + orm.Base.metadata, + Column( + 'api_token_id', + ForeignKey('api_tokens.id', ondelete='CASCADE'), + primary_key=True, + ), + Column( + 'role_id', + ForeignKey('roles.id', ondelete='CASCADE'), + primary_key=True, + ), + extend_existing=True, + ) + orm.APIToken.roles = relationship('Role', secondary='api_token_role_map') + + # tokens have roles, evaluate to scopes + db = Session(bind=c) + for token in db.query(orm.APIToken): + token.scopes = list(roles.roles_to_scopes(token.roles)) + db.commit() + # drop token-role relationship + op.drop_table('api_token_role_map') + + if 'oauth_clients' in tables: + # define new scopes column on API tokens + op.add_column( + 'oauth_clients', sa.Column('allowed_scopes', orm.JSONList(), nullable=True) + ) + + if 'oauth_client_role_map' in tables: + # redefine the to-be-removed api_token->role relationship + # so we can run a query on it for the migration + client_role_map = Table( + "oauth_client_role_map", + orm.Base.metadata, + Column( + 'oauth_client_id', + ForeignKey('oauth_clients.id', ondelete='CASCADE'), + primary_key=True, + ), + Column( + 'role_id', + ForeignKey('roles.id', ondelete='CASCADE'), + primary_key=True, + ), + extend_existing=True, + ) + orm.OAuthClient.allowed_roles = relationship( + 'Role', secondary='oauth_client_role_map' + ) + + # oauth clients have allowed_roles, evaluate to allowed_scopes + db = Session(bind=c) + for oauth_client in db.query(orm.OAuthClient): + allowed_scopes = set(roles.roles_to_scopes(oauth_client.allowed_roles)) + allowed_scopes.update(scopes.access_scopes(oauth_client)) + oauth_client.allowed_scopes = sorted(allowed_scopes) + db.commit() + # drop token-role relationship + op.drop_table('oauth_client_role_map') + + +def downgrade(): + # cannot map permissions from scopes back to roles + # drop whole api token table (revokes all tokens), which will be recreated on hub start + op.drop_table('api_tokens') + op.drop_table('oauth_clients') + op.drop_table('oauth_codes') diff --git a/jupyterhub/alembic/versions/833da8570507_rbac.py b/jupyterhub/alembic/versions/833da8570507_rbac.py new file mode 100644 index 00000000..ec3325ec --- /dev/null +++ b/jupyterhub/alembic/versions/833da8570507_rbac.py @@ -0,0 +1,103 @@ +""" +rbac changes for jupyterhub 2.0 + +Revision ID: 833da8570507 +Revises: 4dc2d5a8c53c +Create Date: 2021-02-17 15:03:04.360368 + +""" +# revision identifiers, used by Alembic. +revision = '833da8570507' +down_revision = '4dc2d5a8c53c' +branch_labels = None +depends_on = None + +import sqlalchemy as sa +from alembic import op + +from jupyterhub import orm + +naming_convention = orm.meta.naming_convention + + +def upgrade(): + # associate spawners and services with their oauth clients + # op.add_column( + # 'services', sa.Column('oauth_client_id', sa.Unicode(length=255), nullable=True) + # ) + for table_name in ('services', 'spawners'): + column_name = "oauth_client_id" + target_table = "oauth_clients" + target_column = "identifier" + with op.batch_alter_table( + table_name, + schema=None, + ) as batch_op: + batch_op.add_column( + sa.Column('oauth_client_id', sa.Unicode(length=255), nullable=True), + ) + batch_op.create_foreign_key( + naming_convention["fk"] + % dict( + table_name=table_name, + column_0_name=column_name, + referred_table_name=target_table, + ), + target_table, + [column_name], + [target_column], + ondelete='SET NULL', + ) + + # FIXME, maybe: currently drops all api tokens and forces recreation! + # this ensures a consistent database, but requires: + # 1. all servers to be stopped for upgrade (maybe unavoidable anyway) + # 2. any manually issued/stored tokens to be re-issued + + # tokens loaded via configuration will be recreated on launch and unaffected + op.drop_table('api_tokens') + op.drop_table('oauth_access_tokens') + return + # TODO: explore in-place migration. This seems hard! + # 1. add new columns in api tokens + # 2. fill default fields (client_id='jupyterhub') for all api tokens + # 3. copy oauth tokens into api tokens + # 4. give oauth tokens 'identify' scopes + + +def downgrade(): + for table_name in ('services', 'spawners'): + column_name = "oauth_client_id" + target_table = "oauth_clients" + target_column = "identifier" + + with op.batch_alter_table( + table_name, + schema=None, + naming_convention=orm.meta.naming_convention, + ) as batch_op: + batch_op.drop_constraint( + naming_convention["fk"] + % dict( + table_name=table_name, + column_0_name=column_name, + referred_table_name=target_table, + ), + type_='foreignkey', + ) + batch_op.drop_column(column_name) + + # delete OAuth tokens for non-jupyterhub clients + # drop new columns from api tokens + # op.drop_constraint(None, 'api_tokens', type_='foreignkey') + # op.drop_column('api_tokens', 'session_id') + # op.drop_column('api_tokens', 'client_id') + + # FIXME: only drop tokens whose client id is not 'jupyterhub' + # until then, drop all tokens + op.drop_table("api_tokens") + + op.drop_table('api_token_role_map') + op.drop_table('service_role_map') + op.drop_table('user_role_map') + op.drop_table('roles') diff --git a/jupyterhub/alembic/versions/896818069c98_token_expires.py b/jupyterhub/alembic/versions/896818069c98_token_expires.py index b3f62411..e8d1a84d 100644 --- a/jupyterhub/alembic/versions/896818069c98_token_expires.py +++ b/jupyterhub/alembic/versions/896818069c98_token_expires.py @@ -11,8 +11,8 @@ down_revision = 'd68c98b66cd4' branch_labels = None depends_on = None -from alembic import op import sqlalchemy as sa +from alembic import op def upgrade(): diff --git a/jupyterhub/alembic/versions/99a28a4418e1_user_created.py b/jupyterhub/alembic/versions/99a28a4418e1_user_created.py index e2746ebb..42ca4b0b 100644 --- a/jupyterhub/alembic/versions/99a28a4418e1_user_created.py +++ b/jupyterhub/alembic/versions/99a28a4418e1_user_created.py @@ -12,11 +12,11 @@ branch_labels = None depends_on = None -from alembic import op -import sqlalchemy as sa - from datetime import datetime +import sqlalchemy as sa +from alembic import op + def upgrade(): op.add_column('users', sa.Column('created', sa.DateTime, nullable=True)) @@ -31,7 +31,7 @@ def upgrade(): % (now,) ) - tables = c.engine.table_names() + tables = sa.inspect(c.engine).get_table_names() if 'spawners' in tables: op.add_column('spawners', sa.Column('started', sa.DateTime, nullable=True)) diff --git a/jupyterhub/alembic/versions/af4cbdb2d13c_services.py b/jupyterhub/alembic/versions/af4cbdb2d13c_services.py index a0f5e8e9..b33da18e 100644 --- a/jupyterhub/alembic/versions/af4cbdb2d13c_services.py +++ b/jupyterhub/alembic/versions/af4cbdb2d13c_services.py @@ -11,8 +11,8 @@ down_revision = 'eeb276e51423' branch_labels = None depends_on = None -from alembic import op import sqlalchemy as sa +from alembic import op def upgrade(): diff --git a/jupyterhub/alembic/versions/d68c98b66cd4_client_description.py b/jupyterhub/alembic/versions/d68c98b66cd4_client_description.py index 13c9cded..4eb0a81e 100644 --- a/jupyterhub/alembic/versions/d68c98b66cd4_client_description.py +++ b/jupyterhub/alembic/versions/d68c98b66cd4_client_description.py @@ -11,12 +11,13 @@ down_revision = '99a28a4418e1' branch_labels = None depends_on = None -from alembic import op import sqlalchemy as sa +from alembic import op def upgrade(): - tables = op.get_bind().engine.table_names() + engine = op.get_bind().engine + tables = sa.inspect(engine).get_table_names() if 'oauth_clients' in tables: op.add_column( 'oauth_clients', sa.Column('description', sa.Unicode(length=1023)) diff --git a/jupyterhub/alembic/versions/eeb276e51423_auth_state.py b/jupyterhub/alembic/versions/eeb276e51423_auth_state.py index 8ae54901..5b4df64b 100644 --- a/jupyterhub/alembic/versions/eeb276e51423_auth_state.py +++ b/jupyterhub/alembic/versions/eeb276e51423_auth_state.py @@ -12,8 +12,9 @@ down_revision = '19c0846f6344' branch_labels = None depends_on = None -from alembic import op import sqlalchemy as sa +from alembic import op + from jupyterhub.orm import JSONDict diff --git a/jupyterhub/apihandlers/__init__.py b/jupyterhub/apihandlers/__init__.py index 7900d8af..f72887eb 100644 --- a/jupyterhub/apihandlers/__init__.py +++ b/jupyterhub/apihandlers/__init__.py @@ -1,10 +1,5 @@ -from . import auth -from . import groups -from . import hub -from . import proxy -from . import services -from . import users -from .base import * +from . import auth, groups, hub, proxy, services, users +from .base import * # noqa default_handlers = [] for mod in (auth, hub, proxy, users, groups, services): diff --git a/jupyterhub/apihandlers/auth.py b/jupyterhub/apihandlers/auth.py index 07e4d18b..0102aa4f 100644 --- a/jupyterhub/apihandlers/auth.py +++ b/jupyterhub/apihandlers/auth.py @@ -3,32 +3,36 @@ # Distributed under the terms of the Modified BSD License. import json from datetime import datetime -from urllib.parse import parse_qsl -from urllib.parse import quote -from urllib.parse import urlencode -from urllib.parse import urlparse -from urllib.parse import urlunparse +from unittest import mock +from urllib.parse import parse_qsl, quote, urlencode, urlparse, urlunparse from oauthlib import oauth2 from tornado import web -from .. import orm -from ..user import User -from ..utils import compare_token -from ..utils import token_authenticated -from .base import APIHandler -from .base import BaseHandler +from .. import orm, roles, scopes +from ..utils import get_browser_protocol, token_authenticated +from .base import APIHandler, BaseHandler class TokenAPIHandler(APIHandler): @token_authenticated def get(self, token): + # FIXME: deprecate this API for oauth token resolution, in favor of using /api/user + # TODO: require specific scope for this deprecated API, applied to service tokens only? + self.log.warning( + "/authorizations/token/:token endpoint is deprecated in JupyterHub 2.0. Use /api/user" + ) orm_token = orm.APIToken.find(self.db, token) - if orm_token is None: - orm_token = orm.OAuthAccessToken.find(self.db, token) if orm_token is None: raise web.HTTPError(404) + owner = orm_token.user or orm_token.service + if owner: + # having a token means we should be able to read the owner's model + # (this is the only thing this handler is for) + self.expanded_scopes |= scopes.identify_scopes(owner) + self.parsed_scopes = scopes.parse_scopes(self.expanded_scopes) + # record activity whenever we see a token now = orm_token.last_activity = datetime.utcnow() if orm_token.user: @@ -45,53 +49,20 @@ class TokenAPIHandler(APIHandler): self.write(json.dumps(model)) async def post(self): - warn_msg = ( - "Using deprecated token creation endpoint %s." - " Use /hub/api/users/:user/tokens instead." - ) % self.request.uri - self.log.warning(warn_msg) - requester = user = self.current_user - if user is None: - # allow requesting a token with username and password - # for authenticators where that's possible - data = self.get_json_body() - try: - requester = user = await self.login_user(data) - except Exception as e: - self.log.error("Failure trying to authenticate with form data: %s" % e) - user = None - if user is None: - raise web.HTTPError(403) - else: - data = self.get_json_body() - # admin users can request tokens for other users - if data and data.get('username'): - user = self.find_user(data['username']) - if user is not requester and not requester.admin: - raise web.HTTPError( - 403, "Only admins can request tokens for other users." - ) - if requester.admin and user is None: - raise web.HTTPError(400, "No such user '%s'" % data['username']) - - note = (data or {}).get('note') - if not note: - note = "Requested via deprecated api" - if requester is not user: - kind = 'user' if isinstance(user, User) else 'service' - note += " by %s %s" % (kind, requester.name) - - api_token = user.new_api_token(note=note) - self.write( - json.dumps( - {'token': api_token, 'warning': warn_msg, 'user': self.user_model(user)} - ) + raise web.HTTPError( + 404, + "Deprecated endpoint /hub/api/authorizations/token is removed in JupyterHub 2.0." + " Use /hub/api/users/:user/tokens instead.", ) class CookieAPIHandler(APIHandler): @token_authenticated def get(self, cookie_name, cookie_value=None): + self.log.warning( + "/authorizations/cookie endpoint is deprecated in JupyterHub 2.0. Use /api/user with OAuth tokens." + ) + cookie_name = quote(cookie_name, safe='') if cookie_value is None: self.log.warning( @@ -137,7 +108,10 @@ class OAuthHandler: # make absolute local redirects full URLs # to satisfy oauthlib's absolute URI requirement redirect_uri = ( - self.request.protocol + "://" + self.request.headers['Host'] + redirect_uri + get_browser_protocol(self.request) + + "://" + + self.request.host + + redirect_uri ) parsed_url = urlparse(uri) query_list = parse_qsl(parsed_url.query, keep_blank_values=True) @@ -198,37 +172,198 @@ class OAuthAuthorizeHandler(OAuthHandler, BaseHandler): raise self.send_oauth_response(headers, body, status) + def needs_oauth_confirm(self, user, oauth_client, requested_scopes): + """Return whether the given oauth client needs to prompt for access for the given user + + Checks list for oauth clients that don't need confirmation + + Sources: + + - the user's own servers + - Clients which already have authorization for the same roles + - Explicit oauth_no_confirm_list configuration (e.g. admin-operated services) + + .. versionadded: 1.1 + """ + # get the oauth client ids for the user's own server(s) + own_oauth_client_ids = { + spawner.oauth_client_id for spawner in user.spawners.values() + } + if ( + # it's the user's own server + oauth_client.identifier in own_oauth_client_ids + # or it's in the global no-confirm list + or oauth_client.identifier + in self.settings.get('oauth_no_confirm_list', set()) + ): + return False + + # Check existing authorization + existing_tokens = self.db.query(orm.APIToken).filter_by( + user_id=user.id, + client_id=oauth_client.identifier, + ) + authorized_scopes = set() + for token in existing_tokens: + authorized_scopes.update(token.scopes) + + if authorized_scopes: + if set(requested_scopes).issubset(authorized_scopes): + self.log.debug( + f"User {user.name} has already authorized {oauth_client.identifier} for scopes {requested_scopes}" + ) + return False + else: + self.log.debug( + f"User {user.name} has authorized {oauth_client.identifier}" + f" for scopes {authorized_scopes}, confirming additional scopes {requested_scopes}" + ) + # default: require confirmation + return True + + def get_login_url(self): + """ + Support automatically logging in when JupyterHub is used as auth provider + """ + if self.authenticator.auto_login_oauth2_authorize: + return self.authenticator.login_url(self.hub.base_url) + return super().get_login_url() + @web.authenticated - def get(self): + async def get(self): """GET /oauth/authorization Render oauth confirmation page: "Server at ... would like permission to ...". - Users accessing their own server will skip confirmation. + Users accessing their own server or a blessed service + will skip confirmation. """ uri, http_method, body, headers = self.extract_oauth_params() try: - scopes, credentials = self.oauth_provider.validate_authorization_request( - uri, http_method, body, headers - ) + with mock.patch.object( + self.oauth_provider.request_validator, + "_current_user", + self.current_user, + create=True, + ): + ( + requested_scopes, + credentials, + ) = self.oauth_provider.validate_authorization_request( + uri, http_method, body, headers + ) credentials = self.add_credentials(credentials) client = self.oauth_provider.fetch_by_client_id(credentials['client_id']) - if client.redirect_uri.startswith(self.current_user.url): + allowed = False + + # check for access to target resource + if client.spawner: + scope_filter = self.get_scope_filter("access:servers") + allowed = scope_filter(client.spawner, kind='server') + elif client.service: + scope_filter = self.get_scope_filter("access:services") + allowed = scope_filter(client.service, kind='service') + else: + # client is not associated with a service or spawner. + # This shouldn't happen, but it might if this is a stale or forged request + # from a service or spawner that's since been deleted + self.log.error( + f"OAuth client {client} has no service or spawner, cannot resolve scopes." + ) + raise web.HTTPError(500, "OAuth configuration error") + + if not allowed: + self.log.error( + f"User {self.current_user} not allowed to access {client.description}" + ) + raise web.HTTPError( + 403, f"You do not have permission to access {client.description}" + ) + + # subset 'raw scopes' to those held by authenticating user + requested_scopes = set(requested_scopes) + user = self.current_user + # raw, _not_ expanded scopes + user_scopes = roles.roles_to_scopes(roles.get_roles_for(user.orm_user)) + # these are some scopes the user may not have + # in 'raw' form, but definitely have at this point + # make sure they are here, because we are computing the + # 'raw' scope intersection, + # rather than the expanded_scope intersection + + required_scopes = {*scopes.identify_scopes(), *scopes.access_scopes(client)} + user_scopes |= {"inherit", *required_scopes} + + allowed_scopes, disallowed_scopes = scopes._resolve_requested_scopes( + requested_scopes, + user_scopes, + user=user.orm_user, + client=client, + db=self.db, + ) + + if disallowed_scopes: + self.log.warning( + f"Service {client.description} requested scopes {','.join(requested_scopes)}" + f" for user {self.current_user.name}," + f" granting only {','.join(allowed_scopes) or '[]'}." + ) + + if not self.needs_oauth_confirm(self.current_user, client, allowed_scopes): self.log.debug( "Skipping oauth confirmation for %s accessing %s", self.current_user, client.description, ) - # access to my own server doesn't require oauth confirmation # this is the pre-1.0 behavior for all oauth - self._complete_login(uri, headers, scopes, credentials) + self._complete_login(uri, headers, allowed_scopes, credentials) return + # discard 'required' scopes from description + # no need to describe the ability to access itself + scopes_to_describe = allowed_scopes.difference(required_scopes) + + if not scopes_to_describe: + # TODO: describe all scopes? + # Not right now, because the no-scope default 'identify' text + # is clearer than what we produce for those scopes individually + scope_descriptions = [ + { + "scope": None, + "description": scopes.scope_definitions['(no_scope)'][ + 'description' + ], + "filter": "", + } + ] + elif 'inherit' in scopes_to_describe: + allowed_scopes = scopes_to_describe = ['inherit'] + scope_descriptions = [ + { + "scope": "inherit", + "description": scopes.scope_definitions['inherit'][ + 'description' + ], + "filter": "", + } + ] + else: + scope_descriptions = scopes.describe_raw_scopes( + scopes_to_describe, + username=self.current_user.name, + ) # Render oauth 'Authorize application...' page + auth_state = await self.current_user.get_auth_state() self.write( - self.render_template("oauth.html", scopes=scopes, oauth_client=client) + await self.render_template( + "oauth.html", + auth_state=auth_state, + allowed_scopes=allowed_scopes, + scope_descriptions=scope_descriptions, + oauth_client=client, + ) ) # Errors that should be shown to the user on the provider website @@ -245,9 +380,26 @@ class OAuthAuthorizeHandler(OAuthHandler, BaseHandler): uri, http_method, body, headers = self.extract_oauth_params() referer = self.request.headers.get('Referer', 'no referer') full_url = self.request.full_url() - if referer != full_url: + # trim protocol, which cannot be trusted with multiple layers of proxies anyway + # Referer is set by browser, but full_url can be modified by proxy layers to appear as http + # when it is actually https + referer_proto, _, stripped_referer = referer.partition("://") + referer_proto = referer_proto.lower() + req_proto, _, stripped_full_url = full_url.partition("://") + req_proto = req_proto.lower() + if referer_proto != req_proto: + self.log.warning("Protocol mismatch: %s != %s", referer, full_url) + if req_proto == "https": + # insecure origin to secure target is not allowed + raise web.HTTPError( + 403, "Not allowing authorization form submitted from insecure page" + ) + if stripped_referer != stripped_full_url: # OAuth post must be made to the URL it came from - self.log.error("OAuth POST from %s != %s", referer, full_url) + self.log.error("Original OAuth POST from %s != %s", referer, full_url) + self.log.error( + "Stripped OAuth POST from %s != %s", stripped_referer, stripped_full_url + ) raise web.HTTPError( 403, "Authorization form must be sent from authorization page" ) @@ -255,6 +407,10 @@ class OAuthAuthorizeHandler(OAuthHandler, BaseHandler): # The scopes the user actually authorized, i.e. checkboxes # that were selected. scopes = self.get_arguments('scopes') + if scopes == []: + # avoid triggering default scopes (provider selects default scopes when scopes is falsy) + # when an explicit empty list is authorized + scopes = ["identify"] # credentials we need in the validator credentials = self.add_credentials() diff --git a/jupyterhub/apihandlers/base.py b/jupyterhub/apihandlers/base.py index 600876cd..88844cb5 100644 --- a/jupyterhub/apihandlers/base.py +++ b/jupyterhub/apihandlers/base.py @@ -2,16 +2,19 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import json -from datetime import datetime +from functools import lru_cache from http.client import responses +from urllib.parse import parse_qs, urlencode, urlparse, urlunparse from sqlalchemy.exc import SQLAlchemyError from tornado import web from .. import orm from ..handlers import BaseHandler -from ..utils import isoformat -from ..utils import url_path_join +from ..scopes import get_scopes_for +from ..utils import get_browser_protocol, isoformat, url_escape_path, url_path_join + +PAGINATION_MEDIA_TYPE = "application/jupyterhub-pagination+json" class APIHandler(BaseHandler): @@ -25,6 +28,9 @@ class APIHandler(BaseHandler): - methods for REST API models """ + # accept token-based authentication for API requests + _accept_token_auth = True + @property def content_security_policy(self): return '; '.join([super().content_security_policy, "default-src 'none'"]) @@ -32,6 +38,16 @@ class APIHandler(BaseHandler): def get_content_type(self): return 'application/json' + @property + @lru_cache() + def accepts_pagination(self): + """Return whether the client accepts the pagination preview media type""" + accept_header = self.request.headers.get("Accept", "") + if not accept_header: + return False + accepts = {s.strip().lower() for s in accept_header.strip().split(",")} + return PAGINATION_MEDIA_TYPE in accepts + def check_referer(self): """Check Origin for cross-site API requests. @@ -39,7 +55,10 @@ class APIHandler(BaseHandler): - allow unspecified host/referer (e.g. scripts) """ - host = self.request.headers.get("Host") + host_header = self.app.forwarded_host_header or "Host" + host = self.request.headers.get(host_header) + if host and "," in host: + host = host.split(",", 1)[0].strip() referer = self.request.headers.get("Referer") # If no header is provided, assume it comes from a script/curl. @@ -51,25 +70,62 @@ class APIHandler(BaseHandler): self.log.warning("Blocking API request with no referer") return False - host_path = url_path_join(host, self.hub.base_url) - referer_path = referer.split('://', 1)[-1] - if not (referer_path + '/').startswith(host_path): + proto = get_browser_protocol(self.request) + + full_host = f"{proto}://{host}{self.hub.base_url}" + host_url = urlparse(full_host) + referer_url = urlparse(referer) + # resolve default ports for http[s] + referer_port = referer_url.port or ( + 443 if referer_url.scheme == 'https' else 80 + ) + host_port = host_url.port or (443 if host_url.scheme == 'https' else 80) + if ( + referer_url.scheme != host_url.scheme + or referer_url.hostname != host_url.hostname + or referer_port != host_port + or not (referer_url.path + "/").startswith(host_url.path) + ): self.log.warning( - "Blocking Cross Origin API request. Referer: %s, Host: %s", - referer, - host_path, + f"Blocking Cross Origin API request. Referer: {referer}," + f" {host_header}: {host}, Host URL: {full_host}", ) return False return True + def check_post_content_type(self): + """Check request content-type, e.g. for cross-site POST requests + + Cross-site POST via form will include content-type + """ + content_type = self.request.headers.get("Content-Type") + if not content_type: + # not specified, e.g. from a script + return True + + # parse content type for application/json + fields = content_type.lower().split(";") + if not any(f.lstrip().startswith("application/json") for f in fields): + self.log.warning(f"Not allowing POST with content-type: {content_type}") + return False + + return True + def get_current_user_cookie(self): - """Override get_user_cookie to check Referer header""" + """Extend get_user_cookie to add checks for CORS""" cookie_user = super().get_current_user_cookie() - # check referer only if there is a cookie user, + # CORS checks for cookie-authentication + # check these only if there is a cookie user, # avoiding misleading "Blocking Cross Origin" messages # when there's no cookie set anyway. - if cookie_user and not self.check_referer(): - return None + if cookie_user: + if not self.check_referer(): + return None + if ( + self.request.method.upper() == 'POST' + and not self.check_post_content_type() + ): + return None return cookie_user def get_json_body(self): @@ -131,35 +187,48 @@ class APIHandler(BaseHandler): json.dumps({'status': status_code, 'message': message or status_message}) ) - def server_model(self, spawner, include_state=False): - """Get the JSON model for a Spawner""" - return { - 'name': spawner.name, - 'last_activity': isoformat(spawner.orm_spawner.last_activity), - 'started': isoformat(spawner.orm_spawner.started), - 'pending': spawner.pending, - 'ready': spawner.ready, - 'state': spawner.get_state() if include_state else None, - 'url': url_path_join(spawner.user.url, spawner.name, '/'), - 'progress_url': spawner._progress_url, + def server_model(self, spawner, *, user=None): + """Get the JSON model for a Spawner + Assume server permission already granted + """ + if isinstance(spawner, orm.Spawner): + # if an orm.Spawner is passed, + # create a model for a stopped Spawner + # not all info is available without the higher-level Spawner wrapper + orm_spawner = spawner + pending = None + ready = False + stopped = True + user = user + if user is None: + raise RuntimeError("Must specify User with orm.Spawner") + state = orm_spawner.state + else: + orm_spawner = spawner.orm_spawner + pending = spawner.pending + ready = spawner.ready + user = spawner.user + stopped = not spawner.active + state = spawner.get_state() + + model = { + 'name': orm_spawner.name, + 'last_activity': isoformat(orm_spawner.last_activity), + 'started': isoformat(orm_spawner.started), + 'pending': pending, + 'ready': ready, + 'stopped': stopped, + 'url': url_path_join(user.url, url_escape_path(spawner.name), '/'), + 'user_options': spawner.user_options, + 'progress_url': user.progress_url(spawner.name), } + scope_filter = self.get_scope_filter('admin:server_state') + if scope_filter(spawner, kind='server'): + model['state'] = state + return model def token_model(self, token): """Get the JSON model for an APIToken""" - expires_at = None - if isinstance(token, orm.APIToken): - kind = 'api_token' - extra = {'note': token.note} - expires_at = token.expires_at - elif isinstance(token, orm.OAuthAccessToken): - kind = 'oauth' - extra = {'oauth_client': token.client.description or token.client.client_id} - if token.expires_at: - expires_at = datetime.fromtimestamp(token.expires_at) - else: - raise TypeError( - "token must be an APIToken or OAuthAccessToken, not %s" % type(token) - ) if token.user: owner_key = 'user' @@ -172,59 +241,172 @@ class APIHandler(BaseHandler): model = { owner_key: owner, 'id': token.api_id, - 'kind': kind, + 'kind': 'api_token', + # deprecated field, but leave it present. + 'roles': [], + 'scopes': list(get_scopes_for(token)), 'created': isoformat(token.created), 'last_activity': isoformat(token.last_activity), - 'expires_at': isoformat(expires_at), + 'expires_at': isoformat(token.expires_at), + 'note': token.note, + 'session_id': token.session_id, + 'oauth_client': token.oauth_client.description + or token.oauth_client.identifier, } - model.update(extra) return model - def user_model(self, user, include_servers=False, include_state=False): + def _filter_model(self, model, access_map, entity, kind, keys=None): + """ + Filter the model based on the available scopes and the entity requested for. + If keys is a dictionary, update it with the allowed keys for the model. + """ + allowed_keys = set() + for scope in access_map: + scope_filter = self.get_scope_filter(scope) + if scope_filter(entity, kind=kind): + allowed_keys |= access_map[scope] + model = {key: model[key] for key in allowed_keys if key in model} + if isinstance(keys, set): + keys.update(allowed_keys) + return model + + _include_stopped_servers = None + + @property + def include_stopped_servers(self): + """Whether stopped servers should be included in user models""" + if self._include_stopped_servers is None: + self._include_stopped_servers = self.get_argument( + "include_stopped_servers", "0" + ).lower() not in {"0", "false"} + return self._include_stopped_servers + + def user_model(self, user): """Get the JSON model for a User object""" if isinstance(user, orm.User): user = self.users[user.id] - + include_stopped_servers = self.include_stopped_servers model = { 'kind': 'user', 'name': user.name, 'admin': user.admin, + 'roles': [r.name for r in user.roles], 'groups': [g.name for g in user.groups], 'server': user.url if user.running else None, 'pending': None, 'created': isoformat(user.created), 'last_activity': isoformat(user.last_activity), + 'auth_state': None, # placeholder, filled in later } - if '' in user.spawners: - model['pending'] = user.spawners[''].pending + access_map = { + 'read:users': { + 'kind', + 'name', + 'admin', + 'roles', + 'groups', + 'server', + 'pending', + 'created', + 'last_activity', + }, + 'read:users:name': {'kind', 'name', 'admin'}, + 'read:users:groups': {'kind', 'name', 'groups'}, + 'read:users:activity': {'kind', 'name', 'last_activity'}, + 'read:servers': {'kind', 'name', 'servers'}, + 'read:roles:users': {'kind', 'name', 'roles', 'admin'}, + 'admin:auth_state': {'kind', 'name', 'auth_state'}, + } + allowed_keys = set() + model = self._filter_model( + model, access_map, user, kind='user', keys=allowed_keys + ) + if model: + if '' in user.spawners and 'pending' in allowed_keys: + model['pending'] = user.spawners[''].pending - if not include_servers: - model['servers'] = None - return model + servers = {} + scope_filter = self.get_scope_filter('read:servers') + for name, spawner in user.spawners.items(): + # include 'active' servers, not just ready + # (this includes pending events) + if (spawner.active or include_stopped_servers) and scope_filter( + spawner, kind='server' + ): + servers[name] = self.server_model(spawner) + + if include_stopped_servers: + # add any stopped servers in the db + seen = set(servers.keys()) + for name, orm_spawner in user.orm_spawners.items(): + if name not in seen and scope_filter(orm_spawner, kind='server'): + servers[name] = self.server_model(orm_spawner, user=user) + + if "servers" in allowed_keys or servers: + # omit servers if no access + # leave present and empty + # if request has access to read servers in general + model["servers"] = servers - servers = model['servers'] = {} - for name, spawner in user.spawners.items(): - # include 'active' servers, not just ready - # (this includes pending events) - if spawner.active: - servers[name] = self.server_model(spawner, include_state=include_state) return model def group_model(self, group): """Get the JSON model for a Group object""" - return { + model = { 'kind': 'group', 'name': group.name, + 'roles': [r.name for r in group.roles], 'users': [u.name for u in group.users], } + access_map = { + 'read:groups': {'kind', 'name', 'users'}, + 'read:groups:name': {'kind', 'name'}, + 'read:roles:groups': {'kind', 'name', 'roles'}, + } + model = self._filter_model(model, access_map, group, 'group') + return model def service_model(self, service): """Get the JSON model for a Service object""" - return {'kind': 'service', 'name': service.name, 'admin': service.admin} + model = { + 'kind': 'service', + 'name': service.name, + 'roles': [r.name for r in service.roles], + 'admin': service.admin, + 'url': getattr(service, 'url', ''), + 'prefix': service.server.base_url if getattr(service, 'server', '') else '', + 'command': getattr(service, 'command', ''), + 'pid': service.proc.pid if getattr(service, 'proc', '') else 0, + 'info': getattr(service, 'info', ''), + 'display': getattr(service, 'display', ''), + } + access_map = { + 'read:services': { + 'kind', + 'name', + 'admin', + 'url', + 'prefix', + 'command', + 'pid', + 'info', + 'display', + }, + 'read:services:name': {'kind', 'name', 'admin'}, + 'read:roles:services': {'kind', 'name', 'roles', 'admin'}, + } + model = self._filter_model(model, access_map, service, 'service') + return model - _user_model_types = {'name': str, 'admin': bool, 'groups': list, 'auth_state': dict} + _user_model_types = { + 'name': str, + 'admin': bool, + 'groups': list, + 'roles': list, + 'auth_state': dict, + } - _group_model_types = {'name': str, 'users': list} + _group_model_types = {'name': str, 'users': list, 'roles': list} def _check_model(self, model, model_types, name): """Check a model provided by a REST API request @@ -264,6 +446,67 @@ class APIHandler(BaseHandler): 400, ("group names must be str, not %r", type(groupname)) ) + def get_api_pagination(self): + default_limit = self.settings["api_page_default_limit"] + max_limit = self.settings["api_page_max_limit"] + if not self.accepts_pagination: + # if new pagination Accept header is not used, + # default to the higher max page limit to reduce likelihood + # of missing users due to pagination in code that hasn't been updated + default_limit = max_limit + offset = self.get_argument("offset", None) + limit = self.get_argument("limit", default_limit) + try: + offset = abs(int(offset)) if offset is not None else 0 + limit = abs(int(limit)) + if limit > max_limit: + limit = max_limit + if limit < 1: + limit = 1 + except Exception as e: + raise web.HTTPError( + 400, "Invalid argument type, offset and limit must be integers" + ) + return offset, limit + + def paginated_model(self, items, offset, limit, total_count): + """Return the paginated form of a collection (list or dict) + + A dict with { items: [], _pagination: {}} + instead of a single list (or dict). + + pagination info includes the current offset and limit, + the total number of results for the query, + and information about how to build the next page request + if there is one. + """ + next_offset = offset + limit + data = { + "items": items, + "_pagination": { + "offset": offset, + "limit": limit, + "total": total_count, + "next": None, + }, + } + if next_offset < total_count: + # if there's a next page + next_url_parsed = urlparse(self.request.full_url()) + query = parse_qs(next_url_parsed.query) + query['offset'] = [next_offset] + query['limit'] = [limit] + next_url_parsed = next_url_parsed._replace( + query=urlencode(query, doseq=True) + ) + next_url = urlunparse(next_url_parsed) + data["_pagination"]["next"] = { + "offset": next_offset, + "limit": limit, + "url": next_url, + } + return data + def options(self, *args, **kwargs): self.finish() diff --git a/jupyterhub/apihandlers/groups.py b/jupyterhub/apihandlers/groups.py index 78e833f7..c5799f15 100644 --- a/jupyterhub/apihandlers/groups.py +++ b/jupyterhub/apihandlers/groups.py @@ -3,11 +3,10 @@ # Distributed under the terms of the Modified BSD License. import json -from tornado import gen from tornado import web from .. import orm -from ..utils import admin_only +from ..scopes import Scope, needs_scope from .base import APIHandler @@ -23,27 +22,59 @@ class _GroupAPIHandler(APIHandler): users.append(user.orm_user) return users - def find_group(self, name): + def find_group(self, group_name): """Find and return a group by name. Raise 404 if not found. """ - group = orm.Group.find(self.db, name=name) + group = orm.Group.find(self.db, name=group_name) if group is None: - raise web.HTTPError(404, "No such group: %s", name) + raise web.HTTPError(404, "No such group: %s", group_name) return group + def check_authenticator_managed_groups(self): + """Raise error on group-management APIs if Authenticator is managing groups""" + if self.authenticator.manage_groups: + raise web.HTTPError(400, "Group management via API is disabled") + class GroupListAPIHandler(_GroupAPIHandler): - @admin_only + @needs_scope('list:groups') def get(self): """List groups""" - data = [self.group_model(g) for g in self.db.query(orm.Group)] + query = full_query = self.db.query(orm.Group) + sub_scope = self.parsed_scopes['list:groups'] + if sub_scope != Scope.ALL: + if not set(sub_scope).issubset({'group'}): + # the only valid filter is group=... + # don't expand invalid !server=x to all groups! + self.log.warning( + f"Invalid filter on list:group for {self.current_user}: {sub_scope}" + ) + raise web.HTTPError(403) + query = query.filter(orm.Group.name.in_(sub_scope['group'])) + + offset, limit = self.get_api_pagination() + query = query.order_by(orm.Group.id.asc()).offset(offset).limit(limit) + group_list = [self.group_model(g) for g in query] + total_count = full_query.count() + if self.accepts_pagination: + data = self.paginated_model(group_list, offset, limit, total_count) + else: + query_count = query.count() + if offset == 0 and total_count > query_count: + self.log.warning( + f"Truncated group list in request that does not expect pagination. Replying with {query_count} of {total_count} total groups." + ) + data = group_list self.write(json.dumps(data)) - @admin_only + @needs_scope('admin:groups') async def post(self): - """POST creates Multiple groups """ + """POST creates Multiple groups""" + + self.check_authenticator_managed_groups() + model = self.get_json_body() if not model or not isinstance(model, dict) or not model.get('groups'): raise web.HTTPError(400, "Must specify at least one group to create") @@ -74,42 +105,44 @@ class GroupListAPIHandler(_GroupAPIHandler): class GroupAPIHandler(_GroupAPIHandler): """View and modify groups by name""" - @admin_only - def get(self, name): - group = self.find_group(name) + @needs_scope('read:groups', 'read:groups:name', 'read:roles:groups') + def get(self, group_name): + group = self.find_group(group_name) self.write(json.dumps(self.group_model(group))) - @admin_only - async def post(self, name): + @needs_scope('admin:groups') + async def post(self, group_name): """POST creates a group by name""" + self.check_authenticator_managed_groups() model = self.get_json_body() if model is None: model = {} else: self._check_group_model(model) - existing = orm.Group.find(self.db, name=name) + existing = orm.Group.find(self.db, name=group_name) if existing is not None: - raise web.HTTPError(409, "Group %s already exists" % name) + raise web.HTTPError(409, "Group %s already exists" % group_name) usernames = model.get('users', []) # check that users exist users = self._usernames_to_users(usernames) # create the group - self.log.info("Creating new group %s with %i users", name, len(users)) + self.log.info("Creating new group %s with %i users", group_name, len(users)) self.log.debug("Users: %s", usernames) - group = orm.Group(name=name, users=users) + group = orm.Group(name=group_name, users=users) self.db.add(group) self.db.commit() self.write(json.dumps(self.group_model(group))) self.set_status(201) - @admin_only - def delete(self, name): + @needs_scope('delete:groups') + def delete(self, group_name): """Delete a group by name""" - group = self.find_group(name) - self.log.info("Deleting group %s", name) + self.check_authenticator_managed_groups() + group = self.find_group(group_name) + self.log.info("Deleting group %s", group_name) self.db.delete(group) self.db.commit() self.set_status(204) @@ -118,39 +151,43 @@ class GroupAPIHandler(_GroupAPIHandler): class GroupUsersAPIHandler(_GroupAPIHandler): """Modify a group's user list""" - @admin_only - def post(self, name): + @needs_scope('groups') + def post(self, group_name): """POST adds users to a group""" - group = self.find_group(name) + self.check_authenticator_managed_groups() + group = self.find_group(group_name) data = self.get_json_body() self._check_group_model(data) if 'users' not in data: raise web.HTTPError(400, "Must specify users to add") - self.log.info("Adding %i users to group %s", len(data['users']), name) + self.log.info("Adding %i users to group %s", len(data['users']), group_name) self.log.debug("Adding: %s", data['users']) for user in self._usernames_to_users(data['users']): if user not in group.users: group.users.append(user) else: - self.log.warning("User %s already in group %s", user.name, name) + self.log.warning("User %s already in group %s", user.name, group_name) self.db.commit() self.write(json.dumps(self.group_model(group))) - @admin_only - async def delete(self, name): + @needs_scope('groups') + async def delete(self, group_name): """DELETE removes users from a group""" - group = self.find_group(name) + self.check_authenticator_managed_groups() + group = self.find_group(group_name) data = self.get_json_body() self._check_group_model(data) if 'users' not in data: raise web.HTTPError(400, "Must specify users to delete") - self.log.info("Removing %i users from group %s", len(data['users']), name) + self.log.info("Removing %i users from group %s", len(data['users']), group_name) self.log.debug("Removing: %s", data['users']) for user in self._usernames_to_users(data['users']): if user in group.users: group.users.remove(user) else: - self.log.warning("User %s already not in group %s", user.name, name) + self.log.warning( + "User %s already not in group %s", user.name, group_name + ) self.db.commit() self.write(json.dumps(self.group_model(group))) diff --git a/jupyterhub/apihandlers/hub.py b/jupyterhub/apihandlers/hub.py index 155ddf92..4474c821 100644 --- a/jupyterhub/apihandlers/hub.py +++ b/jupyterhub/apihandlers/hub.py @@ -5,20 +5,19 @@ import json import sys from tornado import web -from tornado.ioloop import IOLoop from .._version import __version__ -from ..utils import admin_only +from ..scopes import needs_scope from .base import APIHandler class ShutdownAPIHandler(APIHandler): - @admin_only + @needs_scope('shutdown') def post(self): """POST /api/shutdown triggers a clean shutdown - + POST (JSON) parameters: - + - servers: specify whether single-user servers should be terminated - proxy: specify whether the proxy should be terminated """ @@ -47,17 +46,15 @@ class ShutdownAPIHandler(APIHandler): self.set_status(202) self.finish(json.dumps({"message": "Shutting down Hub"})) - # stop the eventloop, which will trigger cleanup - loop = IOLoop.current() - loop.add_callback(loop.stop) + # instruct the app to stop, which will trigger cleanup + app.stop() class RootAPIHandler(APIHandler): def get(self): """GET /api/ returns info about the Hub and its API. - It is not an authenticated endpoint. - + It is not an authenticated endpoint For now, it just returns the version of JupyterHub itself. """ data = {'version': __version__} @@ -65,20 +62,17 @@ class RootAPIHandler(APIHandler): class InfoAPIHandler(APIHandler): - @admin_only + @needs_scope('read:hub') def get(self): """GET /api/info returns detailed info about the Hub and its API. - It is not an authenticated endpoint. - - For now, it just returns the version of JupyterHub itself. + Currently, it returns information on the python version, spawner and authenticator. + Since this information might be sensitive, it is an authenticated endpoint """ def _class_info(typ): """info about a class (Spawner or Authenticator)""" - info = { - 'class': '{mod}.{name}'.format(mod=typ.__module__, name=typ.__name__) - } + info = {'class': f'{typ.__module__}.{typ.__name__}'} pkg = typ.__module__.split('.')[0] try: version = sys.modules[pkg].__version__ diff --git a/jupyterhub/apihandlers/proxy.py b/jupyterhub/apihandlers/proxy.py index 83901832..c73326ab 100644 --- a/jupyterhub/apihandlers/proxy.py +++ b/jupyterhub/apihandlers/proxy.py @@ -2,28 +2,45 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import json -from urllib.parse import urlparse -from tornado import gen from tornado import web -from .. import orm -from ..utils import admin_only +from ..scopes import needs_scope from .base import APIHandler class ProxyAPIHandler(APIHandler): - @admin_only + @needs_scope('proxy') async def get(self): """GET /api/proxy fetches the routing table This is the same as fetching the routing table directly from the proxy, but without clients needing to maintain separate """ - routes = await self.proxy.get_all_routes() - self.write(json.dumps(routes)) + offset, limit = self.get_api_pagination() - @admin_only + all_routes = await self.proxy.get_all_routes() + + if offset == 0 and len(all_routes) < limit: + routes = all_routes + else: + routes = {} + end = offset + limit + for i, key in enumerate(sorted(all_routes.keys())): + if i < offset: + continue + elif i >= end: + break + routes[key] = all_routes[key] + + if self.accepts_pagination: + data = self.paginated_model(routes, offset, limit, len(all_routes)) + else: + data = routes + + self.write(json.dumps(data)) + + @needs_scope('proxy') async def post(self): """POST checks the proxy to ensure that it's up to date. @@ -32,7 +49,7 @@ class ProxyAPIHandler(APIHandler): """ await self.proxy.check_routes(self.users, self.services) - @admin_only + @needs_scope('proxy') async def patch(self): """PATCH updates the location of the proxy diff --git a/jupyterhub/apihandlers/services.py b/jupyterhub/apihandlers/services.py index b8a98d51..43e69aa7 100644 --- a/jupyterhub/apihandlers/services.py +++ b/jupyterhub/apihandlers/services.py @@ -6,59 +6,27 @@ Currently GET-only, no actions can be taken to modify services. # Distributed under the terms of the Modified BSD License. import json -from tornado import web - -from .. import orm -from ..utils import admin_only +from ..scopes import Scope, needs_scope from .base import APIHandler -def service_model(service): - """Produce the model for a service""" - return { - 'name': service.name, - 'admin': service.admin, - 'url': service.url, - 'prefix': service.server.base_url if service.server else '', - 'command': service.command, - 'pid': service.proc.pid if service.proc else 0, - 'info': service.info, - } - - class ServiceListAPIHandler(APIHandler): - @admin_only + @needs_scope('list:services') def get(self): - data = {name: service_model(service) for name, service in self.services.items()} + data = {} + service_scope = self.parsed_scopes['list:services'] + for name, service in self.services.items(): + if service_scope == Scope.ALL or name in service_scope.get("service", {}): + model = self.service_model(service) + data[name] = model self.write(json.dumps(data)) -def admin_or_self(method): - """Decorator for restricting access to either the target service or admin""" - - def decorated_method(self, name): - current = self.current_user - if current is None: - raise web.HTTPError(403) - if not current.admin: - # not admin, maybe self - if not isinstance(current, orm.Service): - raise web.HTTPError(403) - if current.name != name: - raise web.HTTPError(403) - # raise 404 if not found - if name not in self.services: - raise web.HTTPError(404) - return method(self, name) - - return decorated_method - - class ServiceAPIHandler(APIHandler): - @admin_or_self - def get(self, name): - service = self.services[name] - self.write(json.dumps(service_model(service))) + @needs_scope('read:services', 'read:services:name', 'read:roles:services') + def get(self, service_name): + service = self.services[service_name] + self.write(json.dumps(self.service_model(service))) default_handlers = [ diff --git a/jupyterhub/apihandlers/users.py b/jupyterhub/apihandlers/users.py index 1c633723..ac38345c 100644 --- a/jupyterhub/apihandlers/users.py +++ b/jupyterhub/apihandlers/users.py @@ -3,22 +3,25 @@ # Distributed under the terms of the Modified BSD License. import asyncio import json -from datetime import datetime -from datetime import timedelta -from datetime import timezone +from datetime import datetime, timedelta, timezone from async_generator import aclosing from dateutil.parser import parse as parse_date +from sqlalchemy import func, or_ from tornado import web from tornado.iostream import StreamClosedError -from .. import orm +from .. import orm, scopes +from ..roles import assign_default_roles +from ..scopes import needs_scope from ..user import User -from ..utils import admin_only -from ..utils import isoformat -from ..utils import iterate_until -from ..utils import maybe_future -from ..utils import url_path_join +from ..utils import ( + isoformat, + iterate_until, + maybe_future, + url_escape_path, + url_path_join, +) from .base import APIHandler @@ -30,24 +33,148 @@ class SelfAPIHandler(APIHandler): async def get(self): user = self.current_user - if user is None: - # whoami can be accessed via oauth token - user = self.get_current_user_oauth_token() if user is None: raise web.HTTPError(403) - self.write(json.dumps(self.user_model(user))) + + _added_scopes = set() + if isinstance(user, orm.Service): + # ensure we have the minimal 'identify' scopes for the token owner + identify_scopes = scopes.identify_scopes(user) + get_model = self.service_model + else: + identify_scopes = scopes.identify_scopes(user.orm_user) + get_model = self.user_model + + # ensure we have permission to identify ourselves + # all tokens can do this on this endpoint + for scope in identify_scopes: + if scope not in self.expanded_scopes: + _added_scopes.add(scope) + self.expanded_scopes |= {scope} + if _added_scopes: + # re-parse with new scopes + self.parsed_scopes = scopes.parse_scopes(self.expanded_scopes) + + model = get_model(user) + + # add session_id associated with token + # added in 2.0 + token = self.get_token() + if token: + model["session_id"] = token.session_id + else: + model["session_id"] = None + + # add scopes to identify model, + # but not the scopes we added to ensure we could read our own model + model["scopes"] = sorted(self.expanded_scopes.difference(_added_scopes)) + self.write(json.dumps(model)) class UserListAPIHandler(APIHandler): - @admin_only + def _user_has_ready_spawner(self, orm_user): + """Return True if a user has *any* ready spawners + + Used for filtering from active -> ready + """ + user = self.users[orm_user] + return any(spawner.ready for spawner in user.spawners.values()) + + @needs_scope('list:users') def get(self): - data = [ - self.user_model(u, include_servers=True, include_state=True) - for u in self.db.query(orm.User) - ] + state_filter = self.get_argument("state", None) + name_filter = self.get_argument("name_filter", None) + offset, limit = self.get_api_pagination() + + # post_filter + post_filter = None + + if state_filter in {"active", "ready"}: + # only get users with active servers + # an 'active' Spawner has a server record in the database + # which means Spawner.server != None + # it may still be in a pending start/stop state. + # join filters out users with no Spawners + query = ( + self.db.query(orm.User) + # join filters out any Users with no Spawners + .join(orm.Spawner) + # this implicitly gets Users with *any* active server + .filter(orm.Spawner.server != None) + ) + if state_filter == "ready": + # have to post-process query results because active vs ready + # can only be distinguished with in-memory Spawner properties + post_filter = self._user_has_ready_spawner + + elif state_filter == "inactive": + # only get users with *no* active servers + # as opposed to users with *any inactive servers* + # this is the complement to the above query. + # how expensive is this with lots of servers? + query = ( + self.db.query(orm.User) + .outerjoin(orm.Spawner) + .outerjoin(orm.Server) + .group_by(orm.User.id) + .having(func.count(orm.Server.id) == 0) + ) + elif state_filter: + raise web.HTTPError(400, "Unrecognized state filter: %r" % state_filter) + else: + # no filter, return all users + query = self.db.query(orm.User) + + sub_scope = self.parsed_scopes['list:users'] + if sub_scope != scopes.Scope.ALL: + if not set(sub_scope).issubset({'group', 'user'}): + # don't expand invalid !server=x filter to all users! + self.log.warning( + f"Invalid filter on list:user for {self.current_user}: {sub_scope}" + ) + raise web.HTTPError(403) + filters = [] + if 'user' in sub_scope: + filters.append(orm.User.name.in_(sub_scope['user'])) + if 'group' in sub_scope: + filters.append( + orm.User.groups.any( + orm.Group.name.in_(sub_scope['group']), + ) + ) + + if len(filters) == 1: + query = query.filter(filters[0]) + else: + query = query.filter(or_(*filters)) + + if name_filter: + query = query.filter(orm.User.name.ilike(f'%{name_filter}%')) + + full_query = query + query = query.order_by(orm.User.id.asc()).offset(offset).limit(limit) + + user_list = [] + for u in query: + if post_filter is None or post_filter(u): + user_model = self.user_model(u) + if user_model: + user_list.append(user_model) + + total_count = full_query.count() + if self.accepts_pagination: + data = self.paginated_model(user_list, offset, limit, total_count) + else: + query_count = query.count() + if offset == 0 and total_count > query_count: + self.log.warning( + f"Truncated user list in request that does not expect pagination. Processing {query_count} of {total_count} total users." + ) + data = user_list + self.write(json.dumps(data)) - @admin_only + @needs_scope('admin:users') async def post(self): data = self.get_json_body() if not data or not isinstance(data, dict) or not data.get('usernames'): @@ -87,15 +214,14 @@ class UserListAPIHandler(APIHandler): user = self.user_from_username(name) if admin: user.admin = True - self.db.commit() + assign_default_roles(self.db, entity=user) + self.db.commit() try: await maybe_future(self.authenticator.add_user(user)) except Exception as e: self.log.error("Failed to create user: %s" % name, exc_info=True) self.users.delete(user) - raise web.HTTPError( - 400, "Failed to create user %s: %s" % (name, str(e)) - ) + raise web.HTTPError(400, f"Failed to create user {name}: {e}") else: created.append(user) @@ -103,98 +229,92 @@ class UserListAPIHandler(APIHandler): self.set_status(201) -def admin_or_self(method): - """Decorator for restricting access to either the target user or admin""" - - def m(self, name, *args, **kwargs): - current = self.current_user - if current is None: - raise web.HTTPError(403) - if not (current.name == name or current.admin): - raise web.HTTPError(403) - - # raise 404 if not found - if not self.find_user(name): - raise web.HTTPError(404) - return method(self, name, *args, **kwargs) - - return m - - class UserAPIHandler(APIHandler): - @admin_or_self - async def get(self, name): - user = self.find_user(name) - model = self.user_model( - user, include_servers=True, include_state=self.current_user.admin - ) + @needs_scope( + 'read:users', + 'read:users:name', + 'read:servers', + 'read:users:groups', + 'read:users:activity', + 'read:roles:users', + ) + async def get(self, user_name): + user = self.find_user(user_name) + if user is None: + raise web.HTTPError(404) + model = self.user_model(user) # auth state will only be shown if the requester is an admin # this means users can't see their own auth state unless they # are admins, Hub admins often are also marked as admins so they # will see their auth state but normal users won't - requester = self.current_user - if requester.admin: + if 'auth_state' in model: model['auth_state'] = await user.get_auth_state() self.write(json.dumps(model)) - @admin_only - async def post(self, name): + @needs_scope('admin:users') + async def post(self, user_name): data = self.get_json_body() - user = self.find_user(name) + user = self.find_user(user_name) if user is not None: - raise web.HTTPError(409, "User %s already exists" % name) + raise web.HTTPError(409, "User %s already exists" % user_name) - user = self.user_from_username(name) + user = self.user_from_username(user_name) if data: self._check_user_model(data) if 'admin' in data: user.admin = data['admin'] - self.db.commit() + assign_default_roles(self.db, entity=user) + self.db.commit() try: await maybe_future(self.authenticator.add_user(user)) except Exception: - self.log.error("Failed to create user: %s" % name, exc_info=True) + self.log.error("Failed to create user: %s" % user_name, exc_info=True) # remove from registry self.users.delete(user) - raise web.HTTPError(400, "Failed to create user: %s" % name) + raise web.HTTPError(400, "Failed to create user: %s" % user_name) self.write(json.dumps(self.user_model(user))) self.set_status(201) - @admin_only - async def delete(self, name): - user = self.find_user(name) + @needs_scope('delete:users') + async def delete(self, user_name): + user = self.find_user(user_name) if user is None: raise web.HTTPError(404) if user.name == self.current_user.name: raise web.HTTPError(400, "Cannot delete yourself!") if user.spawner._stop_pending: raise web.HTTPError( - 400, "%s's server is in the process of stopping, please wait." % name + 400, + "%s's server is in the process of stopping, please wait." % user_name, ) if user.running: await self.stop_single_user(user) if user.spawner._stop_pending: raise web.HTTPError( 400, - "%s's server is in the process of stopping, please wait." % name, + "%s's server is in the process of stopping, please wait." + % user_name, ) await maybe_future(self.authenticator.delete_user(user)) + + await user.delete_spawners() + # remove from registry self.users.delete(user) self.set_status(204) - @admin_only - async def patch(self, name): - user = self.find_user(name) + @needs_scope('admin:users') + async def patch(self, user_name): + user = self.find_user(user_name) if user is None: raise web.HTTPError(404) data = self.get_json_body() self._check_user_model(data) - if 'name' in data and data['name'] != name: + if 'name' in data and data['name'] != user_name: # check if the new name is already taken inside db if self.find_user(data['name']): raise web.HTTPError( @@ -206,6 +326,8 @@ class UserAPIHandler(APIHandler): await user.save_auth_state(value) else: setattr(user, key, value) + if key == 'admin': + assign_default_roles(self.db, entity=user) self.db.commit() user_ = self.user_model(user) user_['auth_state'] = await user.get_auth_state() @@ -215,15 +337,14 @@ class UserAPIHandler(APIHandler): class UserTokenListAPIHandler(APIHandler): """API endpoint for listing/creating tokens""" - @admin_or_self - def get(self, name): + @needs_scope('read:tokens') + def get(self, user_name): """Get tokens for a given user""" - user = self.find_user(name) + user = self.find_user(user_name) if not user: - raise web.HTTPError(404, "No such user: %s" % name) + raise web.HTTPError(404, "No such user: %s" % user_name) now = datetime.utcnow() - api_tokens = [] def sort_key(token): @@ -237,19 +358,9 @@ class UserTokenListAPIHandler(APIHandler): continue api_tokens.append(self.token_model(token)) - oauth_tokens = [] - # OAuth tokens use integer timestamps - now_timestamp = now.timestamp() - for token in sorted(user.oauth_tokens, key=sort_key): - if token.expires_at and token.expires_at < now_timestamp: - # exclude expired tokens - self.db.delete(token) - self.db.commit() - continue - oauth_tokens.append(self.token_model(token)) - self.write(json.dumps({'api_tokens': api_tokens, 'oauth_tokens': oauth_tokens})) + self.write(json.dumps({'api_tokens': api_tokens})) - async def post(self, name): + async def post(self, user_name): body = self.get_json_body() or {} if not isinstance(body, dict): raise web.HTTPError(400, "Body must be a JSON dict or empty") @@ -277,23 +388,35 @@ class UserTokenListAPIHandler(APIHandler): if requester is None: # couldn't identify requester raise web.HTTPError(403) - user = self.find_user(name) - if requester is not user and not requester.admin: - raise web.HTTPError(403, "Only admins can request tokens for other users") - if not user: - raise web.HTTPError(404, "No such user: %s" % name) - if requester is not user: - kind = 'user' if isinstance(requester, User) else 'service' + self._jupyterhub_user = requester + self._resolve_roles_and_scopes() + user = self.find_user(user_name) + kind = 'user' if isinstance(requester, User) else 'service' + scope_filter = self.get_scope_filter('tokens') + if user is None or not scope_filter(user, kind): + raise web.HTTPError( + 403, + f"{kind.title()} {user_name} not found or no permissions to generate tokens", + ) note = body.get('note') if not note: note = "Requested via api" if requester is not user: - note += " by %s %s" % (kind, requester.name) + note += f" by {kind} {requester.name}" - api_token = user.new_api_token( - note=note, expires_in=body.get('expires_in', None) - ) + token_roles = body.get("roles") + token_scopes = body.get("scopes") + + try: + api_token = user.new_api_token( + note=note, + expires_in=body.get('expires_in', None), + roles=token_roles, + scopes=token_scopes, + ) + except ValueError as e: + raise web.HTTPError(400, str(e)) if requester is not user: self.log.info( "%s %s requested API token for %s", @@ -308,6 +431,7 @@ class UserTokenListAPIHandler(APIHandler): token_model = self.token_model(orm.APIToken.find(self.db, api_token)) token_model['token'] = api_token self.write(json.dumps(token_model)) + self.set_status(201) class UserTokenAPIHandler(APIHandler): @@ -319,45 +443,41 @@ class UserTokenAPIHandler(APIHandler): Raises 404 if not found for any reason (e.g. wrong owner, invalid key format, etc.) """ - not_found = "No such token %s for user %s" % (token_id, user.name) - prefix, id = token_id[0], token_id[1:] - if prefix == 'a': - Token = orm.APIToken - elif prefix == 'o': - Token = orm.OAuthAccessToken - else: + not_found = f"No such token {token_id} for user {user.name}" + prefix, id_ = token_id[:1], token_id[1:] + if prefix != 'a': raise web.HTTPError(404, not_found) try: - id = int(id) + id_ = int(id_) except ValueError: raise web.HTTPError(404, not_found) - orm_token = self.db.query(Token).filter(Token.id == id).first() + orm_token = self.db.query(orm.APIToken).filter_by(id=id_).first() if orm_token is None or orm_token.user is not user.orm_user: raise web.HTTPError(404, "Token not found %s", orm_token) return orm_token - @admin_or_self - def get(self, name, token_id): + @needs_scope('read:tokens') + def get(self, user_name, token_id): """""" - user = self.find_user(name) + user = self.find_user(user_name) if not user: - raise web.HTTPError(404, "No such user: %s" % name) + raise web.HTTPError(404, "No such user: %s" % user_name) token = self.find_token_by_id(user, token_id) self.write(json.dumps(self.token_model(token))) - @admin_or_self - def delete(self, name, token_id): + @needs_scope('tokens') + def delete(self, user_name, token_id): """Delete a token""" - user = self.find_user(name) + user = self.find_user(user_name) if not user: - raise web.HTTPError(404, "No such user: %s" % name) + raise web.HTTPError(404, "No such user: %s" % user_name) token = self.find_token_by_id(user, token_id) # deleting an oauth token deletes *all* oauth tokens for that client - if isinstance(token, orm.OAuthAccessToken): - client_id = token.client_id + client_id = token.client_id + if token.client_id != "jupyterhub": tokens = [ - token for token in user.oauth_tokens if token.client_id == client_id + token for token in user.api_tokens if token.client_id == client_id ] else: tokens = [token] @@ -371,33 +491,40 @@ class UserTokenAPIHandler(APIHandler): class UserServerAPIHandler(APIHandler): """Start and stop single-user servers""" - @admin_or_self - async def post(self, name, server_name=''): - user = self.find_user(name) + @needs_scope('servers') + async def post(self, user_name, server_name=''): + user = self.find_user(user_name) + if user is None: + # this can be reached if a token has `servers` + # permission on *all* users + raise web.HTTPError(404) + if server_name: if not self.allow_named_servers: raise web.HTTPError(400, "Named servers are not enabled.") - if ( - self.named_server_limit_per_user > 0 - and server_name not in user.orm_spawners - ): + + named_server_limit_per_user = ( + await self.get_current_user_named_server_limit() + ) + + if named_server_limit_per_user > 0 and server_name not in user.orm_spawners: named_spawners = list(user.all_spawners(include_default=False)) - if self.named_server_limit_per_user <= len(named_spawners): + if named_server_limit_per_user <= len(named_spawners): raise web.HTTPError( 400, "User {} already has the maximum of {} named servers." " One must be deleted before a new server can be created".format( - name, self.named_server_limit_per_user + user_name, named_server_limit_per_user ), ) - spawner = user.spawners[server_name] + spawner = user.get_spawner(server_name, replace_failed=True) pending = spawner.pending if pending == 'spawn': self.set_header('Content-Type', 'text/plain') self.set_status(202) return elif pending: - raise web.HTTPError(400, "%s is pending %s" % (spawner._log_name, pending)) + raise web.HTTPError(400, f"{spawner._log_name} is pending {pending}") if spawner.ready: # include notify, so that a server that died is noticed immediately @@ -416,16 +543,24 @@ class UserServerAPIHandler(APIHandler): self.set_header('Content-Type', 'text/plain') self.set_status(status) - @admin_or_self - async def delete(self, name, server_name=''): - user = self.find_user(name) + @needs_scope('delete:servers') + async def delete(self, user_name, server_name=''): + user = self.find_user(user_name) options = self.get_json_body() remove = (options or {}).get('remove', False) - def _remove_spawner(f=None): - if f and f.exception(): - return + async def _remove_spawner(f=None): + """Remove the spawner object + + only called after it stops successfully + """ + if f: + # await f, stop on error, + # leaving resources in the db in case of failure to stop + await f self.log.info("Deleting spawner %s", spawner._log_name) + await maybe_future(user._delete_spawner(spawner)) + self.db.delete(spawner.orm_spawner) user.spawners.pop(server_name, None) self.db.commit() @@ -435,7 +570,7 @@ class UserServerAPIHandler(APIHandler): raise web.HTTPError(400, "Named servers are not enabled.") if server_name not in user.orm_spawners: raise web.HTTPError( - 404, "%s has no server named '%s'" % (name, server_name) + 404, f"{user_name} has no server named '{server_name}'" ) elif remove: raise web.HTTPError(400, "Cannot delete the default server") @@ -446,13 +581,14 @@ class UserServerAPIHandler(APIHandler): self.set_header('Content-Type', 'text/plain') self.set_status(202) if remove: - spawner._stop_future.add_done_callback(_remove_spawner) + # schedule remove when stop completes + asyncio.ensure_future(_remove_spawner(spawner._stop_future)) return if spawner.pending: raise web.HTTPError( 400, - "%s is pending %s, please wait" % (spawner._log_name, spawner.pending), + f"{spawner._log_name} is pending {spawner.pending}, please wait", ) stop_future = None @@ -464,9 +600,10 @@ class UserServerAPIHandler(APIHandler): if remove: if stop_future: - stop_future.add_done_callback(_remove_spawner) + # schedule remove when stop completes + asyncio.ensure_future(_remove_spawner(spawner._stop_future)) else: - _remove_spawner() + await _remove_spawner() status = 202 if spawner._stop_pending else 204 self.set_header('Content-Type', 'text/plain') @@ -479,19 +616,19 @@ class UserAdminAccessAPIHandler(APIHandler): This handler sets the necessary cookie for an admin to login to a single-user server. """ - @admin_only - def post(self, name): + @needs_scope('servers') + def post(self, user_name): self.log.warning( "Deprecated in JupyterHub 0.8." " Admin access API is not needed now that we use OAuth." ) current = self.current_user self.log.warning( - "Admin user %s has requested access to %s's server", current.name, name + "Admin user %s has requested access to %s's server", current.name, user_name ) if not self.settings.get('admin_access', False): raise web.HTTPError(403, "admin access to user servers disabled") - user = self.find_user(name) + user = self.find_user(user_name) if user is None: raise web.HTTPError(404) @@ -506,7 +643,7 @@ class SpawnProgressAPIHandler(APIHandler): async def send_event(self, event): try: - self.write('data: {}\n\n'.format(json.dumps(event))) + self.write(f'data: {json.dumps(event)}\n\n') await self.flush() except StreamClosedError: self.log.warning("Stream closed while handling %s", self.request.uri) @@ -535,12 +672,12 @@ class SpawnProgressAPIHandler(APIHandler): await asyncio.wait([self._finish_future], timeout=self.keepalive_interval) - @admin_or_self - async def get(self, username, server_name=''): + @needs_scope('read:servers') + async def get(self, user_name, server_name=''): self.set_header('Cache-Control', 'no-cache') if server_name is None: server_name = '' - user = self.find_user(username) + user = self.find_user(user_name) if user is None: # no such user raise web.HTTPError(404) @@ -556,11 +693,11 @@ class SpawnProgressAPIHandler(APIHandler): # - spawner not running at all # - spawner failed # - spawner pending start (what we expect) - url = url_path_join(user.url, server_name, '/') + url = url_path_join(user.url, url_escape_path(server_name), '/') ready_event = { 'progress': 100, 'ready': True, - 'message': "Server ready at {}".format(url), + 'message': f"Server ready at {url}", 'html_message': 'Server ready at {0}'.format(url), 'url': url, } @@ -579,7 +716,12 @@ class SpawnProgressAPIHandler(APIHandler): # check if spawner has just failed f = spawn_future if f and f.done() and f.exception(): - failed_event['message'] = "Spawn failed: %s" % f.exception() + exc = f.exception() + message = getattr(exc, "jupyterhub_message", str(exc)) + failed_event['message'] = f"Spawn failed: {message}" + html_message = getattr(exc, "jupyterhub_html_message", "") + if html_message: + failed_event['html_message'] = html_message await self.send_event(failed_event) return else: @@ -589,11 +731,14 @@ class SpawnProgressAPIHandler(APIHandler): async with aclosing( iterate_until(spawn_future, spawner._generate_progress()) ) as events: - async for event in events: - # don't allow events to sneakily set the 'ready' flag - if 'ready' in event: - event.pop('ready', None) - await self.send_event(event) + try: + async for event in events: + # don't allow events to sneakily set the 'ready' flag + if 'ready' in event: + event.pop('ready', None) + await self.send_event(event) + except asyncio.CancelledError: + pass # progress finished, wait for spawn to actually resolve, # in case progress finished early @@ -609,7 +754,12 @@ class SpawnProgressAPIHandler(APIHandler): # what happened? Maybe spawn failed? f = spawn_future if f and f.done() and f.exception(): - failed_event['message'] = "Spawn failed: %s" % f.exception() + exc = f.exception() + message = getattr(exc, "jupyterhub_message", str(exc)) + failed_event['message'] = f"Spawn failed: {message}" + html_message = getattr(exc, "jupyterhub_html_message", "") + if html_message: + failed_event['html_message'] = html_message else: self.log.warning( "Server %s didn't start for unknown reason", spawner._log_name @@ -622,14 +772,14 @@ def _parse_timestamp(timestamp): - raise HTTPError(400) on parse error - handle and strip tz info for internal consistency - (we use naïve utc timestamps everywhere) + (we use naive utc timestamps everywhere) """ try: dt = parse_date(timestamp) except Exception: raise web.HTTPError(400, "Not a valid timestamp: %r", timestamp) if dt.tzinfo: - # strip timezone info to naïve UTC datetime + # strip timezone info to naive UTC datetime dt = dt.astimezone(timezone.utc).replace(tzinfo=None) now = datetime.utcnow() @@ -660,7 +810,7 @@ class ActivityAPIHandler(APIHandler): if server_name not in spawners: raise web.HTTPError( 400, - "No such server '{}' for user {}".format(server_name, user.name), + f"No such server '{server_name}' for user {user.name}", ) # check that each per-server field is a dict if not isinstance(server_info, dict): @@ -675,12 +825,12 @@ class ActivityAPIHandler(APIHandler): ) return servers - @admin_or_self - def post(self, username): - user = self.find_user(username) + @needs_scope('users:activity') + def post(self, user_name): + user = self.find_user(user_name) if user is None: # no such user - raise web.HTTPError(404, "No such user: %r", username) + raise web.HTTPError(404, "No such user: %r", user_name) body = self.get_json_body() if not isinstance(body, dict): diff --git a/jupyterhub/app.py b/jupyterhub/app.py index 5deb1181..dda1f2d6 100644 --- a/jupyterhub/app.py +++ b/jupyterhub/app.py @@ -8,89 +8,89 @@ import binascii import logging import os import re +import secrets import signal import socket +import ssl import sys +import time from concurrent.futures import ThreadPoolExecutor -from datetime import datetime -from datetime import timezone +from datetime import datetime, timedelta, timezone from functools import partial from getpass import getuser from operator import itemgetter from textwrap import dedent -from urllib.parse import unquote -from urllib.parse import urlparse -from urllib.parse import urlunparse +from urllib.parse import unquote, urlparse, urlunparse if sys.version_info[:2] < (3, 3): raise ValueError("Python < 3.3 not supported: %s" % sys.version) - -from dateutil.parser import parse as parse_date -from jinja2 import Environment, FileSystemLoader, PrefixLoader, ChoiceLoader -from sqlalchemy.exc import OperationalError, SQLAlchemyError - -from tornado.httpclient import AsyncHTTPClient import tornado.httpserver -from tornado.ioloop import IOLoop, PeriodicCallback -from tornado.log import app_log, access_log, gen_log import tornado.options +from dateutil.parser import parse as parse_date +from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader +from jupyter_telemetry.eventlog import EventLog +from sqlalchemy.exc import OperationalError, SQLAlchemyError from tornado import gen, web -from tornado.platform.asyncio import AsyncIOMainLoop - +from tornado.httpclient import AsyncHTTPClient +from tornado.ioloop import IOLoop, PeriodicCallback +from tornado.log import access_log, app_log, gen_log from traitlets import ( - Unicode, - Integer, - Dict, - TraitError, - List, - Bool, Any, - Tuple, - Type, - Set, - Instance, + Bool, Bytes, + Dict, Float, - observe, + Instance, + Integer, + List, + Set, + Tuple, + Unicode, + Union, default, + observe, + validate, ) from traitlets.config import Application, Configurable, catch_config_error here = os.path.dirname(__file__) import jupyterhub -from . import handlers, apihandlers -from .handlers.static import CacheControlStaticFilesHandler, LogoHandler -from .services.service import Service -from . import crypto -from . import dbutil, orm -from .user import UserDict -from .oauth.provider import make_provider +from . import apihandlers, crypto, dbutil, handlers, orm, roles, scopes from ._data import DATA_FILES_PATH -from .log import CoroutineLogFormatter, log_request -from .proxy import Proxy, ConfigurableHTTPProxy -from .traitlets import URLPrefix, Command, EntryPointType -from .utils import ( - maybe_future, - url_path_join, - print_stacks, - print_ps_info, - make_ssl_context, -) -from .metrics import RUNNING_SERVERS -from .metrics import TOTAL_USERS # classes for config from .auth import Authenticator, PAMAuthenticator from .crypto import CryptKeeper -from .spawner import Spawner, LocalProcessSpawner -from .objects import Hub, Server # For faking stats from .emptyclass import EmptyClass - +from .handlers.static import CacheControlStaticFilesHandler, LogoHandler +from .log import CoroutineLogFormatter, log_request +from .metrics import ( + HUB_STARTUP_DURATION_SECONDS, + INIT_SPAWNERS_DURATION_SECONDS, + RUNNING_SERVERS, + TOTAL_USERS, +) +from .oauth.provider import make_provider +from .objects import Hub, Server +from .proxy import ConfigurableHTTPProxy, Proxy +from .services.service import Service +from .spawner import LocalProcessSpawner, Spawner +from .traitlets import Callable, Command, EntryPointType, URLPrefix +from .user import UserDict +from .utils import ( + AnyTimeoutError, + catch_db_error, + make_ssl_context, + maybe_future, + print_ps_info, + print_stacks, + url_path_join, +) common_aliases = { 'log-level': 'Application.log_level', @@ -98,7 +98,8 @@ common_aliases = { 'config': 'JupyterHub.config_file', 'db': 'JupyterHub.db_url', } - +if isinstance(Application.aliases, dict): + common_aliases.update(Application.aliases) aliases = { 'base-url': 'JupyterHub.base_url', @@ -115,7 +116,10 @@ token_aliases = {} token_aliases.update(common_aliases) aliases.update(common_aliases) -flags = { +flags = {} +if isinstance(Application.flags, dict): + flags.update(Application.flags) +hub_flags = { 'debug': ( {'Application': {'log_level': logging.DEBUG}}, "set log level to logging.DEBUG (maximize logging output)", @@ -145,10 +149,11 @@ flags = { "[DEPRECATED in 0.7: does nothing]", ), } +flags.update(hub_flags) COOKIE_SECRET_BYTES = ( - 32 -) # the number of bytes to use when generating new cookie secrets + 32 # the number of bytes to use when generating new cookie secrets +) HEX_RE = re.compile('^([a-f0-9]{2})+$', re.IGNORECASE) @@ -195,11 +200,12 @@ class NewToken(Application): hub.load_config_file(hub.config_file) hub.init_db() - def init_users(): + def init_roles_and_users(): loop = asyncio.new_event_loop() + loop.run_until_complete(hub.init_role_creation()) loop.run_until_complete(hub.init_users()) - ThreadPoolExecutor(1).submit(init_users).result() + ThreadPoolExecutor(1).submit(init_roles_and_users).result() user = orm.User.find(hub.db, self.name) if user is None: print("No such user: %s" % self.name, file=sys.stderr) @@ -302,9 +308,70 @@ class JupyterHub(Application): """, ).tag(config=True) + load_roles = List( + Dict(), + help="""List of predefined role dictionaries to load at startup. + + For instance:: + + load_roles = [ + { + 'name': 'teacher', + 'description': 'Access to users' information and group membership', + 'scopes': ['users', 'groups'], + 'users': ['cyclops', 'gandalf'], + 'services': [], + 'groups': [] + } + ] + + All keys apart from 'name' are optional. + See all the available scopes in the JupyterHub REST API documentation. + + Default roles are defined in roles.py. + + """, + ).tag(config=True) + + custom_scopes = Dict( + key_trait=Unicode(), + value_trait=Dict( + key_trait=Unicode(), + ), + help="""Custom scopes to define. + + For use when defining custom roles, + to grant users granular permissions + + All custom scopes must have a description, + and must start with the prefix `custom:`. + + For example:: + + custom_scopes = { + "custom:jupyter_server:read": { + "description": "read-only access to a single-user server", + }, + } + """, + ).tag(config=True) + config_file = Unicode('jupyterhub_config.py', help="The config file to load").tag( config=True ) + + @validate("config_file") + def _validate_config_file(self, proposal): + if not self.generate_config and not os.path.isfile(proposal.value): + print( + "ERROR: Failed to find specified config file: {}".format( + proposal.value + ), + file=sys.stderr, + ) + sys.exit(1) + return proposal.value + generate_config = Bool(False, help="Generate default config file").tag(config=True) generate_certs = Bool(False, help="Generate certs used for internal ssl").tag( config=True @@ -324,6 +391,42 @@ class JupyterHub(Application): Default is two weeks. """, ).tag(config=True) + + oauth_token_expires_in = Integer( + help="""Expiry (in seconds) of OAuth access tokens. + + The default is to expire when the cookie storing them expires, + according to `cookie_max_age_days` config. + + These are the tokens stored in cookies when you visit + a single-user server or service. + When they expire, you must re-authenticate with the Hub, + even if your Hub authentication is still valid. + If your Hub authentication is valid, + logging in may be a transparent redirect as you refresh the page. + + This does not affect JupyterHub API tokens in general, + which do not expire by default. + Only tokens issued during the oauth flow + accessing services and single-user servers are affected. + + .. versionadded:: 1.4 + OAuth token expires_in was not previously configurable. + .. versionchanged:: 1.4 + Default now uses cookie_max_age_days so that oauth tokens + which are generally stored in cookies, + expire when the cookies storing them expire. + Previously, it was one hour. + """, + config=True, + ) + + @default("oauth_token_expires_in") + def _cookie_max_age_seconds(self): + """default to cookie max age, where these tokens are stored""" + # convert cookie max age days to seconds + return int(self.cookie_max_age_days * 24 * 3600) + redirect_to_server = Bool( True, help="Redirect user to server (if running), instead of control panel." ).tag(config=True) @@ -343,7 +446,8 @@ class JupyterHub(Application): 300, help="Interval (in seconds) at which to update last-activity timestamps." ).tag(config=True) proxy_check_interval = Integer( - 30, help="Interval (in seconds) at which to check if the proxy is running." + 5, + help="DEPRECATED since version 0.8: Use ConfigurableHTTPProxy.check_running_interval", ).tag(config=True) service_check_interval = Integer( 60, @@ -540,10 +644,23 @@ class JupyterHub(Application): def _url_part_changed(self, change): """propagate deprecated ip/port/base_url config to the bind_url""" urlinfo = urlparse(self.bind_url) - urlinfo = urlinfo._replace(netloc='%s:%i' % (self.ip, self.port)) + if ':' in self.ip: + fmt = '[%s]:%i' + else: + fmt = '%s:%i' + urlinfo = urlinfo._replace(netloc=fmt % (self.ip, self.port)) urlinfo = urlinfo._replace(path=self.base_url) bind_url = urlunparse(urlinfo) + + # Warn if both bind_url and ip/port/base_url are set if bind_url != self.bind_url: + if self.bind_url != self._bind_url_default(): + self.log.warning( + "Both bind_url and ip/port/base_url have been configured. " + "JupyterHub.ip, JupyterHub.port, JupyterHub.base_url are" + " deprecated in JupyterHub 0.9," + " please use JupyterHub.bind_url instead." + ) self.bind_url = bind_url bind_url = Unicode( @@ -555,6 +672,22 @@ class JupyterHub(Application): """, ).tag(config=True) + @validate('bind_url') + def _validate_bind_url(self, proposal): + """ensure protocol field of bind_url matches ssl""" + v = proposal['value'] + proto, sep, rest = v.partition('://') + if self.ssl_cert and proto != 'https': + return 'https' + sep + rest + elif proto != 'http' and not self.ssl_cert: + return 'http' + sep + rest + return v + + @default('bind_url') + def _bind_url_default(self): + proto = 'https' if self.ssl_cert else 'http' + return proto + '://:8000' + subdomain_host = Unicode( '', help="""Run single-user servers on subdomains of this host. @@ -571,11 +704,14 @@ class JupyterHub(Application): """, ).tag(config=True) - def _subdomain_host_changed(self, name, old, new): + @validate("subdomain_host") + def _validate_subdomain_host(self, proposal): + new = proposal.value if new and '://' not in new: # host should include '://' # if not specified, assume https: You have to be really explicit about HTTP! - self.subdomain_host = 'https://' + new + new = 'https://' + new + return new domain = Unicode(help="domain name, e.g. 'example.com' (excludes protocol, port)") @@ -592,7 +728,9 @@ class JupyterHub(Application): @default('logo_file') def _logo_file_default(self): - return os.path.join(self.data_files_path, 'static', 'images', 'jupyter.png') + return os.path.join( + self.data_files_path, 'static', 'images', 'jupyterhub-80.png' + ) jinja_environment_options = Dict( help="Supply extra arguments that will be passed to Jinja environment." @@ -626,6 +764,7 @@ class JupyterHub(Application): ).tag(config=True) _proxy_config_map = { + 'proxy_check_interval': 'check_running_interval', 'proxy_cmd': 'command', 'debug_proxy': 'debug', 'proxy_auth_token': 'auth_token', @@ -658,6 +797,16 @@ class JupyterHub(Application): self.proxy_api_ip or '127.0.0.1', self.proxy_api_port or self.port + 1 ) + forwarded_host_header = Unicode( + '', + help="""Alternate header to use as the Host (e.g., X-Forwarded-Host) + when determining whether a request is cross-origin + + This may be useful when JupyterHub is running behind a proxy that rewrites + the Host header. + """, + ).tag(config=True) + hub_port = Integer( 8081, help="""The internal port for the Hub process. @@ -688,10 +837,10 @@ class JupyterHub(Application): help="""The ip or hostname for proxies and spawners to use for connecting to the Hub. - Use when the bind address (`hub_ip`) is 0.0.0.0 or otherwise different + Use when the bind address (`hub_ip`) is 0.0.0.0, :: or otherwise different from the connect address. - Default: when `hub_ip` is 0.0.0.0, use `socket.gethostname()`, otherwise use `hub_ip`. + Default: when `hub_ip` is 0.0.0.0 or ::, use `socket.gethostname()`, otherwise use `hub_ip`. Note: Some spawners or proxy implementations might not support hostnames. Check your spawner or proxy documentation to see if they have extra requirements. @@ -757,6 +906,66 @@ class JupyterHub(Application): def _hub_prefix_default(self): return url_path_join(self.base_url, '/hub/') + hub_routespec = Unicode( + "/", + help=""" + The routing prefix for the Hub itself. + + Override to send only a subset of traffic to the Hub. + Default is to use the Hub as the default route for all requests. + + This is necessary for normal jupyterhub operation, + as the Hub must receive requests for e.g. `/user/:name` + when the user's server is not running. + + However, some deployments using only the JupyterHub API + may want to handle these events themselves, + in which case they can register their own default target with the proxy + and set e.g. `hub_routespec = /hub/` to serve only the hub's own pages, or even `/hub/api/` for api-only operation. + + Note: hub_routespec must include the base_url, if any. + + .. versionadded:: 1.4 + """, + ).tag(config=True) + + @default("hub_routespec") + def _default_hub_routespec(self): + # Default routespec for the Hub is the *app* base url + # not the hub URL, so the Hub receives requests for non-running servers + # use `/` with host-based routing so the Hub + # gets requests for all hosts + if self.subdomain_host: + routespec = '/' + else: + routespec = self.base_url + return routespec + + @validate("hub_routespec") + def _validate_hub_routespec(self, proposal): + """ensure leading/trailing / on custom routespec prefix + + - trailing '/' always required + - leading '/' required unless using subdomains + """ + routespec = proposal.value + if not routespec.endswith("/"): + routespec = routespec + "/" + if not self.subdomain_host and not routespec.startswith("/"): + routespec = "/" + routespec + return routespec + + @observe("hub_routespec") + def _hub_routespec_changed(self, change): + if change.new == change.old: + return + routespec = change.new + if routespec not in {'/', self.base_url}: + self.log.warning( + f"Using custom route for Hub: {routespec}." + " Requests for not-running servers may not be handled." + ) + @observe('base_url') def _update_hub_prefix(self, change): """add base URL to hub prefix""" @@ -784,15 +993,30 @@ class JupyterHub(Application): to reduce the cost of checking authentication tokens. """, ).tag(config=True) - cookie_secret = Bytes( + cookie_secret = Union( + [Bytes(), Unicode()], help="""The cookie secret to use to encrypt cookies. Loaded from the JPY_COOKIE_SECRET env variable by default. Should be exactly 256 bits (32 bytes). - """ + """, ).tag(config=True, env='JPY_COOKIE_SECRET') + @validate('cookie_secret') + def _validate_secret_key(self, proposal): + """Coerces strings with even number of hexadecimal characters to bytes.""" + r = proposal['value'] + if isinstance(r, str): + try: + return bytes.fromhex(r) + except ValueError: + raise ValueError( + "cookie_secret set as a string must contain an even amount of hexadecimal characters." + ) + else: + return r + @observe('cookie_secret') def _cookie_secret_check(self, change): secret = change.new @@ -809,17 +1033,26 @@ class JupyterHub(Application): api_tokens = Dict( Unicode(), - help="""PENDING DEPRECATION: consider using service_tokens + help="""PENDING DEPRECATION: consider using services Dict of token:username to be loaded into the database. Allows ahead-of-time generation of API tokens for use by externally managed services, which authenticate as JupyterHub users. - Consider using service_tokens for general services that talk to the JupyterHub API. + Consider using services for general services that talk to the JupyterHub API. """, ).tag(config=True) + api_page_default_limit = Integer( + 50, + help="The default amount of records returned by a paginated endpoint", + ).tag(config=True) + + api_page_max_limit = Integer( + 200, help="The maximum amount of records that can be returned at once" + ).tag(config=True) + authenticate_prometheus = Bool( True, help="Authentication for prometheus metrics" ).tag(config=True) @@ -892,23 +1125,71 @@ class JupyterHub(Application): @default('authenticator') def _authenticator_default(self): - return self.authenticator_class(parent=self, db=self.db) + return self.authenticator_class(parent=self, _deprecated_db_session=self.db) + + implicit_spawn_seconds = Float( + 0, + help="""Trigger implicit spawns after this many seconds. + + When a user visits a URL for a server that's not running, + they are shown a page indicating that the requested server + is not running with a button to spawn the server. + + Setting this to a positive value will redirect the user + after this many seconds, effectively clicking this button + automatically for the users, + automatically beginning the spawn process. + + Warning: this can result in errors and surprising behavior + when sharing access URLs to actual servers, + since the wrong server is likely to be started. + """, + ).tag(config=True) allow_named_servers = Bool( False, help="Allow named single-user servers per user" ).tag(config=True) - named_server_limit_per_user = Integer( - 0, + named_server_limit_per_user = Union( + [Integer(), Callable()], + default_value=0, help=""" Maximum number of concurrent named servers that can be created by a user at a time. Setting this can limit the total resources a user can consume. If set to 0, no limit is enforced. + + Can be an integer or a callable/awaitable based on the handler object: + + :: + + def named_server_limit_per_user_fn(handler): + user = handler.current_user + if user and user.admin: + return 0 + return 5 + + c.JupyterHub.named_server_limit_per_user = named_server_limit_per_user_fn """, ).tag(config=True) + default_server_name = Unicode( + "", + help="If named servers are enabled, default name of server to spawn or open, e.g. by user-redirect.", + ).tag(config=True) + # Ensure that default_server_name doesn't do anything if named servers aren't allowed + _default_server_name = Unicode( + help="Non-configurable version exposed to JupyterHub." + ) + + @default('_default_server_name') + def _set_default_server_name(self): + if self.allow_named_servers: + return self.default_server_name + else: + return "" + # class for spawning single-user servers spawner_class = EntryPointType( default_value=LocalProcessSpawner, @@ -982,6 +1263,28 @@ class JupyterHub(Application): """, ).tag(config=True) + init_spawners_timeout = Integer( + 10, + help=""" + Timeout (in seconds) to wait for spawners to initialize + + Checking if spawners are healthy can take a long time + if many spawners are active at hub start time. + + If it takes longer than this timeout to check, + init_spawner will be left to complete in the background + and the http server is allowed to start. + + A timeout of -1 means wait forever, + which can mean a slow startup of the Hub + but ensures that the Hub is fully consistent by the time it starts responding to requests. + This matches the behavior of jupyterhub 1.0. + + .. versionadded: 1.1.0 + + """, + ).tag(config=True) + db_url = Unicode( 'sqlite:///jupyterhub.sqlite', help="url for the database. e.g. `sqlite:///jupyterhub.sqlite`", @@ -1023,11 +1326,14 @@ class JupyterHub(Application): admin_access = Bool( False, - help="""Grant admin users permission to access single-user servers. + help="""DEPRECATED since version 2.0.0. - Users should be properly informed if this is enabled. + The default admin role has full permissions, use custom RBAC scopes instead to + create restricted administrator roles. + https://jupyterhub.readthedocs.io/en/stable/rbac/index.html """, ).tag(config=True) + admin_users = Set( help="""DEPRECATED since version 0.7.2, use Authenticator.admin_users instead.""" ).tag(config=True) @@ -1177,14 +1483,16 @@ class JupyterHub(Application): max(self.log_level, logging.INFO) ) - # hook up tornado 3's loggers to our app handlers for log in (app_log, access_log, gen_log): # ensure all log statements identify the application they come from log.name = self.log.name - logger = logging.getLogger('tornado') - logger.propagate = True - logger.parent = self.log - logger.setLevel(self.log.level) + + # hook up tornado's and oauthlib's loggers to our own + for name in ("tornado", "oauthlib"): + logger = logging.getLogger(name) + logger.propagate = True + logger.parent = self.log + logger.setLevel(self.log.level) @staticmethod def add_url_prefix(prefix, handlers): @@ -1205,14 +1513,63 @@ class JupyterHub(Application): """ ).tag(config=True) - default_url = Unicode( + default_url = Union( + [Unicode(), Callable()], help=""" The default URL for users when they arrive (e.g. when user directs to "/") By default, redirects users to their own server. - """ + + Can be a Unicode string (e.g. '/hub/home') or a callable based on the handler object: + + :: + + def default_url_fn(handler): + user = handler.current_user + if user and user.admin: + return '/hub/admin' + return '/hub/home' + + c.JupyterHub.default_url = default_url_fn + """, ).tag(config=True) + user_redirect_hook = Callable( + None, + allow_none=True, + help=""" + Callable to affect behavior of /user-redirect/ + + Receives 4 parameters: + 1. path - URL path that was provided after /user-redirect/ + 2. request - A Tornado HTTPServerRequest representing the current request. + 3. user - The currently authenticated user. + 4. base_url - The base_url of the current hub, for relative redirects + + It should return the new URL to redirect to, or None to preserve + current behavior. + """, + ).tag(config=True) + + use_legacy_stopped_server_status_code = Bool( + False, + help=""" + Return 503 rather than 424 when request comes in for a non-running server. + + Prior to JupyterHub 2.0, we returned a 503 when any request came in for + a user server that was currently not running. By default, JupyterHub 2.0 + will return a 424 - this makes operational metric dashboards more useful. + + JupyterLab < 3.2 expected the 503 to know if the user server is no longer + running, and prompted the user to start their server. Set this config to + true to retain the old behavior, so JupyterLab < 3.2 can continue to show + the appropriate UI when the user server is stopped. + + This option will be removed in a future release. + """, + config=True, + ) + def init_handlers(self): h = [] # load handlers from the authenticator @@ -1309,7 +1666,7 @@ class JupyterHub(Application): if not secret: secret_from = 'new' self.log.debug("Generating new %s", trait_name) - secret = os.urandom(COOKIE_SECRET_BYTES) + secret = secrets.token_bytes(COOKIE_SECRET_BYTES) if secret_file and secret_from == 'new': # if we generated a new secret, store it in the secret_file @@ -1354,7 +1711,9 @@ class JupyterHub(Application): for authority, files in self.internal_ssl_authorities.items(): if files: self.log.info("Adding CA for %s", authority) - certipy.store.add_record(authority, is_ca=True, files=files) + certipy.store.add_record( + authority, is_ca=True, files=files, overwrite=True + ) self.internal_trust_bundles = certipy.trust_from_graph( self.internal_ssl_components_trust @@ -1462,10 +1821,31 @@ class JupyterHub(Application): except orm.DatabaseSchemaMismatch as e: self.exit(e) + # ensure the default oauth client exists + if ( + not self.db.query(orm.OAuthClient) + .filter_by(identifier="jupyterhub") + .one_or_none() + ): + # create the oauth client for jupyterhub itself + # this allows us to distinguish between orphaned tokens + # (failed cascade deletion) and tokens issued by the hub + # it has no client_secret, which means it cannot be used + # to make requests + client = orm.OAuthClient( + identifier="jupyterhub", + secret="", + redirect_uri="", + description="JupyterHub", + ) + self.db.add(client) + self.db.commit() + def init_hub(self): """Load the Hub URL config""" hub_args = dict( base_url=self.hub_prefix, + routespec=self.hub_routespec, public_host=self.subdomain_host, certfile=self.internal_ssl_cert, keyfile=self.internal_ssl_key, @@ -1481,17 +1861,15 @@ class JupyterHub(Application): hub_args['ip'] = self.hub_ip hub_args['port'] = self.hub_port - # routespec for the Hub is the *app* base url - # not the hub URL, so it receives requests for non-running servers - # use `/` with host-based routing so the Hub - # gets requests for all hosts - host = '' - if self.subdomain_host: - routespec = '/' - else: - routespec = self.base_url + self.hub = Hub(**hub_args) - self.hub = Hub(routespec=routespec, **hub_args) + if not self.subdomain_host: + api_prefix = url_path_join(self.hub.base_url, "api/") + if not api_prefix.startswith(self.hub.routespec): + self.log.warning( + f"Hub API prefix {api_prefix} not on prefix {self.hub.routespec}. " + "The Hub may not receive any API requests from outside." + ) if self.hub_connect_ip: self.hub.connect_ip = self.hub_connect_ip @@ -1542,12 +1920,6 @@ class JupyterHub(Application): if not self.authenticator.validate_username(username): raise ValueError("username %r is not valid" % username) - if not admin_users: - self.log.warning("No admin users, admin interface will be unavailable.") - self.log.warning( - "Add any administrative users to `c.Authenticator.admin_users` in config." - ) - new_users = [] for name in admin_users: @@ -1555,30 +1927,30 @@ class JupyterHub(Application): user = orm.User.find(db, name) if user is None: user = orm.User(name=name, admin=True) + roles.assign_default_roles(self.db, entity=user) new_users.append(user) db.add(user) else: user.admin = True - # the admin_users config variable will never be used after this point. # only the database values will be referenced. - whitelist = [ + allowed_users = [ self.authenticator.normalize_username(name) - for name in self.authenticator.whitelist + for name in self.authenticator.allowed_users ] - self.authenticator.whitelist = set(whitelist) # force normalization - for username in whitelist: + self.authenticator.allowed_users = set(allowed_users) # force normalization + for username in allowed_users: if not self.authenticator.validate_username(username): raise ValueError("username %r is not valid" % username) - if not whitelist: + if not allowed_users: self.log.info( - "Not using whitelist. Any authenticated user will be allowed." + "Not using allowed_users. Any authenticated user will be allowed." ) - # add whitelisted users to the db - for name in whitelist: + # add allowed users to the db + for name in allowed_users: user = orm.User.find(db, name) if user is None: user = orm.User(name=name) @@ -1588,13 +1960,16 @@ class JupyterHub(Application): db.commit() # Notify authenticator of all users. - # This ensures Auth whitelist is up-to-date with the database. - # This lets whitelist be used to set up initial list, - # but changes to the whitelist can occur in the database, + # This ensures Authenticator.allowed_users is up-to-date with the database. + # This lets .allowed_users be used to set up initial list, + # but changes to the allowed_users set can occur in the database, # and persist across sessions. + total_users = 0 for user in db.query(orm.User): try: - await maybe_future(self.authenticator.add_user(user)) + f = self.authenticator.add_user(user) + if f: + await maybe_future(f) except Exception: self.log.exception("Error adding user %s already in db", user.name) if self.authenticator.delete_invalid_users: @@ -1616,6 +1991,7 @@ class JupyterHub(Application): ) ) else: + total_users += 1 # handle database upgrades where user.created is undefined. # we don't want to allow user.created to be undefined, # so initialize it to last_activity (if defined) or now. @@ -1623,35 +1999,224 @@ class JupyterHub(Application): user.created = user.last_activity or datetime.utcnow() db.commit() - # The whitelist set and the users in the db are now the same. + # The allowed_users set and the users in the db are now the same. # From this point on, any user changes should be done simultaneously - # to the whitelist set and user db, unless the whitelist is empty (all users allowed). + # to the allowed_users set and user db, unless the allowed set is empty (all users allowed). + + TOTAL_USERS.set(total_users) + + async def _get_or_create_user(self, username): + """Create user if username is found in config but user does not exist""" + if not (await maybe_future(self.authenticator.check_allowed(username, None))): + raise ValueError( + "Username %r is not in Authenticator.allowed_users" % username + ) + user = orm.User.find(self.db, name=username) + if user is None: + if not self.authenticator.validate_username(username): + raise ValueError("Username %r is not valid" % username) + self.log.info(f"Creating user {username}") + user = orm.User(name=username) + self.db.add(user) + roles.assign_default_roles(self.db, entity=user) + self.db.commit() + return user async def init_groups(self): """Load predefined groups into the database""" db = self.db + + if self.authenticator.manage_groups and self.load_groups: + raise ValueError("Group management has been offloaded to the authenticator") for name, usernames in self.load_groups.items(): group = orm.Group.find(db, name) if group is None: + self.log.info(f"Creating group {name}") group = orm.Group(name=name) db.add(group) for username in usernames: username = self.authenticator.normalize_username(username) - if not ( - await maybe_future( - self.authenticator.check_whitelist(username, None) - ) - ): - raise ValueError("Username %r is not in whitelist" % username) - user = orm.User.find(db, name=username) - if user is None: - if not self.authenticator.validate_username(username): - raise ValueError("Group username %r is not valid" % username) - user = orm.User(name=username) - db.add(user) + user = await self._get_or_create_user(username) + self.log.debug(f"Adding user {username} to group {name}") group.users.append(user) db.commit() + async def init_role_creation(self): + """Load default and user-defined roles and scopes into the database""" + if self.custom_scopes: + self.log.info(f"Defining {len(self.custom_scopes)} custom scopes.") + scopes.define_custom_scopes(self.custom_scopes) + self.log.debug('Loading roles into database') + default_roles = roles.get_default_roles() + config_role_names = [r['name'] for r in self.load_roles] + + default_roles_dict = {role["name"]: role for role in default_roles} + init_roles = [] + roles_with_new_permissions = [] + for role_spec in self.load_roles: + role_name = role_spec['name'] + if role_name in default_roles_dict: + self.log.debug(f"Overriding default role {role_name}") + # merge custom role spec with default role spec when overriding + # so the new role can be partially defined + default_role_spec = default_roles_dict.pop(role_name) + merged_role_spec = {} + merged_role_spec.update(default_role_spec) + merged_role_spec.update(role_spec) + role_spec = merged_role_spec + + # Check for duplicates + if config_role_names.count(role_name) > 1: + raise ValueError( + f"Role {role_name} multiply defined. Please check the `load_roles` configuration" + ) + init_roles.append(role_spec) + # Check if some roles have obtained new permissions (to avoid 'scope creep') + old_role = orm.Role.find(self.db, name=role_name) + if old_role: + if not set(role_spec.get('scopes', [])).issubset(old_role.scopes): + self.log.warning( + "Role %s has obtained extra permissions" % role_name + ) + roles_with_new_permissions.append(role_name) + + # make sure we load any default roles not overridden + init_roles = list(default_roles_dict.values()) + init_roles + + init_role_names = [r['name'] for r in init_roles] + if ( + self.db.query(orm.Role).first() is None + and self.db.query(orm.User).first() is not None + ): + # apply rbac-upgrade default role assignment if there are users in the db, + # but not any roles + self._rbac_upgrade = True + else: + self._rbac_upgrade = False + for role in self.db.query(orm.Role).filter( + orm.Role.name.notin_(init_role_names) + ): + self.log.warning(f"Deleting role {role.name}") + self.db.delete(role) + self.db.commit() + for role in init_roles: + roles.create_role(self.db, role) + + async def init_role_assignment(self): + # tokens are added separately + kinds = ['users', 'services', 'groups'] + admin_role_objects = ['users', 'services'] + config_admin_users = set(self.authenticator.admin_users) + db = self.db + # load predefined roles from config file + if config_admin_users: + for role_spec in self.load_roles: + if role_spec['name'] == 'admin': + self.log.warning( + "Configuration specifies both admin_users and users in the admin role specification. " + "If admin role is present in config, c.Authenticator.admin_users should not be used." + ) + self.log.info( + "Merging admin_users set with users list in admin role" + ) + role_spec['users'] = set(role_spec.get('users', [])) + role_spec['users'] |= config_admin_users + self.log.debug('Loading role assignments from config') + has_admin_role_spec = {role_bearer: False for role_bearer in admin_role_objects} + for role_spec in self.load_roles: + role = orm.Role.find(db, name=role_spec['name']) + role_name = role_spec["name"] + if role_name == 'admin': + for kind in admin_role_objects: + has_admin_role_spec[kind] = kind in role_spec + if has_admin_role_spec[kind]: + self.log.info(f"Admin role specifies static {kind} list") + else: + self.log.info( + f"Admin role does not specify {kind}, preserving admin membership in database" + ) + # add users, services, and/or groups, + # tokens need to be checked for permissions + for kind in kinds: + orm_role_bearers = [] + if kind in role_spec: + for name in role_spec[kind]: + if kind == 'users': + name = self.authenticator.normalize_username(name) + if not ( + await maybe_future( + self.authenticator.check_allowed(name, None) + ) + ): + raise ValueError( + f"Username {name} is not in Authenticator.allowed_users" + ) + Class = orm.get_class(kind) + orm_obj = Class.find(db, name) + if orm_obj is not None: + orm_role_bearers.append(orm_obj) + else: + self.log.info( + f"Found unexisting {kind} {name} in role definition {role_name}" + ) + if kind == 'users': + orm_obj = await self._get_or_create_user(name) + orm_role_bearers.append(orm_obj) + elif kind == 'groups': + group = orm.Group(name=name) + db.add(group) + db.commit() + orm_role_bearers.append(group) + else: + raise ValueError( + f"{kind} {name} defined in config role definition {role_name} but not present in database" + ) + # Ensure all with admin role have admin flag + if role_name == 'admin': + orm_obj.admin = True + # explicitly defined list + # ensure membership list is exact match (adds and revokes permissions) + setattr(role, kind, orm_role_bearers) + else: + # no defined members + # leaving 'users' undefined in overrides of the default 'user' role + # should not clear membership on startup + # since allowed users could be managed by the authenticator + if kind == "users" and role_name == "user": + # Default user lists can be managed by the Authenticator, + # if unspecified in role config + pass + else: + # otherwise, omitting a member category is equivalent to specifying an empty list + setattr(role, kind, []) + + db.commit() + if self.authenticator.allowed_users: + self.log.debug( + f"Assigning {len(self.authenticator.allowed_users)} allowed_users to the user role" + ) + allowed_users = db.query(orm.User).filter( + orm.User.name.in_(self.authenticator.allowed_users) + ) + for user in allowed_users: + roles.grant_role(db, user, 'user') + admin_role = orm.Role.find(db, 'admin') + for kind in admin_role_objects: + Class = orm.get_class(kind) + for admin_obj in db.query(Class).filter_by(admin=True): + if has_admin_role_spec[kind]: + admin_obj.admin = admin_role in admin_obj.roles + else: + roles.grant_role(db, admin_obj, 'admin') + db.commit() + # make sure that on hub upgrade, all users, services and tokens have at least one role (update with default) + if getattr(self, '_rbac_upgrade', False): + self.log.warning( + "No roles found; assuming hub upgrade. Initializing default roles for all entities" + ) + for kind in kinds: + roles.check_for_default_roles(db, kind) + async def _add_tokens(self, token_dict, kind): """Add tokens for users or services to the database""" if kind == 'user': @@ -1666,11 +2231,20 @@ class JupyterHub(Application): if kind == 'user': name = self.authenticator.normalize_username(name) if not ( - await maybe_future(self.authenticator.check_whitelist(name, None)) + await maybe_future(self.authenticator.check_allowed(name, None)) ): - raise ValueError("Token name %r is not in whitelist" % name) + raise ValueError( + "Token user name %r is not in Authenticator.allowed_users" + % name + ) if not self.authenticator.validate_username(name): - raise ValueError("Token name %r is not valid" % name) + raise ValueError("Token user name %r is not valid" % name) + if kind == 'service': + if not any(service["name"] == name for service in self.services): + self.log.warning( + "Warning: service '%s' not in services, creating implicitly. It is recommended to register services using services list." + % name + ) orm_token = orm.APIToken.find(db, token) if orm_token is None: obj = Class.find(db, name) @@ -1695,7 +2269,7 @@ class JupyterHub(Application): # don't allow bad tokens to create users db.delete(obj) db.commit() - raise + raise else: self.log.debug("Not duplicating token %s", orm_token) db.commit() @@ -1703,17 +2277,30 @@ class JupyterHub(Application): # purge expired tokens hourly purge_expired_tokens_interval = 3600 + @catch_db_error + def purge_expired_tokens(self): + """purge all expiring token objects from the database + + run periodically + """ + # this should be all the subclasses of Expiring + for cls in (orm.APIToken, orm.OAuthCode): + self.log.debug(f"Purging expired {cls.__name__}s") + cls.purge_expired(self.db) + async def init_api_tokens(self): """Load predefined API tokens (for services) into database""" + await self._add_tokens(self.service_tokens, kind='service') await self._add_tokens(self.api_tokens, kind='user') - purge_expired_tokens = partial(orm.APIToken.purge_expired, self.db) - purge_expired_tokens() + + await self.purge_expired_tokens() # purge expired tokens hourly # we don't need to be prompt about this # because expired tokens cannot be used anyway + pc = PeriodicCallback( - purge_expired_tokens, 1e3 * self.purge_expired_tokens_interval + self.purge_expired_tokens, 1e3 * self.purge_expired_tokens_interval ) pc.start() @@ -1722,7 +2309,7 @@ class JupyterHub(Application): if self.domain: domain = 'services.' + self.domain parsed = urlparse(self.subdomain_host) - host = '%s://services.%s' % (parsed.scheme, parsed.netloc) + host = f'{parsed.scheme}://services.{parsed.netloc}' else: domain = host = '' @@ -1735,6 +2322,14 @@ class JupyterHub(Application): if orm_service is None: # not found, create a new one orm_service = orm.Service(name=name) + if spec.get('admin', False): + self.log.warning( + f"Service {name} sets `admin: True`, which is deprecated in JupyterHub 2.0." + " You can assign now assign roles via `JupyterHub.load_roles` configuration." + " If you specify services in the admin role configuration, " + "the Service admin flag will be ignored." + ) + roles.update_roles(self.db, entity=orm_service, roles=['admin']) self.db.add(orm_service) orm_service.admin = spec.get('admin', False) self.db.commit() @@ -1744,6 +2339,7 @@ class JupyterHub(Application): base_url=self.base_url, db=self.db, orm=orm_service, + roles=orm_service.roles, domain=domain, host=host, hub=self.hub, @@ -1755,18 +2351,14 @@ class JupyterHub(Application): raise AttributeError("No such service field: %s" % key) setattr(service, key, value) - if service.managed: - if not service.api_token: - # generate new token - # TODO: revoke old tokens? - service.api_token = service.orm.new_api_token( - note="generated at startup" - ) - else: - # ensure provided token is registered - self.service_tokens[service.api_token] = service.name - else: + if service.api_token: self.service_tokens[service.api_token] = service.name + elif service.managed: + # generate new token + # TODO: revoke old tokens? + service.api_token = service.orm.new_api_token( + note="generated at startup" + ) if service.url: parsed = urlparse(service.url) @@ -1789,12 +2381,37 @@ class JupyterHub(Application): service.orm.server = None if service.oauth_available: - self.oauth_provider.add_client( + allowed_scopes = set() + if service.oauth_client_allowed_scopes: + allowed_scopes.update(service.oauth_client_allowed_scopes) + if service.oauth_roles: + if not allowed_scopes: + # DEPRECATED? It's still convenient and valid, + # e.g. 'admin' + allowed_roles = list( + self.db.query(orm.Role).filter( + orm.Role.name.in_(service.oauth_roles) + ) + ) + allowed_scopes.update(roles.roles_to_scopes(allowed_roles)) + else: + self.log.warning( + f"Ignoring oauth_roles for {service.name}: {service.oauth_roles}," + f" using oauth_client_allowed_scopes={allowed_scopes}." + ) + oauth_client = self.oauth_provider.add_client( client_id=service.oauth_client_id, client_secret=service.api_token, redirect_uri=service.oauth_redirect_uri, description="JupyterHub service %s" % service.name, ) + service.orm.oauth_client = oauth_client + # add access-scopes, derived from OAuthClient itself + allowed_scopes.update(scopes.access_scopes(oauth_client)) + oauth_client.allowed_scopes = sorted(allowed_scopes) + else: + if service.oauth_client: + self.db.delete(service.oauth_client) self._service_map[name] = service @@ -1810,8 +2427,8 @@ class JupyterHub(Application): if not service.url: continue try: - await Server.from_orm(service.orm.server).wait_up(timeout=1) - except TimeoutError: + await Server.from_orm(service.orm.server).wait_up(timeout=1, http=True) + except AnyTimeoutError: self.log.warning( "Cannot connect to %s service %s at %s", service.kind, @@ -1827,18 +2444,17 @@ class JupyterHub(Application): ) async def init_spawners(self): + self.log.debug("Initializing spawners") db = self.db def _user_summary(user): """user is an orm.User, not a full user""" - parts = ['{0: >8}'.format(user.name)] + parts = [f'{user.name: >8}'] if user.admin: parts.append('admin') for name, spawner in sorted(user.orm_spawners.items(), key=itemgetter(0)): if spawner.server: - parts.append( - '%s:%s running at %s' % (user.name, name, spawner.server) - ) + parts.append(f'{user.name}:{name} running at {spawner.server}') return ' '.join(parts) async def user_stopped(user, server_name): @@ -1890,7 +2506,7 @@ class JupyterHub(Application): ) try: await user._wait_up(spawner) - except TimeoutError: + except AnyTimeoutError: self.log.error( "%s does not appear to be running at %s, shutting it down.", spawner._log_name, @@ -1917,21 +2533,43 @@ class JupyterHub(Application): else: self.log.debug("%s not running", spawner._log_name) + spawner._check_pending = False + # parallelize checks for running Spawners + # run query on extant Server objects + # so this is O(running servers) not O(total users) + # Server objects can be associated with either a Spawner or a Service, + # we are only interested in the ones associated with a Spawner check_futures = [] - for orm_user in db.query(orm.User): - user = self.users[orm_user] - self.log.debug("Loading state for %s from db", user.name) - for name, orm_spawner in user.orm_spawners.items(): - if orm_spawner.server is not None: - # spawner should be running - # instantiate Spawner wrapper and check if it's still alive - spawner = user.spawners[name] - f = asyncio.ensure_future(check_spawner(user, name, spawner)) - check_futures.append(f) + for orm_server in db.query(orm.Server): + orm_spawner = orm_server.spawner + if not orm_spawner: + # check for orphaned Server rows + # this shouldn't happen if we've got our sqlachemy right + if not orm_server.service: + self.log.warning("deleting orphaned server %s", orm_server) + self.db.delete(orm_server) + self.db.commit() + continue + # instantiate Spawner wrapper and check if it's still alive + # spawner should be running + user = self.users[orm_spawner.user] + spawner = user.spawners[orm_spawner.name] + self.log.debug("Loading state for %s from db", spawner._log_name) + # signal that check is pending to avoid race conditions + spawner._check_pending = True + f = asyncio.ensure_future(check_spawner(user, spawner.name, spawner)) + check_futures.append(f) + + # it's important that we get here before the first await + # so that we know all spawners are instantiated and in the check-pending state # await checks after submitting them all - await gen.multi(check_futures) + if check_futures: + self.log.debug( + "Awaiting checks for %i possibly-running spawners", len(check_futures) + ) + await asyncio.gather(*check_futures) db.commit() # only perform this query if we are going to log it @@ -1941,7 +2579,7 @@ class JupyterHub(Application): active_counts = self.users.count_active_users() RUNNING_SERVERS.set(active_counts['active']) - TOTAL_USERS.set(len(self.users)) + return len(check_futures) def init_oauth(self): base_url = self.hub.base_url @@ -1949,6 +2587,7 @@ class JupyterHub(Application): lambda: self.db, url_prefix=url_path_join(base_url, 'api/oauth2'), login_url=url_path_join(base_url, 'login'), + token_expires_in=self.oauth_token_expires_in, ) def cleanup_oauth_clients(self): @@ -1956,17 +2595,13 @@ class JupyterHub(Application): This should mainly be services that have been removed from configuration or renamed. """ - oauth_client_ids = set() + oauth_client_ids = {"jupyterhub"} for service in self._service_map.values(): if service.oauth_available: oauth_client_ids.add(service.oauth_client_id) for user in self.users.values(): for spawner in user.spawners.values(): oauth_client_ids.add(spawner.oauth_client_id) - # avoid deleting clients created by 0.8 - # 0.9 uses `jupyterhub-user-...` for the client id, while - # 0.8 uses just `user-...` - oauth_client_ids.add(spawner.oauth_client_id.split('-', 1)[1]) for i, oauth_client in enumerate(self.db.query(orm.OAuthClient)): if oauth_client.identifier not in oauth_client_ids: @@ -1998,7 +2633,7 @@ class JupyterHub(Application): def init_tornado_settings(self): """Set up the tornado settings dict.""" base_url = self.hub.base_url - jinja_options = dict(autoescape=True) + jinja_options = dict(autoescape=True, enable_async=True) jinja_options.update(self.jinja_environment_options) base_path = self._template_paths_default()[0] if base_path not in self.template_paths: @@ -2010,6 +2645,14 @@ class JupyterHub(Application): ] ) jinja_env = Environment(loader=loader, **jinja_options) + # We need a sync jinja environment too, for the times we *must* use sync + # code - particularly in RequestHandler.write_error. Since *that* + # is called from inside the asyncio event loop, we can't actulaly just + # schedule it on the loop - without starting another thread with its + # own loop, which seems not worth the trouble. Instead, we create another + # environment, exactly like this one, but sync + del jinja_options['enable_async'] + jinja_env_sync = Environment(loader=loader, **jinja_options) login_url = url_path_join(base_url, 'login') logout_url = self.authenticator.logout_url(base_url) @@ -2022,6 +2665,15 @@ class JupyterHub(Application): else: version_hash = datetime.now().strftime("%Y%m%d%H%M%S") + oauth_no_confirm_list = set() + for service in self._service_map.values(): + if service.oauth_no_confirm: + self.log.warning( + "Allowing service %s to complete OAuth without confirmation on an authorization web page", + service.name, + ) + oauth_no_confirm_list.add(service.oauth_client_id) + settings = dict( log_function=log_request, config=self.config, @@ -2032,6 +2684,8 @@ class JupyterHub(Application): activity_resolution=self.activity_resolution, admin_users=self.authenticator.admin_users, admin_access=self.admin_access, + api_page_default_limit=self.api_page_default_limit, + api_page_max_limit=self.api_page_max_limit, authenticator=self.authenticator, spawner_class=self.spawner_class, base_url=self.base_url, @@ -2047,13 +2701,17 @@ class JupyterHub(Application): template_path=self.template_paths, template_vars=self.template_vars, jinja2_env=jinja_env, + jinja2_env_sync=jinja_env_sync, version_hash=version_hash, subdomain_host=self.subdomain_host, domain=self.domain, statsd=self.statsd, + implicit_spawn_seconds=self.implicit_spawn_seconds, allow_named_servers=self.allow_named_servers, + default_server_name=self._default_server_name, named_server_limit_per_user=self.named_server_limit_per_user, oauth_provider=self.oauth_provider, + oauth_no_confirm_list=oauth_no_confirm_list, concurrent_spawn_limit=self.concurrent_spawn_limit, spawn_throttle_retry_range=self.spawn_throttle_retry_range, active_server_limit=self.active_server_limit, @@ -2067,6 +2725,8 @@ class JupyterHub(Application): internal_ssl_ca=self.internal_ssl_ca, trusted_alt_names=self.trusted_alt_names, shutdown_on_logout=self.shutdown_on_logout, + eventlog=self.eventlog, + app=self, ) # allow configured settings to have priority settings.update(self.tornado_settings) @@ -2092,6 +2752,16 @@ class JupyterHub(Application): e, ) + def init_eventlog(self): + """Set up the event logging system.""" + self.eventlog = EventLog(parent=self) + + for dirname, _, files in os.walk(os.path.join(here, 'event-schemas')): + for file in files: + if not file.endswith('.yaml'): + continue + self.eventlog.register_schema_file(os.path.join(dirname, file)) + def write_pid_file(self): pid = os.getpid() if self.pid_file: @@ -2101,16 +2771,27 @@ class JupyterHub(Application): @catch_config_error async def initialize(self, *args, **kwargs): + hub_startup_start_time = time.perf_counter() super().initialize(*args, **kwargs) if self.generate_config or self.generate_certs or self.subapp: return + self._start_future = asyncio.Future() + + def record_start(f): + startup_time = time.perf_counter() - hub_startup_start_time + self.log.debug("It took %.3f seconds for the Hub to start", startup_time) + HUB_STARTUP_DURATION_SECONDS.observe(startup_time) + + self._start_future.add_done_callback(record_start) + self.load_config_file(self.config_file) self.init_logging() + self.log.info("Running JupyterHub version %s", jupyterhub.__version__) if 'JupyterHubApp' in self.config: self.log.warning( "Use JupyterHub in config, not JupyterHubApp. Outdated config:\n%s", '\n'.join( - 'JupyterHubApp.{key} = {value!r}'.format(key=key, value=value) + f'JupyterHubApp.{key} = {value!r}' for key, value in self.config.JupyterHubApp.items() ), ) @@ -2132,7 +2813,7 @@ class JupyterHub(Application): mod = sys.modules.get(cls.__module__.split('.')[0]) version = getattr(mod, '__version__', '') if version: - version = '-{}'.format(version) + version = f'-{version}' else: version = '' self.log.info( @@ -2141,7 +2822,9 @@ class JupyterHub(Application): _log_cls("Authenticator", self.authenticator_class) _log_cls("Spawner", self.spawner_class) + _log_cls("Proxy", self.proxy_class) + self.init_eventlog() self.init_pycurl() self.init_secrets() self.init_internal_ssl() @@ -2149,16 +2832,69 @@ class JupyterHub(Application): self.init_hub() self.init_proxy() self.init_oauth() + await self.init_role_creation() await self.init_users() await self.init_groups() self.init_services() await self.init_api_tokens() + await self.init_role_assignment() self.init_tornado_settings() - await self.init_spawners() - self.cleanup_oauth_clients() self.init_handlers() self.init_tornado_application() + # init_spawners can take a while + init_spawners_timeout = self.init_spawners_timeout + if init_spawners_timeout < 0: + # negative timeout means forever (previous, most stable behavior) + init_spawners_timeout = 86400 + + init_start_time = time.perf_counter() + init_spawners_future = asyncio.ensure_future(self.init_spawners()) + + def log_init_time(f): + n_spawners = f.result() + spawner_initialization_time = time.perf_counter() - init_start_time + INIT_SPAWNERS_DURATION_SECONDS.observe(spawner_initialization_time) + self.log.info( + "Initialized %i spawners in %.3f seconds", + n_spawners, + spawner_initialization_time, + ) + + init_spawners_future.add_done_callback(log_init_time) + + try: + + # don't allow a zero timeout because we still need to be sure + # that the Spawner objects are defined and pending + await gen.with_timeout( + timedelta(seconds=max(init_spawners_timeout, 1)), init_spawners_future + ) + except AnyTimeoutError: + self.log.warning( + "init_spawners did not complete within %i seconds. " + "Allowing to complete in the background.", + self.init_spawners_timeout, + ) + + if init_spawners_future.done(): + self.cleanup_oauth_clients() + else: + # schedule async operations after init_spawners finishes + async def finish_init_spawners(): + await init_spawners_future + # schedule cleanup after spawners are all set up + # because it relies on the state resolved by init_spawners + self.cleanup_oauth_clients() + # trigger a proxy check as soon as all spawners are ready + # because this may be *after* the check made as part of normal startup. + # To avoid races with partially-complete start, + # ensure that start is complete before running this check. + await self._start_future + await self.proxy.check_routes(self.users, self._service_map) + + asyncio.ensure_future(finish_init_spawners()) + async def cleanup(self): """Shutdown managed services and various subprocesses. Cleanup runtime files.""" @@ -2239,6 +2975,7 @@ class JupyterHub(Application): with open(self.config_file, mode='w') as f: f.write(config_text) + @catch_db_error async def update_last_activity(self): """Update User.last_activity timestamps from the proxy""" routes = await self.proxy.get_all_routes() @@ -2266,7 +3003,7 @@ class JupyterHub(Application): continue dt = parse_date(route_data['last_activity']) if dt.tzinfo: - # strip timezone info to naïve UTC datetime + # strip timezone info to naive UTC datetime dt = dt.astimezone(timezone.utc).replace(tzinfo=None) if user.last_activity: @@ -2308,7 +3045,7 @@ class JupyterHub(Application): if self.generate_certs: self.load_config_file(self.config_file) if not self.internal_ssl: - self.log.warn( + self.log.warning( "You'll need to enable `internal_ssl` " "in the `jupyterhub_config` file to use " "these certs." @@ -2323,11 +3060,25 @@ class JupyterHub(Application): loop.stop() return + # start the proxy + if self.proxy.should_start: + try: + await self.proxy.start() + except Exception as e: + self.log.critical("Failed to start proxy", exc_info=True) + self.exit(1) + else: + self.log.info("Not starting proxy") + + # verify that we can talk to the proxy before listening. + # avoids delayed failure if we can't talk to the proxy + await self.proxy.get_all_routes() + ssl_context = make_ssl_context( self.internal_ssl_key, self.internal_ssl_cert, cafile=self.internal_ssl_ca, - check_hostname=False, + purpose=ssl.Purpose.CLIENT_AUTH, ) # start the webserver @@ -2360,27 +3111,13 @@ class JupyterHub(Application): self.log.error("Failed to bind hub to %s", self.hub.bind_url) raise - # start the proxy - if self.proxy.should_start: - try: - await self.proxy.start() - except Exception as e: - self.log.critical("Failed to start proxy", exc_info=True) - self.exit(1) - else: - self.log.info("Not starting proxy") - # start the service(s) for service_name, service in self._service_map.items(): - msg = ( - '%s at %s' % (service_name, service.url) - if service.url - else service_name - ) + msg = f'{service_name} at {service.url}' if service.url else service_name if service.managed: self.log.info("Starting managed service %s", msg) try: - service.start() + await service.start() except Exception as e: self.log.critical( "Failed to start service %s", service_name, exc_info=True @@ -2393,15 +3130,10 @@ class JupyterHub(Application): tries = 10 if service.managed else 1 for i in range(tries): try: - ssl_context = make_ssl_context( - self.internal_ssl_key, - self.internal_ssl_cert, - cafile=self.internal_ssl_ca, - ) await Server.from_orm(service.orm.server).wait_up( http=True, timeout=1, ssl_context=ssl_context ) - except TimeoutError: + except AnyTimeoutError: if service.managed: status = await service.spawner.poll() if status is not None: @@ -2438,12 +3170,18 @@ class JupyterHub(Application): self.last_activity_callback = pc pc.start() - self.log.info("JupyterHub is now running at %s", self.proxy.public_url) + if self.proxy.should_start: + self.log.info("JupyterHub is now running at %s", self.proxy.public_url) + else: + self.log.info( + "JupyterHub is now running, internal Hub API at %s", self.hub.url + ) # Use atexit for Windows, it doesn't have signal handling support if _mswindows: atexit.register(self.atexit) # register cleanup on both TERM and INT self.init_signal() + self._start_future.set_result(None) def init_signal(self): loop = asyncio.get_event_loop() @@ -2474,6 +3212,40 @@ class JupyterHub(Application): self.log.critical("Received signalnum %s, , initiating shutdown...", signum) raise SystemExit(128 + signum) + def _init_asyncio_patch(self): + """Set default asyncio policy to be compatible with Tornado. + + Tornado 6 (at least) is not compatible with the default + asyncio implementation on Windows. + + Pick the older SelectorEventLoopPolicy on Windows + if the known-incompatible default policy is in use. + + Do this as early as possible to make it a low priority and overrideable. + + ref: https://github.com/tornadoweb/tornado/issues/2608 + + FIXME: If/when tornado supports the defaults in asyncio, + remove and bump tornado requirement for py38. + """ + if sys.platform.startswith("win") and sys.version_info >= (3, 8): + try: + from asyncio import ( + WindowsProactorEventLoopPolicy, + WindowsSelectorEventLoopPolicy, + ) + except ImportError: + pass + # not affected + else: + if ( + type(asyncio.get_event_loop_policy()) + is WindowsProactorEventLoopPolicy + ): + # WindowsProactorEventLoopPolicy is not compatible with Tornado 6. + # Fallback to the pre-3.8 default of WindowsSelectorEventLoopPolicy. + asyncio.set_event_loop_policy(WindowsSelectorEventLoopPolicy()) + _atexit_ran = False def atexit(self): @@ -2481,19 +3253,20 @@ class JupyterHub(Application): if self._atexit_ran: return self._atexit_ran = True + self._init_asyncio_patch() # run the cleanup step (in a new loop, because the interrupted one is unclean) - asyncio.set_event_loop(asyncio.new_event_loop()) - IOLoop.clear_current() - loop = IOLoop() - loop.make_current() - loop.run_sync(self.cleanup) + asyncio.run(self.cleanup()) - async def shutdown_cancel_tasks(self, sig): + async def shutdown_cancel_tasks(self, sig=None): """Cancel all other tasks of the event loop and initiate cleanup""" - self.log.critical("Received signal %s, initiating shutdown...", sig.name) - tasks = [ - t for t in asyncio.Task.all_tasks() if t is not asyncio.Task.current_task() - ] + if sig is None: + self.log.critical("Initiating shutdown...") + else: + self.log.critical("Received signal %s, initiating shutdown...", sig.name) + + await self.cleanup() + + tasks = [t for t in asyncio.all_tasks() if t is not asyncio.current_task()] if tasks: self.log.debug("Cancelling pending tasks") @@ -2506,10 +3279,9 @@ class JupyterHub(Application): except StopAsyncIteration as e: self.log.error("Caught StopAsyncIteration Exception", exc_info=True) - tasks = [t for t in asyncio.Task.all_tasks()] + tasks = [t for t in asyncio.all_tasks()] for t in tasks: self.log.debug("Task status: %s", t) - await self.cleanup() asyncio.get_event_loop().stop() def stop(self): @@ -2517,7 +3289,19 @@ class JupyterHub(Application): return if self.http_server: self.http_server.stop() - self.io_loop.add_callback(self.io_loop.stop) + self.io_loop.add_callback(self.shutdown_cancel_tasks) + + async def start_show_config(self): + """Async wrapper around base start_show_config method""" + # We need this because of our custom launch_instance_async, + # where `start` isn't a blocking call, + # it only gets async things going + # and `--show-config` replaces `start` with a blocking function. + # so our version: + # 1. calls the original blocking method + # 2. stops the event loop when we are done, so the process exits + super().start_show_config() + self.exit(0) async def launch_instance_async(self, argv=None): try: @@ -2530,17 +3314,20 @@ class JupyterHub(Application): @classmethod def launch_instance(cls, argv=None): self = cls.instance() - AsyncIOMainLoop().install() - loop = IOLoop.current() - task = asyncio.ensure_future(self.launch_instance_async(argv)) + self._init_asyncio_patch() + loop = IOLoop(make_current=False) + + try: + loop.run_sync(partial(self.launch_instance_async, argv)) + except Exception: + loop.close() + raise + try: loop.start() except KeyboardInterrupt: print("\nInterrupted") finally: - if task.done(): - # re-raise exceptions in launch_instance_async - task.result() loop.stop() loop.close() diff --git a/jupyterhub/auth.py b/jupyterhub/auth.py index bc906a70..2795a781 100644 --- a/jupyterhub/auth.py +++ b/jupyterhub/auth.py @@ -7,10 +7,10 @@ import re import sys import warnings from concurrent.futures import ThreadPoolExecutor +from functools import partial from shutil import which -from subprocess import PIPE -from subprocess import Popen -from subprocess import STDOUT +from subprocess import PIPE, STDOUT, Popen +from textwrap import dedent try: import pamela @@ -19,13 +19,12 @@ except Exception as e: _pamela_error = e from tornado.concurrent import run_on_executor - +from traitlets import Any, Bool, Dict, Integer, Set, Unicode, default, observe from traitlets.config import LoggingConfigurable -from traitlets import Bool, Integer, Set, Unicode, Dict, Any, default, observe from .handlers.login import LoginHandler -from .utils import maybe_future, url_path_join from .traitlets import Command +from .utils import maybe_future, url_path_join class Authenticator(LoggingConfigurable): @@ -33,6 +32,23 @@ class Authenticator(LoggingConfigurable): db = Any() + @default("db") + def _deprecated_db(self): + self.log.warning( + dedent( + """ + The shared database session at Authenticator.db is deprecated, and will be removed. + Please manage your own database and connections. + + Contact JupyterHub at https://github.com/jupyterhub/jupyterhub/issues/3700 + if you have questions or ideas about direct database needs for your Authenticator. + """ + ), + ) + return self._deprecated_db_session + + _deprecated_db_session = Any() + enable_auth_state = Bool( False, config=True, @@ -87,6 +103,10 @@ class Authenticator(LoggingConfigurable): help=""" Set of users that will have admin rights on this JupyterHub. + Note: As of JupyterHub 2.0, + full admin rights should not be required, + and more precise permissions can be managed via roles. + Admin users have extra privileges: - Use the admin panel to see list of users logged in - Add / remove users in some authenticators @@ -101,40 +121,76 @@ class Authenticator(LoggingConfigurable): ).tag(config=True) whitelist = Set( + help="Deprecated, use `Authenticator.allowed_users`", + config=True, + ) + + allowed_users = Set( help=""" - Whitelist of usernames that are allowed to log in. + Set of usernames that are allowed to log in. Use this with supported authenticators to restrict which users can log in. This is an - additional whitelist that further restricts users, beyond whatever restrictions the - authenticator has in place. + additional list that further restricts users, beyond whatever restrictions the + authenticator has in place. Any user in this list is granted the 'user' role on hub startup. If empty, does not perform any additional restriction. + + .. versionchanged:: 1.2 + `Authenticator.whitelist` renamed to `allowed_users` """ ).tag(config=True) - blacklist = Set( + blocked_users = Set( help=""" - Blacklist of usernames that are not allowed to log in. + Set of usernames that are not allowed to log in. Use this with supported authenticators to restrict which users can not log in. This is an - additional blacklist that further restricts users, beyond whatever restrictions the + additional block list that further restricts users, beyond whatever restrictions the authenticator has in place. If empty, does not perform any additional restriction. .. versionadded: 0.9 + + .. versionchanged:: 1.2 + `Authenticator.blacklist` renamed to `blocked_users` """ ).tag(config=True) - @observe('whitelist') - def _check_whitelist(self, change): + _deprecated_aliases = { + "whitelist": ("allowed_users", "1.2"), + "blacklist": ("blocked_users", "1.2"), + } + + @observe(*list(_deprecated_aliases)) + def _deprecated_trait(self, change): + """observer for deprecated traits""" + old_attr = change.name + new_attr, version = self._deprecated_aliases.get(old_attr) + new_value = getattr(self, new_attr) + if new_value != change.new: + # only warn if different + # protects backward-compatible config from warnings + # if they set the same value under both names + self.log.warning( + "{cls}.{old} is deprecated in JupyterHub {version}, use {cls}.{new} instead".format( + cls=self.__class__.__name__, + old=old_attr, + new=new_attr, + version=version, + ) + ) + setattr(self, new_attr, change.new) + + @observe('allowed_users') + def _check_allowed_users(self, change): short_names = [name for name in change['new'] if len(name) <= 1] if short_names: sorted_names = sorted(short_names) single = ''.join(sorted_names) string_set_typo = "set('%s')" % single self.log.warning( - "whitelist contains single-character names: %s; did you mean set([%r]) instead of %s?", + "Allowed set contains single-character names: %s; did you mean set([%r]) instead of %s?", sorted_names[:8], single, string_set_typo, @@ -148,6 +204,13 @@ class Authenticator(LoggingConfigurable): """ ) + def get_custom_html(self, base_url): + """Get custom HTML for the authenticator. + + .. versionadded: 1.4 + """ + return self.custom_html + login_service = Unicode( help=""" Name of the login service that this authenticator is providing using to authenticate users. @@ -193,6 +256,9 @@ class Authenticator(LoggingConfigurable): if not username: # empty usernames are not allowed return False + if username != username.strip(): + # starting/ending with space is not allowed + return False if not self.username_regex: return True return bool(self.username_regex.match(username)) @@ -206,6 +272,7 @@ class Authenticator(LoggingConfigurable): delete_invalid_users = Bool( False, + config=True, help="""Delete any users from the database that do not pass validation When JupyterHub starts, `.add_user` will be called @@ -260,39 +327,74 @@ class Authenticator(LoggingConfigurable): def __init__(self, **kwargs): super().__init__(**kwargs) - for method_name in ( - 'check_whitelist', - 'check_blacklist', - 'check_group_whitelist', + self._init_deprecated_methods() + + def _init_deprecated_methods(self): + # handles deprecated signature *and* name + # with correct subclass override priority! + for old_name, new_name in ( + ('check_whitelist', 'check_allowed'), + ('check_blacklist', 'check_blocked_users'), + ('check_group_whitelist', 'check_allowed_groups'), ): - original_method = getattr(self, method_name, None) - if original_method is None: + old_method = getattr(self, old_name, None) + if old_method is None: # no such method (check_group_whitelist is optional) continue - signature = inspect.signature(original_method) - if 'authentication' not in signature.parameters: + + # allow old name to have higher priority + # if and only if it's defined in a later subclass + # than the new name + for cls in self.__class__.mro(): + has_old_name = old_name in cls.__dict__ + has_new_name = new_name in cls.__dict__ + if has_new_name: + break + if has_old_name and not has_new_name: + warnings.warn( + "{0}.{1} should be renamed to {0}.{2} for JupyterHub >= 1.2".format( + cls.__name__, old_name, new_name + ), + DeprecationWarning, + ) + # use old name instead of new + # if old name is overridden in subclass + def _new_calls_old(old_name, *args, **kwargs): + return getattr(self, old_name)(*args, **kwargs) + + setattr(self, new_name, partial(_new_calls_old, old_name)) + break + + # deprecate pre-1.0 method signatures + signature = inspect.signature(old_method) + if 'authentication' not in signature.parameters and not any( + param.kind == inspect.Parameter.VAR_KEYWORD + for param in signature.parameters.values() + ): # adapt to pre-1.0 signature for compatibility warnings.warn( """ {0}.{1} does not support the authentication argument, - added in JupyterHub 1.0. + added in JupyterHub 1.0. and is renamed to {2} in JupyterHub 1.2. It should have the signature: - def {1}(self, username, authentication=None): + def {2}(self, username, authentication=None): ... Adapting for compatibility. """.format( - self.__class__.__name__, method_name + self.__class__.__name__, old_name, new_name ), DeprecationWarning, ) - def wrapped_method(username, authentication=None, **kwargs): + def wrapped_method( + original_method, username, authentication=None, **kwargs + ): return original_method(username, **kwargs) - setattr(self, method_name, wrapped_method) + setattr(self, old_name, partial(wrapped_method, old_method)) async def run_post_auth_hook(self, handler, authentication): """ @@ -326,39 +428,45 @@ class Authenticator(LoggingConfigurable): username = self.username_map.get(username, username) return username - def check_whitelist(self, username, authentication=None): - """Check if a username is allowed to authenticate based on whitelist configuration + def check_allowed(self, username, authentication=None): + """Check if a username is allowed to authenticate based on configuration Return True if username is allowed, False otherwise. - No whitelist means any username is allowed. + No allowed_users set means any username is allowed. - Names are normalized *before* being checked against the whitelist. + Names are normalized *before* being checked against the allowed set. .. versionchanged:: 1.0 Signature updated to accept authentication data and any future changes - """ - if not self.whitelist: - # No whitelist means any name is allowed - return True - return username in self.whitelist - def check_blacklist(self, username, authentication=None): - """Check if a username is blocked to authenticate based on blacklist configuration + .. versionchanged:: 1.2 + Renamed check_whitelist to check_allowed + """ + if not self.allowed_users: + # No allowed set means any name is allowed + return True + return username in self.allowed_users + + def check_blocked_users(self, username, authentication=None): + """Check if a username is blocked to authenticate based on Authenticator.blocked configuration Return True if username is allowed, False otherwise. - No blacklist means any username is allowed. + No block list means any username is allowed. - Names are normalized *before* being checked against the blacklist. + Names are normalized *before* being checked against the block list. .. versionadded: 0.9 .. versionchanged:: 1.0 Signature updated to accept authentication data as second argument + + .. versionchanged:: 1.2 + Renamed check_blacklist to check_blocked_users """ - if not self.blacklist: - # No blacklist means any name is allowed + if not self.blocked_users: + # No block list means any name is allowed return True - return username not in self.blacklist + return username not in self.blocked_users async def get_authenticated_user(self, handler, data): """Authenticate the user who is attempting to log in @@ -367,7 +475,7 @@ class Authenticator(LoggingConfigurable): This calls `authenticate`, which should be overridden in subclasses, normalizes the username if any normalization should be done, - and then validates the name in the whitelist. + and then validates the name in the allowed set. This is the outer API for authenticating a user. Subclasses should not override this method. @@ -375,7 +483,7 @@ class Authenticator(LoggingConfigurable): The various stages can be overridden separately: - `authenticate` turns formdata into a username - `normalize_username` normalizes the username - - `check_whitelist` checks against the user whitelist + - `check_allowed` checks against the allowed usernames .. versionchanged:: 0.8 return dict instead of username @@ -389,7 +497,7 @@ class Authenticator(LoggingConfigurable): else: authenticated = {'name': authenticated} authenticated.setdefault('auth_state', None) - # Leave the default as None, but reevaluate later post-whitelist + # Leave the default as None, but reevaluate later post-allowed-check authenticated.setdefault('admin', None) # normalize the username @@ -400,20 +508,18 @@ class Authenticator(LoggingConfigurable): self.log.warning("Disallowing invalid username %r.", username) return - blacklist_pass = await maybe_future( - self.check_blacklist(username, authenticated) - ) - whitelist_pass = await maybe_future( - self.check_whitelist(username, authenticated) + blocked_pass = await maybe_future( + self.check_blocked_users(username, authenticated) ) + allowed_pass = await maybe_future(self.check_allowed(username, authenticated)) - if blacklist_pass: + if blocked_pass: pass else: - self.log.warning("User %r in blacklist. Stop authentication", username) + self.log.warning("User %r blocked. Stop authentication", username) return - if whitelist_pass: + if allowed_pass: if authenticated['admin'] is None: authenticated['admin'] = await maybe_future( self.is_admin(handler, authenticated) @@ -423,7 +529,7 @@ class Authenticator(LoggingConfigurable): return authenticated else: - self.log.warning("User %r not in whitelist.", username) + self.log.warning("User %r not allowed.", username) return async def refresh_user(self, user, handler=None): @@ -479,7 +585,7 @@ class Authenticator(LoggingConfigurable): It must return the username on successful authentication, and return None on failed authentication. - Checking the whitelist is handled separately by the caller. + Checking allowed_users/blocked_users is handled separately by the caller. .. versionchanged:: 0.8 Allow `authenticate` to return a dict containing auth_state. @@ -494,9 +600,13 @@ class Authenticator(LoggingConfigurable): or None if Authentication failed. The Authenticator may return a dict instead, which MUST have a - key `name` holding the username, and MAY have two optional keys - set: `auth_state`, a dictionary of of auth state that will be - persisted; and `admin`, the admin setting value for the user. + key `name` holding the username, and MAY have additional keys: + + - `auth_state`, a dictionary of of auth state that will be + persisted; + - `admin`, the admin setting value for the user + - `groups`, the list of group names the user should be a member of, + if Authenticator.manage_groups is True. """ def pre_spawn_start(self, user, spawner): @@ -520,10 +630,10 @@ class Authenticator(LoggingConfigurable): This method may be a coroutine. - By default, this just adds the user to the whitelist. + By default, this just adds the user to the allowed_users set. Subclasses may do more extensive things, such as adding actual unix users, - but they should call super to ensure the whitelist is updated. + but they should call super to ensure the allowed_users set is updated. Note that this should be idempotent, since it is called whenever the hub restarts for all users. @@ -533,19 +643,32 @@ class Authenticator(LoggingConfigurable): """ if not self.validate_username(user.name): raise ValueError("Invalid username: %s" % user.name) - if self.whitelist: - self.whitelist.add(user.name) + if self.allowed_users: + self.allowed_users.add(user.name) def delete_user(self, user): """Hook called when a user is deleted - Removes the user from the whitelist. - Subclasses should call super to ensure the whitelist is updated. + Removes the user from the allowed_users set. + Subclasses should call super to ensure the allowed_users set is updated. Args: user (User): The User wrapper object """ - self.whitelist.discard(user.name) + self.allowed_users.discard(user.name) + + manage_groups = Bool( + False, + config=True, + help="""Let authenticator manage user groups + + If True, Authenticator.authenticate and/or .refresh_user + may return a list of group names in the 'groups' field, + which will be assigned to the user. + + All group-assignment APIs are disabled if this is True. + """, + ) auto_login = Bool( False, @@ -562,6 +685,26 @@ class Authenticator(LoggingConfigurable): """, ) + auto_login_oauth2_authorize = Bool( + False, + config=True, + help=""" + Automatically begin login process for OAuth2 authorization requests + + When another application is using JupyterHub as OAuth2 provider, it + sends users to `/hub/api/oauth2/authorize`. If the user isn't logged + in already, and auto_login is not set, the user will be dumped on the + hub's home page, without any context on what to do next. + + Setting this to true will automatically redirect users to login if + they aren't logged in *only* on the `/hub/api/oauth2/authorize` + endpoint. + + .. versionadded:: 1.5 + + """, + ) + def login_url(self, base_url): """Override this when registering a custom login handler @@ -610,6 +753,41 @@ class Authenticator(LoggingConfigurable): return [('/login', LoginHandler)] +def _deprecated_method(old_name, new_name, version): + """Create a deprecated method wrapper for a deprecated method name""" + + def deprecated(self, *args, **kwargs): + warnings.warn( + ( + "{cls}.{old_name} is deprecated in JupyterHub {version}." + " Please use {cls}.{new_name} instead." + ).format( + cls=self.__class__.__name__, + old_name=old_name, + new_name=new_name, + version=version, + ), + DeprecationWarning, + stacklevel=2, + ) + old_method = getattr(self, new_name) + return old_method(*args, **kwargs) + + return deprecated + + +# deprecate white/blacklist method names +for _old_name, _new_name, _version in [ + ("check_whitelist", "check_allowed", "1.2"), + ("check_blacklist", "check_blocked_users", "1.2"), +]: + setattr( + Authenticator, + _old_name, + _deprecated_method(_old_name, _new_name, _version), + ) + + class LocalAuthenticator(Authenticator): """Base class for Authenticators that work with local Linux/UNIX users @@ -655,42 +833,53 @@ class LocalAuthenticator(Authenticator): raise ValueError("I don't know how to create users on OS X") elif which('pw'): # Probably BSD - return ['pw', 'useradd', '-m'] + return ['pw', 'useradd', '-m', '-n'] else: # This appears to be the Linux non-interactive adduser command: return ['adduser', '-q', '--gecos', '""', '--disabled-password'] - group_whitelist = Set( + uids = Dict( help=""" - Whitelist all users from this UNIX group. - - This makes the username whitelist ineffective. + Dictionary of uids to use at user creation time. + This helps ensure that users created from the database + get the same uid each time they are created + in temporary deployments or containers. """ ).tag(config=True) - @observe('group_whitelist') - def _group_whitelist_changed(self, change): + group_whitelist = Set( + help="""DEPRECATED: use allowed_groups""", + ).tag(config=True) + + allowed_groups = Set( + help=""" + Allow login from all users in these UNIX groups. + + If set, allowed username set is ignored. """ - Log a warning if both group_whitelist and user whitelist are set. - """ - if self.whitelist: + ).tag(config=True) + + @observe('allowed_groups') + def _allowed_groups_changed(self, change): + """Log a warning if mutually exclusive user and group allowed sets are specified.""" + if self.allowed_users: self.log.warning( - "Ignoring username whitelist because group whitelist supplied!" + "Ignoring Authenticator.allowed_users set because Authenticator.allowed_groups supplied!" ) - def check_whitelist(self, username, authentication=None): - if self.group_whitelist: - return self.check_group_whitelist(username, authentication) + def check_allowed(self, username, authentication=None): + if self.allowed_groups: + return self.check_allowed_groups(username, authentication) else: - return super().check_whitelist(username, authentication) + return super().check_allowed(username, authentication) - def check_group_whitelist(self, username, authentication=None): + def check_allowed_groups(self, username, authentication=None): """ - If group_whitelist is configured, check if authenticating user is part of group. + If allowed_groups is configured, check if authenticating user is part of group. """ - if not self.group_whitelist: + if not self.allowed_groups: return False - for grnam in self.group_whitelist: + for grnam in self.allowed_groups: try: group = self._getgrnam(grnam) except KeyError: @@ -762,13 +951,19 @@ class LocalAuthenticator(Authenticator): Tested to work on FreeBSD and Linux, at least. """ name = user.name - cmd = [arg.replace('USERNAME', name) for arg in self.add_user_cmd] + [name] + cmd = [arg.replace('USERNAME', name) for arg in self.add_user_cmd] + try: + uid = self.uids[name] + cmd += ['--uid', '%d' % uid] + except KeyError: + self.log.debug("No UID for user %s" % name) + cmd += [name] self.log.info("Creating user: %s", ' '.join(map(pipes.quote, cmd))) p = Popen(cmd, stdout=PIPE, stderr=STDOUT) p.wait() if p.returncode: err = p.stdout.read().decode('utf8', 'replace') - raise RuntimeError("Failed to create system user %s: %s" % (name, err)) + raise RuntimeError(f"Failed to create system user {name}: {err}") class PAMAuthenticator(LocalAuthenticator): @@ -796,16 +991,24 @@ class PAMAuthenticator(LocalAuthenticator): ).tag(config=True) open_sessions = Bool( - True, + False, help=""" Whether to open a new PAM session when spawners are started. - This may trigger things like mounting shared filsystems, - loading credentials, etc. depending on system configuration, - but it does not always work. + This may trigger things like mounting shared filesystems, + loading credentials, etc. depending on system configuration. + + The lifecycle of PAM sessions is not correct, + so many PAM session configurations will not work. If any errors are encountered when opening/closing PAM sessions, this is automatically set to False. + + .. versionchanged:: 2.2 + + Due to longstanding problems in the session lifecycle, + this is now disabled by default. + You may opt-in to opening sessions by setting this to True. """, ).tag(config=True) @@ -814,8 +1017,8 @@ class PAMAuthenticator(LocalAuthenticator): help=""" Whether to check the user's account status via PAM during authentication. - The PAM account stack performs non-authentication based account - management. It is typically used to restrict/permit access to a + The PAM account stack performs non-authentication based account + management. It is typically used to restrict/permit access to a service and this step is needed to access the host's user access control. Disabling this can be dangerous as authenticated but unauthorized users may @@ -828,7 +1031,11 @@ class PAMAuthenticator(LocalAuthenticator): Authoritative list of user groups that determine admin access. Users not in these groups can still be granted admin status through admin_users. - White/blacklisting rules still apply. + allowed/blocked rules still apply. + + Note: As of JupyterHub 2.0, + full admin rights should not be required, + and more precise permissions can be managed via roles. """ ).tag(config=True) @@ -966,10 +1173,21 @@ class PAMAuthenticator(LocalAuthenticator): uid = pwd.getpwnam(username).pw_uid username = pwd.getpwuid(uid).pw_name username = self.username_map.get(username, username) + return username else: return super().normalize_username(username) +for _old_name, _new_name, _version in [ + ("check_group_whitelist", "check_group_allowed", "1.2"), +]: + setattr( + LocalAuthenticator, + _old_name, + _deprecated_method(_old_name, _new_name, _version), + ) + + class DummyAuthenticator(Authenticator): """Dummy Authenticator for testing @@ -996,3 +1214,22 @@ class DummyAuthenticator(Authenticator): return data['username'] return None return data['username'] + + +class NullAuthenticator(Authenticator): + """Null Authenticator for JupyterHub + + For cases where authentication should be disabled, + e.g. only allowing access via API tokens. + + .. versionadded:: 2.0 + """ + + # auto_login skips 'Login with...' page on Hub 0.8 + auto_login = True + + # for Hub 0.7, show 'login with...' + login_service = 'null' + + def get_handlers(self, app): + return [] diff --git a/jupyterhub/crypto.py b/jupyterhub/crypto.py index 57bd00d3..854017f8 100644 --- a/jupyterhub/crypto.py +++ b/jupyterhub/crypto.py @@ -4,19 +4,12 @@ import os from binascii import a2b_hex from concurrent.futures import ThreadPoolExecutor -from traitlets import Any -from traitlets import default -from traitlets import Dict -from traitlets import Integer -from traitlets import List -from traitlets import observe -from traitlets import validate -from traitlets.config import Config -from traitlets.config import SingletonConfigurable +from traitlets import Any, Integer, List, default, observe, validate +from traitlets.config import Config, SingletonConfigurable try: import cryptography - from cryptography.fernet import Fernet, MultiFernet, InvalidToken + from cryptography.fernet import Fernet, InvalidToken, MultiFernet except ImportError: cryptography = None diff --git a/jupyterhub/dbutil.py b/jupyterhub/dbutil.py index 703de8f4..6466bdad 100644 --- a/jupyterhub/dbutil.py +++ b/jupyterhub/dbutil.py @@ -26,10 +26,9 @@ def write_alembic_ini(alembic_ini='alembic.ini', db_url='sqlite:///jupyterhub.sq Parameters ---------- - - alembic_ini: str + alembic_ini : str path to the alembic.ini file that should be written. - db_url: str + db_url : str The SQLAlchemy database url, e.g. `sqlite:///jupyterhub.sqlite`. """ with open(ALEMBIC_INI_TEMPLATE_PATH) as f: @@ -58,13 +57,11 @@ def _temp_alembic_ini(db_url): Parameters ---------- - - db_url: str + db_url : str The SQLAlchemy database url, e.g. `sqlite:///jupyterhub.sqlite`. Returns ------- - alembic_ini: str The path to the temporary alembic.ini that we have created. This file will be cleaned up on exit from the context manager. @@ -94,7 +91,7 @@ def backup_db_file(db_file, log=None): for i in range(1, 10): if not os.path.exists(backup_db_file): break - backup_db_file = '{}.{}.{}'.format(db_file, timestamp, i) + backup_db_file = f'{db_file}.{timestamp}.{i}' # if os.path.exists(backup_db_file): raise OSError("backup db file already exists: %s" % backup_db_file) @@ -139,7 +136,7 @@ def upgrade_if_needed(db_url, backup=True, log=None): def shell(args=None): - """Start an IPython shell hooked up to the jupyerhub database""" + """Start an IPython shell hooked up to the jupyterhub database""" from .app import JupyterHub hub = JupyterHub() diff --git a/jupyterhub/event-schemas/server-actions/v1.yaml b/jupyterhub/event-schemas/server-actions/v1.yaml new file mode 100644 index 00000000..9023b105 --- /dev/null +++ b/jupyterhub/event-schemas/server-actions/v1.yaml @@ -0,0 +1,59 @@ +"$id": hub.jupyter.org/server-action +version: 1 +title: JupyterHub server events +description: | + Record actions on user servers made via JupyterHub. + + JupyterHub can perform various actions on user servers via + direct interaction from users, or via the API. This event is + recorded whenever either of those happen. + + Limitations: + + 1. This does not record all server starts / stops, only those + explicitly performed by JupyterHub. For example, a user's server + can go down because the node it was running on dies. That will + not cause an event to be recorded, since it was not initiated + by JupyterHub. In practice this happens often, so this is not + a complete record. + 2. Events are only recorded when an action succeeds. +type: object +required: + - action + - username + - servername +properties: + action: + enum: + - start + - stop + description: | + Action performed by JupyterHub. + + This is a required field. + + Possibl Values: + + 1. start + A user's server was successfully started + + 2. stop + A user's server was successfully stopped + username: + type: string + description: | + Name of the user whose server this action was performed on. + + This is the normalized name used by JupyterHub itself, + which is derived from the authentication provider used but + might not be the same as used in the authentication provider. + servername: + type: string + description: | + Name of the server this action was performed on. + + JupyterHub supports each user having multiple servers with + arbitrary names, and this field specifies the name of the + server. + + The 'default' server is denoted by the empty string diff --git a/jupyterhub/handlers/__init__.py b/jupyterhub/handlers/__init__.py index 6af7659f..79f3ea87 100644 --- a/jupyterhub/handlers/__init__.py +++ b/jupyterhub/handlers/__init__.py @@ -1,9 +1,6 @@ -from . import base -from . import login -from . import metrics -from . import pages -from .base import * -from .login import * +from . import base, login, metrics, pages +from .base import * # noqa +from .login import * # noqa default_handlers = [] for mod in (base, pages, login, metrics): diff --git a/jupyterhub/handlers/base.py b/jupyterhub/handlers/base.py index f9c8485b..33b5a089 100644 --- a/jupyterhub/handlers/base.py +++ b/jupyterhub/handlers/base.py @@ -2,51 +2,51 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import asyncio -import copy +import functools import json import math import random import re import time import uuid -from datetime import datetime -from datetime import timedelta +from datetime import datetime, timedelta from http.client import responses -from urllib.parse import parse_qs -from urllib.parse import parse_qsl -from urllib.parse import urlencode -from urllib.parse import urlparse -from urllib.parse import urlunparse +from urllib.parse import parse_qs, parse_qsl, urlencode, urlparse, urlunparse from jinja2 import TemplateNotFound from sqlalchemy.exc import SQLAlchemyError -from tornado import gen -from tornado import web -from tornado.httputil import HTTPHeaders -from tornado.httputil import url_concat +from tornado import gen, web +from tornado.httputil import HTTPHeaders, url_concat from tornado.ioloop import IOLoop from tornado.log import app_log -from tornado.web import addslash -from tornado.web import MissingArgumentError -from tornado.web import RequestHandler +from tornado.web import RequestHandler, addslash -from .. import __version__ -from .. import orm -from ..metrics import PROXY_ADD_DURATION_SECONDS -from ..metrics import ProxyAddStatus -from ..metrics import RUNNING_SERVERS -from ..metrics import SERVER_POLL_DURATION_SECONDS -from ..metrics import SERVER_SPAWN_DURATION_SECONDS -from ..metrics import SERVER_STOP_DURATION_SECONDS -from ..metrics import ServerPollStatus -from ..metrics import ServerSpawnStatus -from ..metrics import ServerStopStatus +from .. import __version__, orm, roles, scopes +from ..metrics import ( + PROXY_ADD_DURATION_SECONDS, + PROXY_DELETE_DURATION_SECONDS, + RUNNING_SERVERS, + SERVER_POLL_DURATION_SECONDS, + SERVER_SPAWN_DURATION_SECONDS, + SERVER_STOP_DURATION_SECONDS, + TOTAL_USERS, + ProxyDeleteStatus, + ServerPollStatus, + ServerSpawnStatus, + ServerStopStatus, +) from ..objects import Server +from ..scopes import needs_scope from ..spawner import LocalProcessSpawner from ..user import User -from ..utils import get_accepted_mimetype -from ..utils import maybe_future -from ..utils import url_path_join +from ..utils import ( + AnyTimeoutError, + get_accepted_mimetype, + get_browser_protocol, + maybe_future, + url_escape_path, + url_path_join, +) # pattern for the authentication token header auth_header_pat = re.compile(r'^(?:token|bearer)\s+([^\s]+)$', flags=re.IGNORECASE) @@ -67,6 +67,12 @@ SESSION_COOKIE_NAME = 'jupyterhub-session-id' class BaseHandler(RequestHandler): """Base Handler class with access to common methods and properties.""" + # by default, only accept cookie-based authentication + # The APIHandler base class enables token auth + # versionadded: 2.0 + _accept_cookie_auth = True + _accept_token_auth = False + async def prepare(self): """Identify the user during the prepare stage of each request @@ -79,12 +85,18 @@ class BaseHandler(RequestHandler): The current user (None if not logged in) may be accessed via the `self.current_user` property during the handling of any request. """ + self.expanded_scopes = set() try: await self.get_current_user() - except Exception: - self.log.exception("Failed to get current user") + except Exception as e: + # ensure get_current_user is never called again for this handler, + # since it failed self._jupyterhub_user = None - + self.log.exception("Failed to get current user") + if isinstance(e, SQLAlchemyError): + self.log.error("Rolling back session due to database error") + self.db.rollback() + self._resolve_roles_and_scopes() return await maybe_future(super().prepare()) @property @@ -140,6 +152,10 @@ class BaseHandler(RequestHandler): def hub(self): return self.settings['hub'] + @property + def app(self): + return self.settings['app'] + @property def proxy(self): return self.settings['proxy'] @@ -156,6 +172,10 @@ class BaseHandler(RequestHandler): def oauth_provider(self): return self.settings['oauth_provider'] + @property + def eventlog(self): + return self.settings['eventlog'] + def finish(self, *args, **kwargs): """Roll back any uncommitted transactions from the handler.""" if self.db.dirty: @@ -228,6 +248,17 @@ class BaseHandler(RequestHandler): def authenticate_prometheus(self): return self.settings.get('authenticate_prometheus', True) + async def get_current_user_named_server_limit(self): + """ + Return named server limit for current user. + """ + named_server_limit_per_user = self.named_server_limit_per_user + + if callable(named_server_limit_per_user): + return await maybe_future(named_server_limit_per_user(self)) + + return named_server_limit_per_user + def get_auth_token(self): """Get the authorization token from Authorization header""" auth_header = self.request.headers.get('Authorization', '') @@ -236,26 +267,6 @@ class BaseHandler(RequestHandler): return None return match.group(1) - def get_current_user_oauth_token(self): - """Get the current user identified by OAuth access token - - Separate from API token because OAuth access tokens - can only be used for identifying users, - not using the API. - """ - token = self.get_auth_token() - if token is None: - return None - orm_token = orm.OAuthAccessToken.find(self.db, token) - if orm_token is None: - return None - - now = datetime.utcnow() - recorded = self._record_activity(orm_token, now) - if self._record_activity(orm_token.user, now) or recorded: - self.db.commit() - return self._user_from_orm(orm_token.user) - def _record_activity(self, obj, timestamp=None): """record activity on an ORM object @@ -342,23 +353,28 @@ class BaseHandler(RequestHandler): auth_info['auth_state'] = await user.get_auth_state() return await self.auth_to_user(auth_info, user) - def get_current_user_token(self): - """get_current_user from Authorization header token""" + @functools.lru_cache() + def get_token(self): + """get token from authorization header""" token = self.get_auth_token() if token is None: return None orm_token = orm.APIToken.find(self.db, token) + return orm_token + + def get_current_user_token(self): + """get_current_user from Authorization header token""" + # record token activity + orm_token = self.get_token() if orm_token is None: return None - - # record token activity now = datetime.utcnow() recorded = self._record_activity(orm_token, now) if orm_token.user: # FIXME: scopes should give us better control than this # don't consider API requests originating from a server # to be activity from the user - if not orm_token.note.startswith("Server at "): + if not orm_token.note or not orm_token.note.startswith("Server at "): recorded = self._record_activity(orm_token.user, now) or recorded if recorded: self.db.commit() @@ -408,9 +424,11 @@ class BaseHandler(RequestHandler): async def get_current_user(self): """get current username""" if not hasattr(self, '_jupyterhub_user'): + user = None try: - user = self.get_current_user_token() - if user is None: + if self._accept_token_auth: + user = self.get_current_user_token() + if user is None and self._accept_cookie_auth: user = self.get_current_user_cookie() if user and isinstance(user, User): user = await self.refresh_auth(user) @@ -418,9 +436,38 @@ class BaseHandler(RequestHandler): except Exception: # don't let errors here raise more than once self._jupyterhub_user = None - self.log.exception("Error getting current user") + # but still raise, which will get handled in .prepare() + raise return self._jupyterhub_user + def _resolve_roles_and_scopes(self): + self.expanded_scopes = set() + if self.current_user: + orm_token = self.get_token() + if orm_token: + self.expanded_scopes = scopes.get_scopes_for(orm_token) + else: + self.expanded_scopes = scopes.get_scopes_for(self.current_user) + self.parsed_scopes = scopes.parse_scopes(self.expanded_scopes) + + @functools.lru_cache() + def get_scope_filter(self, req_scope): + """Produce a filter function for req_scope on resources + + Returns `has_access_to(orm_resource, kind)` which returns True or False + for whether the current request has access to req_scope on the given resource. + """ + + def no_access(orm_resource, kind): + return False + + if req_scope not in self.parsed_scopes: + return no_access + + sub_scope = self.parsed_scopes[req_scope] + + return functools.partial(scopes.check_scope_filter, sub_scope) + @property def current_user(self): """Override .current_user accessor from tornado @@ -446,6 +493,8 @@ class BaseHandler(RequestHandler): # not found, create and register user u = orm.User(name=username) self.db.add(u) + roles.assign_default_roles(self.db, entity=u) + TOTAL_USERS.inc() self.db.commit() user = self._user_from_orm(u) return user @@ -458,17 +507,15 @@ class BaseHandler(RequestHandler): session_id = self.get_session_cookie() if session_id: # clear session id - self.clear_cookie(SESSION_COOKIE_NAME, **kwargs) + self.clear_cookie(SESSION_COOKIE_NAME, path=self.base_url, **kwargs) if user: # user is logged in, clear any tokens associated with the current session # don't clear session tokens if not logged in, # because that could be a malicious logout request! count = 0 - for access_token in ( - self.db.query(orm.OAuthAccessToken) - .filter(orm.OAuthAccessToken.user_id == user.id) - .filter(orm.OAuthAccessToken.session_id == session_id) + for access_token in self.db.query(orm.APIToken).filter_by( + user_id=user.id, session_id=session_id ): self.db.delete(access_token) count += 1 @@ -482,8 +529,21 @@ class BaseHandler(RequestHandler): self.clear_cookie( 'jupyterhub-services', path=url_path_join(self.base_url, 'services'), - **kwargs + **kwargs, ) + # clear_cookie only accepts a subset of set_cookie's kwargs + clear_xsrf_cookie_kwargs = { + key: value + for key, value in self.settings.get('xsrf_cookie_kwargs', {}).items() + if key in {"path", "domain"} + } + + self.clear_cookie( + '_xsrf', + **clear_xsrf_cookie_kwargs, + ) + # Reset _jupyterhub_user + self._jupyterhub_user = None def _set_cookie(self, key, value, encrypted=True, **overrides): """Setting any cookie should go through here @@ -532,7 +592,9 @@ class BaseHandler(RequestHandler): so other services on this domain can read it. """ session_id = uuid.uuid4().hex - self._set_cookie(SESSION_COOKIE_NAME, session_id, encrypted=False) + self._set_cookie( + SESSION_COOKIE_NAME, session_id, encrypted=False, path=self.base_url + ) return session_id def set_service_cookie(self, user): @@ -583,33 +645,34 @@ class BaseHandler(RequestHandler): next_url = self.get_argument('next', default='') # protect against some browsers' buggy handling of backslash as slash next_url = next_url.replace('\\', '%5C') - if (next_url + '/').startswith( - ( - '%s://%s/' % (self.request.protocol, self.request.host), - '//%s/' % self.request.host, - ) - ) or ( + proto = get_browser_protocol(self.request) + host = self.request.host + if next_url.startswith("///"): + # strip more than 2 leading // down to 2 + # because urlparse treats that as empty netloc, + # whereas browsers treat more than two leading // the same as //, + # so netloc is the first non-/ bit + next_url = "//" + next_url.lstrip("/") + parsed_next_url = urlparse(next_url) + + if (next_url + '/').startswith((f'{proto}://{host}/', f'//{host}/',)) or ( self.subdomain_host - and urlparse(next_url).netloc - and ("." + urlparse(next_url).netloc).endswith( + and parsed_next_url.netloc + and ("." + parsed_next_url.netloc).endswith( "." + urlparse(self.subdomain_host).netloc ) ): # treat absolute URLs for our host as absolute paths: - # below, redirects that aren't strictly paths - parsed = urlparse(next_url) - next_url = parsed.path - if parsed.query: - next_url = next_url + '?' + parsed.query - if parsed.fragment: - next_url = next_url + '#' + parsed.fragment + # below, redirects that aren't strictly paths are rejected + next_url = parsed_next_url.path + if parsed_next_url.query: + next_url = next_url + '?' + parsed_next_url.query + if parsed_next_url.fragment: + next_url = next_url + '#' + parsed_next_url.fragment + parsed_next_url = urlparse(next_url) # if it still has host info, it didn't match our above check for *this* host - if next_url and ( - '://' in next_url - or next_url.startswith('//') - or not next_url.startswith('/') - ): + if next_url and (parsed_next_url.netloc or not next_url.startswith('/')): self.log.warning("Disallowing redirect outside JupyterHub: %r", next_url) next_url = '' @@ -625,9 +688,22 @@ class BaseHandler(RequestHandler): next_url, ) + # this is where we know if next_url is coming from ?next= param or we are using a default url + if next_url: + next_url_from_param = True + else: + next_url_from_param = False + if not next_url: - # custom default URL - next_url = default or self.default_url + # custom default URL, usually passed because user landed on that page but was not logged in + if default: + next_url = default + else: + # As set in jupyterhub_config.py + if callable(self.default_url): + next_url = self.default_url(self) + else: + next_url = self.default_url if not next_url: # default URL after login @@ -642,8 +718,45 @@ class BaseHandler(RequestHandler): next_url = url_path_join(self.hub.base_url, 'spawn') else: next_url = url_path_join(self.hub.base_url, 'home') + + if not next_url_from_param: + # when a request made with ?next=... assume all the params have already been encoded + # otherwise, preserve params from the current request across the redirect + next_url = self.append_query_parameters(next_url, exclude=['next']) return next_url + def append_query_parameters(self, url, exclude=None): + """Append the current request's query parameters to the given URL. + + Supports an extra optional parameter ``exclude`` that when provided must + contain a list of parameters to be ignored, i.e. these parameters will + not be added to the URL. + + This is important to avoid infinite loops with the next parameter being + added over and over, for instance. + + The default value for ``exclude`` is an array with "next". This is useful + as most use cases in JupyterHub (all?) won't want to include the next + parameter twice (the next parameter is added elsewhere to the query + parameters). + + :param str url: a URL + :param list exclude: optional list of parameters to be ignored, defaults to + a list with "next" (to avoid redirect-loops) + :rtype (str) + """ + if exclude is None: + exclude = ['next'] + if self.request.query: + query_string = [ + param + for param in parse_qsl(self.request.query) + if param[0] not in exclude + ] + if query_string: + url = url_concat(url, query_string) + return url + async def auth_to_user(self, authenticated, user=None): """Persist data from .authenticate() or .refresh_user() to the User database @@ -661,24 +774,36 @@ class BaseHandler(RequestHandler): refreshing = user is not None if user and username != user.name: - raise ValueError("Username doesn't match! %s != %s" % (username, user.name)) + raise ValueError(f"Username doesn't match! {username} != {user.name}") if user is None: - new_user = username not in self.users - user = self.user_from_username(username) + user = self.find_user(username) + new_user = user is None if new_user: + user = self.user_from_username(username) await maybe_future(self.authenticator.add_user(user)) # Only set `admin` if the authenticator returned an explicit value. if admin is not None and admin != user.admin: user.admin = admin - self.db.commit() + # always ensure default roles ('user', 'admin' if admin) are assigned + # after a successful login + roles.assign_default_roles(self.db, entity=user) + + # apply authenticator-managed groups + if self.authenticator.manage_groups: + group_names = authenticated.get("groups") + if group_names is not None: + user.sync_groups(group_names) + # always set auth_state and commit, # because there could be key-rotation or clearing of previous values # going on. if not self.authenticator.enable_auth_state: # auth_state is not enabled. Force None. auth_state = None + await user.save_auth_state(auth_state) + return user async def login_user(self, data=None): @@ -692,6 +817,7 @@ class BaseHandler(RequestHandler): self.set_login_cookie(user) self.statsd.incr('login.success') self.statsd.timing('login.authenticate.success', auth_timer.ms) + self.log.info("User logged in: %s", user.name) user._auth_refreshed = time.monotonic() return user @@ -741,14 +867,20 @@ class BaseHandler(RequestHandler): user_server_name = user.name if server_name: - user_server_name = '%s:%s' % (user.name, server_name) + if '/' in server_name: + error_message = ( + f"Invalid server_name (may not contain '/'): {server_name}" + ) + self.log.error(error_message) + raise web.HTTPError(400, error_message) + user_server_name = f'{user.name}:{server_name}' if server_name in user.spawners and user.spawners[server_name].pending: pending = user.spawners[server_name].pending SERVER_SPAWN_DURATION_SECONDS.labels( status=ServerSpawnStatus.already_pending ).observe(time.perf_counter() - spawn_start_time) - raise RuntimeError("%s pending %s" % (user_server_name, pending)) + raise RuntimeError(f"{user_server_name} pending {pending}") # count active servers and pending spawns # we could do careful bookkeeping to avoid @@ -846,6 +978,11 @@ class BaseHandler(RequestHandler): SERVER_SPAWN_DURATION_SECONDS.labels( status=ServerSpawnStatus.success ).observe(time.perf_counter() - spawn_start_time) + self.eventlog.record_event( + 'hub.jupyter.org/server-action', + 1, + {'action': 'start', 'username': user.name, 'servername': server_name}, + ) proxy_add_start_time = time.perf_counter() spawner._proxy_pending = True try: @@ -860,7 +997,7 @@ class BaseHandler(RequestHandler): self.log.error( "Stopping %s to avoid inconsistent state", user_server_name ) - await user.stop() + await user.stop(server_name) PROXY_ADD_DURATION_SECONDS.labels(status='failure').observe( time.perf_counter() - proxy_add_start_time ) @@ -877,7 +1014,7 @@ class BaseHandler(RequestHandler): # clear spawner._spawn_future when it's done # keep an exception around, though, to prevent repeated implicit spawns # if spawn is failing - if f.exception() is None: + if f.cancelled() or f.exception() is None: spawner._spawn_future = None # Now we're all done. clear _spawn_pending flag spawner._spawn_pending = False @@ -888,11 +1025,14 @@ class BaseHandler(RequestHandler): # update failure count and abort if consecutive failure limit # is reached def _track_failure_count(f): - if f.exception() is None: + if f.cancelled() or f.exception() is None: # spawn succeeded, reset failure count self.settings['failure_count'] = 0 return # spawn failed, increment count and abort if limit reached + SERVER_SPAWN_DURATION_SECONDS.labels( + status=ServerSpawnStatus.failure + ).observe(time.perf_counter() - spawn_start_time) self.settings.setdefault('failure_count', 0) self.settings['failure_count'] += 1 failure_count = self.settings['failure_count'] @@ -908,6 +1048,7 @@ class BaseHandler(RequestHandler): self.log.critical( "Aborting due to %i consecutive spawn failures", failure_count ) + # abort in 2 seconds to allow pending handlers to resolve # mostly propagating errors for the current failures def abort(): @@ -921,17 +1062,20 @@ class BaseHandler(RequestHandler): await gen.with_timeout( timedelta(seconds=self.slow_spawn_timeout), finish_spawn_future ) - except gen.TimeoutError: + except AnyTimeoutError: # waiting_for_response indicates server process has started, # but is yet to become responsive. if spawner._spawn_pending and not spawner._waiting_for_response: - # still in Spawner.start, which is taking a long time - # we shouldn't poll while spawn is incomplete. - self.log.warning( - "User %s is slow to start (timeout=%s)", - user_server_name, - self.slow_spawn_timeout, - ) + # If slow_spawn_timeout is intentionally disabled then we + # don't need to log a warning, just return. + if self.slow_spawn_timeout > 0: + # still in Spawner.start, which is taking a long time + # we shouldn't poll while spawn is incomplete. + self.log.warning( + "User %s is slow to start (timeout=%s)", + user_server_name, + self.slow_spawn_timeout, + ) return # start has finished, but the server hasn't come up @@ -994,7 +1138,18 @@ class BaseHandler(RequestHandler): self.log.warning( "User %s server stopped, with exit code: %s", user.name, status ) - await self.proxy.delete_user(user, server_name) + proxy_deletion_start_time = time.perf_counter() + try: + await self.proxy.delete_user(user, server_name) + PROXY_DELETE_DURATION_SECONDS.labels( + status=ProxyDeleteStatus.success + ).observe(time.perf_counter() - proxy_deletion_start_time) + except Exception: + PROXY_DELETE_DURATION_SECONDS.labels( + status=ProxyDeleteStatus.failure + ).observe(time.perf_counter() - proxy_deletion_start_time) + raise + await user.stop(server_name) async def stop_single_user(self, user, server_name=''): @@ -1002,7 +1157,7 @@ class BaseHandler(RequestHandler): raise KeyError("User %s has no such spawner %r", user.name, server_name) spawner = user.spawners[server_name] if spawner.pending: - raise RuntimeError("%s pending %s" % (spawner._log_name, spawner.pending)) + raise RuntimeError(f"{spawner._log_name} pending {spawner.pending}") # set user._stop_pending before doing anything async # to avoid races spawner._stop_pending = True @@ -1017,6 +1172,10 @@ class BaseHandler(RequestHandler): tic = time.perf_counter() try: await self.proxy.delete_user(user, server_name) + PROXY_DELETE_DURATION_SECONDS.labels( + status=ProxyDeleteStatus.success + ).observe(time.perf_counter() - tic) + await user.stop(server_name) toc = time.perf_counter() self.log.info( @@ -1026,7 +1185,19 @@ class BaseHandler(RequestHandler): SERVER_STOP_DURATION_SECONDS.labels( status=ServerStopStatus.success ).observe(toc - tic) + self.eventlog.record_event( + 'hub.jupyter.org/server-action', + 1, + { + 'action': 'stop', + 'username': user.name, + 'servername': server_name, + }, + ) except: + PROXY_DELETE_DURATION_SECONDS.labels( + status=ProxyDeleteStatus.failure + ).observe(time.perf_counter() - tic) SERVER_STOP_DURATION_SECONDS.labels( status=ServerStopStatus.failure ).observe(time.perf_counter() - tic) @@ -1038,10 +1209,13 @@ class BaseHandler(RequestHandler): try: await gen.with_timeout(timedelta(seconds=self.slow_stop_timeout), future) - except gen.TimeoutError: + except AnyTimeoutError: # hit timeout, but stop is still pending self.log.warning( - "User %s:%s server is slow to stop", user.name, server_name + "User %s:%s server is slow to stop (timeout=%s)", + user.name, + server_name, + self.slow_stop_timeout, ) # return handle on the future for hooking up callbacks @@ -1064,16 +1238,36 @@ class BaseHandler(RequestHandler): "home page.".format(home=home) ) - def get_template(self, name): - """Return the jinja template object for a given name""" - return self.settings['jinja2_env'].get_template(name) + def get_template(self, name, sync=False): + """ + Return the jinja template object for a given name - def render_template(self, name, **ns): + If sync is True, we return a Template that is compiled without async support. + Only those can be used in synchronous code. + + If sync is False, we return a Template that is compiled with async support + """ + if sync: + key = 'jinja2_env_sync' + else: + key = 'jinja2_env' + return self.settings[key].get_template(name) + + def render_template(self, name, sync=False, **ns): + """ + Render jinja2 template + + If sync is set to True, we render the template & return a string + If sync is set to False, we return an awaitable + """ template_ns = {} template_ns.update(self.template_namespace) template_ns.update(ns) - template = self.get_template(name) - return template.render(**template_ns) + template = self.get_template(name, sync) + if sync: + return template.render(**template_ns) + else: + return template.render_async(**template_ns) @property def template_namespace(self): @@ -1087,11 +1281,26 @@ class BaseHandler(RequestHandler): logout_url=self.settings['logout_url'], static_url=self.static_url, version_hash=self.version_hash, + services=self.get_accessible_services(user), + parsed_scopes=self.parsed_scopes, + expanded_scopes=self.expanded_scopes, ) if self.settings['template_vars']: ns.update(self.settings['template_vars']) return ns + def get_accessible_services(self, user): + accessible_services = [] + if user is None: + return accessible_services + for service in self.services.values(): + if not service.url: + continue + if not service.display: + continue + accessible_services.append(service) + return accessible_services + def write_error(self, status_code, **kwargs): """render custom error pages""" exc_info = kwargs.get('exc_info') @@ -1135,17 +1344,19 @@ class BaseHandler(RequestHandler): # Content-Length must be recalculated. self.clear_header('Content-Length') - # render the template + # render_template is async, but write_error can't be! + # so we run it sync here, instead of making a sync version of render_template + try: - html = self.render_template('%s.html' % status_code, **ns) + html = self.render_template('%s.html' % status_code, sync=True, **ns) except TemplateNotFound: self.log.debug("No template for %d", status_code) try: - html = self.render_template('error.html', **ns) + html = self.render_template('error.html', sync=True, **ns) except: # In this case, any side effect must be avoided. ns['no_spawner_check'] = True - html = self.render_template('error.html', **ns) + html = self.render_template('error.html', sync=True, **ns) self.write(html) @@ -1187,7 +1398,7 @@ class UserUrlHandler(BaseHandler): **Changed Behavior as of 1.0** This handler no longer triggers a spawn. Instead, it checks if: - 1. server is not active, serve page prompting for spawn (status: 503) + 1. server is not active, serve page prompting for spawn (status: 424) 2. server is ready (This shouldn't happen! Proxy isn't updated yet. Wait a bit and redirect.) 3. server is active, redirect to /hub/spawn-pending to monitor launch progress (will redirect back when finished) @@ -1201,12 +1412,22 @@ class UserUrlHandler(BaseHandler): Note that this only occurs if bob's server is not already running. """ + # accept token auth for API requests that are probably to non-running servers + _accept_token_auth = True + def _fail_api_request(self, user_name='', server_name=''): """Fail an API request to a not-running server""" self.log.warning( "Failing suspected API request to not-running server: %s", self.request.path ) - self.set_status(503) + + # If we got here, the server is not running. To differentiate + # that the *server* itself is not running, rather than just the particular + # resource *in* the server is not found, we return a 424 instead of a 404. + # We allow retaining the old behavior to support older JupyterLab versions + self.set_status( + 424 if not self.app.use_legacy_stopped_server_status_code else 503 + ) self.set_header("Content-Type", "application/json") spawn_url = urlparse(self.request.full_url())._replace(query="") @@ -1258,54 +1479,24 @@ class UserUrlHandler(BaseHandler): delete = non_get @web.authenticated + @needs_scope("access:servers") async def get(self, user_name, user_path): if not user_path: user_path = '/' current_user = self.current_user - - if ( - current_user - and current_user.name != user_name - and current_user.admin - and self.settings.get('admin_access', False) - ): - # allow admins to spawn on behalf of users + if user_name != current_user.name: user = self.find_user(user_name) if user is None: # no such user - raise web.HTTPError(404, "No such user %s" % user_name) + raise web.HTTPError(404, f"No such user {user_name}") self.log.info( - "Admin %s requesting spawn on behalf of %s", - current_user.name, - user.name, + f"User {current_user.name} requesting spawn on behalf of {user.name}" ) admin_spawn = True should_spawn = True redirect_to_self = False else: user = current_user - admin_spawn = False - # For non-admins, spawn if the user requested is the current user - # otherwise redirect users to their own server - should_spawn = current_user and current_user.name == user_name - redirect_to_self = not should_spawn - - if redirect_to_self: - # logged in as a different non-admin user, redirect to user's own server - # this is only a stop-gap for a common mistake, - # because the same request will be a 403 - # if the requested server is running - self.statsd.incr('redirects.user_to_user', 1) - self.log.warning( - "User %s requested server for %s, which they don't own", - current_user.name, - user_name, - ) - target = url_path_join(current_user.url, user_path or '') - if self.request.query: - target = url_concat(target, parse_qsl(self.request.query)) - self.redirect(target) - return # If people visit /user/:user_name directly on the Hub, # the redirects will just loop, because the proxy is bypassed. @@ -1340,6 +1531,7 @@ class UserUrlHandler(BaseHandler): server_name = '' else: server_name = '' + escaped_server_name = url_escape_path(server_name) spawner = user.spawners[server_name] if spawner.ready: @@ -1357,7 +1549,12 @@ class UserUrlHandler(BaseHandler): return pending_url = url_concat( - url_path_join(self.hub.base_url, 'spawn-pending', user.name, server_name), + url_path_join( + self.hub.base_url, + 'spawn-pending', + user.escaped_name, + escaped_server_name, + ), {'next': self.request.uri}, ) if spawner.pending or spawner._failed: @@ -1365,17 +1562,28 @@ class UserUrlHandler(BaseHandler): self.redirect(pending_url, status=303) return - # if we got here, the server is not running - # serve a page prompting for spawn and 503 error - # visiting /user/:name no longer triggers implicit spawn - # without explicit user action - self.set_status(503) + # If we got here, the server is not running. To differentiate + # that the *server* itself is not running, rather than just the particular + # page *in* the server is not found, we return a 424 instead of a 404. + # We allow retaining the old behavior to support older JupyterLab versions spawn_url = url_concat( - url_path_join(self.hub.base_url, "spawn", user.name, server_name), + url_path_join( + self.hub.base_url, "spawn", user.escaped_name, escaped_server_name + ), {"next": self.request.uri}, ) - html = self.render_template( - "not_running.html", user=user, server_name=server_name, spawn_url=spawn_url + self.set_status( + 424 if not self.app.use_legacy_stopped_server_status_code else 503 + ) + + auth_state = await user.get_auth_state() + html = await self.render_template( + "not_running.html", + user=user, + server_name=server_name, + spawn_url=spawn_url, + auth_state=auth_state, + implicit_spawn_seconds=self.settings.get("implicit_spawn_seconds", 0), ) self.finish(html) @@ -1419,20 +1627,23 @@ class UserUrlHandler(BaseHandler): if self.subdomain_host: target = user.host + target - referer = self.request.headers.get('Referer', '') # record redirect count in query parameter if redirects: self.log.warning("Redirect loop detected on %s", self.request.uri) # add capped exponential backoff where cap is 10s - await gen.sleep(min(1 * (2 ** redirects), 10)) + await asyncio.sleep(min(1 * (2**redirects), 10)) # rewrite target url with new `redirects` query value url_parts = urlparse(target) query_parts = parse_qs(url_parts.query) query_parts['redirects'] = redirects + 1 url_parts = url_parts._replace(query=urlencode(query_parts, doseq=True)) target = urlunparse(url_parts) - elif '/user/{}'.format(user.name) in referer or not referer: - # add first counter only if it's a redirect from /user/:name -> /hub/user/:name + else: + # Start redirect counter. + # This should only occur for redirects from /user/:name -> /hub/user/:name + # when the corresponding server is already ready. + # We don't check this explicitly (direct visits to /hub/user are technically possible), + # but that's now the only normal way to get here. target = url_concat(target, {'redirects': 1}) self.redirect(target) @@ -1448,19 +1659,51 @@ class UserRedirectHandler(BaseHandler): If the user is not logged in, send to login URL, redirecting back here. + If c.JupyterHub.user_redirect_hook is set, the return value of that + callable is used to generate the redirect URL. + .. versionadded:: 0.7 """ @web.authenticated - def get(self, path): - user = self.current_user - user_url = url_path_join(user.url, path) - if self.request.query: - user_url = url_concat(user_url, parse_qsl(self.request.query)) + async def get(self, path): + # If hook is present to generate URL to redirect to, use that instead + # of the default. The configurer is responsible for making sure this + # URL is right. If None is returned by the hook, we do our normal + # processing + url = None + if self.app.user_redirect_hook: + url = await maybe_future( + self.app.user_redirect_hook( + path, self.request, self.current_user, self.base_url + ) + ) + if url is None: + user = self.current_user + user_url = user.url - url = url_concat( - url_path_join(self.hub.base_url, "spawn", user.name), {"next": user_url} - ) + if self.app.default_server_name: + user_url = url_path_join(user_url, self.app.default_server_name) + + user_url = url_path_join(user_url, path) + if self.request.query: + user_url = url_concat(user_url, parse_qsl(self.request.query)) + + if self.app.default_server_name: + url = url_concat( + url_path_join( + self.hub.base_url, + "spawn", + user.escaped_name, + self.app.default_server_name, + ), + {"next": user_url}, + ) + else: + url = url_concat( + url_path_join(self.hub.base_url, "spawn", user.escaped_name), + {"next": user_url}, + ) self.redirect(url) diff --git a/jupyterhub/handlers/login.py b/jupyterhub/handlers/login.py index f1bd7c45..f913146a 100644 --- a/jupyterhub/handlers/login.py +++ b/jupyterhub/handlers/login.py @@ -3,6 +3,7 @@ # Distributed under the terms of the Modified BSD License. import asyncio +from jinja2 import Template from tornado import web from tornado.escape import url_escape from tornado.httputil import url_concat @@ -72,14 +73,14 @@ class LogoutHandler(BaseHandler): Override this function to set a custom logout page. """ if self.authenticator.auto_login: - html = self.render_template('logout.html') + html = await self.render_template('logout.html') self.finish(html) else: self.redirect(self.settings['login_url'], permanent=False) async def get(self): """Log the user out, call the custom action, forward the user - to the logout page + to the logout page """ await self.default_handle_logout() await self.handle_logout() @@ -90,17 +91,23 @@ class LoginHandler(BaseHandler): """Render the login page.""" def _render(self, login_error=None, username=None): - return self.render_template( - 'login.html', - next=url_escape(self.get_argument('next', default='')), - username=username, - login_error=login_error, - custom_html=self.authenticator.custom_html, - login_url=self.settings['login_url'], - authenticator_login_url=url_concat( + context = { + "next": url_escape(self.get_argument('next', default='')), + "username": username, + "login_error": login_error, + "login_url": self.settings['login_url'], + "authenticator_login_url": url_concat( self.authenticator.login_url(self.hub.base_url), {'next': self.get_argument('next', '')}, ), + } + custom_html = Template( + self.authenticator.get_custom_html(self.hub.base_url) + ).render(**context) + return self.render_template( + 'login.html', + **context, + custom_html=custom_html, ) async def get(self): @@ -132,13 +139,15 @@ class LoginHandler(BaseHandler): self.redirect(auto_login_url) return username = self.get_argument('username', default='') - self.finish(self._render(username=username)) + self.finish(await self._render(username=username)) async def post(self): # parse the arguments dict data = {} for arg in self.request.arguments: - data[arg] = self.get_argument(arg, strip=False) + # strip username, but not other fields like passwords, + # which should be allowed to start or end with space + data[arg] = self.get_argument(arg, strip=arg == "username") auth_timer = self.statsd.timer('login.authenticate').start() user = await self.login_user(data) @@ -149,7 +158,7 @@ class LoginHandler(BaseHandler): self._jupyterhub_user = user self.redirect(self.get_next_url(user)) else: - html = self._render( + html = await self._render( login_error='Invalid username or password', username=data['username'] ) self.finish(html) diff --git a/jupyterhub/handlers/metrics.py b/jupyterhub/handlers/metrics.py index f7a95b62..674aeddd 100644 --- a/jupyterhub/handlers/metrics.py +++ b/jupyterhub/handlers/metrics.py @@ -1,7 +1,5 @@ -from prometheus_client import CONTENT_TYPE_LATEST -from prometheus_client import generate_latest -from prometheus_client import REGISTRY -from tornado import gen +"""Handlers for serving prometheus metrics""" +from prometheus_client import CONTENT_TYPE_LATEST, REGISTRY, generate_latest from ..utils import metrics_authentication from .base import BaseHandler @@ -12,10 +10,15 @@ class MetricsHandler(BaseHandler): Handler to serve Prometheus metrics """ + _accept_token_auth = True + @metrics_authentication async def get(self): self.set_header('Content-Type', CONTENT_TYPE_LATEST) self.write(generate_latest(REGISTRY)) -default_handlers = [(r'/metrics$', MetricsHandler)] +default_handlers = [ + (r'/metrics$', MetricsHandler), + (r'/api/metrics$', MetricsHandler), +] diff --git a/jupyterhub/handlers/pages.py b/jupyterhub/handlers/pages.py index d93f2638..4086f7b2 100644 --- a/jupyterhub/handlers/pages.py +++ b/jupyterhub/handlers/pages.py @@ -2,23 +2,19 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import asyncio -import copy import time from collections import defaultdict from datetime import datetime from http.client import responses from jinja2 import TemplateNotFound -from tornado import gen from tornado import web from tornado.httputil import url_concat -from .. import orm -from ..metrics import SERVER_POLL_DURATION_SECONDS -from ..metrics import ServerPollStatus -from ..utils import admin_only -from ..utils import maybe_future -from ..utils import url_path_join +from .. import __version__ +from ..metrics import SERVER_POLL_DURATION_SECONDS, ServerPollStatus +from ..scopes import needs_scope +from ..utils import maybe_future, url_escape_path, url_path_join from .base import BaseHandler @@ -39,11 +35,15 @@ class RootHandler(BaseHandler): def get(self): user = self.current_user if self.default_url: - url = self.default_url + # As set in jupyterhub_config.py + if callable(self.default_url): + url = self.default_url(self) + else: + url = self.default_url elif user: url = self.get_next_url(user) else: - url = self.settings['login_url'] + url = url_concat(self.settings["login_url"], dict(next=self.request.uri)) self.redirect(url) @@ -61,16 +61,18 @@ class HomeHandler(BaseHandler): # to establish that this is an explicit spawn request rather # than an implicit one, which can be caused by any link to `/user/:name(/:server_name)` if user.active: - url = url_path_join(self.base_url, 'user', user.name) + url = url_path_join(self.base_url, 'user', user.escaped_name) else: - url = url_path_join(self.hub.base_url, 'spawn', user.name) + url = url_path_join(self.hub.base_url, 'spawn', user.escaped_name) - html = self.render_template( + auth_state = await user.get_auth_state() + html = await self.render_template( 'home.html', + auth_state=auth_state, user=user, url=url, allow_named_servers=self.allow_named_servers, - named_server_limit_per_user=self.named_server_limit_per_user, + named_server_limit_per_user=await self.get_current_user_named_server_limit(), url_path_join=url_path_join, # can't use user.spawners because the stop method of User pops named servers from user.spawners when they're stopped spawners=user.orm_user._orm_spawners, @@ -89,10 +91,12 @@ class SpawnHandler(BaseHandler): default_url = None - def _render_form(self, for_user, spawner_options_form, message=''): - return self.render_template( + async def _render_form(self, for_user, spawner_options_form, message=''): + auth_state = await for_user.get_auth_state() + return await self.render_template( 'spawn.html', for_user=for_user, + auth_state=auth_state, spawner_options_form=spawner_options_form, error_message=message, url=self.request.uri, @@ -100,44 +104,56 @@ class SpawnHandler(BaseHandler): ) @web.authenticated - async def get(self, for_user=None, server_name=''): + def get(self, user_name=None, server_name=''): """GET renders form for spawning with user-specified options or triggers spawn via redirect if there is no form. """ + # two-stage to get the right signature for @require_scopes filter on user_name + if user_name is None: + user_name = self.current_user.name + if server_name is None: + server_name = "" + return self._get(user_name=user_name, server_name=server_name) + + @needs_scope("servers") + async def _get(self, user_name, server_name): + for_user = user_name user = current_user = self.current_user - if for_user is not None and for_user != user.name: - if not user.admin: - raise web.HTTPError( - 403, "Only admins can spawn on behalf of other users" - ) - + if for_user != user.name: user = self.find_user(for_user) if user is None: - raise web.HTTPError(404, "No such user: %s" % for_user) + raise web.HTTPError(404, f"No such user: {for_user}") + + if server_name: + if not self.allow_named_servers: + raise web.HTTPError(400, "Named servers are not enabled.") + + named_server_limit_per_user = ( + await self.get_current_user_named_server_limit() + ) + + if named_server_limit_per_user > 0 and server_name not in user.orm_spawners: + named_spawners = list(user.all_spawners(include_default=False)) + if named_server_limit_per_user <= len(named_spawners): + raise web.HTTPError( + 400, + "User {} already has the maximum of {} named servers." + " One must be deleted before a new server can be created".format( + user.name, named_server_limit_per_user + ), + ) if not self.allow_named_servers and user.running: - url = self.get_next_url(user, default=user.server_url(server_name)) + url = self.get_next_url(user, default=user.server_url("")) self.log.info("User is running: %s", user.name) self.redirect(url) return - if server_name is None: - server_name = '' + spawner = user.get_spawner(server_name, replace_failed=True) - spawner = user.spawners[server_name] - # resolve `?next=...`, falling back on the spawn-pending url - # must not be /user/server for named servers, - # which may get handled by the default server if they aren't ready yet - - pending_url = url_path_join( - self.hub.base_url, "spawn-pending", user.name, server_name - ) - - if self.get_argument('next', None): - # preserve `?next=...` through spawn-pending - pending_url = url_concat(pending_url, {'next': self.get_argument('next')}) + pending_url = self._get_pending_url(user, server_name) # spawner is active, redirect back to get progress, etc. if spawner.ready: @@ -153,47 +169,81 @@ class SpawnHandler(BaseHandler): # Add handler to spawner here so you can access query params in form rendering. spawner.handler = self + + # auth_state may be an input to options form, + # so resolve the auth state hook here + auth_state = await user.get_auth_state() + await spawner.run_auth_state_hook(auth_state) + + # Try to start server directly when query arguments are passed. + error_message = '' + query_options = {} + for key, byte_list in self.request.query_arguments.items(): + query_options[key] = [bs.decode('utf8') for bs in byte_list] + + # 'next' is reserved argument for redirect after spawn + query_options.pop('next', None) + + if len(query_options) > 0: + try: + self.log.debug( + "Triggering spawn with supplied query arguments for %s", + spawner._log_name, + ) + options = await maybe_future(spawner.options_from_query(query_options)) + return await self._wrap_spawn_single_user( + user, server_name, spawner, pending_url, options + ) + except Exception as e: + self.log.error( + "Failed to spawn single-user server with query arguments", + exc_info=True, + ) + error_message = str(e) + # fallback to behavior without failing query arguments + spawner_options_form = await spawner.get_options_form() if spawner_options_form: self.log.debug("Serving options form for %s", spawner._log_name) - form = self._render_form( - for_user=user, spawner_options_form=spawner_options_form + form = await self._render_form( + for_user=user, + spawner_options_form=spawner_options_form, + message=error_message, ) self.finish(form) else: self.log.debug( "Triggering spawn with default options for %s", spawner._log_name ) - # Explicit spawn request: clear _spawn_future - # which may have been saved to prevent implicit spawns - # after a failure. - if spawner._spawn_future and spawner._spawn_future.done(): - spawner._spawn_future = None - # not running, no form. Trigger spawn and redirect back to /user/:name - f = asyncio.ensure_future(self.spawn_single_user(user, server_name)) - await asyncio.wait([f], timeout=1) - self.redirect(pending_url) + return await self._wrap_spawn_single_user( + user, server_name, spawner, pending_url + ) @web.authenticated - async def post(self, for_user=None, server_name=''): + def post(self, user_name=None, server_name=''): """POST spawns with user-specified options""" + if user_name is None: + user_name = self.current_user.name + if server_name is None: + server_name = "" + return self._post(user_name=user_name, server_name=server_name) + + @needs_scope("servers") + async def _post(self, user_name, server_name): + for_user = user_name user = current_user = self.current_user - if for_user is not None and for_user != user.name: - if not user.admin: - raise web.HTTPError( - 403, "Only admins can spawn on behalf of other users" - ) + if for_user != user.name: user = self.find_user(for_user) if user is None: raise web.HTTPError(404, "No such user: %s" % for_user) - spawner = user.spawners[server_name] + spawner = user.get_spawner(server_name, replace_failed=True) if spawner.ready: raise web.HTTPError(400, "%s is already running" % (spawner._log_name)) elif spawner.pending: raise web.HTTPError( - 400, "%s is pending %s" % (spawner._log_name, spawner.pending) + 400, f"{spawner._log_name} is pending {spawner.pending}" ) form_options = {} @@ -202,27 +252,71 @@ class SpawnHandler(BaseHandler): for key, byte_list in self.request.files.items(): form_options["%s_file" % key] = byte_list try: - options = await maybe_future(spawner.options_from_form(form_options)) - await self.spawn_single_user(user, server_name=server_name, options=options) + self.log.debug( + "Triggering spawn with supplied form options for %s", spawner._log_name + ) + options = await maybe_future(spawner.run_options_from_form(form_options)) + pending_url = self._get_pending_url(user, server_name) + return await self._wrap_spawn_single_user( + user, server_name, spawner, pending_url, options + ) except Exception as e: self.log.error( "Failed to spawn single-user server with form", exc_info=True ) spawner_options_form = await user.spawner.get_options_form() - form = self._render_form( + form = await self._render_form( for_user=user, spawner_options_form=spawner_options_form, message=str(e) ) self.finish(form) return - if current_user is user: - self.set_login_cookie(user) - next_url = self.get_next_url( - user, - default=url_path_join( - self.hub.base_url, "spawn-pending", user.name, server_name - ), + + def _get_pending_url(self, user, server_name): + # resolve `?next=...`, falling back on the spawn-pending url + # must not be /user/server for named servers, + # which may get handled by the default server if they aren't ready yet + + pending_url = url_path_join( + self.hub.base_url, + "spawn-pending", + user.escaped_name, + url_escape_path(server_name), ) - self.redirect(next_url) + + pending_url = self.append_query_parameters(pending_url, exclude=['next']) + + if self.get_argument('next', None): + # preserve `?next=...` through spawn-pending + pending_url = url_concat(pending_url, {'next': self.get_argument('next')}) + + return pending_url + + async def _wrap_spawn_single_user( + self, user, server_name, spawner, pending_url, options=None + ): + # Explicit spawn request: clear _spawn_future + # which may have been saved to prevent implicit spawns + # after a failure. + if spawner._spawn_future and spawner._spawn_future.done(): + spawner._spawn_future = None + # not running, no form. Trigger spawn and redirect back to /user/:name + f = asyncio.ensure_future( + self.spawn_single_user(user, server_name, options=options) + ) + done, pending = await asyncio.wait([f], timeout=1) + # If spawn_single_user throws an exception, raise a 500 error + # otherwise it may cause a redirect loop + if f.done() and f.exception(): + exc = f.exception() + self.log.exception(f"Error starting server {spawner._log_name}: {exc}") + if isinstance(exc, web.HTTPError): + # allow custom HTTPErrors to pass through + raise exc + raise web.HTTPError( + 500, + f"Unhandled error starting server {spawner._log_name}", + ) + return self.redirect(pending_url) class SpawnPendingHandler(BaseHandler): @@ -243,22 +337,19 @@ class SpawnPendingHandler(BaseHandler): """ @web.authenticated - async def get(self, for_user, server_name=''): + @needs_scope("servers") + async def get(self, user_name, server_name=''): + for_user = user_name user = current_user = self.current_user - if for_user is not None and for_user != current_user.name: - if not current_user.admin: - raise web.HTTPError( - 403, "Only admins can spawn on behalf of other users" - ) + if for_user != current_user.name: user = self.find_user(for_user) if user is None: raise web.HTTPError(404, "No such user: %s" % for_user) if server_name and server_name not in user.spawners: - raise web.HTTPError( - 404, "%s has no such server %s" % (user.name, server_name) - ) + raise web.HTTPError(404, f"{user.name} has no such server {server_name}") + escaped_server_name = url_escape_path(server_name) spawner = user.spawners[server_name] if spawner.ready: @@ -270,26 +361,28 @@ class SpawnPendingHandler(BaseHandler): # if spawning fails for any reason, point users to /hub/home to retry self.extra_error_html = self.spawn_home_error + auth_state = await user.get_auth_state() + # First, check for previous failure. - if ( - not spawner.active - and spawner._spawn_future - and spawner._spawn_future.done() - and spawner._spawn_future.exception() - ): - # Condition: spawner not active and _spawn_future exists and contains an Exception + if not spawner.active and spawner._failed: + # Condition: spawner not active and last spawn failed + # (failure is available as spawner._spawn_future.exception()). # Implicit spawn on /user/:name is not allowed if the user's last spawn failed. # We should point the user to Home if the most recent spawn failed. exc = spawner._spawn_future.exception() self.log.error("Previous spawn for %s failed: %s", spawner._log_name, exc) - spawn_url = url_path_join(self.hub.base_url, "spawn", user.escaped_name) + spawn_url = url_path_join( + self.hub.base_url, "spawn", user.escaped_name, escaped_server_name + ) self.set_status(500) - html = self.render_template( + html = await self.render_template( "not_running.html", user=user, + auth_state=auth_state, server_name=server_name, spawn_url=spawn_url, failed=True, + failed_html_message=getattr(exc, 'jupyterhub_html_message', ''), failed_message=getattr(exc, 'jupyterhub_message', ''), exception=exc, ) @@ -307,8 +400,12 @@ class SpawnPendingHandler(BaseHandler): page = "stop_pending.html" else: page = "spawn_pending.html" - html = self.render_template( - page, user=user, spawner=spawner, progress_url=spawner._progress_url + html = await self.render_template( + page, + user=user, + spawner=spawner, + progress_url=spawner._progress_url, + auth_state=auth_state, ) self.finish(html) return @@ -327,10 +424,13 @@ class SpawnPendingHandler(BaseHandler): # further, set status to 404 because this is not # serving the expected page if status is not None: - spawn_url = url_path_join(self.hub.base_url, "spawn", user.escaped_name) - html = self.render_template( + spawn_url = url_path_join( + self.hub.base_url, "spawn", user.escaped_name, escaped_server_name + ) + html = await self.render_template( "not_running.html", user=user, + auth_state=auth_state, server_name=server_name, spawn_url=spawn_url, ) @@ -348,65 +448,22 @@ class SpawnPendingHandler(BaseHandler): class AdminHandler(BaseHandler): """Render the admin page.""" - @admin_only - def get(self): - available = {'name', 'admin', 'running', 'last_activity'} - default_sort = ['admin', 'name'] - mapping = {'running': orm.Spawner.server_id} - for name in available: - if name not in mapping: - mapping[name] = getattr(orm.User, name) - - default_order = { - 'name': 'asc', - 'last_activity': 'desc', - 'admin': 'desc', - 'running': 'desc', - } - - sorts = self.get_arguments('sort') or default_sort - orders = self.get_arguments('order') - - for bad in set(sorts).difference(available): - self.log.warning("ignoring invalid sort: %r", bad) - sorts.remove(bad) - for bad in set(orders).difference({'asc', 'desc'}): - self.log.warning("ignoring invalid order: %r", bad) - orders.remove(bad) - - # add default sort as secondary - for s in default_sort: - if s not in sorts: - sorts.append(s) - if len(orders) < len(sorts): - for col in sorts[len(orders) :]: - orders.append(default_order[col]) - else: - orders = orders[: len(sorts)] - - # this could be one incomprehensible nested list comprehension - # get User columns - cols = [mapping[c] for c in sorts] - # get User.col.desc() order objects - ordered = [getattr(c, o)() for c, o in zip(cols, orders)] - - users = self.db.query(orm.User).outerjoin(orm.Spawner).order_by(*ordered) - users = [self._user_from_orm(u) for u in users] - from itertools import chain - - running = [] - for u in users: - running.extend(s for s in u.spawners.values() if s.active) - - html = self.render_template( + @web.authenticated + # stacked decorators: all scopes must be present + # note: keep in sync with admin link condition in page.html + @needs_scope('admin-ui') + async def get(self): + auth_state = await self.current_user.get_auth_state() + html = await self.render_template( 'admin.html', current_user=self.current_user, - admin_access=self.settings.get('admin_access', False), - users=users, - running=running, - sort={s: o for s, o in zip(sorts, orders)}, + auth_state=auth_state, + admin_access=True, allow_named_servers=self.allow_named_servers, - named_server_limit_per_user=self.named_server_limit_per_user, + named_server_limit_per_user=await self.get_current_user_named_server_limit(), + server_version=f'{__version__} {self.version_hash}', + api_page_limit=self.settings["api_page_default_limit"], + base_url=self.settings["base_url"], ) self.finish(html) @@ -415,7 +472,7 @@ class TokenPageHandler(BaseHandler): """Handler for page requesting new API tokens""" @web.authenticated - def get(self): + async def get(self): never = datetime(1900, 1, 1) user = self.current_user @@ -424,36 +481,32 @@ class TokenPageHandler(BaseHandler): return (token.last_activity or never, token.created or never) now = datetime.utcnow() - api_tokens = [] - for token in sorted(user.api_tokens, key=sort_key, reverse=True): - if token.expires_at and token.expires_at < now: - self.db.delete(token) - self.db.commit() - continue - api_tokens.append(token) # group oauth client tokens by client id - # AccessTokens have expires_at as an integer timestamp - now_timestamp = now.timestamp() - oauth_tokens = defaultdict(list) - for token in user.oauth_tokens: - if token.expires_at and token.expires_at < now_timestamp: - self.log.warning("Deleting expired token") + all_tokens = defaultdict(list) + for token in sorted(user.api_tokens, key=sort_key, reverse=True): + if token.expires_at and token.expires_at < now: + self.log.warning(f"Deleting expired token {token}") self.db.delete(token) self.db.commit() continue if not token.client_id: # token should have been deleted when client was deleted - self.log.warning("Deleting stale oauth token for %s", user.name) + self.log.warning(f"Deleting stale oauth token {token}") self.db.delete(token) self.db.commit() continue - oauth_tokens[token.client_id].append(token) + all_tokens[token.client_id].append(token) + # individually list tokens issued by jupyterhub itself + api_tokens = all_tokens.pop("jupyterhub", []) + + # group all other tokens issued under their owners # get the earliest created and latest last_activity # timestamp for a given oauth client oauth_clients = [] - for client_id, tokens in oauth_tokens.items(): + + for client_id, tokens in all_tokens.items(): created = tokens[0].created last_activity = tokens[0].last_activity for token in tokens[1:]: @@ -466,8 +519,9 @@ class TokenPageHandler(BaseHandler): token = tokens[0] oauth_clients.append( { - 'client': token.client, - 'description': token.client.description or token.client.identifier, + 'client': token.oauth_client, + 'description': token.oauth_client.description + or token.oauth_client.identifier, 'created': created, 'last_activity': last_activity, 'tokens': tokens, @@ -484,8 +538,12 @@ class TokenPageHandler(BaseHandler): oauth_clients = sorted(oauth_clients, key=sort_key, reverse=True) - html = self.render_template( - 'token.html', api_tokens=api_tokens, oauth_clients=oauth_clients + auth_state = await self.current_user.get_auth_state() + html = await self.render_template( + 'token.html', + api_tokens=api_tokens, + oauth_clients=oauth_clients, + auth_state=auth_state, ) self.finish(html) @@ -493,7 +551,7 @@ class TokenPageHandler(BaseHandler): class ProxyErrorHandler(BaseHandler): """Handler for rendering proxy error pages""" - def get(self, status_code_s): + async def get(self, status_code_s): status_code = int(status_code_s) status_message = responses.get(status_code, 'Unknown HTTP Error') # build template namespace @@ -517,19 +575,23 @@ class ProxyErrorHandler(BaseHandler): self.set_header('Content-Type', 'text/html') # render the template try: - html = self.render_template('%s.html' % status_code, **ns) + html = await self.render_template('%s.html' % status_code, **ns) except TemplateNotFound: self.log.debug("No template for %d", status_code) - html = self.render_template('error.html', **ns) + html = await self.render_template('error.html', **ns) self.write(html) class HealthCheckHandler(BaseHandler): - """Answer to health check""" + """Serve health check probes as quickly as possible""" - def get(self, *args): - self.finish() + # There is nothing for us to do other than return a positive + # HTTP status code as quickly as possible for GET or HEAD requests + def get(self): + pass + + head = get default_handlers = [ @@ -544,4 +606,5 @@ default_handlers = [ (r'/token', TokenPageHandler), (r'/error/(\d+)', ProxyErrorHandler), (r'/health$', HealthCheckHandler), + (r'/api/health$', HealthCheckHandler), ] diff --git a/jupyterhub/handlers/static.py b/jupyterhub/handlers/static.py index 662af997..e9c54199 100644 --- a/jupyterhub/handlers/static.py +++ b/jupyterhub/handlers/static.py @@ -7,7 +7,7 @@ from tornado.web import StaticFileHandler class CacheControlStaticFilesHandler(StaticFileHandler): """StaticFileHandler subclass that sets Cache-Control: no-cache without `?v=` - + rather than relying on default browser cache behavior. """ diff --git a/jupyterhub/log.py b/jupyterhub/log.py index a9992acf..4a9638f0 100644 --- a/jupyterhub/log.py +++ b/jupyterhub/log.py @@ -2,16 +2,16 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import json +import logging import traceback +from functools import partial from http.cookies import SimpleCookie -from urllib.parse import urlparse -from urllib.parse import urlunparse +from urllib.parse import urlparse, urlunparse -from tornado.log import access_log -from tornado.log import LogFormatter -from tornado.web import HTTPError -from tornado.web import StaticFileHandler +from tornado.log import LogFormatter, access_log +from tornado.web import HTTPError, StaticFileHandler +from .handlers.pages import HealthCheckHandler from .metrics import prometheus_log_method @@ -98,13 +98,17 @@ def _scrub_headers(headers): headers = dict(headers) if 'Authorization' in headers: auth = headers['Authorization'] - if auth.startswith('token '): - headers['Authorization'] = 'token [secret]' + if ' ' in auth: + auth_type = auth.split(' ', 1)[0] + else: + # no space, hide the whole thing in case there was a mistake + auth_type = '' + headers['Authorization'] = f'{auth_type} [secret]' if 'Cookie' in headers: c = SimpleCookie(headers['Cookie']) redacted = [] for name in c.keys(): - redacted.append("{}=[secret]".format(name)) + redacted.append(f"{name}=[secret]") headers['Cookie'] = '; '.join(redacted) return headers @@ -123,21 +127,29 @@ def log_request(handler): """ status = handler.get_status() request = handler.request - if status == 304 or (status < 300 and isinstance(handler, StaticFileHandler)): + if status == 304 or ( + status < 300 and isinstance(handler, (StaticFileHandler, HealthCheckHandler)) + ): # static-file success and 304 Found are debug-level - log_method = access_log.debug + log_level = logging.DEBUG elif status < 400: - log_method = access_log.info + log_level = logging.INFO elif status < 500: - log_method = access_log.warning + log_level = logging.WARNING else: - log_method = access_log.error + log_level = logging.ERROR uri = _scrub_uri(request.uri) headers = _scrub_headers(request.headers) request_time = 1000.0 * handler.request.request_time() + # always log slow responses (longer than 1s) at least info-level + if request_time >= 1000 and log_level < logging.INFO: + log_level = logging.INFO + + log_method = partial(access_log.log, log_level) + try: user = handler.current_user except (HTTPError, RuntimeError): @@ -162,7 +174,7 @@ def log_request(handler): location='', ) msg = "{status} {method} {uri}{location} ({user}@{ip}) {request_time:.2f}ms" - if status >= 500 and status != 502: + if status >= 500 and status not in {502, 503}: log_method(json.dumps(headers, indent=2)) elif status in {301, 302}: # log redirect targets @@ -170,6 +182,6 @@ def log_request(handler): # to get headers from tornado location = handler._headers.get('Location') if location: - ns['location'] = ' -> {}'.format(_scrub_uri(location)) + ns['location'] = f' -> {_scrub_uri(location)}' log_method(msg.format(**ns)) prometheus_log_method(handler) diff --git a/jupyterhub/metrics.py b/jupyterhub/metrics.py index ef7370ce..051aeaaa 100644 --- a/jupyterhub/metrics.py +++ b/jupyterhub/metrics.py @@ -3,9 +3,9 @@ Prometheus metrics exported by JupyterHub Read https://prometheus.io/docs/practices/naming/ for naming conventions for metrics & labels. We generally prefer naming them -`__`. So a histogram that's tracking +`jupyterhub___`. So a histogram that's tracking the duration (in seconds) of servers spawning would be called -SERVER_SPAWN_DURATION_SECONDS. +jupyterhub_server_spawn_duration_seconds. We also create an Enum for each 'status' type label in every metric we collect. This is to make sure that the metrics exist regardless @@ -14,20 +14,23 @@ create them, the metric spawn_duration_seconds{status="failure"} will not actually exist until the first failure. This makes dashboarding and alerting difficult, so we explicitly list statuses and create them manually here. + +.. versionchanged:: 1.3 + + added ``jupyterhub_`` prefix to metric names. """ from enum import Enum -from prometheus_client import Gauge -from prometheus_client import Histogram +from prometheus_client import Gauge, Histogram REQUEST_DURATION_SECONDS = Histogram( - 'request_duration_seconds', + 'jupyterhub_request_duration_seconds', 'request duration for all HTTP requests', ['method', 'handler', 'code'], ) SERVER_SPAWN_DURATION_SECONDS = Histogram( - 'server_spawn_duration_seconds', + 'jupyterhub_server_spawn_duration_seconds', 'time taken for server spawning operation', ['status'], # Use custom bucket sizes, since the default bucket ranges @@ -36,13 +39,27 @@ SERVER_SPAWN_DURATION_SECONDS = Histogram( ) RUNNING_SERVERS = Gauge( - 'running_servers', 'the number of user servers currently running' + 'jupyterhub_running_servers', 'the number of user servers currently running' ) -TOTAL_USERS = Gauge('total_users', 'total number of users') +TOTAL_USERS = Gauge('jupyterhub_total_users', 'total number of users') CHECK_ROUTES_DURATION_SECONDS = Histogram( - 'check_routes_duration_seconds', 'Time taken to validate all routes in proxy' + 'jupyterhub_check_routes_duration_seconds', + 'Time taken to validate all routes in proxy', +) + +HUB_STARTUP_DURATION_SECONDS = Histogram( + 'jupyterhub_hub_startup_duration_seconds', 'Time taken for Hub to start' +) + +INIT_SPAWNERS_DURATION_SECONDS = Histogram( + 'jupyterhub_init_spawners_duration_seconds', 'Time taken for spawners to initialize' +) + +PROXY_POLL_DURATION_SECONDS = Histogram( + 'jupyterhub_proxy_poll_duration_seconds', + 'duration for polling all routes from proxy', ) @@ -67,7 +84,9 @@ for s in ServerSpawnStatus: PROXY_ADD_DURATION_SECONDS = Histogram( - 'proxy_add_duration_seconds', 'duration for adding user routes to proxy', ['status'] + 'jupyterhub_proxy_add_duration_seconds', + 'duration for adding user routes to proxy', + ['status'], ) @@ -88,7 +107,7 @@ for s in ProxyAddStatus: SERVER_POLL_DURATION_SECONDS = Histogram( - 'server_poll_duration_seconds', + 'jupyterhub_server_poll_duration_seconds', 'time taken to poll if server is running', ['status'], ) @@ -115,7 +134,9 @@ for s in ServerPollStatus: SERVER_STOP_DURATION_SECONDS = Histogram( - 'server_stop_seconds', 'time taken for server stopping operation', ['status'] + 'jupyterhub_server_stop_seconds', + 'time taken for server stopping operation', + ['status'], ) @@ -135,14 +156,37 @@ for s in ServerStopStatus: SERVER_STOP_DURATION_SECONDS.labels(status=s) +PROXY_DELETE_DURATION_SECONDS = Histogram( + 'jupyterhub_proxy_delete_duration_seconds', + 'duration for deleting user routes from proxy', + ['status'], +) + + +class ProxyDeleteStatus(Enum): + """ + Possible values for 'status' label of PROXY_DELETE_DURATION_SECONDS + """ + + success = 'success' + failure = 'failure' + + def __str__(self): + return self.value + + +for s in ProxyDeleteStatus: + PROXY_DELETE_DURATION_SECONDS.labels(status=s) + + def prometheus_log_method(handler): """ Tornado log handler for recording RED metrics. We record the following metrics: - Rate – the number of requests, per second, your services are serving. - Errors – the number of failed requests per second. - Duration – The amount of time each request takes expressed as a time interval. + Rate: the number of requests, per second, your services are serving. + Errors: the number of failed requests per second. + Duration: the amount of time each request takes expressed as a time interval. We use a fully qualified name of the handler as a label, rather than every url path to reduce cardinality. @@ -153,6 +197,6 @@ def prometheus_log_method(handler): """ REQUEST_DURATION_SECONDS.labels( method=handler.request.method, - handler='{}.{}'.format(handler.__class__.__module__, type(handler).__name__), + handler=f'{handler.__class__.__module__}.{type(handler).__name__}', code=handler.get_status(), ).observe(handler.request.request_time()) diff --git a/jupyterhub/oauth/provider.py b/jupyterhub/oauth/provider.py index 6157223f..bb72e338 100644 --- a/jupyterhub/oauth/provider.py +++ b/jupyterhub/oauth/provider.py @@ -2,23 +2,20 @@ implements https://oauthlib.readthedocs.io/en/latest/oauth2/server.html """ -from datetime import datetime -from urllib.parse import urlparse - from oauthlib import uri_validate -from oauthlib.oauth2 import RequestValidator -from oauthlib.oauth2 import WebApplicationServer -from oauthlib.oauth2.rfc6749.grant_types import authorization_code -from oauthlib.oauth2.rfc6749.grant_types import base -from sqlalchemy.orm import scoped_session -from tornado import web -from tornado.escape import url_escape +from oauthlib.oauth2 import RequestValidator, WebApplicationServer +from oauthlib.oauth2.rfc6749.grant_types import authorization_code, base from tornado.log import app_log from .. import orm -from ..utils import compare_token -from ..utils import hash_token -from ..utils import url_path_join +from ..roles import roles_to_scopes +from ..scopes import ( + _check_scopes_exist, + _resolve_requested_scopes, + access_scopes, + identify_scopes, +) +from ..utils import compare_token, hash_token # patch absolute-uri check # because we want to allow relative uri oauth @@ -65,6 +62,9 @@ class JupyterHubRequestValidator(RequestValidator): ) if oauth_client is None: return False + if not client_secret or not oauth_client.secret: + # disallow authentication with no secret + return False if not compare_token(oauth_client.secret, client_secret): app_log.warning("Client secret mismatch for %s", client_id) return False @@ -151,7 +151,17 @@ class JupyterHubRequestValidator(RequestValidator): - Resource Owner Password Credentials Grant - Client Credentials grant """ - return ['identify'] + orm_client = ( + self.db.query(orm.OAuthClient).filter_by(identifier=client_id).first() + ) + if orm_client is None: + raise ValueError("No such client: %s" % client_id) + scopes = set(orm_client.allowed_scopes) + if 'inherit' not in scopes: + # add identify-user scope + # and access-service scope + scopes |= identify_scopes() | access_scopes(orm_client) + return scopes def get_original_scopes(self, refresh_token, request, *args, **kwargs): """Get the list of scopes associated with the refresh token. @@ -250,9 +260,8 @@ class JupyterHubRequestValidator(RequestValidator): client=orm_client, code=code['code'], # oauth has 5 minutes to complete - expires_at=int(datetime.utcnow().timestamp() + 300), - # TODO: persist oauth scopes - # scopes=request.scopes, + expires_at=int(orm.OAuthCode.now() + 300), + scopes=list(request.scopes), user=request.user.orm_user, redirect_uri=orm_client.redirect_uri, session_id=request.session_id, @@ -261,7 +270,7 @@ class JupyterHubRequestValidator(RequestValidator): self.db.commit() def get_authorization_code_scopes(self, client_id, code, redirect_uri, request): - """ Extracts scopes from saved authorization code. + """Extracts scopes from saved authorization code. The scopes returned by this method is used to route token requests based on scopes passed to Authorization Code requests. With that the token endpoint knows when to include OpenIDConnect @@ -326,10 +335,6 @@ class JupyterHubRequestValidator(RequestValidator): """ log_token = {} log_token.update(token) - scopes = token['scope'].split(' ') - # TODO: - if scopes != ['identify']: - raise ValueError("Only 'identify' scope is supported") # redact sensitive keys in log for key in ('access_token', 'refresh_token', 'state'): if key in token: @@ -337,6 +342,7 @@ class JupyterHubRequestValidator(RequestValidator): if isinstance(value, str): log_token[key] = 'REDACTED' app_log.debug("Saving bearer token %s", log_token) + if request.user is None: raise ValueError("No user for access token: %s" % request.user) client = ( @@ -344,19 +350,19 @@ class JupyterHubRequestValidator(RequestValidator): .filter_by(identifier=request.client.client_id) .first() ) - orm_access_token = orm.OAuthAccessToken( - client=client, - grant_type=orm.GrantType.authorization_code, - expires_at=datetime.utcnow().timestamp() + token['expires_in'], - refresh_token=token['refresh_token'], - # TODO: save scopes, - # scopes=scopes, + # FIXME: support refresh tokens + # These should be in a new table + token.pop("refresh_token", None) + + # APIToken.new commits the token to the db + orm.APIToken.new( + oauth_client=client, + expires_in=token['expires_in'], + scopes=request.scopes, token=token['access_token'], session_id=request.session_id, user=request.user, ) - self.db.add(orm_access_token) - self.db.commit() return client.redirect_uri def validate_bearer_token(self, token, scopes, request): @@ -417,6 +423,8 @@ class JupyterHubRequestValidator(RequestValidator): ) if orm_client is None: return False + if not orm_client.secret: + return False request.client = orm_client return True @@ -441,7 +449,7 @@ class JupyterHubRequestValidator(RequestValidator): Method is used by: - Authorization Code Grant """ - orm_code = self.db.query(orm.OAuthCode).filter_by(code=code).first() + orm_code = orm.OAuthCode.find(self.db, code=code) if orm_code is None: app_log.debug("No such code: %s", code) return False @@ -452,9 +460,7 @@ class JupyterHubRequestValidator(RequestValidator): return False request.user = orm_code.user request.session_id = orm_code.session_id - # TODO: record state on oauth codes - # TODO: specify scopes - request.scopes = ['identify'] + request.scopes = orm_code.scopes return True def validate_grant_type( @@ -540,7 +546,7 @@ class JupyterHubRequestValidator(RequestValidator): def validate_scopes(self, client_id, scopes, client, request, *args, **kwargs): """Ensure the client is authorized access to requested scopes. :param client_id: Unicode client identifier - :param scopes: List of scopes (defined by you) + :param scopes: List of 'raw' scopes (defined by you) :param client: Client object set by you, see authenticate_client. :param request: The HTTP Request (oauthlib.common.Request) :rtype: True or False @@ -550,6 +556,73 @@ class JupyterHubRequestValidator(RequestValidator): - Resource Owner Password Credentials Grant - Client Credentials Grant """ + orm_client = ( + self.db.query(orm.OAuthClient).filter_by(identifier=client_id).one_or_none() + ) + if orm_client is None: + app_log.warning("No such oauth client %s", client_id) + return False + + requested_scopes = set(scopes) + # explicitly allow 'identify', which was the only allowed scope previously + # requesting 'identify' gets no actual permissions other than self-identification + if "identify" in requested_scopes: + app_log.warning( + f"Ignoring deprecated 'identify' scope, requested by {client_id}" + ) + requested_scopes.discard("identify") + + # TODO: handle roles->scopes transition + # In 2.x, `?scopes=` only accepted _role_ names, + # but in 3.0 we accept and prefer scopes. + # For backward-compatibility, we still accept both. + # Should roles be deprecated here, or kept as a convenience? + try: + _check_scopes_exist(requested_scopes) + except KeyError as e: + # scopes don't exist, maybe they are role names + requested_roles = list( + self.db.query(orm.Role).filter(orm.Role.name.in_(requested_scopes)) + ) + if len(requested_roles) != len(requested_scopes): + # did not find roles + app_log.warning(f"No such scopes: {requested_scopes}") + return False + app_log.info( + f"OAuth client {client_id} requesting roles: {requested_scopes}" + ) + requested_scopes = roles_to_scopes(requested_roles) + + client_allowed_scopes = set(orm_client.allowed_scopes) + + # scope resolution only works if we have a user defined + user = request.user or getattr(self, "_current_user") + + # always grant reading the token-owner's name + # and accessing the service itself + required_scopes = {*identify_scopes(), *access_scopes(orm_client)} + requested_scopes.update(required_scopes) + client_allowed_scopes.update(required_scopes) + + allowed_scopes, disallowed_scopes = _resolve_requested_scopes( + requested_scopes, + client_allowed_scopes, + user=user.orm_user, + client=orm_client, + db=self.db, + ) + + if disallowed_scopes: + app_log.error( + f"Scope(s) not allowed for {client_id}: {', '.join(disallowed_scopes)}" + ) + return False + + # store resolved scopes on request + app_log.debug( + f"Allowing request for scope(s) for {client_id}: {','.join(requested_scopes) or '[]'}" + ) + request.scopes = requested_scopes return True @@ -558,35 +631,53 @@ class JupyterHubOAuthServer(WebApplicationServer): self.db = db super().__init__(validator, *args, **kwargs) - def add_client(self, client_id, client_secret, redirect_uri, description=''): + def add_client( + self, + client_id, + client_secret, + redirect_uri, + allowed_scopes=None, + description='', + ): """Add a client hash its client_secret before putting it in the database. """ - # clear existing clients with same ID - for orm_client in self.db.query(orm.OAuthClient).filter_by( - identifier=client_id - ): - self.db.delete(orm_client) - self.db.commit() - - orm_client = orm.OAuthClient( - identifier=client_id, - secret=hash_token(client_secret), - redirect_uri=redirect_uri, - description=description, + # Update client if it already exists, else create it + # Sqlalchemy doesn't have a good db agnostic UPSERT, + # so we do this manually. It's protected inside a + # transaction, so should fail if there are multiple + # rows with the same identifier. + orm_client = ( + self.db.query(orm.OAuthClient).filter_by(identifier=client_id).one_or_none() ) - self.db.add(orm_client) + if orm_client is None: + orm_client = orm.OAuthClient( + identifier=client_id, + ) + self.db.add(orm_client) + app_log.info(f'Creating oauth client {client_id}') + else: + app_log.info(f'Updating oauth client {client_id}') + if allowed_scopes == None: + allowed_scopes = [] + orm_client.secret = hash_token(client_secret) if client_secret else "" + orm_client.redirect_uri = redirect_uri + orm_client.description = description or client_id + orm_client.allowed_scopes = list(allowed_scopes) self.db.commit() + return orm_client def fetch_by_client_id(self, client_id): """Find a client by its id""" - return self.db.query(orm.OAuthClient).filter_by(identifier=client_id).first() + client = self.db.query(orm.OAuthClient).filter_by(identifier=client_id).first() + if client and client.secret: + return client -def make_provider(session_factory, url_prefix, login_url): +def make_provider(session_factory, url_prefix, login_url, **oauth_server_kwargs): """Make an OAuth provider""" db = session_factory() validator = JupyterHubRequestValidator(db) - server = JupyterHubOAuthServer(db, validator) + server = JupyterHubOAuthServer(db, validator, **oauth_server_kwargs) return server diff --git a/jupyterhub/objects.py b/jupyterhub/objects.py index d2e32639..4fd215e8 100644 --- a/jupyterhub/objects.py +++ b/jupyterhub/objects.py @@ -3,25 +3,20 @@ # Distributed under the terms of the Modified BSD License. import socket import warnings -from urllib.parse import urlparse -from urllib.parse import urlunparse +from urllib.parse import urlparse, urlunparse -from traitlets import default -from traitlets import HasTraits -from traitlets import Instance -from traitlets import Integer -from traitlets import observe -from traitlets import Unicode -from traitlets import validate +from traitlets import HasTraits, Instance, Integer, Unicode, default, observe, validate from . import orm from .traitlets import URLPrefix -from .utils import can_connect -from .utils import make_ssl_context -from .utils import random_port -from .utils import url_path_join -from .utils import wait_for_http_server -from .utils import wait_for_server +from .utils import ( + can_connect, + make_ssl_context, + random_port, + url_path_join, + wait_for_http_server, + wait_for_server, +) class Server(HasTraits): @@ -53,7 +48,7 @@ class Server(HasTraits): Never used in APIs, only logging, since it can be non-connectable value, such as '', meaning all interfaces. """ - if self.ip in {'', '0.0.0.0'}: + if self.ip in {'', '0.0.0.0', '::'}: return self.url.replace(self._connect_ip, self.ip or '*', 1) return self.url @@ -87,13 +82,13 @@ class Server(HasTraits): """The address to use when connecting to this server When `ip` is set to a real ip address, the same value is used. - When `ip` refers to 'all interfaces' (e.g. '0.0.0.0'), + When `ip` refers to 'all interfaces' (e.g. '0.0.0.0' or '::'), clients connect via hostname by default. Setting `connect_ip` explicitly overrides any default behavior. """ if self.connect_ip: return self.connect_ip - elif self.ip in {'', '0.0.0.0'}: + elif self.ip in {'', '0.0.0.0', '::'}: # if listening on all interfaces, default to hostname for connect return socket.gethostname() else: @@ -148,8 +143,15 @@ class Server(HasTraits): def host(self): if self.connect_url: parsed = urlparse(self.connect_url) - return "{proto}://{host}".format(proto=parsed.scheme, host=parsed.netloc) - return "{proto}://{ip}:{port}".format( + proto = parsed.scheme + host = parsed.netloc + return f"{proto}://{host}" + + if ':' in self._connect_ip: + fmt = "{proto}://[{ip}]:{port}" + else: + fmt = "{proto}://{ip}:{port}" + return fmt.format( proto=self.proto, ip=self._connect_ip, port=self._connect_port ) @@ -157,7 +159,7 @@ class Server(HasTraits): def url(self): if self.connect_url: return self.connect_url - return "{host}{uri}".format(host=self.host, uri=self.base_url) + return f"{self.host}{self.base_url}" def __repr__(self): return "{name}(url={url}, bind_url={bind})".format( @@ -213,8 +215,4 @@ class Hub(Server): return url_path_join(self.url, 'api') def __repr__(self): - return "<%s %s:%s>" % ( - self.__class__.__name__, - self.server.ip, - self.server.port, - ) + return f"<{self.__class__.__name__} {self.ip}:{self.port}>" diff --git a/jupyterhub/orm.py b/jupyterhub/orm.py index 471dd4e2..0aa1e1ca 100644 --- a/jupyterhub/orm.py +++ b/jupyterhub/orm.py @@ -3,45 +3,42 @@ # Distributed under the terms of the Modified BSD License. import enum import json -from base64 import decodebytes -from base64 import encodebytes -from datetime import datetime -from datetime import timedelta +from base64 import decodebytes, encodebytes +from datetime import datetime, timedelta import alembic.command import alembic.config from alembic.script import ScriptDirectory -from sqlalchemy import Boolean -from sqlalchemy import Column -from sqlalchemy import create_engine -from sqlalchemy import DateTime -from sqlalchemy import Enum -from sqlalchemy import event -from sqlalchemy import exc -from sqlalchemy import ForeignKey -from sqlalchemy import inspect -from sqlalchemy import Integer -from sqlalchemy import or_ -from sqlalchemy import select -from sqlalchemy import Table -from sqlalchemy import Unicode +from sqlalchemy import ( + Boolean, + Column, + DateTime, + ForeignKey, + Integer, + MetaData, + Table, + Unicode, + create_engine, + event, + exc, + inspect, + or_, + select, +) from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.orm import interfaces -from sqlalchemy.orm import object_session -from sqlalchemy.orm import relationship -from sqlalchemy.orm import Session -from sqlalchemy.orm import sessionmaker +from sqlalchemy.orm import ( + Session, + backref, + interfaces, + object_session, + relationship, + sessionmaker, +) from sqlalchemy.pool import StaticPool -from sqlalchemy.sql.expression import bindparam -from sqlalchemy.types import LargeBinary -from sqlalchemy.types import Text -from sqlalchemy.types import TypeDecorator +from sqlalchemy.types import LargeBinary, Text, TypeDecorator from tornado.log import app_log -from .utils import compare_token -from .utils import hash_token -from .utils import new_token -from .utils import random_port +from .utils import compare_token, hash_token, new_token, random_port # top-level variable for easier mocking in tests utcnow = datetime.utcnow @@ -89,7 +86,39 @@ class JSONDict(TypeDecorator): return value -Base = declarative_base() +class JSONList(JSONDict): + """Represents an immutable structure as a json-encoded string (to be used for list type columns). + + Usage:: + + JSONList(JSONDict) + + """ + + def process_bind_param(self, value, dialect): + if isinstance(value, list) and value is not None: + value = json.dumps(value) + return value + + def process_result_value(self, value, dialect): + if value is None: + return [] + else: + value = json.loads(value) + return value + + +meta = MetaData( + naming_convention={ + "ix": "ix_%(column_0_label)s", + "uq": "uq_%(table_name)s_%(column_0_name)s", + "ck": "ck_%(table_name)s_%(constraint_name)s", + "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", + "pk": "pk_%(table_name)s", + } +) + +Base = declarative_base(metadata=meta) Base.log = app_log @@ -109,7 +138,62 @@ class Server(Base): cookie_name = Column(Unicode(255), default='cookie') def __repr__(self): - return "" % (self.ip, self.port) + return f"" + + +# lots of things have roles +# mapping tables are the same for all of them + +_role_map_tables = [] + +for has_role in ( + 'user', + 'group', + 'service', +): + role_map = Table( + f'{has_role}_role_map', + Base.metadata, + Column( + f'{has_role}_id', + ForeignKey(f'{has_role}s.id', ondelete='CASCADE'), + primary_key=True, + ), + Column( + 'role_id', + ForeignKey('roles.id', ondelete='CASCADE'), + primary_key=True, + ), + ) + _role_map_tables.append(role_map) + + +class Role(Base): + """User Roles""" + + __tablename__ = 'roles' + id = Column(Integer, primary_key=True, autoincrement=True) + name = Column(Unicode(255), unique=True) + description = Column(Unicode(1023)) + scopes = Column(JSONList, default=[]) + users = relationship('User', secondary='user_role_map', backref='roles') + services = relationship('Service', secondary='service_role_map', backref='roles') + groups = relationship('Group', secondary='group_role_map', backref='roles') + + def __repr__(self): + return "<{} {} ({}) - scopes: {}>".format( + self.__class__.__name__, + self.name, + self.description, + self.scopes, + ) + + @classmethod + def find(cls, db, name): + """Find a role by name. + Returns None if not found. + """ + return db.query(cls).filter(cls.name == name).first() # user:group many:many mapping table @@ -179,14 +263,11 @@ class User(Base): def orm_spawners(self): return {s.name: s for s in self._orm_spawners} - admin = Column(Boolean, default=False) + admin = Column(Boolean(create_constraint=False), default=False) created = Column(DateTime, default=datetime.utcnow) last_activity = Column(DateTime, nullable=True) api_tokens = relationship("APIToken", backref="user", cascade="all, delete-orphan") - oauth_tokens = relationship( - "OAuthAccessToken", backref="user", cascade="all, delete-orphan" - ) oauth_codes = relationship( "OAuthCode", backref="user", cascade="all, delete-orphan" ) @@ -222,7 +303,7 @@ class User(Base): class Spawner(Base): - """"State about a Spawner""" + """ "State about a Spawner""" __tablename__ = 'spawners' @@ -230,7 +311,12 @@ class Spawner(Base): user_id = Column(Integer, ForeignKey('users.id', ondelete='CASCADE')) server_id = Column(Integer, ForeignKey('servers.id', ondelete='SET NULL')) - server = relationship(Server, cascade="all") + server = relationship( + Server, + backref=backref('spawner', uselist=False), + single_parent=True, + cascade="all, delete-orphan", + ) state = Column(JSONDict) name = Column(Unicode(255)) @@ -239,6 +325,21 @@ class Spawner(Base): last_activity = Column(DateTime, nullable=True) user_options = Column(JSONDict) + # added in 2.0 + oauth_client_id = Column( + Unicode(255), + ForeignKey( + 'oauth_clients.identifier', + ondelete='SET NULL', + ), + ) + oauth_client = relationship( + 'OAuthClient', + backref=backref("spawner", uselist=False), + cascade="all, delete-orphan", + single_parent=True, + ) + # properties on the spawner wrapper # some APIs get these low-level objects # when the spawner isn't running, @@ -274,7 +375,7 @@ class Service(Base): # common user interface: name = Column(Unicode(255), unique=True) - admin = Column(Boolean, default=False) + admin = Column(Boolean(create_constraint=False), default=False) api_tokens = relationship( "APIToken", backref="service", cascade="all, delete-orphan" @@ -282,9 +383,29 @@ class Service(Base): # service-specific interface _server_id = Column(Integer, ForeignKey('servers.id', ondelete='SET NULL')) - server = relationship(Server, cascade='all') + server = relationship( + Server, + backref=backref('service', uselist=False), + single_parent=True, + cascade="all, delete-orphan", + ) pid = Column(Integer) + # added in 2.0 + oauth_client_id = Column( + Unicode(255), + ForeignKey( + 'oauth_clients.identifier', + ondelete='SET NULL', + ), + ) + oauth_client = relationship( + 'OAuthClient', + backref=backref("service", uselist=False), + cascade="all, delete-orphan", + single_parent=True, + ) + def new_api_token(self, token=None, **kwargs): """Create a new API token If `token` is given, load that token. @@ -300,7 +421,46 @@ class Service(Base): return db.query(cls).filter(cls.name == name).first() -class Hashed(object): +class Expiring: + """Mixin for expiring entries + + Subclass must define at least expires_at property, + which should be unix timestamp or datetime object + """ + + now = utcnow # function, must return float timestamp or datetime + expires_at = None # must be defined + + @property + def expires_in(self): + """Property returning expiration in seconds from now + + or None + """ + if self.expires_at: + delta = self.expires_at - self.now() + if isinstance(delta, timedelta): + delta = delta.total_seconds() + return delta + else: + return None + + @classmethod + def purge_expired(cls, db): + """Purge expired API Tokens from the database""" + now = cls.now() + deleted = False + for obj in ( + db.query(cls).filter(cls.expires_at != None).filter(cls.expires_at < now) + ): + app_log.debug("Purging expired %s", obj) + deleted = True + db.delete(obj) + if deleted: + db.commit() + + +class Hashed(Expiring): """Mixin for tables with hashed tokens""" prefix_length = 4 @@ -357,11 +517,19 @@ class Hashed(object): """Start the query for matching token. Returns an SQLAlchemy query already filtered by prefix-matches. + + .. versionchanged:: 1.2 + + Excludes expired matches. """ prefix = token[: cls.prefix_length] # since we can't filter on hashed values, filter on prefix # so we aren't comparing with all tokens - return db.query(cls).filter(bindparam('prefix', prefix).startswith(cls.prefix)) + prefix_match = db.query(cls).filter_by(prefix=prefix) + prefix_match = prefix_match.filter( + or_(cls.expires_at == None, cls.expires_at >= cls.now()) + ) + return prefix_match @classmethod def find(cls, db, token): @@ -378,120 +546,6 @@ class Hashed(object): return orm_token -class APIToken(Hashed, Base): - """An API token""" - - __tablename__ = 'api_tokens' - - user_id = Column(Integer, ForeignKey('users.id', ondelete="CASCADE"), nullable=True) - service_id = Column( - Integer, ForeignKey('services.id', ondelete="CASCADE"), nullable=True - ) - - id = Column(Integer, primary_key=True) - hashed = Column(Unicode(255), unique=True) - prefix = Column(Unicode(16), index=True) - - @property - def api_id(self): - return 'a%i' % self.id - - # token metadata for bookkeeping - created = Column(DateTime, default=datetime.utcnow) - expires_at = Column(DateTime, default=None, nullable=True) - last_activity = Column(DateTime) - note = Column(Unicode(1023)) - - def __repr__(self): - if self.user is not None: - kind = 'user' - name = self.user.name - elif self.service is not None: - kind = 'service' - name = self.service.name - else: - # this shouldn't happen - kind = 'owner' - name = 'unknown' - return "<{cls}('{pre}...', {kind}='{name}')>".format( - cls=self.__class__.__name__, pre=self.prefix, kind=kind, name=name - ) - - @classmethod - def purge_expired(cls, db): - """Purge expired API Tokens from the database""" - now = utcnow() - deleted = False - for token in ( - db.query(cls).filter(cls.expires_at != None).filter(cls.expires_at < now) - ): - app_log.debug("Purging expired %s", token) - deleted = True - db.delete(token) - if deleted: - db.commit() - - @classmethod - def find(cls, db, token, *, kind=None): - """Find a token object by value. - - Returns None if not found. - - `kind='user'` only returns API tokens for users - `kind='service'` only returns API tokens for services - """ - prefix_match = cls.find_prefix(db, token) - prefix_match = prefix_match.filter( - or_(cls.expires_at == None, cls.expires_at >= utcnow()) - ) - if kind == 'user': - prefix_match = prefix_match.filter(cls.user_id != None) - elif kind == 'service': - prefix_match = prefix_match.filter(cls.service_id != None) - elif kind is not None: - raise ValueError("kind must be 'user', 'service', or None, not %r" % kind) - for orm_token in prefix_match: - if orm_token.match(token): - return orm_token - - @classmethod - def new( - cls, - token=None, - user=None, - service=None, - note='', - generated=True, - expires_in=None, - ): - """Generate a new API token for a user or service""" - assert user or service - assert not (user and service) - db = inspect(user or service).session - if token is None: - token = new_token() - # Don't need hash + salt rounds on generated tokens, - # which already have good entropy - generated = True - else: - cls.check_token(db, token) - # two stages to ensure orm_token.generated has been set - # before token setter is called - orm_token = cls(generated=generated, note=note or '') - orm_token.token = token - if user: - assert user.id is not None - orm_token.user = user - else: - assert service.id is not None - orm_token.service = service - if expires_in is not None: - orm_token.expires_at = utcnow() + timedelta(seconds=expires_in) - db.add(orm_token) - db.commit() - return token - - # ------------------------------------ # OAuth tables # ------------------------------------ @@ -506,59 +560,223 @@ class GrantType(enum.Enum): refresh_token = 'refresh_token' -class OAuthAccessToken(Hashed, Base): - __tablename__ = 'oauth_access_tokens' - id = Column(Integer, primary_key=True, autoincrement=True) +class APIToken(Hashed, Base): + """An API token""" - @property - def api_id(self): - return 'o%i' % self.id + __tablename__ = 'api_tokens' - client_id = Column( - Unicode(255), ForeignKey('oauth_clients.identifier', ondelete='CASCADE') + user_id = Column( + Integer, + ForeignKey('users.id', ondelete="CASCADE"), + nullable=True, + ) + service_id = Column( + Integer, + ForeignKey('services.id', ondelete="CASCADE"), + nullable=True, ) - grant_type = Column(Enum(GrantType), nullable=False) - expires_at = Column(Integer) - refresh_token = Column(Unicode(255)) - # TODO: drop refresh_expires_at. Refresh tokens shouldn't expire - refresh_expires_at = Column(Integer) - user_id = Column(Integer, ForeignKey('users.id', ondelete='CASCADE')) - service = None # for API-equivalence with APIToken - # the browser session id associated with a given token - session_id = Column(Unicode(255)) - - # from Hashed + id = Column(Integer, primary_key=True) hashed = Column(Unicode(255), unique=True) prefix = Column(Unicode(16), index=True) + @property + def api_id(self): + return 'a%i' % self.id + + @property + def owner(self): + return self.user or self.service + + # added in 2.0 + client_id = Column( + Unicode(255), + ForeignKey( + 'oauth_clients.identifier', + ondelete='CASCADE', + ), + ) + + # FIXME: refresh_tokens not implemented + # should be a relation to another token table + # refresh_token = Column( + # Integer, + # ForeignKey('refresh_tokens.id', ondelete="CASCADE"), + # nullable=True, + # ) + + # the browser session id associated with a given token, + # if issued during oauth to be stored in a cookie + session_id = Column(Unicode(255), nullable=True) + + # token metadata for bookkeeping + now = datetime.utcnow # for expiry created = Column(DateTime, default=datetime.utcnow) - last_activity = Column(DateTime, nullable=True) + expires_at = Column(DateTime, default=None, nullable=True) + last_activity = Column(DateTime) + note = Column(Unicode(1023)) + scopes = Column(JSONList, default=[]) def __repr__(self): - return "<{cls}('{prefix}...', client_id={client_id!r}, user={user!r}>".format( + if self.user is not None: + kind = 'user' + name = self.user.name + elif self.service is not None: + kind = 'service' + name = self.service.name + else: + # this shouldn't happen + kind = 'owner' + name = 'unknown' + return "<{cls}('{pre}...', {kind}='{name}', client_id={client_id!r})>".format( cls=self.__class__.__name__, + pre=self.prefix, + kind=kind, + name=name, client_id=self.client_id, - user=self.user and self.user.name, - prefix=self.prefix, ) @classmethod - def find(cls, db, token): - orm_token = super().find(db, token) - if orm_token and not orm_token.client_id: - app_log.warning( - "Deleting stale oauth token for %s with no client", - orm_token.user and orm_token.user.name, + def find(cls, db, token, *, kind=None): + """Find a token object by value. + + Returns None if not found. + + `kind='user'` only returns API tokens for users + `kind='service'` only returns API tokens for services + """ + prefix_match = cls.find_prefix(db, token) + if kind == 'user': + prefix_match = prefix_match.filter(cls.user_id != None) + elif kind == 'service': + prefix_match = prefix_match.filter(cls.service_id != None) + elif kind is not None: + raise ValueError("kind must be 'user', 'service', or None, not %r" % kind) + for orm_token in prefix_match: + if orm_token.match(token): + if not orm_token.client_id: + app_log.warning( + "Deleting stale oauth token for %s with no client", + orm_token.user and orm_token.user.name, + ) + db.delete(orm_token) + db.commit() + return + return orm_token + + @classmethod + def new( + cls, + token=None, + *, + user=None, + service=None, + roles=None, + scopes=None, + note='', + generated=True, + session_id=None, + expires_in=None, + client_id=None, + oauth_client=None, + return_orm=False, + ): + """Generate a new API token for a user or service""" + assert user or service + assert not (user and service) + db = inspect(user or service).session + if token is None: + token = new_token() + # Don't need hash + salt rounds on generated tokens, + # which already have good entropy + generated = True + else: + cls.check_token(db, token) + + # avoid circular import + from .roles import roles_to_scopes + + if scopes is not None and roles is not None: + raise ValueError( + "Can only assign one of scopes or roles when creating tokens." ) - db.delete(orm_token) - db.commit() - return - return orm_token + + elif scopes is None and roles is None: + # this is the default branch + # use the default 'token' role to specify default permissions for API tokens + default_token_role = Role.find(db, 'token') + if not default_token_role: + scopes = ["inherit"] + else: + scopes = roles_to_scopes([default_token_role]) + elif roles is not None: + # evaluate roles to scopes immediately + # TODO: should this be deprecated, or not? + # warnings.warn( + # "Setting roles on tokens is deprecated in JupyterHub 3.0. Use scopes.", + # DeprecationWarning, + # stacklevel=3, + # ) + orm_roles = [] + for rolename in roles: + role = Role.find(db, name=rolename) + if role is None: + raise ValueError(f"No such role: {rolename}") + orm_roles.append(role) + scopes = roles_to_scopes(orm_roles) + + if oauth_client is None: + # lookup oauth client by identifier + if client_id is None: + # default: global 'jupyterhub' client + client_id = "jupyterhub" + oauth_client = db.query(OAuthClient).filter_by(identifier=client_id).one() + if client_id is None: + client_id = oauth_client.identifier + + # avoid circular import + from .scopes import _check_scopes_exist, _check_token_scopes + + _check_scopes_exist(scopes, who_for="token") + _check_token_scopes(scopes, owner=user or service, oauth_client=oauth_client) + + # two stages to ensure orm_token.generated has been set + # before token setter is called + orm_token = cls( + generated=generated, + note=note or '', + client_id=client_id, + session_id=session_id, + scopes=list(scopes), + ) + orm_token.token = token + if user: + assert user.id is not None + orm_token.user = user + else: + assert service.id is not None + orm_token.service = service + if expires_in is not None: + orm_token.expires_at = cls.now() + timedelta(seconds=expires_in) + + db.add(orm_token) + db.commit() + return token + + def update_scopes(self, new_scopes): + """Set new scopes, checking that they are allowed""" + from .scopes import _check_scopes_exist, _check_token_scopes + + _check_scopes_exist(new_scopes, who_for="token") + _check_token_scopes( + new_scopes, owner=self.owner, oauth_client=self.oauth_client + ) + self.scopes = new_scopes -class OAuthCode(Base): +class OAuthCode(Expiring, Base): __tablename__ = 'oauth_codes' + id = Column(Integer, primary_key=True, autoincrement=True) client_id = Column( Unicode(255), ForeignKey('oauth_clients.identifier', ondelete='CASCADE') @@ -570,6 +788,26 @@ class OAuthCode(Base): # state = Column(Unicode(1023)) user_id = Column(Integer, ForeignKey('users.id', ondelete='CASCADE')) + scopes = Column(JSONList, default=[]) + + @staticmethod + def now(): + return datetime.utcnow().timestamp() + + @classmethod + def find(cls, db, code): + return ( + db.query(cls) + .filter(cls.code == code) + .filter(or_(cls.expires_at == None, cls.expires_at >= cls.now())) + .first() + ) + + def __repr__(self): + return ( + f"<{self.__class__.__name__}(id={self.id}, client_id={self.client_id!r})>" + ) + class OAuthClient(Base): __tablename__ = 'oauth_clients' @@ -584,10 +822,17 @@ class OAuthClient(Base): return self.identifier access_tokens = relationship( - OAuthAccessToken, backref='client', cascade='all, delete-orphan' + APIToken, backref='oauth_client', cascade='all, delete-orphan' ) codes = relationship(OAuthCode, backref='client', cascade='all, delete-orphan') + # these are the scopes an oauth client is allowed to request + # *not* the scopes of the client itself + allowed_scopes = Column(JSONList, default=[]) + + def __repr__(self): + return f"<{self.__class__.__name__}(identifier={self.identifier!r})>" + # General database utilities @@ -623,7 +868,10 @@ def _expire_relationship(target, relationship_prop): return # many-to-many and one-to-many have a list of peers # many-to-one has only one - if relationship_prop.direction is interfaces.MANYTOONE: + if ( + relationship_prop.direction is interfaces.MANYTOONE + or not relationship_prop.uselist + ): peers = [peers] for obj in peers: if inspect(obj).persistent: @@ -702,7 +950,7 @@ def check_db_revision(engine): - Empty databases are tagged with the current revision """ # Check database schema version - current_table_names = set(engine.table_names()) + current_table_names = set(inspect(engine).get_table_names()) my_table_names = set(Base.metadata.tables.keys()) from .dbutil import _temp_alembic_ini @@ -747,7 +995,6 @@ def check_db_revision(engine): ).first()[0] if alembic_revision == head: app_log.debug("database schema version found: %s", alembic_revision) - pass else: raise DatabaseSchemaMismatch( "Found database schema version {found} != {head}. " @@ -770,8 +1017,8 @@ def mysql_large_prefix_check(engine): ).fetchall() ) if ( - variables['innodb_file_format'] == 'Barracuda' - and variables['innodb_large_prefix'] == 'ON' + variables.get('innodb_file_format', 'Barracuda') == 'Barracuda' + and variables.get('innodb_large_prefix', 'ON') == 'ON' ): return True else: @@ -821,3 +1068,18 @@ def new_session_factory( # this off gives us a major performance boost session_factory = sessionmaker(bind=engine, expire_on_commit=expire_on_commit) return session_factory + + +def get_class(resource_name): + """Translates resource string names to ORM classes""" + class_dict = { + 'users': User, + 'services': Service, + 'tokens': APIToken, + 'groups': Group, + } + if resource_name not in class_dict: + raise ValueError( + f'Kind must be one of {", ".join(class_dict)}, not {resource_name}' + ) + return class_dict[resource_name] diff --git a/jupyterhub/proxy.py b/jupyterhub/proxy.py index 1b1a9f43..87b47a6c 100644 --- a/jupyterhub/proxy.py +++ b/jupyterhub/proxy.py @@ -23,30 +23,33 @@ import signal import time from functools import wraps from subprocess import Popen -from urllib.parse import quote -from urllib.parse import urlparse +from urllib.parse import quote, urlparse +from weakref import WeakKeyDictionary -from tornado import gen -from tornado.httpclient import AsyncHTTPClient -from tornado.httpclient import HTTPError -from tornado.httpclient import HTTPRequest +from tornado.httpclient import AsyncHTTPClient, HTTPError, HTTPRequest from tornado.ioloop import PeriodicCallback -from traitlets import Any -from traitlets import Bool -from traitlets import default -from traitlets import Instance -from traitlets import Integer -from traitlets import observe -from traitlets import Unicode +from traitlets import ( + Any, + Bool, + CaselessStrEnum, + Dict, + Instance, + Integer, + TraitError, + Unicode, + default, + observe, + validate, +) from traitlets.config import LoggingConfigurable -from . import utils -from .metrics import CHECK_ROUTES_DURATION_SECONDS -from .objects import Server -from .utils import make_ssl_context -from .utils import url_path_join from jupyterhub.traitlets import Command +from . import utils +from .metrics import CHECK_ROUTES_DURATION_SECONDS, PROXY_POLL_DURATION_SECONDS +from .objects import Server +from .utils import AnyTimeoutError, exponential_backoff, url_escape_path, url_path_join + def _one_at_a_time(method): """decorator to limit an async method to be called only once @@ -54,11 +57,18 @@ def _one_at_a_time(method): If multiple concurrent calls to this method are made, queue them instead of allowing them to be concurrently outstanding. """ - method._lock = asyncio.Lock() + # use weak dict for locks + # so that the lock is always acquired within the current asyncio loop + # should only be relevant in testing, where eventloops are created and destroyed often + method._locks = WeakKeyDictionary() @wraps(method) async def locked_method(*args, **kwargs): - async with method._lock: + loop = asyncio.get_event_loop() + lock = method._locks.get(loop, None) + if lock is None: + lock = method._locks[loop] = asyncio.Lock() + async with lock: return await method(*args, **kwargs) return locked_method @@ -112,6 +122,72 @@ class Proxy(LoggingConfigurable): """, ) + extra_routes = Dict( + key_trait=Unicode(), + value_trait=Unicode(), + config=True, + help=""" + Additional routes to be maintained in the proxy. + + A dictionary with a route specification as key, and + a URL as target. The hub will ensure this route is present + in the proxy. + + If the hub is running in host based mode (with + JupyterHub.subdomain_host set), the routespec *must* + have a domain component (example.com/my-url/). If the + hub is not running in host based mode, the routespec + *must not* have a domain component (/my-url/). + + Helpful when the hub is running in API-only mode. + """, + ) + + @validate("extra_routes") + def _validate_extra_routes(self, proposal): + extra_routes = {} + # check routespecs for leading/trailing slashes + for routespec, target in proposal.value.items(): + if not isinstance(routespec, str): + raise TraitError( + f"Proxy.extra_routes keys must be str, got {routespec!r}" + ) + if not isinstance(target, str): + raise TraitError( + f"Proxy.extra_routes values must be str, got {target!r}" + ) + if not routespec.endswith("/"): + # trailing / is unambiguous, so we can add it + self.log.warning( + f"Adding missing trailing '/' to c.Proxy.extra_routes {routespec} -> {routespec}/" + ) + routespec += "/" + + if self.app.subdomain_host: + # subdomain routing must _not_ start with / + if routespec.startswith("/"): + raise ValueError( + f"Proxy.extra_routes missing host component in {routespec} (must not have leading '/') when using `JupyterHub.subdomain_host = {self.app.subdomain_host!r}`" + ) + + else: + # no subdomains, must start with / + # this is ambiguous with host routing, so raise instead of warn + if not routespec.startswith("/"): + raise ValueError( + f"Proxy.extra_routes routespec {routespec} missing leading '/'." + ) + + # validate target URL? + target_url = urlparse(target.lower()) + if target_url.scheme not in {"http", "https"} or not target_url.netloc: + raise ValueError( + f"Proxy.extra_routes target {routespec}={target!r} doesn't look like a URL (should have http[s]://...)" + ) + extra_routes[routespec] = target + + return extra_routes + def start(self): """Start the proxy. @@ -174,14 +250,12 @@ class Proxy(LoggingConfigurable): The proxy implementation should also have a way to associate the fact that a route came from JupyterHub. """ - pass async def delete_route(self, routespec): """Delete a route with a given routespec if it exists. **Subclasses must define this method** """ - pass async def get_all_routes(self): """Fetch and return all the routes associated by JupyterHub from the @@ -198,7 +272,6 @@ class Proxy(LoggingConfigurable): 'data': the attached data dict for this route (as specified in add_route) } """ - pass async def get_route(self, routespec): """Return the route info for a given routespec. @@ -277,7 +350,9 @@ class Proxy(LoggingConfigurable): """Remove a user's server from the proxy table.""" routespec = user.proxy_spec if server_name: - routespec = url_path_join(user.proxy_spec, server_name, '/') + routespec = url_path_join( + user.proxy_spec, url_escape_path(server_name), '/' + ) self.log.info("Removing user %s from proxy (%s)", user.name, routespec) await self.delete_route(routespec) @@ -291,7 +366,7 @@ class Proxy(LoggingConfigurable): if service.server: futures.append(self.add_service(service)) # wait after submitting them all - await gen.multi(futures) + await asyncio.gather(*futures) async def add_all_users(self, user_dict): """Update the proxy table from the database. @@ -304,7 +379,7 @@ class Proxy(LoggingConfigurable): if spawner.ready: futures.append(self.add_user(user, name)) # wait after submitting them all - await gen.multi(futures) + await asyncio.gather(*futures) @_one_at_a_time async def check_routes(self, user_dict, service_dict, routes=None): @@ -313,10 +388,8 @@ class Proxy(LoggingConfigurable): if not routes: self.log.debug("Fetching routes to check") routes = await self.get_all_routes() - # log info-level that we are starting the route-checking - # this may help diagnose performance issues, - # as we are about - self.log.info("Checking routes") + + self.log.debug("Checking routes") user_routes = {path for path, r in routes.items() if 'user' in r['data']} futures = [] @@ -330,7 +403,7 @@ class Proxy(LoggingConfigurable): route = routes[self.app.hub.routespec] if route['target'] != hub.host: self.log.warning( - "Updating default route %s → %s", route['target'], hub.host + "Updating Hub route %s → %s", route['target'], hub.host ) futures.append(self.add_hub_route(hub)) @@ -384,19 +457,24 @@ class Proxy(LoggingConfigurable): ) futures.append(self.add_service(service)) + # Add extra routes we've been configured for + for routespec, url in self.extra_routes.items(): + good_routes.add(routespec) + futures.append(self.add_route(routespec, url, {'extra': True})) + # Now delete the routes that shouldn't be there for routespec in routes: if routespec not in good_routes: self.log.warning("Deleting stale route %s", routespec) futures.append(self.delete_route(routespec)) - await gen.multi(futures) + await asyncio.gather(*futures) stop = time.perf_counter() # timer stops here when user is deleted CHECK_ROUTES_DURATION_SECONDS.observe(stop - start) # histogram metric def add_hub_route(self, hub): """Add the default route for the Hub""" - self.log.info("Adding default route for Hub: %s => %s", hub.routespec, hub.host) + self.log.info("Adding route for Hub: %s => %s", hub.routespec, hub.host) return self.add_route(hub.routespec, self.hub.host, {'hub': True}) async def restore_routes(self): @@ -443,14 +521,32 @@ class ConfigurableHTTPProxy(Proxy): def _concurrency_changed(self, change): self.semaphore = asyncio.BoundedSemaphore(change.new) + # https://github.com/jupyterhub/configurable-http-proxy/blob/4.5.1/bin/configurable-http-proxy#L92 + log_level = CaselessStrEnum( + ["debug", "info", "warn", "error"], + "info", + help="Proxy log level", + config=True, + ) + debug = Bool(False, help="Add debug-level logging to the Proxy.", config=True) + + @observe('debug') + def _debug_changed(self, change): + if change.new: + self.log_level = "debug" + auth_token = Unicode( help="""The Proxy auth token Loaded from the CONFIGPROXY_AUTH_TOKEN env variable by default. """ ).tag(config=True) - check_running_interval = Integer(5, config=True) + check_running_interval = Integer( + 5, + help="Interval (in seconds) at which to check if the proxy is running.", + config=True, + ) @default('auth_token') def _auth_token_default(self): @@ -472,7 +568,7 @@ class ConfigurableHTTPProxy(Proxy): if self.app.internal_ssl: proto = 'https' - return "{proto}://{url}".format(proto=proto, url=url) + return f"{proto}://{url}" command = Command( 'configurable-http-proxy', @@ -496,6 +592,19 @@ class ConfigurableHTTPProxy(Proxy): if not psutil.pid_exists(pid): raise ProcessLookupError + + try: + process = psutil.Process(pid) + if self.command and self.command[0]: + process_cmd = process.cmdline() + if process_cmd and not any( + self.command[0] in clause for clause in process_cmd + ): + raise ProcessLookupError + except (psutil.AccessDenied, psutil.NoSuchProcess): + # If there is a process at the proxy's PID but we don't have permissions to see it, + # then it is unlikely to actually be the proxy. + raise ProcessLookupError else: os.kill(pid, 0) @@ -515,7 +624,7 @@ class ConfigurableHTTPProxy(Proxy): pid_file = os.path.abspath(self.pid_file) self.log.warning("Found proxy pid file: %s", pid_file) try: - with open(pid_file, "r") as f: + with open(pid_file) as f: pid = int(f.read().strip()) except ValueError: self.log.warning("%s did not appear to contain a pid", pid_file) @@ -571,7 +680,34 @@ class ConfigurableHTTPProxy(Proxy): os.remove(self.pid_file) except FileNotFoundError: self.log.debug("PID file %s already removed", self.pid_file) - pass + + def _get_ssl_options(self): + """List of cmd proxy options to use internal SSL""" + cmd = [] + proxy_api = 'proxy-api' + proxy_client = 'proxy-client' + api_key = self.app.internal_proxy_certs[proxy_api][ + 'keyfile' + ] # Check content in next test and just patch manulaly or in the config of the file + api_cert = self.app.internal_proxy_certs[proxy_api]['certfile'] + api_ca = self.app.internal_trust_bundles[proxy_api + '-ca'] + + client_key = self.app.internal_proxy_certs[proxy_client]['keyfile'] + client_cert = self.app.internal_proxy_certs[proxy_client]['certfile'] + client_ca = self.app.internal_trust_bundles[proxy_client + '-ca'] + + cmd.extend(['--api-ssl-key', api_key]) + cmd.extend(['--api-ssl-cert', api_cert]) + cmd.extend(['--api-ssl-ca', api_ca]) + cmd.extend(['--api-ssl-request-cert']) + cmd.extend(['--api-ssl-reject-unauthorized']) + + cmd.extend(['--client-ssl-key', client_key]) + cmd.extend(['--client-ssl-cert', client_cert]) + cmd.extend(['--client-ssl-ca', client_ca]) + cmd.extend(['--client-ssl-request-cert']) + cmd.extend(['--client-ssl-reject-unauthorized']) + return cmd async def start(self): """Start the proxy process""" @@ -594,48 +730,17 @@ class ConfigurableHTTPProxy(Proxy): str(api_server.port), '--error-target', url_path_join(self.hub.url, 'error'), + '--log-level', + self.log_level, ] if self.app.subdomain_host: cmd.append('--host-routing') - if self.debug: - cmd.extend(['--log-level', 'debug']) if self.ssl_key: cmd.extend(['--ssl-key', self.ssl_key]) if self.ssl_cert: cmd.extend(['--ssl-cert', self.ssl_cert]) if self.app.internal_ssl: - proxy_api = 'proxy-api' - proxy_client = 'proxy-client' - api_key = self.app.internal_proxy_certs[proxy_api]['keyfile'] - api_cert = self.app.internal_proxy_certs[proxy_api]['certfile'] - api_ca = self.app.internal_trust_bundles[proxy_api + '-ca'] - - client_key = self.app.internal_proxy_certs[proxy_client]['keyfile'] - client_cert = self.app.internal_proxy_certs[proxy_client]['certfile'] - client_ca = self.app.internal_trust_bundles[proxy_client + '-ca'] - - cmd.extend(['--api-ssl-key', api_key]) - cmd.extend(['--api-ssl-cert', api_cert]) - cmd.extend(['--api-ssl-ca', api_ca]) - cmd.extend(['--api-ssl-request-cert']) - cmd.extend(['--api-ssl-reject-unauthorized']) - - cmd.extend(['--client-ssl-key', client_key]) - cmd.extend(['--client-ssl-cert', client_cert]) - cmd.extend(['--client-ssl-ca', client_ca]) - cmd.extend(['--client-ssl-request-cert']) - cmd.extend(['--client-ssl-reject-unauthorized']) - if self.app.statsd_host: - cmd.extend( - [ - '--statsd-host', - self.app.statsd_host, - '--statsd-port', - str(self.app.statsd_port), - '--statsd-prefix', - self.app.statsd_prefix + '.chp', - ] - ) + cmd.extend(self._get_ssl_options()) # Warn if SSL is not used if ' --ssl' not in ' '.join(cmd): self.log.warning( @@ -664,15 +769,16 @@ class ConfigurableHTTPProxy(Proxy): def _check_process(): status = self.proxy_process.poll() if status is not None: - e = RuntimeError("Proxy failed to start with exit code %i" % status) - raise e from None + with self.proxy_process: + e = RuntimeError("Proxy failed to start with exit code %i" % status) + raise e from None for server in (public_server, api_server): for i in range(10): _check_process() try: await server.wait_up(1) - except TimeoutError: + except AnyTimeoutError: continue else: break @@ -691,8 +797,17 @@ class ConfigurableHTTPProxy(Proxy): parent = psutil.Process(pid) children = parent.children(recursive=True) for child in children: - child.kill() - psutil.wait_procs(children, timeout=5) + child.terminate() + gone, alive = psutil.wait_procs(children, timeout=5) + for p in alive: + p.kill() + # Clear the shell, too, if it still exists. + try: + parent.terminate() + parent.wait(timeout=5) + parent.kill() + except psutil.NoSuchProcess: + pass def _terminate(self): """Terminate our process""" @@ -766,12 +881,38 @@ class ConfigurableHTTPProxy(Proxy): req = HTTPRequest( url, method=method, - headers={'Authorization': 'token {}'.format(self.auth_token)}, + headers={'Authorization': f'token {self.auth_token}'}, body=body, + connect_timeout=3, # default: 20s + request_timeout=10, # default: 20s ) - async with self.semaphore: - result = await client.fetch(req) - return result + + async def _wait_for_api_request(): + try: + async with self.semaphore: + return await client.fetch(req) + except HTTPError as e: + # Retry on potentially transient errors in CHP, typically + # numbered 500 and up. Note that CHP isn't able to emit 429 + # errors. + if e.code >= 500: + self.log.warning( + "api_request to the proxy failed with status code {}, retrying...".format( + e.code + ) + ) + return False # a falsy return value make exponential_backoff retry + else: + self.log.error(f"api_request to proxy failed: {e}") + # An unhandled error here will help the hub invoke cleanup logic + raise + + result = await exponential_backoff( + _wait_for_api_request, + f'Repeated api_request to proxy path "{path}" failed.', + timeout=30, + ) + return result async def add_route(self, routespec, target, data): body = data or {} @@ -801,6 +942,7 @@ class ConfigurableHTTPProxy(Proxy): async def get_all_routes(self, client=None): """Fetch the proxy's routes.""" + proxy_poll_start_time = time.perf_counter() resp = await self.api_request('', client=client) chp_routes = json.loads(resp.body.decode('utf8', 'replace')) all_routes = {} @@ -811,4 +953,5 @@ class ConfigurableHTTPProxy(Proxy): self.log.debug("Omitting non-jupyterhub route %r", routespec) continue all_routes[routespec] = self._reformat_routespec(routespec, chp_data) + PROXY_POLL_DURATION_SECONDS.observe(time.perf_counter() - proxy_poll_start_time) return all_routes diff --git a/jupyterhub/roles.py b/jupyterhub/roles.py new file mode 100644 index 00000000..e6b864c1 --- /dev/null +++ b/jupyterhub/roles.py @@ -0,0 +1,327 @@ +"""Roles utils""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import re +from functools import wraps + +from sqlalchemy import func +from tornado.log import app_log + +from . import orm, scopes + + +def get_default_roles(): + """Returns: + default roles (list): default role definitions as dictionaries: + { + 'name': role name, + 'description': role description, + 'scopes': list of scopes, + } + """ + default_roles = [ + { + 'name': 'user', + 'description': 'Standard user privileges', + 'scopes': [ + 'self', + ], + }, + { + 'name': 'admin', + 'description': 'Elevated privileges (can do anything)', + 'scopes': [ + 'admin-ui', + 'admin:users', + 'admin:servers', + 'tokens', + 'admin:groups', + 'list:services', + 'read:services', + 'read:hub', + 'proxy', + 'shutdown', + 'access:services', + 'access:servers', + 'read:roles', + 'read:metrics', + ], + }, + { + 'name': 'server', + 'description': 'Post activity only', + 'scopes': [ + 'users:activity!user', + 'access:servers!user', + ], + }, + { + 'name': 'token', + 'description': 'Token with same permissions as its owner', + 'scopes': ['inherit'], + }, + ] + return default_roles + + +def get_roles_for(orm_object): + """Get roles for a given User/Group/etc. + + If User, take into account the user's groups roles as well + + Arguments: + orm_object: orm.User, orm.Service, orm.Group + Any role-having entity + + Returns: + roles (list): list of orm.Role objects assigned to the object. + """ + if not isinstance(orm_object, orm.Base): + raise TypeError(f"Only orm objects allowed, got {orm_object}") + + roles = [] + roles.extend(orm_object.roles) + + if isinstance(orm_object, orm.User): + for group in orm_object.groups: + roles.extend(group.roles) + return roles + + +def roles_to_scopes(roles): + """Returns set of raw (not expanded) scopes for a collection of roles""" + raw_scopes = set() + + for role in roles: + raw_scopes.update(role.scopes) + return raw_scopes + + +def roles_to_expanded_scopes(roles, owner): + """Returns a set of fully expanded scopes for a specified role or list of roles + + Arguments: + roles (list(orm.Role): orm.Role objects to expand + owner (obj): orm.User or orm.Service which holds the role(s) + Used for expanding filters and metascopes such as !user. + + Returns: + expanded scopes (set): set of all expanded scopes for the role(s) + """ + return scopes.expand_scopes(roles_to_scopes(roles), owner=owner) + + +_role_name_pattern = re.compile(r'^[a-z][a-z0-9\-_~\.]{1,253}[a-z0-9]$') + + +def _validate_role_name(name): + """Ensure a role has a valid name + + Raises ValueError if role name is invalid + """ + if not _role_name_pattern.match(name): + raise ValueError( + f"Invalid role name: {name!r}." + " Role names must:\n" + " - be 3-255 characters\n" + " - contain only lowercase ascii letters, numbers, and URL unreserved special characters '-.~_'\n" + " - start with a letter\n" + " - end with letter or number\n" + ) + return True + + +def create_role(db, role_dict): + """Adds a new role to database or modifies an existing one""" + default_roles = get_default_roles() + + if 'name' not in role_dict.keys(): + raise KeyError('Role definition must have a name') + else: + name = role_dict['name'] + _validate_role_name(name) + role = orm.Role.find(db, name) + + description = role_dict.get('description') + scopes = role_dict.get('scopes') + + if name == "admin": + for _role in get_default_roles(): + if _role["name"] == "admin": + admin_spec = _role + break + for key in ["description", "scopes"]: + if key in role_dict and role_dict[key] != admin_spec[key]: + raise ValueError( + f"Cannot override admin role admin.{key} = {role_dict[key]}" + ) + + # check if the provided scopes exist + if scopes: + # avoid circular import + from .scopes import _check_scopes_exist + + _check_scopes_exist(scopes, who_for=f"role {role_dict['name']}") + else: + app_log.warning('Role %s will have no scopes', name) + + if role is None: + role = orm.Role(name=name, description=description, scopes=scopes) + db.add(role) + if role_dict not in default_roles: + app_log.info('Role %s added to database', name) + else: + for attr in ["description", "scopes"]: + default_value = getattr(orm.Role, attr).default + if default_value: + default_value = default_value.arg + + new_value = role_dict.get(attr, default_value) + old_value = getattr(role, attr) + if new_value != old_value: + setattr(role, attr, new_value) + app_log.info( + f'Role attribute {role.name}.{attr} has been changed', + ) + app_log.debug( + f'Role attribute {role.name}.{attr} changed from %r to %r', + old_value, + new_value, + ) + db.commit() + + +def delete_role(db, rolename): + """Removes a role from database""" + # default roles are not removable + default_roles = get_default_roles() + if any(role['name'] == rolename for role in default_roles): + raise ValueError('Default role %r cannot be removed', rolename) + + role = orm.Role.find(db, rolename) + if role: + db.delete(role) + db.commit() + app_log.info('Role %s has been deleted', rolename) + else: + raise KeyError('Cannot remove role %r that does not exist', rolename) + + +def _existing_only(func): + """Decorator for checking if roles exist""" + + @wraps(func) + def _check_existence(db, entity, role=None, *, rolename=None): + if isinstance(role, str): + rolename = role + if rolename is not None: + # if given as a str, lookup role by name + role = orm.Role.find(db, rolename) + if role is None: + raise ValueError(f"Role {rolename} does not exist") + + return func(db, entity, role) + + return _check_existence + + +@_existing_only +def grant_role(db, entity, role): + """Adds a role for users, services, groups or tokens""" + if isinstance(entity, orm.APIToken): + entity_repr = entity + else: + entity_repr = entity.name + + if role not in entity.roles: + entity.roles.append(role) + app_log.info( + 'Adding role %s for %s: %s', + role.name, + type(entity).__name__, + entity_repr, + ) + db.commit() + + +@_existing_only +def strip_role(db, entity, role): + """Removes a role for users, services, groups or tokens""" + if isinstance(entity, orm.APIToken): + entity_repr = entity + else: + entity_repr = entity.name + if role in entity.roles: + entity.roles.remove(role) + db.commit() + app_log.info( + 'Removing role %s for %s: %s', + role.name, + type(entity).__name__, + entity_repr, + ) + + +def assign_default_roles(db, entity): + """Assigns default role(s) to an entity: + + tokens get 'token' role + + users and services get 'admin' role if they are admin (removed if they are not) + + users always get 'user' role + """ + if isinstance(entity, orm.Group): + return + + # users and services all have 'user' role by default + # and optionally 'admin' as well + + kind = type(entity).__name__ + app_log.debug(f'Assigning default role to {kind} {entity.name}') + if entity.admin: + grant_role(db, entity=entity, rolename="admin") + else: + admin_role = orm.Role.find(db, 'admin') + if admin_role in entity.roles: + strip_role(db, entity=entity, rolename="admin") + if kind == "User": + grant_role(db, entity=entity, rolename="user") + + +def update_roles(db, entity, roles): + """Add roles to an entity (token, user, etc.) + + Calls `grant_role` for each role. + """ + for rolename in roles: + grant_role(db, entity=entity, rolename=rolename) + + +def check_for_default_roles(db, bearer): + """Checks that role bearers have at least one role (default if none). + Groups can be without a role + """ + Class = orm.get_class(bearer) + if Class in {orm.Group, orm.Service}: + pass + else: + for obj in ( + db.query(Class) + .outerjoin(orm.Role, Class.roles) + .group_by(Class.id) + .having(func.count(orm.Role.id) == 0) + ): + assign_default_roles(db, obj) + db.commit() + + +def mock_roles(app, name, kind): + """Loads and assigns default roles for mocked objects""" + Class = orm.get_class(kind) + obj = Class.find(app.db, name=name) + default_roles = get_default_roles() + for role in default_roles: + create_role(app.db, role) + app_log.info('Assigning default roles to mocked %s: %s', kind[:-1], name) + assign_default_roles(db=app.db, entity=obj) diff --git a/jupyterhub/scopes.py b/jupyterhub/scopes.py new file mode 100644 index 00000000..fb345f3d --- /dev/null +++ b/jupyterhub/scopes.py @@ -0,0 +1,1168 @@ +""" +General scope definitions and utilities + +Scope functions generally return _immutable_ collections, +such as `frozenset` to avoid mutating cached values. +If needed, mutable copies can be made, e.g. `set(frozen_scopes)` + +Scope variable nomenclature +--------------------------- +scopes or 'raw' scopes: collection of scopes that may contain abbreviations (e.g., in role definition) +expanded scopes: set of expanded scopes without abbreviations (i.e., resolved metascopes, filters, and subscopes) +parsed scopes: dictionary format of expanded scopes (`read:users!user=name` -> `{'read:users': {user: [name]}`) +intersection : set of expanded scopes as intersection of 2 expanded scope sets +identify scopes: set of expanded scopes needed for identify (whoami) endpoints +reduced scopes: expanded scopes that have been reduced +""" +import functools +import inspect +import re +import warnings +from enum import Enum +from functools import lru_cache +from itertools import chain +from textwrap import indent + +import sqlalchemy as sa +from tornado import web +from tornado.log import app_log + +from . import orm, roles +from ._memoize import DoNotCache, FrozenDict, lru_cache_key + +"""when modifying the scope definitions, make sure that `docs/source/rbac/generate-scope-table.py` is run + so that changes are reflected in the documentation and REST API description.""" +scope_definitions = { + '(no_scope)': {'description': 'Identify the owner of the requesting entity.'}, + 'self': { + 'description': 'Your own resources', + 'doc_description': 'The user’s own resources _(metascope for users, resolves to (no_scope) for services)_', + }, + 'inherit': { + 'description': 'Anything you have access to', + 'doc_description': 'Everything that the token-owning entity can access _(metascope for tokens)_', + }, + 'admin-ui': { + 'description': 'Access the admin page.', + 'doc_description': 'Access the admin page. Permission to take actions via the admin page granted separately.', + }, + 'admin:users': { + 'description': 'Read, write, create and delete users and their authentication state, not including their servers or tokens.', + 'subscopes': ['admin:auth_state', 'users', 'read:roles:users', 'delete:users'], + }, + 'admin:auth_state': {'description': 'Read a user’s authentication state.'}, + 'users': { + 'description': 'Read and write permissions to user models (excluding servers, tokens and authentication state).', + 'subscopes': ['read:users', 'list:users', 'users:activity'], + }, + 'delete:users': { + 'description': "Delete users.", + }, + 'list:users': { + 'description': 'List users, including at least their names.', + 'subscopes': ['read:users:name'], + }, + 'read:users': { + 'description': 'Read user models (excluding including servers, tokens and authentication state).', + 'subscopes': [ + 'read:users:name', + 'read:users:groups', + 'read:users:activity', + ], + }, + 'read:users:name': {'description': 'Read names of users.'}, + 'read:users:groups': {'description': 'Read users’ group membership.'}, + 'read:users:activity': {'description': 'Read time of last user activity.'}, + 'read:roles': { + 'description': 'Read role assignments.', + 'subscopes': ['read:roles:users', 'read:roles:services', 'read:roles:groups'], + }, + 'read:roles:users': {'description': 'Read user role assignments.'}, + 'read:roles:services': {'description': 'Read service role assignments.'}, + 'read:roles:groups': {'description': 'Read group role assignments.'}, + 'users:activity': { + 'description': 'Update time of last user activity.', + 'subscopes': ['read:users:activity'], + }, + 'admin:servers': { + 'description': 'Read, start, stop, create and delete user servers and their state.', + 'subscopes': ['admin:server_state', 'servers'], + }, + 'admin:server_state': {'description': 'Read and write users’ server state.'}, + 'servers': { + 'description': 'Start and stop user servers.', + 'subscopes': ['read:servers', 'delete:servers'], + }, + 'read:servers': { + 'description': 'Read users’ names and their server models (excluding the server state).', + 'subscopes': ['read:users:name'], + }, + 'delete:servers': {'description': "Stop and delete users' servers."}, + 'tokens': { + 'description': 'Read, write, create and delete user tokens.', + 'subscopes': ['read:tokens'], + }, + 'read:tokens': {'description': 'Read user tokens.'}, + 'admin:groups': { + 'description': 'Read and write group information, create and delete groups.', + 'subscopes': ['groups', 'read:roles:groups', 'delete:groups'], + }, + 'groups': { + 'description': 'Read and write group information, including adding/removing users to/from groups.', + 'subscopes': ['read:groups', 'list:groups'], + }, + 'list:groups': { + 'description': 'List groups, including at least their names.', + 'subscopes': ['read:groups:name'], + }, + 'read:groups': { + 'description': 'Read group models.', + 'subscopes': ['read:groups:name'], + }, + 'read:groups:name': {'description': 'Read group names.'}, + 'delete:groups': { + 'description': "Delete groups.", + }, + 'list:services': { + 'description': 'List services, including at least their names.', + 'subscopes': ['read:services:name'], + }, + 'read:services': { + 'description': 'Read service models.', + 'subscopes': ['read:services:name'], + }, + 'read:services:name': {'description': 'Read service names.'}, + 'read:hub': {'description': 'Read detailed information about the Hub.'}, + 'access:servers': { + 'description': 'Access user servers via API or browser.', + }, + 'access:services': { + 'description': 'Access services via API or browser.', + }, + 'proxy': { + 'description': 'Read information about the proxy’s routing table, sync the Hub with the proxy and notify the Hub about a new proxy.' + }, + 'shutdown': {'description': 'Shutdown the hub.'}, + 'read:metrics': { + 'description': "Read prometheus metrics.", + }, +} + + +class Scope(Enum): + ALL = True + + +def _intersection_cache_key(scopes_a, scopes_b, db=None): + """Cache key function for scope intersections""" + return (frozenset(scopes_a), frozenset(scopes_b)) + + +@lru_cache_key(_intersection_cache_key) +def _intersect_expanded_scopes(scopes_a, scopes_b, db=None): + """Intersect two sets of scopes by comparing their permissions + + Arguments: + scopes_a, scopes_b: sets of expanded scopes + db (optional): db connection for resolving group membership + + Returns: + intersection: set of expanded scopes as intersection of the arguments + + If db is given, group membership will be accounted for in intersections, + Otherwise, it can result in lower than intended permissions, + (i.e. users!group=x & users!user=y will be empty, even if user y is in group x.) + """ + empty_set = frozenset() + scopes_a = frozenset(scopes_a) + scopes_b = frozenset(scopes_b) + + # cached lookups for group membership of users and servers + @lru_cache() + def groups_for_user(username): + """Get set of group names for a given username""" + # if we need a group lookup, the result is not cacheable + nonlocal needs_db + needs_db = True + user = db.query(orm.User).filter_by(name=username).first() + if user is None: + return empty_set + else: + return {group.name for group in user.groups} + + @lru_cache() + def groups_for_server(server): + """Get set of group names for a given server""" + username, _, servername = server.partition("/") + return groups_for_user(username) + + parsed_scopes_a = parse_scopes(scopes_a) + parsed_scopes_b = parse_scopes(scopes_b) + + # track whether we need a db lookup (for groups) + # because we can't cache the intersection if we do + # if there are no group filters, this is cacheable + needs_db = False + + common_bases = parsed_scopes_a.keys() & parsed_scopes_b.keys() + + common_filters = {} + warned = False + for base in common_bases: + filters_a = parsed_scopes_a[base] + filters_b = parsed_scopes_b[base] + if filters_a == Scope.ALL: + common_filters[base] = filters_b + elif filters_b == Scope.ALL: + common_filters[base] = filters_a + else: + common_entities = filters_a.keys() & filters_b.keys() + all_entities = filters_a.keys() | filters_b.keys() + + # if we don't have a db session, we can't check group membership + # warn *if* there are non-overlapping user= and group= filters that we can't check + if ( + db is None + and not warned + and 'group' in all_entities + and ('user' in all_entities or 'server' in all_entities) + ): + # this could resolve wrong if there's a user or server only on one side and a group only on the other + # check both directions: A has group X not in B group list AND B has user Y not in A user list + for a, b in [(filters_a, filters_b), (filters_b, filters_a)]: + for b_key in ('user', 'server'): + if ( + not warned + and "group" in a + and b_key in b + and a["group"].difference(b.get("group", [])) + and b[b_key].difference(a.get(b_key, [])) + ): + warnings.warn( + f"{base}[!{b_key}={b[b_key]}, !group={a['group']}] combinations of filters present," + " without db access. Intersection between not considered." + " May result in lower than intended permissions.", + UserWarning, + ) + warned = True + needs_db = True + + common_filters[base] = { + entity: filters_a[entity] & filters_b[entity] + for entity in common_entities + } + + # resolve hierarchies (group/user/server) in both directions + common_servers = common_filters[base].get("server", set()) + common_users = common_filters[base].get("user", set()) + + for a, b in [(filters_a, filters_b), (filters_b, filters_a)]: + if 'server' in a and b.get('server') != a['server']: + # skip already-added servers (includes overlapping servers) + servers = a['server'].difference(common_servers) + + # resolve user/server hierarchy + if servers and 'user' in b: + for server in servers: + username, _, servername = server.partition("/") + if username in b['user']: + common_servers.add(server) + + # resolve group/server hierarchy if db available + servers = servers.difference(common_servers) + if db is not None and servers and 'group' in b: + needs_db = True + for server in servers: + server_groups = groups_for_server(server) + if server_groups & b['group']: + common_servers.add(server) + + # resolve group/user hierarchy if db available and user sets aren't identical + if ( + db is not None + and 'user' in a + and 'group' in b + and b.get('user') != a['user'] + ): + # skip already-added users (includes overlapping users) + users = a['user'].difference(common_users) + for username in users: + groups = groups_for_user(username) + if groups & b["group"]: + common_users.add(username) + + # add server filter if there wasn't one before + if common_servers and "server" not in common_filters[base]: + common_filters[base]["server"] = common_servers + + # add user filter if it's non-empty and there wasn't one before + if common_users and "user" not in common_filters[base]: + common_filters[base]["user"] = common_users + + intersection = unparse_scopes(common_filters) + if needs_db: + # return intersection, but don't cache it if it needed db lookups + return DoNotCache(intersection) + + return intersection + + +def get_scopes_for(orm_object): + """Find scopes for a given user or token from their roles and resolve permissions + + Arguments: + orm_object: orm object or User wrapper + + Returns: + expanded scopes (set) for the orm object + or + intersection (set) if orm_object == orm.APIToken + """ + expanded_scopes = set() + if orm_object is None: + return expanded_scopes + + if not isinstance(orm_object, orm.Base): + from .user import User + + if isinstance(orm_object, User): + orm_object = orm_object.orm_user + else: + raise TypeError( + f"Only allow orm objects or User wrappers, got {orm_object}" + ) + + owner = None + if isinstance(orm_object, orm.APIToken): + owner = orm_object.user or orm_object.service + owner_roles = roles.get_roles_for(owner) + owner_scopes = roles.roles_to_expanded_scopes(owner_roles, owner) + + token_scopes = set(orm_object.scopes) + if 'inherit' in token_scopes: + # token_scopes includes 'inherit', + # so we know the intersection is exactly the owner's scopes + # only thing we miss by short-circuiting here: warning about excluded extra scopes + return owner_scopes + + token_scopes = set( + expand_scopes( + token_scopes, + owner=owner, + oauth_client=orm_object.oauth_client, + ) + ) + + if orm_object.client_id != "jupyterhub": + # oauth tokens can be used to access the service issuing the token, + # assuming the owner itself still has permission to do so + token_scopes.update(access_scopes(orm_object.oauth_client)) + + # reduce to collapse multiple filters on the same scope + # to avoid spurious logs about discarded scopes + token_scopes.update(identify_scopes(owner)) + token_scopes = reduce_scopes(token_scopes) + + intersection = _intersect_expanded_scopes( + token_scopes, + owner_scopes, + db=sa.inspect(orm_object).session, + ) + discarded_token_scopes = token_scopes - intersection + + # Not taking symmetric difference here because token owner can naturally have more scopes than token + if discarded_token_scopes: + app_log.warning( + f"discarding scopes [{discarded_token_scopes}]," + f" not present in roles of owner {owner}" + ) + app_log.debug( + "Owner %s has scopes: %s\nToken has scopes: %s", + owner, + owner_scopes, + token_scopes, + ) + expanded_scopes = intersection + # always include identify scopes + expanded_scopes + else: + expanded_scopes = roles.roles_to_expanded_scopes( + roles.get_roles_for(orm_object), + owner=orm_object, + ) + if isinstance(orm_object, (orm.User, orm.Service)): + owner = orm_object + + return expanded_scopes + + +@lru_cache() +def _expand_self_scope(username): + """ + Users have a metascope 'self' that should be expanded to standard user privileges. + At the moment that is a user-filtered version (optional read) access to + users + users:name + users:groups + users:activity + tokens + servers + access:servers + + + Arguments: + username (str): user name + + Returns: + expanded scopes (set): set of expanded scopes covering standard user privileges + """ + scope_list = [ + 'read:users', + 'read:users:name', + 'read:users:groups', + 'users:activity', + 'read:users:activity', + 'servers', + 'delete:servers', + 'read:servers', + 'tokens', + 'read:tokens', + 'access:servers', + ] + # return immutable frozenset because the result is cached + return frozenset(f"{scope}!user={username}" for scope in scope_list) + + +@lru_cache(maxsize=65535) +def _expand_scope(scope): + """Returns a scope and all all subscopes + + Arguments: + scope (str): the scope to expand + + Returns: + expanded scope (set): set of all scope's subscopes including the scope itself + """ + + # remove filter, save for later + scope_name, sep, filter_ = scope.partition('!') + + # expand scope and subscopes + expanded_scope_names = set() + + def _add_subscopes(scope_name): + expanded_scope_names.add(scope_name) + if scope_definitions[scope_name].get('subscopes'): + for subscope in scope_definitions[scope_name].get('subscopes'): + _add_subscopes(subscope) + + _add_subscopes(scope_name) + + # reapply !filter + if filter_: + expanded_scopes = { + f"{scope_name}!{filter_}" + for scope_name in expanded_scope_names + # server scopes have some cross-resource subscopes + # where the !server filter doesn't make sense, + # e.g. read:servers -> read:users:name + if not (filter_.startswith("server") and scope_name.startswith("read:user")) + } + else: + expanded_scopes = expanded_scope_names + + # return immutable frozenset because the result is cached + return frozenset(expanded_scopes) + + +def _expand_scopes_key(scopes, owner=None, oauth_client=None): + """Cache key function for expand_scopes + + scopes is usually a mutable list or set, + which can be hashed as a frozenset + + For the owner, we only care about what kind they are, + and their name. + """ + # freeze scopes for hash + frozen_scopes = frozenset(scopes) + if owner is None: + owner_key = None + else: + # owner key is the type and name + owner_key = (type(owner).__name__, owner.name) + if oauth_client is None: + oauth_client_key = None + else: + oauth_client_key = oauth_client.identifier + return (frozen_scopes, owner_key, oauth_client_key) + + +@lru_cache_key(_expand_scopes_key) +def expand_scopes(scopes, owner=None, oauth_client=None): + """Returns a set of fully expanded scopes for a collection of raw scopes + + Arguments: + scopes (collection(str)): collection of raw scopes + owner (obj, optional): orm.User or orm.Service as owner of orm.APIToken + Used for expansion of metascopes such as `self` + and owner-based filters such as `!user` + oauth_client (obj, optional): orm.OAuthClient + The issuing OAuth client of an API token. + + Returns: + expanded scopes (set): set of all expanded scopes, with filters applied for the owner + """ + expanded_scopes = set(chain.from_iterable(map(_expand_scope, scopes))) + + filter_replacements = { + "user": None, + "service": None, + "server": None, + } + user_name = None + if isinstance(owner, orm.User): + user_name = owner.name + filter_replacements["user"] = f"user={user_name}" + elif isinstance(owner, orm.Service): + filter_replacements["service"] = f"service={owner.name}" + + if oauth_client is not None: + if oauth_client.service is not None: + filter_replacements["service"] = f"service={oauth_client.service.name}" + elif oauth_client.spawner is not None: + spawner = oauth_client.spawner + filter_replacements["server"] = f"server={spawner.user.name}/{spawner.name}" + + for scope in expanded_scopes.copy(): + base_scope, _, filter = scope.partition('!') + if filter in filter_replacements: + # translate !user into !user={username} + # and !service into !service={servicename} + # and !server into !server={username}/{servername} + expanded_scopes.remove(scope) + expanded_filter = filter_replacements[filter] + if expanded_filter: + # translate + expanded_scopes.add(f'{base_scope}!{expanded_filter}') + else: + warnings.warn( + f"Not expanding !{filter} filter without target {filter} in {scope}", + stacklevel=3, + ) + + if 'self' in expanded_scopes: + expanded_scopes.remove('self') + if user_name: + expanded_scopes |= _expand_self_scope(user_name) + else: + warnings.warn( + f"Not expanding 'self' scope for owner {owner} which is not a User", + stacklevel=3, + ) + + # reduce to discard overlapping scopes + # return immutable frozenset because the result is cached + return frozenset(reduce_scopes(expanded_scopes)) + + +def _resolve_requested_scopes(requested_scopes, have_scopes, user, client, db): + """Resolve requested scopes for an OAuth token + + Intersects requested scopes with user scopes. + + First, at the raw scope level, + then if some scopes remain, intersect expanded scopes. + + Args: + requested_scopes (set): + raw scopes being requested. + have_scopes (set): + raw scopes currently held, against which requested_scopes will be checked. + user (orm.User): + user for whom the scopes will be issued + client (orm.OAuthClient): + oauth client which will own the token + db: + database session, required to resolve user|group intersections + + Returns: + (allowed_scopes, disallowed_scopes): + sets of allowed and disallowed scopes from the request + """ + + allowed_scopes = requested_scopes.intersection(have_scopes) + disallowed_scopes = requested_scopes.difference(have_scopes) + + if not disallowed_scopes: + # simple intersection worked, all scopes granted + return (allowed_scopes, disallowed_scopes) + + # if we got here, some scopes were disallowed. + # resolve fully expanded scopes to make sure scope intersections are properly allowed. + expanded_allowed = expand_scopes(allowed_scopes, user, client) + expanded_have = expand_scopes(have_scopes, user, client) + # compute one at a time so we can keep the abbreviated scopes + # if they are a subset of user scopes (e.g. requested !server, have !user) + for scope in list(disallowed_scopes): + expanded_disallowed = expand_scopes({scope}, user, client) + # don't check already-allowed scopes + expanded_disallowed -= expanded_allowed + if expanded_disallowed: + allowed_intersection = _intersect_expanded_scopes( + expanded_disallowed, expanded_have, db=db + ) + else: + allowed_intersection = set() + + if allowed_intersection == expanded_disallowed: + # full scope allowed (requested scope is subset of user scopes) + allowed_scopes.add(scope) + disallowed_scopes.remove(scope) + expanded_allowed = expand_scopes(allowed_scopes, user, client) + + elif allowed_intersection: + # some scopes get through, but not all, + # allow the subset + allowed_scopes |= allowed_intersection + expanded_allowed = expand_scopes(allowed_scopes, user, client) + # choice: report that the requested scope wasn't _fully_ granted (current behavior) + # or report the exact (likely too detailed) set of not granted scopes (below) + # disallowed_scopes.remove(scope) + # disallowed_scopes |= expanded_disallowed.difference(allowed_intersection) + else: + # no new scopes granted, original check was right + pass + return (allowed_scopes, disallowed_scopes) + + +def _needs_scope_expansion(filter_, filter_value, sub_scope): + """ + Check if there is a requirements to expand the `group` scope to individual `user` scopes. + Assumptions: + filter_ != Scope.ALL + """ + if not (filter_ == 'user' and 'group' in sub_scope): + return False + if 'user' in sub_scope: + return filter_value not in sub_scope['user'] + else: + return True + + +def _check_user_in_expanded_scope(handler, user_name, scope_group_names): + """Check if username is present in set of allowed groups""" + user = handler.find_user(user_name) + if user is None: + raise web.HTTPError(404, "No access to resources or resources not found") + group_names = {group.name for group in user.groups} + return bool(set(scope_group_names) & group_names) + + +def _check_scope_access(api_handler, req_scope, **kwargs): + """Check if scopes satisfy requirements + Returns True for (potentially restricted) access, False for refused access + """ + # Parse user name and server name together + try: + api_name = api_handler.request.path + except AttributeError: + api_name = type(api_handler).__name__ + if 'user' in kwargs and 'server' in kwargs: + kwargs['server'] = "{}/{}".format(kwargs['user'], kwargs['server']) + if req_scope not in api_handler.parsed_scopes: + app_log.debug("No access to %s via %s", api_name, req_scope) + return False + if api_handler.parsed_scopes[req_scope] == Scope.ALL: + app_log.debug("Unrestricted access to %s via %s", api_name, req_scope) + return True + # Apply filters + sub_scope = api_handler.parsed_scopes[req_scope] + if not kwargs: + app_log.debug( + "Client has restricted access to %s via %s. Internal filtering may apply", + api_name, + req_scope, + ) + return True + for (filter_, filter_value) in kwargs.items(): + if filter_ in sub_scope and filter_value in sub_scope[filter_]: + app_log.debug("Argument-based access to %s via %s", api_name, req_scope) + return True + if _needs_scope_expansion(filter_, filter_value, sub_scope): + group_names = sub_scope['group'] + if _check_user_in_expanded_scope(api_handler, filter_value, group_names): + app_log.debug("Restricted client access supported with group expansion") + return True + app_log.debug( + "Client access refused; filters do not match API endpoint %s request" % api_name + ) + raise web.HTTPError(404, "No access to resources or resources not found") + + +def _check_scopes_exist(scopes, who_for=None): + """Check if provided scopes exist + + Arguments: + scopes (list): list of scopes to check + + Raises KeyError if scope does not exist + """ + + allowed_scopes = set(scope_definitions.keys()) + filter_prefixes = ('!user=', '!service=', '!group=', '!server=') + exact_filters = {"!user", "!service", "!server"} + + if who_for: + log_for = f"for {who_for}" + else: + log_for = "" + + for scope in scopes: + scopename, _, filter_ = scope.partition('!') + if scopename not in allowed_scopes: + if scopename == "all": + raise KeyError("Draft scope 'all' is now called 'inherit'") + raise KeyError(f"Scope '{scope}' {log_for} does not exist") + if filter_: + full_filter = f"!{filter_}" + if full_filter not in exact_filters and not full_filter.startswith( + filter_prefixes + ): + raise KeyError( + f"Scope filter {filter_} '{full_filter}' in scope '{scope}' {log_for} does not exist" + ) + + +def _check_token_scopes(scopes, owner, oauth_client): + """Check that scopes to be assigned to a token + are in fact + + Arguments: + scopes: raw or expanded scopes + owner: orm.User or orm.Service + + raises: + ValueError: if requested scopes exceed owner's assigned scopes + """ + scopes = set(scopes) + if scopes.issubset({"inherit"}): + # nothing to check for simple 'inherit' scopes + return + scopes.discard("inherit") + # common short circuit + token_scopes = expand_scopes(scopes, owner=owner, oauth_client=oauth_client) + + if not token_scopes: + return + + owner_scopes = get_scopes_for(owner) + intersection = _intersect_expanded_scopes( + token_scopes, + owner_scopes, + db=sa.inspect(owner).session, + ) + excess_scopes = token_scopes - intersection + + if excess_scopes: + raise ValueError( + f"Not assigning requested scopes {','.join(excess_scopes)} not held by {owner.__class__.__name__} {owner.name}" + ) + + +@lru_cache_key(frozenset) +def parse_scopes(scope_list): + """ + Parses scopes and filters in something akin to JSON style + + For instance, scope list ["users", "groups!group=foo", "servers!server=user/bar", "servers!server=user/baz"] + would lead to scope model + { + "users":scope.ALL, + "admin:users":{ + "user":[ + "alice" + ] + }, + "servers":{ + "server":[ + "user/bar", + "user/baz" + ] + } + } + """ + parsed_scopes = {} + for scope in scope_list: + base_scope, _, filter_ = scope.partition('!') + if not filter_: + parsed_scopes[base_scope] = Scope.ALL + elif base_scope not in parsed_scopes: + parsed_scopes[base_scope] = {} + + if parsed_scopes[base_scope] != Scope.ALL: + key, _, value = filter_.partition('=') + if key not in parsed_scopes[base_scope]: + parsed_scopes[base_scope][key] = {value} + else: + parsed_scopes[base_scope][key].add(value) + # return immutable FrozenDict because the result is cached + return FrozenDict(parsed_scopes) + + +@lru_cache_key(FrozenDict) +def unparse_scopes(parsed_scopes): + """Turn a parsed_scopes dictionary back into a expanded scopes set""" + expanded_scopes = set() + for base, filters in parsed_scopes.items(): + if filters == Scope.ALL: + expanded_scopes.add(base) + else: + for entity, names_list in filters.items(): + for name in names_list: + expanded_scopes.add(f'{base}!{entity}={name}') + # return immutable frozenset because the result is cached + return frozenset(expanded_scopes) + + +@lru_cache_key(frozenset) +def reduce_scopes(expanded_scopes): + """Reduce expanded scopes to minimal set + + Eliminates overlapping scopes, such as access:services and access:services!service=x + """ + # unparse_scopes already returns a frozenset + return unparse_scopes(parse_scopes(expanded_scopes)) + + +def needs_scope(*scopes): + """Decorator to restrict access to users or services with the required scope""" + + for scope in scopes: + if scope not in scope_definitions: + raise ValueError(f"Scope {scope} is not a valid scope") + + def scope_decorator(func): + @functools.wraps(func) + def _auth_func(self, *args, **kwargs): + sig = inspect.signature(func) + bound_sig = sig.bind(self, *args, **kwargs) + bound_sig.apply_defaults() + # Load scopes in case they haven't been loaded yet + if not hasattr(self, 'expanded_scopes'): + self.expanded_scopes = {} + self.parsed_scopes = {} + + s_kwargs = {} + for resource in {'user', 'server', 'group', 'service'}: + resource_name = resource + '_name' + if resource_name in bound_sig.arguments: + resource_value = bound_sig.arguments[resource_name] + s_kwargs[resource] = resource_value + for scope in scopes: + app_log.debug("Checking access via scope %s", scope) + has_access = _check_scope_access(self, scope, **s_kwargs) + if has_access: + return func(self, *args, **kwargs) + try: + end_point = self.request.path + except AttributeError: + end_point = self.__name__ + app_log.warning( + "Not authorizing access to {}. Requires any of [{}], not derived from scopes [{}]".format( + end_point, ", ".join(scopes), ", ".join(self.expanded_scopes) + ) + ) + raise web.HTTPError( + 403, + "Action is not authorized with current scopes; requires any of [{}]".format( + ", ".join(scopes) + ), + ) + + return _auth_func + + return scope_decorator + + +def _identify_key(obj=None): + if obj is None: + return None + else: + return (type(obj).__name__, obj.name) + + +@lru_cache_key(_identify_key) +def identify_scopes(obj=None): + """Return 'identify' scopes for an orm object + + Arguments: + obj (optional): orm.User or orm.Service + If not specified, 'raw' scopes for identifying the current user are returned, + which may need to be expanded, later. + + Returns: + identify scopes (set): set of scopes needed for 'identify' endpoints + """ + if obj is None: + return frozenset(f"read:users:{field}!user" for field in {"name", "groups"}) + elif isinstance(obj, orm.User): + return frozenset( + f"read:users:{field}!user={obj.name}" for field in {"name", "groups"} + ) + elif isinstance(obj, orm.Service): + return frozenset( + f"read:services:{field}!service={obj.name}" for field in {"name"} + ) + else: + raise TypeError(f"Expected orm.User or orm.Service, got {obj!r}") + + +@lru_cache_key(lambda oauth_client: oauth_client.identifier) +def access_scopes(oauth_client): + """Return scope(s) required to access an oauth client""" + scopes = set() + if oauth_client.identifier == "jupyterhub": + return frozenset() + spawner = oauth_client.spawner + if spawner: + scopes.add(f"access:servers!server={spawner.user.name}/{spawner.name}") + else: + service = oauth_client.service + if service: + scopes.add(f"access:services!service={service.name}") + else: + app_log.warning( + f"OAuth client {oauth_client} has no associated service or spawner!" + ) + return frozenset(scopes) + + +def _check_scope_key(sub_scope, orm_resource, kind): + """Cache key function for check_scope_filter""" + if kind == 'server': + resource_key = (orm_resource.user.name, orm_resource.name) + else: + resource_key = orm_resource.name + return (sub_scope, resource_key, kind) + + +@lru_cache_key(_check_scope_key) +def check_scope_filter(sub_scope, orm_resource, kind): + """Return whether a sub_scope filter applies to a given resource. + + param sub_scope: parsed_scopes filter (i.e. dict or Scope.ALL) + param orm_resource: User or Service or Group or Spawner + param kind: 'user' or 'service' or 'group' or 'server'. + + Returns True or False + """ + if sub_scope is Scope.ALL: + return True + elif kind in sub_scope and orm_resource.name in sub_scope[kind]: + return True + + if kind == 'server': + server_format = f"{orm_resource.user.name}/{orm_resource.name}" + if server_format in sub_scope.get(kind, []): + return True + # Fall back on checking if we have user access + if 'user' in sub_scope and orm_resource.user.name in sub_scope['user']: + return True + # Fall back on checking if we have group access for this user + orm_resource = orm_resource.user + kind = 'user' + + if kind == 'user' and 'group' in sub_scope: + group_names = {group.name for group in orm_resource.groups} + user_in_group = bool(group_names & set(sub_scope['group'])) + # cannot cache if we needed to lookup groups in db + return DoNotCache(user_in_group) + return False + + +def describe_parsed_scopes(parsed_scopes, username=None): + """Return list of descriptions of parsed scopes + + Highly detailed, often redundant descriptions + """ + descriptions = [] + for scope, filters in parsed_scopes.items(): + base_text = scope_definitions[scope]["description"] + if filters == Scope.ALL: + # no filter + filter_text = "" + else: + filter_chunks = [] + for kind, names in filters.items(): + if kind == 'user' and names == {username}: + filter_chunks.append("only you") + else: + kind_text = kind + if kind == 'group': + kind_text = "users in group" + if len(names) == 1: + filter_chunks.append(f"{kind}: {list(names)[0]}") + else: + filter_chunks.append(f"{kind}s: {', '.join(names)}") + filter_text = "; or ".join(filter_chunks) + descriptions.append( + { + "scope": scope, + "description": scope_definitions[scope]["description"], + "filter": filter_text, + } + ) + return descriptions + + +@lru_cache_key(lambda raw_scopes, username=None: (frozenset(raw_scopes), username)) +def describe_raw_scopes(raw_scopes, username=None): + """Return list of descriptions of raw scopes + + A much shorter list than describe_parsed_scopes + """ + descriptions = [] + for raw_scope in raw_scopes: + scope, _, filter_ = raw_scope.partition("!") + base_text = scope_definitions[scope]["description"] + if not filter_: + # no filter + filter_text = "" + elif filter_ == "user": + filter_text = "only you" + else: + kind, _, name = filter_.partition("=") + if kind == "user" and name == username: + filter_text = "only you" + else: + kind_text = kind + if kind == 'group': + kind_text = "users in group" + filter_text = f"{kind_text} {name}" + descriptions.append( + { + "scope": scope, + "description": scope_definitions[scope]["description"], + "filter": filter_text, + } + ) + # make sure we return immutable from a cached function + return tuple(descriptions) + + +# regex for custom scope +# for-humans description below +# note: scope description duplicated in docs/source/rbac/scopes.md +# update docs when making changes here +_custom_scope_pattern = re.compile(r"^custom:[a-z0-9][a-z0-9_\-\*:]+[a-z0-9_\*]$") + +# custom scope pattern description +# used in docstring below and error message when scopes don't match _custom_scope_pattern +_custom_scope_description = """ +Custom scopes must start with `custom:` +and contain only lowercase ascii letters, numbers, hyphen, underscore, colon, and asterisk (-_:*). +The part after `custom:` must start with a letter or number. +Scopes may not end with a hyphen or colon. +""" + + +def define_custom_scopes(scopes): + """Define custom scopes + + Adds custom scopes to the scope_definitions dict. + + Scopes must start with `custom:`. + It is recommended to name custom scopes with a pattern like:: + + custom:$your-project:$action:$resource + + e.g.:: + + custom:jupyter_server:read:contents + + That makes them easy to parse and avoids collisions across projects. + + `scopes` must have at least one scope definition, + and each scope definition must have a `description`, + which will be displayed on the oauth authorization page, + and _may_ have a `subscopes` list of other scopes if having one scope + should imply having other, more specific scopes. + + Args: + + scopes: dict + A dictionary of scope definitions. + The keys are the scopes, + while the values are dictionaries with at least a `description` field, + and optional `subscopes` field. + %s + Examples:: + + define_custom_scopes( + { + "custom:jupyter_server:read:contents": { + "description": "read-only access to files in a Jupyter server", + }, + "custom:jupyter_server:read": { + "description": "read-only access to a Jupyter server", + "subscopes": [ + "custom:jupyter_server:read:contents", + "custom:jupyter_server:read:kernels", + "...", + }, + } + ) + """ % indent( + _custom_scope_description, " " * 8 + ) + for scope, scope_definition in scopes.items(): + if scope in scope_definitions and scope_definitions[scope] != scope_definition: + raise ValueError( + f"Cannot redefine scope {scope}={scope_definition}. Already have {scope}={scope_definitions[scope]}" + ) + if not _custom_scope_pattern.match(scope): + # note: keep this description in sync with docstring above + raise ValueError( + f"Invalid scope name: {scope!r}.\n{_custom_scope_description}" + " and contain only lowercase ascii letters, numbers, hyphen, underscore, colon, and asterisk." + " The part after `custom:` must start with a letter or number." + " Scopes may not end with a hyphen or colon." + ) + if "description" not in scope_definition: + raise ValueError( + f"scope {scope}={scope_definition} missing key 'description'" + ) + if "subscopes" in scope_definition: + subscopes = scope_definition["subscopes"] + if not isinstance(subscopes, list) or not all( + isinstance(s, str) for s in subscopes + ): + raise ValueError( + f"subscopes must be a list of scope strings, got {subscopes!r}" + ) + for subscope in subscopes: + if subscope not in scopes: + if subscope in scope_definitions: + raise ValueError( + f"non-custom subscope {subscope} in {scope}={scope_definition} is not allowed." + f" Custom scopes may only have custom subscopes." + f" Roles should be used to assign multiple scopes together." + ) + raise ValueError( + f"subscope {subscope} in {scope}={scope_definition} not found. All scopes must be defined." + ) + + extra_keys = set(scope_definition.keys()).difference( + ["description", "subscopes"] + ) + if extra_keys: + warnings.warn( + f"Ignoring unrecognized key(s) {', '.join(extra_keys)!r} in {scope}={scope_definition}", + UserWarning, + stacklevel=2, + ) + app_log.info(f"Defining custom scope {scope}") + # deferred evaluation for debug-logging + app_log.debug("Defining custom scope %s=%s", scope, scope_definition) + scope_definitions[scope] = scope_definition diff --git a/jupyterhub/services/auth.py b/jupyterhub/services/auth.py index 9c9028a6..24106ee2 100644 --- a/jupyterhub/services/auth.py +++ b/jupyterhub/services/auth.py @@ -1,15 +1,31 @@ """Authenticating services with JupyterHub. -Cookies are sent to the Hub for verification. The Hub replies with a JSON -model describing the authenticated user. +Tokens are sent to the Hub for verification. +The Hub replies with a JSON model describing the authenticated user. -``HubAuth`` can be used in any application, even outside tornado. +This contains two levels of authentication: -``HubAuthenticated`` is a mixin class for tornado handlers that should -authenticate with the Hub. +- :class:`HubOAuth` - Use OAuth 2 to authenticate browsers with the Hub. + This should be used for any service that should respond to browser requests + (i.e. most services). + +- :class:`HubAuth` - token-only authentication, for a service that only need to handle token-authenticated API requests + +The ``Auth`` classes (:class:`HubAuth`, :class:`HubOAuth`) +can be used in any application, even outside tornado. +They contain reference implementations of talking to the Hub API +to resolve a token to a user. + +The ``Authenticated`` classes (:class:`HubAuthenticated`, :class:`HubOAuthenticated`) +are mixins for tornado handlers that should authenticate with the Hub. + +If you are using OAuth, you will also need to register an oauth callback handler to complete the oauth process. +A tornado implementation is provided in :class:`HubOAuthCallbackHandler`. """ +import asyncio import base64 +import hashlib import json import os import random @@ -19,25 +35,64 @@ import string import time import uuid import warnings -from urllib.parse import quote +from http import HTTPStatus +from unittest import mock from urllib.parse import urlencode -import requests -from tornado.gen import coroutine +from tornado.httpclient import AsyncHTTPClient, HTTPRequest from tornado.httputil import url_concat from tornado.log import app_log -from tornado.web import HTTPError -from tornado.web import RequestHandler -from traitlets import default -from traitlets import Dict -from traitlets import Instance -from traitlets import Integer -from traitlets import observe -from traitlets import Unicode -from traitlets import validate +from tornado.web import HTTPError, RequestHandler +from traitlets import ( + Any, + Dict, + Instance, + Integer, + Set, + Unicode, + default, + observe, + validate, +) from traitlets.config import SingletonConfigurable -from ..utils import url_path_join +from ..scopes import _intersect_expanded_scopes +from ..utils import get_browser_protocol, url_path_join + + +def check_scopes(required_scopes, scopes): + """Check that required_scope(s) are in scopes + + Returns the subset of scopes matching required_scopes, + which is truthy if any scopes match any required scopes. + + Correctly resolves scope filters *except* for groups -> user, + e.g. require: access:server!user=x, have: access:server!group=y + will not grant access to user x even if user x is in group y. + + Parameters + ---------- + + required_scopes: set + The set of scopes required. + scopes: set + The set (or list) of scopes to check against required_scopes + + Returns + ------- + relevant_scopes: set + The set of scopes in required_scopes that are present in scopes, + which is truthy if any required scopes are present, + and falsy otherwise. + """ + if isinstance(required_scopes, str): + required_scopes = {required_scopes} + + intersection = _intersect_expanded_scopes(required_scopes, scopes) + # re-intersect with required_scopes in case the intersection + # applies stricter filters than required_scopes declares + # e.g. required_scopes = {'read:users'} and intersection has only {'read:users!user=x'} + return set(required_scopes) & intersection class _ExpiringDict(dict): @@ -113,9 +168,15 @@ class HubAuth(SingletonConfigurable): This can be used by any application. + Use this base class only for direct, token-authenticated applications + (web APIs). + For applications that support direct visits from browsers, + use HubOAuth to enable OAuth redirect-based authentication. + + If using tornado, use via :class:`HubAuthenticated` mixin. - If using manually, use the ``.user_for_cookie(cookie_value)`` method - to identify the user corresponding to a given cookie value. + If using manually, use the ``.user_for_token(token_value)`` method + to identify the user owning a given token. The following config must be set: @@ -129,15 +190,12 @@ class HubAuth(SingletonConfigurable): - cookie_cache_max_age: the number of seconds responses from the Hub should be cached. - login_url (the *public* ``/hub/login`` URL of the Hub). - - cookie_name: the name of the cookie I should be using, - if different from the default (unlikely). - """ hub_host = Unicode( '', help="""The public host of JupyterHub - + Only used if JupyterHub is spreading servers across subdomains. """, ).tag(config=True) @@ -172,6 +230,7 @@ class HubAuth(SingletonConfigurable): help="""The base API URL of the Hub. Typically `http://hub-ip:hub-port/hub/api` + Default: $JUPYTERHUB_API_URL """, ).tag(config=True) @@ -187,7 +246,10 @@ class HubAuth(SingletonConfigurable): os.getenv('JUPYTERHUB_API_TOKEN', ''), help="""API key for accessing Hub API. - Generate with `jupyterhub token [username]` or add to JupyterHub.services config. + Default: $JUPYTERHUB_API_TOKEN + + Loaded from services configuration in jupyterhub_config. + Will be auto-generated for hub-managed services. """, ).tag(config=True) @@ -196,6 +258,7 @@ class HubAuth(SingletonConfigurable): help="""The URL prefix for the Hub itself. Typically /hub/ + Default: $JUPYTERHUB_BASE_URL """, ).tag(config=True) @@ -239,10 +302,6 @@ class HubAuth(SingletonConfigurable): """, ).tag(config=True) - cookie_name = Unicode( - 'jupyterhub-services', help="""The name of the cookie I should be looking for""" - ).tag(config=True) - cookie_options = Dict( help="""Additional options to pass when setting cookies. @@ -286,12 +345,83 @@ class HubAuth(SingletonConfigurable): def _default_cache(self): return _ExpiringDict(self.cache_max_age) - def _check_hub_authorization(self, url, cache_key=None, use_cache=True): + @property + def oauth_scopes(self): + warnings.warn( + "HubAuth.oauth_scopes is deprecated in JupyterHub 3.0. Use .access_scopes", + DeprecationWarning, + stacklevel=2, + ) + return self.access_scopes + + access_scopes = Set( + Unicode(), + help="""OAuth scopes to use for allowing access. + + Get from $JUPYTERHUB_OAUTH_ACCESS_SCOPES by default. + """, + ).tag(config=True) + + @default('access_scopes') + def _default_scopes(self): + env_scopes = os.getenv('JUPYTERHUB_OAUTH_ACCESS_SCOPES') + if not env_scopes: + # deprecated name (since 3.0) + env_scopes = os.getenv('JUPYTERHUB_OAUTH_SCOPES') + if env_scopes: + return set(json.loads(env_scopes)) + # scopes not specified, use service name if defined + service_name = os.getenv("JUPYTERHUB_SERVICE_NAME") + if service_name: + return {f'access:services!service={service_name}'} + return set() + + _pool = Any(help="Thread pool for running async methods in the background") + + @default("_pool") + def _new_pool(self): + # start a single ThreadPool in the background + from concurrent.futures import ThreadPoolExecutor + + pool = ThreadPoolExecutor(1) + # create an event loop in the thread + pool.submit(self._setup_asyncio_thread).result() + return pool + + def _setup_asyncio_thread(self): + """Create asyncio loop + + To be called from the background thread, + so that any thread-local state is setup correctly + """ + self._thread_loop = asyncio.new_event_loop() + + def _synchronize(self, async_f, *args, **kwargs): + """Call an async method in our background thread""" + future = self._pool.submit( + lambda: self._thread_loop.run_until_complete(async_f(*args, **kwargs)) + ) + return future.result() + + def _call_coroutine(self, sync, async_f, *args, **kwargs): + """Call an async coroutine function, either blocking or returning an awaitable + + if not sync: calls function directly, returning awaitable + else: Block on a call in our background thread, return actual result + """ + if not sync: + return async_f(*args, **kwargs) + else: + return self._synchronize(async_f, *args, **kwargs) + + async def _check_hub_authorization( + self, url, api_token, cache_key=None, use_cache=True + ): """Identify a user with the Hub - + Args: url (str): The API URL to check the Hub for authorization - (e.g. http://127.0.0.1:8081/hub/api/authorizations/token/abc-def) + (e.g. http://127.0.0.1:8081/hub/api/user) cache_key (str): The key for checking the cache use_cache (bool): Specify use_cache=False to skip cached cookie values (default: True) @@ -309,7 +439,12 @@ class HubAuth(SingletonConfigurable): except KeyError: app_log.debug("HubAuth cache miss: %s", cache_key) - data = self._api_request('GET', url, allow_404=True) + data = await self._api_request( + 'GET', + url, + headers={"Authorization": "token " + api_token}, + allow_403=True, + ) if data is None: app_log.warning("No Hub user identified for request") else: @@ -319,18 +454,26 @@ class HubAuth(SingletonConfigurable): self.cache[cache_key] = data return data - def _api_request(self, method, url, **kwargs): + async def _api_request(self, method, url, **kwargs): """Make an API request""" - allow_404 = kwargs.pop('allow_404', False) + allow_403 = kwargs.pop('allow_403', False) headers = kwargs.setdefault('headers', {}) - headers.setdefault('Authorization', 'token %s' % self.api_token) - if "cert" not in kwargs and self.certfile and self.keyfile: - kwargs["cert"] = (self.certfile, self.keyfile) - if self.client_ca: - kwargs["verify"] = self.client_ca + headers.setdefault('Authorization', f'token {self.api_token}') + # translate requests args to tornado's + if self.certfile: + kwargs["client_cert"] = self.certfile + if self.keyfile: + kwargs["client_key"] = self.keyfile + if self.client_ca: + kwargs["ca_certs"] = self.client_ca + req = HTTPRequest( + url, + method=method, + **kwargs, + ) try: - r = requests.request(method, url, **kwargs) - except requests.ConnectionError as e: + r = await AsyncHTTPClient().fetch(req, raise_error=False) + except Exception as e: app_log.error("Error connecting to %s: %s", self.api_url, e) msg = "Failed to connect to Hub API at %r." % self.api_url msg += ( @@ -345,96 +488,111 @@ class HubAuth(SingletonConfigurable): raise HTTPError(500, msg) data = None - if r.status_code == 404 and allow_404: + try: + status = HTTPStatus(r.code) + except ValueError: + app_log.error( + f"Unknown error checking authorization with JupyterHub: {r.code}" + ) + app_log.error(r.body.decode("utf8", "replace")) + + response_text = r.body.decode("utf8", "replace") + if status.value == 403 and allow_403: pass - elif r.status_code == 403: + elif status.value == 403: app_log.error( "I don't have permission to check authorization with JupyterHub, my auth token may have expired: [%i] %s", - r.status_code, - r.reason, + status.value, + status.description, ) - app_log.error(r.text) + app_log.error(response_text) raise HTTPError( 500, "Permission failure checking authorization, I may need a new token" ) - elif r.status_code >= 500: + elif status.value >= 500: app_log.error( "Upstream failure verifying auth token: [%i] %s", - r.status_code, - r.reason, + status.value, + status.description, ) - app_log.error(r.text) + app_log.error(response_text) raise HTTPError(502, "Failed to check authorization (upstream problem)") - elif r.status_code >= 400: + elif status.value >= 400: app_log.warning( - "Failed to check authorization: [%i] %s", r.status_code, r.reason + "Failed to check authorization: [%i] %s", + status.value, + status.description, ) - app_log.warning(r.text) + app_log.warning(response_text) msg = "Failed to check authorization" - # pass on error_description from oauth failure + # pass on error from oauth failure try: - description = r.json().get("error_description") + response = json.loads(response_text) + # prefer more specific 'error_description', fallback to 'error' + description = response.get( + "error_description", response.get("error", "Unknown error") + ) except Exception: pass else: msg += ": " + description raise HTTPError(500, msg) else: - data = r.json() + data = json.loads(response_text) return data def user_for_cookie(self, encrypted_cookie, use_cache=True, session_id=''): - """Ask the Hub to identify the user for a given cookie. - - Args: - encrypted_cookie (str): the cookie value (not decrypted, the Hub will do that) - use_cache (bool): Specify use_cache=False to skip cached cookie values (default: True) - - Returns: - user_model (dict): The user model, if a user is identified, None if authentication fails. - - The 'name' field contains the user's name. - """ - return self._check_hub_authorization( - url=url_path_join( - self.api_url, - "authorizations/cookie", - self.cookie_name, - quote(encrypted_cookie, safe=''), - ), - cache_key='cookie:{}:{}'.format(session_id, encrypted_cookie), - use_cache=use_cache, + """Deprecated and removed. Use HubOAuth to authenticate browsers.""" + raise RuntimeError( + "Identifying users by shared cookie is removed in JupyterHub 2.0. Use OAuth tokens." ) - def user_for_token(self, token, use_cache=True, session_id=''): + def user_for_token(self, token, use_cache=True, session_id='', *, sync=True): """Ask the Hub to identify the user for a given token. + .. versionadded:: 2.4 + async support via `sync` argument. + Args: token (str): the token use_cache (bool): Specify use_cache=False to skip cached cookie values (default: True) + sync (bool): whether to block for the result or return an awaitable Returns: user_model (dict): The user model, if a user is identified, None if authentication fails. The 'name' field contains the user's name. """ - return self._check_hub_authorization( + return self._call_coroutine( + sync, + self._check_hub_authorization, url=url_path_join( - self.api_url, "authorizations/token", quote(token, safe='') + self.api_url, + "user", + ), + api_token=token, + cache_key='token:{}:{}'.format( + session_id, + hashlib.sha256(token.encode("utf8", "replace")).hexdigest(), ), - cache_key='token:{}:{}'.format(session_id, token), use_cache=use_cache, ) auth_header_name = 'Authorization' - auth_header_pat = re.compile('token\s+(.+)', re.IGNORECASE) + auth_header_pat = re.compile(r'(?:token|bearer)\s+(.+)', re.IGNORECASE) - def get_token(self, handler): - """Get the user token from a request + def get_token(self, handler, in_cookie=True): + """Get the token authenticating a request + + .. versionchanged:: 2.2 + in_cookie added. + Previously, only URL params and header were considered. + Pass `in_cookie=False` to preserve that behavior. - in URL parameters: ?token= - in header: Authorization: token + - in cookie (stored after oauth), if in_cookie is True """ user_token = handler.get_argument('token', '') @@ -445,14 +603,18 @@ class HubAuth(SingletonConfigurable): ) if m: user_token = m.group(1) + if not user_token and in_cookie: + user_token = self._get_token_cookie(handler) return user_token - def _get_user_cookie(self, handler): + def _get_token_cookie(self, handler): + """Base class doesn't store tokens in cookies""" + return None + + async def _get_user_cookie(self, handler): """Get the user model from a cookie""" - encrypted_cookie = handler.get_cookie(self.cookie_name) - session_id = self.get_session_id(handler) - if encrypted_cookie: - return self.user_for_cookie(encrypted_cookie, session_id=session_id) + # overridden in HubOAuth to store the access token after oauth + return None def get_session_id(self, handler): """Get the jupyterhub session id @@ -461,20 +623,26 @@ class HubAuth(SingletonConfigurable): """ return handler.get_cookie('jupyterhub-session-id', '') - def get_user(self, handler): + def get_user(self, handler, *, sync=True): """Get the Hub user for a given tornado handler. Checks cookie with the Hub to identify the current user. + .. versionadded:: 2.4 + async support via `sync` argument. + Args: handler (tornado.web.RequestHandler): the current request handler + sync (bool): whether to block for the result or return an awaitable Returns: user_model (dict): The user model, if a user is identified, None if authentication fails. The 'name' field contains the user's name. """ + return self._call_coroutine(sync, self._get_user, handler) + async def _get_user(self, handler): # only allow this to be called once per handler # avoids issues if an error is raised, # since this may be called again when trying to render the error page @@ -484,16 +652,20 @@ class HubAuth(SingletonConfigurable): handler._cached_hub_user = user_model = None session_id = self.get_session_id(handler) - # check token first - token = self.get_token(handler) + # check token first, ignoring cookies + # because some checks are different when a request + # is token-authenticated (CORS-related) + token = self.get_token(handler, in_cookie=False) if token: - user_model = self.user_for_token(token, session_id=session_id) + user_model = await self.user_for_token( + token, session_id=session_id, sync=False + ) if user_model: handler._token_authenticated = True # no token, check cookie if user_model is None: - user_model = self._get_user_cookie(handler) + user_model = await self._get_user_cookie(handler) # cache result handler._cached_hub_user = user_model @@ -501,10 +673,17 @@ class HubAuth(SingletonConfigurable): app_log.debug("No user identified") return user_model + def check_scopes(self, required_scopes, user): + """Check whether the user has required scope(s)""" + return check_scopes(required_scopes, set(user["scopes"])) + class HubOAuth(HubAuth): """HubAuth using OAuth for login instead of cookies set by the Hub. + Use this class if you want users to be able to visit your service with a browser. + They will be authenticated via OAuth with the Hub. + .. versionadded: 0.8 """ @@ -538,12 +717,21 @@ class HubOAuth(HubAuth): """ return self.cookie_name + '-oauth-state' - def _get_user_cookie(self, handler): + def _get_token_cookie(self, handler): + """Base class doesn't store tokens in cookies""" token = handler.get_secure_cookie(self.cookie_name) + if token: + # decode cookie bytes + token = token.decode('ascii', 'replace') + return token + + async def _get_user_cookie(self, handler): + token = self._get_token_cookie(handler) session_id = self.get_session_id(handler) if token: - token = token.decode('ascii', 'replace') - user_model = self.user_for_token(token, session_id=session_id) + user_model = await self.user_for_token( + token, session_id=session_id, sync=False + ) if user_model is None: app_log.warning("Token stored in cookie may have expired") handler.clear_cookie(self.cookie_name) @@ -553,7 +741,7 @@ class HubOAuth(HubAuth): oauth_client_id = Unicode( help="""The OAuth client ID for this application. - + Use JUPYTERHUB_CLIENT_ID by default. """ ).tag(config=True) @@ -570,7 +758,7 @@ class HubOAuth(HubAuth): oauth_redirect_uri = Unicode( help="""OAuth redirect URI - + Should generally be /base_url/oauth_callback """ ).tag(config=True) @@ -598,17 +786,20 @@ class HubOAuth(HubAuth): def _token_url(self): return url_path_join(self.api_url, 'oauth2/token') - def token_for_code(self, code): + def token_for_code(self, code, *, sync=True): """Get token for OAuth temporary code - + This is the last step of OAuth login. Should be called in OAuth Callback handler. - + Args: code (str): oauth code for finishing OAuth login Returns: token (str): JupyterHub API Token """ + return self._call_coroutine(sync, self._token_for_code, code) + + async def _token_for_code(self, code): # GitHub specifies a POST request yet requires URL parameters params = dict( client_id=self.oauth_client_id, @@ -618,10 +809,10 @@ class HubOAuth(HubAuth): redirect_uri=self.oauth_redirect_uri, ) - token_reply = self._api_request( + token_reply = await self._api_request( 'POST', self.oauth_token_url, - data=urlencode(params).encode('utf8'), + body=urlencode(params).encode('utf8'), headers={'Content-Type': 'application/x-www-form-urlencoded'}, ) @@ -664,12 +855,15 @@ class HubOAuth(HubAuth): Parameters ---------- - handler (RequestHandler): A tornado RequestHandler - next_url (str): The page to redirect to on successful login + handler : RequestHandler + A tornado RequestHandler + next_url : str + The page to redirect to on successful login Returns ------- - state (str): The OAuth state that has been stored in the cookie (url safe, base64-encoded) + state : str + The OAuth state that has been stored in the cookie (url safe, base64-encoded) """ extra_state = {} if handler.get_cookie(self.state_cookie_name): @@ -680,7 +874,7 @@ class HubOAuth(HubAuth): cookie_suffix = ''.join( random.choice(string.ascii_letters) for i in range(8) ) - cookie_name = '{}-{}'.format(self.state_cookie_name, cookie_suffix) + cookie_name = f'{self.state_cookie_name}-{cookie_suffix}' extra_state['cookie_name'] = cookie_name else: cookie_name = self.state_cookie_name @@ -694,7 +888,7 @@ class HubOAuth(HubAuth): # OAuth that doesn't complete shouldn't linger too long. 'max_age': 600, } - if handler.request.protocol == 'https': + if get_browser_protocol(handler.request) == 'https': kwargs['secure'] = True # load user cookie overrides kwargs.update(self.cookie_options) @@ -706,7 +900,8 @@ class HubOAuth(HubAuth): Parameters ---------- - next_url (str): The URL of the page to redirect to on successful login. + next_url : str + The URL of the page to redirect to on successful login. Returns ------- @@ -733,7 +928,7 @@ class HubOAuth(HubAuth): def set_cookie(self, handler, access_token): """Set a cookie recording OAuth result""" kwargs = {'path': self.base_url, 'httponly': True} - if handler.request.protocol == 'https': + if get_browser_protocol(handler.request) == 'https': kwargs['secure'] = True # load user cookie overrides kwargs.update(self.cookie_options) @@ -764,22 +959,36 @@ class UserNotAllowed(Exception): ) -class HubAuthenticated(object): +class HubAuthenticated: """Mixin for tornado handlers that are authenticated with JupyterHub A handler that mixes this in must have the following attributes/properties: - .hub_auth: A HubAuth instance + - .hub_scopes: A set of JupyterHub 2.0 OAuth scopes to allow. + Default comes from .hub_auth.oauth_access_scopes, + which in turn is set by $JUPYTERHUB_OAUTH_ACCESS_SCOPES + Default values include: + - 'access:services', 'access:services!service={service_name}' for services + - 'access:servers', 'access:servers!user={user}', + 'access:servers!server={user}/{server_name}' + for single-user servers + + If hub_scopes is not used (e.g. JupyterHub 1.x), + these additional properties can be used: + + - .allow_admin: If True, allow any admin user. + Default: False. - .hub_users: A set of usernames to allow. If left unspecified or None, username will not be checked. - .hub_groups: A set of group names to allow. If left unspecified or None, groups will not be checked. + - .allow_admin: Is admin user access allowed or not + If left unspecified or False, admin user won't have an access. Examples:: class MyHandler(HubAuthenticated, web.RequestHandler): - hub_users = {'inara', 'mal'} - def initialize(self, hub_auth): self.hub_auth = hub_auth @@ -789,20 +998,28 @@ class HubAuthenticated(object): """ + # deprecated, pre-2.0 allow sets hub_services = None # set of allowed services hub_users = None # set of allowed users hub_groups = None # set of allowed groups allow_admin = False # allow any admin user access + @property + def hub_scopes(self): + """Set of allowed scopes (use hub_auth.access_scopes by default)""" + return self.hub_auth.access_scopes or None + @property def allow_all(self): """Property indicating that all successfully identified user or service should be allowed. """ return ( - self.hub_services is None + self.hub_scopes is None + and self.hub_services is None and self.hub_users is None and self.hub_groups is None + and not self.allow_admin ) # self.hub_auth must be a HubAuth instance. @@ -829,30 +1046,59 @@ class HubAuthenticated(object): # add state argument to OAuth url state = self.hub_auth.set_state_cookie(self, next_url=self.request.uri) login_url = url_concat(login_url, {'state': state}) - app_log.debug("Redirecting to login url: %s", login_url) - return login_url + # temporary override at setting level, + # to allow any subclass overrides of get_login_url to preserve their effect + # for example, APIHandler raises 403 to prevent redirects + with mock.patch.dict(self.application.settings, {"login_url": login_url}): + app_log.debug("Redirecting to login url: %s", login_url) + return super().get_login_url() def check_hub_user(self, model): """Check whether Hub-authenticated user or service should be allowed. Returns the input if the user should be allowed, None otherwise. - Override if you want to check anything other than the username's presence in hub_users list. + Override for custom logic in authenticating users. Args: - model (dict): the user or service model returned from :class:`HubAuth` + user_model (dict): the user or service model returned from :class:`HubAuth` Returns: user_model (dict): The user model if the user should be allowed, None otherwise. """ name = model['name'] kind = model.setdefault('kind', 'user') + if self.allow_all: app_log.debug( "Allowing Hub %s %s (all Hub users and services allowed)", kind, name ) return model + if self.hub_scopes: + scopes = self.hub_auth.check_scopes(self.hub_scopes, model) + if scopes: + app_log.debug( + f"Allowing Hub {kind} {name} based on oauth scopes {scopes}" + ) + return model + else: + app_log.warning( + f"Not allowing Hub {kind} {name}: missing required scopes" + ) + app_log.debug( + f"Hub {kind} {name} needs scope(s) {self.hub_scopes}, has scope(s) {model['scopes']}" + ) + # if hub_scopes are used, *only* hub_scopes are used + # note: this means successful authentication, but insufficient permission + raise UserNotAllowed(model) + + # proceed with the pre-2.0 way if hub_scopes is not set + warnings.warn( + "hub_scopes ($JUPYTERHUB not set, proceeding with pre-2.0 authentication", + DeprecationWarning, + ) + if self.allow_admin and model.get('admin', False): app_log.debug("Allowing Hub admin %s", name) return model @@ -860,15 +1106,15 @@ class HubAuthenticated(object): if kind == 'service': # it's a service, check hub_services if self.hub_services and name in self.hub_services: - app_log.debug("Allowing whitelisted Hub service %s", name) + app_log.debug("Allowing Hub service %s", name) return model else: app_log.warning("Not allowing Hub service %s", name) raise UserNotAllowed(model) if self.hub_users and name in self.hub_users: - # user in whitelist - app_log.debug("Allowing whitelisted Hub user %s", name) + # user in allowed list + app_log.debug("Allowing Hub user %s", name) return model elif self.hub_groups and set(model['groups']).intersection(self.hub_groups): allowed_groups = set(model['groups']).intersection(self.hub_groups) @@ -877,7 +1123,7 @@ class HubAuthenticated(object): name, ','.join(sorted(allowed_groups)), ) - # group in whitelist + # group in allowed list return model else: app_log.warning("Not allowing Hub user %s", name) @@ -915,8 +1161,8 @@ class HubAuthenticated(object): self._hub_auth_user_cache = None raise - # store tokens passed via url or header in a cookie for future requests - url_token = self.hub_auth.get_token(self) + # store ?token=... tokens passed via url in a cookie for future requests + url_token = self.get_argument('token', '') if ( user_model and url_token @@ -946,8 +1192,7 @@ class HubOAuthCallbackHandler(HubOAuthenticated, RequestHandler): .. versionadded: 0.8 """ - @coroutine - def get(self): + async def get(self): error = self.get_argument("error", False) if error: msg = self.get_argument("error_description", error) @@ -972,10 +1217,12 @@ class HubOAuthCallbackHandler(HubOAuthenticated, RequestHandler): app_log.warning("oauth state %r != %r", arg_state, cookie_state) raise HTTPError(403, "oauth state does not match. Try logging in again.") next_url = self.hub_auth.get_next_url(cookie_state) - # TODO: make async (in a Thread?) - token = self.hub_auth.token_for_code(code) + + token = await self.hub_auth.token_for_code(code, sync=False) session_id = self.hub_auth.get_session_id(self) - user_model = self.hub_auth.user_for_token(token, session_id=session_id) + user_model = await self.hub_auth.user_for_token( + token, session_id=session_id, sync=False + ) if user_model is None: raise HTTPError(500, "oauth callback failed to identify a user") app_log.info("Logged-in user %s", user_model) diff --git a/jupyterhub/services/service.py b/jupyterhub/services/service.py index 9da030b7..c750e9dc 100644 --- a/jupyterhub/services/service.py +++ b/jupyterhub/services/service.py @@ -38,25 +38,29 @@ A hub-managed service with no URL:: } """ +import asyncio import copy import os import pipes import shutil from subprocess import Popen -from traitlets import Any -from traitlets import Bool -from traitlets import default -from traitlets import Dict -from traitlets import HasTraits -from traitlets import Instance -from traitlets import Unicode +from traitlets import ( + Any, + Bool, + Dict, + HasTraits, + Instance, + List, + Unicode, + default, + validate, +) from traitlets.config import LoggingConfigurable from .. import orm from ..objects import Server -from ..spawner import LocalProcessSpawner -from ..spawner import set_user_setuid +from ..spawner import LocalProcessSpawner, set_user_setuid from ..traitlets import Command from ..utils import url_path_join @@ -96,6 +100,14 @@ class _ServiceSpawner(LocalProcessSpawner): cwd = Unicode() cmd = Command(minlen=0) + _service_name = Unicode() + + @default("oauth_access_scopes") + def _default_oauth_access_scopes(self): + return [ + "access:services", + f"access:services!service={self._service_name}", + ] def make_preexec_fn(self, name): if not name: @@ -147,11 +159,14 @@ class Service(LoggingConfigurable): - name: str the name of the service - - admin: bool(false) + - admin: bool(False) whether the service should have administrative privileges - url: str (None) The URL where the service is/should be. If specified, the service will be added to the proxy at /services/:name + - oauth_no_confirm: bool(False) + Whether this service should be allowed to complete oauth + with logged-in users without prompting for confirmation. If a service is to be managed by the Hub, it has a few extra options: @@ -184,6 +199,27 @@ class Service(LoggingConfigurable): If managed, will be passed as JUPYTERHUB_SERVICE_URL env. """ ).tag(input=True) + + oauth_roles = List( + help="""OAuth allowed roles. + + DEPRECATED in 3.0: use oauth_client_allowed_scopes + """ + ).tag(input=True) + + oauth_client_allowed_scopes = List( + help="""OAuth allowed scopes. + + This sets the maximum and default scopes + assigned to oauth tokens issued for this service + (i.e. tokens stored in browsers after authenticating with the server), + defining what actions the service can take on behalf of logged-in users. + + Default is an empty list, meaning minimal permissions to identify users, + no actions can be taken on their behalf. + """ + ).tag(input=True) + api_token = Unicode( help="""The API token to use for the service. @@ -197,6 +233,25 @@ class Service(LoggingConfigurable): """ ).tag(input=True) + display = Bool( + True, help="""Whether to list the service on the JupyterHub UI""" + ).tag(input=True) + + oauth_no_confirm = Bool( + False, + help="""Skip OAuth confirmation when users access this service. + + By default, when users authenticate with a service using JupyterHub, + they are prompted to confirm that they want to grant that service + access to their credentials. + Setting oauth_no_confirm=True skips the confirmation web page for this service. + Skipping the confirmation page is useful for admin-managed services that are considered part of the Hub + and shouldn't need extra prompts for login. + + .. versionadded: 1.1 + """, + ).tag(input=True) + # Managed service API: spawner = Any() @@ -244,6 +299,7 @@ class Service(LoggingConfigurable): base_url = Unicode() db = Any() orm = Any() + roles = Any() cookie_options = Dict() oauth_provider = Any() @@ -260,6 +316,15 @@ class Service(LoggingConfigurable): def _default_client_id(self): return 'service-%s' % self.name + @validate("oauth_client_id") + def _validate_client_id(self, proposal): + if not proposal.value.startswith("service-"): + raise ValueError( + f"service {self.name} has oauth_client_id='{proposal.value}'." + " Service oauth client ids must start with 'service-'" + ) + return proposal.value + oauth_redirect_uri = Unicode( help="""OAuth redirect URI for this service. @@ -282,6 +347,10 @@ class Service(LoggingConfigurable): """ return bool(self.server is not None or self.oauth_redirect_uri) + @property + def oauth_client(self): + return self.orm.oauth_client + @property def server(self): if self.orm.server: @@ -309,7 +378,7 @@ class Service(LoggingConfigurable): managed=' managed' if self.managed else '', ) - def start(self): + async def start(self): """Start a managed service""" if not self.managed: raise RuntimeError("Cannot start unmanaged service %s" % self) @@ -323,7 +392,7 @@ class Service(LoggingConfigurable): env['JUPYTERHUB_SERVICE_PREFIX'] = self.server.base_url hub = self.hub - if self.hub.ip in ('0.0.0.0', ''): + if self.hub.ip in ('', '0.0.0.0', '::'): # if the Hub is listening on all interfaces, # tell services to connect via localhost # since they are always local subprocesses @@ -336,6 +405,7 @@ class Service(LoggingConfigurable): environment=env, api_token=self.api_token, oauth_client_id=self.oauth_client_id, + _service_name=self.name, cookie_options=self.cookie_options, cwd=self.cwd, hub=self.hub, @@ -346,6 +416,8 @@ class Service(LoggingConfigurable): internal_certs_location=self.app.internal_certs_location, internal_trust_bundles=self.app.internal_trust_bundles, ) + if self.spawner.internal_ssl: + self.spawner.cert_paths = await self.spawner.create_certs() self.spawner.start() self.proc = self.spawner.proc self.spawner.add_poll_callback(self._proc_stopped) @@ -356,7 +428,8 @@ class Service(LoggingConfigurable): self.log.error( "Service %s exited with status %i", self.name, self.proc.returncode ) - self.start() + # schedule start + asyncio.ensure_future(self.start()) async def stop(self): """Stop a managed service""" diff --git a/jupyterhub/singleuser/__init__.py b/jupyterhub/singleuser/__init__.py new file mode 100644 index 00000000..f3188bc7 --- /dev/null +++ b/jupyterhub/singleuser/__init__.py @@ -0,0 +1,18 @@ +"""JupyterHub single-user server entrypoints + +Contains default notebook-app subclass and mixins +""" +from .app import SingleUserNotebookApp, main +from .mixins import HubAuthenticatedHandler, make_singleuser_app + +__all__ = [ + "SingleUserNotebookApp", + "main", + "HubAuthenticatedHandler", + "make_singleuser_app", +] + +# backward-compatibility +JupyterHubLoginHandler = SingleUserNotebookApp.login_handler_class +JupyterHubLogoutHandler = SingleUserNotebookApp.logout_handler_class +OAuthCallbackHandler = SingleUserNotebookApp.oauth_callback_handler_class diff --git a/jupyterhub/singleuser/__main__.py b/jupyterhub/singleuser/__main__.py new file mode 100644 index 00000000..18d6d1b4 --- /dev/null +++ b/jupyterhub/singleuser/__main__.py @@ -0,0 +1,4 @@ +from .app import main + +if __name__ == '__main__': + main() diff --git a/jupyterhub/singleuser/app.py b/jupyterhub/singleuser/app.py new file mode 100644 index 00000000..572c6776 --- /dev/null +++ b/jupyterhub/singleuser/app.py @@ -0,0 +1,68 @@ +"""Make a single-user app based on the environment: + +- $JUPYTERHUB_SINGLEUSER_APP, the base Application class, to be wrapped in JupyterHub authentication. + default: jupyter_server.serverapp.ServerApp + +.. versionchanged:: 2.0 + + Default app changed to launch `jupyter labhub`. + Use JUPYTERHUB_SINGLEUSER_APP=notebook.notebookapp.NotebookApp for the legacy 'classic' notebook server. +""" +import os + +from traitlets import import_item + +from .mixins import make_singleuser_app + +JUPYTERHUB_SINGLEUSER_APP = os.environ.get("JUPYTERHUB_SINGLEUSER_APP") + + +if JUPYTERHUB_SINGLEUSER_APP: + App = import_item(JUPYTERHUB_SINGLEUSER_APP) +else: + App = None + _import_error = None + for JUPYTERHUB_SINGLEUSER_APP in ( + "jupyter_server.serverapp.ServerApp", + "notebook.notebookapp.NotebookApp", + ): + try: + App = import_item(JUPYTERHUB_SINGLEUSER_APP) + except ImportError as e: + if _import_error is None: + _import_error = e + continue + else: + break + if App is None: + raise _import_error + + +SingleUserNotebookApp = make_singleuser_app(App) + + +def main(): + """Launch a jupyterhub single-user server""" + if not os.environ.get("JUPYTERHUB_SINGLEUSER_APP"): + # app not specified, launch jupyter-labhub by default, + # if jupyterlab is recent enough (3.1). + # This is a minimally extended ServerApp that does: + # 1. ensure lab extension is enabled, and + # 2. set default URL to `/lab` + import re + + _version_pat = re.compile(r"(\d+)\.(\d+)") + try: + import jupyterlab + from jupyterlab.labhubapp import SingleUserLabApp + + m = _version_pat.match(jupyterlab.__version__) + except Exception: + m = None + + if m is not None: + version_tuple = tuple(int(v) for v in m.groups()) + if version_tuple >= (3, 1): + return SingleUserLabApp.launch_instance() + + return SingleUserNotebookApp.launch_instance() diff --git a/jupyterhub/singleuser.py b/jupyterhub/singleuser/mixins.py similarity index 53% rename from jupyterhub/singleuser.py rename to jupyterhub/singleuser/mixins.py index 6fa4f350..dfad82ca 100755 --- a/jupyterhub/singleuser.py +++ b/jupyterhub/singleuser/mixins.py @@ -1,56 +1,60 @@ #!/usr/bin/env python -"""Extend regular notebook server to be aware of multiuser things.""" +"""Mixins to regular notebook server to add JupyterHub auth. + +Meant to be compatible with jupyter_server and classic notebook + +Use make_singleuser_app to create a compatible Application class +with JupyterHub authentication mixins enabled. +""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import asyncio import json +import logging import os import random -from datetime import datetime +import secrets +import sys +import warnings from datetime import timezone +from importlib import import_module from textwrap import dedent from urllib.parse import urlparse -from jinja2 import ChoiceLoader -from jinja2 import FunctionLoader -from tornado import gen +from jinja2 import ChoiceLoader, FunctionLoader from tornado import ioloop -from tornado.httpclient import AsyncHTTPClient -from tornado.httpclient import HTTPRequest -from tornado.web import HTTPError +from tornado.httpclient import AsyncHTTPClient, HTTPRequest from tornado.web import RequestHandler - -try: - import notebook -except ImportError: - raise ImportError("JupyterHub single-user server requires notebook >= 4.0") - from traitlets import ( Any, Bool, Bytes, - Integer, - Unicode, CUnicode, + Integer, + TraitError, + Unicode, default, + import_item, observe, validate, - TraitError, ) +from traitlets.config import Configurable -from notebook.notebookapp import ( - NotebookApp, - aliases as notebook_aliases, - flags as notebook_flags, -) -from notebook.auth.login import LoginHandler -from notebook.auth.logout import LogoutHandler -from notebook.base.handlers import IPythonHandler +from .._version import __version__, _check_version +from ..log import log_request +from ..services.auth import HubOAuth, HubOAuthCallbackHandler, HubOAuthenticated +from ..utils import exponential_backoff, isoformat, make_ssl_context, url_path_join -from ._version import __version__, _check_version -from .log import log_request -from .services.auth import HubOAuth, HubOAuthenticated, HubOAuthCallbackHandler -from .utils import isoformat, url_path_join, make_ssl_context, exponential_backoff + +def _bool_env(key): + """Cast an environment variable to bool + + 0, empty, or unset is False; All other values are True. + """ + if os.environ.get(key, "") in {"", "0"}: + return False + else: + return True # Authenticate requests with the Hub @@ -80,7 +84,7 @@ class HubAuthenticatedHandler(HubOAuthenticated): return set() -class JupyterHubLoginHandler(LoginHandler): +class JupyterHubLoginHandlerMixin: """LoginHandler that hooks up Hub authentication""" @staticmethod @@ -97,14 +101,31 @@ class JupyterHubLoginHandler(LoginHandler): @staticmethod def get_user(handler): - """alternative get_current_user to query the Hub""" - # patch in HubAuthenticated class for querying the Hub for cookie authentication - if HubAuthenticatedHandler not in handler.__class__.__bases__: + """alternative get_current_user to query the Hub + + Thus shouldn't be called anymore because HubAuthenticatedHandler + should have already overridden get_current_user(). + + Keep here to protect uncommon circumstance of multiple BaseHandlers + from missing auth. + + e.g. when multiple BaseHandler classes are used. + """ + if HubAuthenticatedHandler not in handler.__class__.mro(): + warnings.warn( + f"Expected to see HubAuthenticatedHandler in {handler.__class__}.mro()," + " patching in at call time. Hub authentication is still applied.", + RuntimeWarning, + stacklevel=2, + ) + # patch HubAuthenticated into the instance handler.__class__ = type( handler.__class__.__name__, (HubAuthenticatedHandler, handler.__class__), {}, ) + # patch into the class itself so this doesn't happen again for the same class + patch_base_handler(handler.__class__) return handler.get_current_user() @classmethod @@ -113,7 +134,7 @@ class JupyterHubLoginHandler(LoginHandler): return -class JupyterHubLogoutHandler(LogoutHandler): +class JupyterHubLogoutHandlerMixin: def get(self): self.settings['hub_auth'].clear_cookie(self) self.redirect( @@ -122,7 +143,7 @@ class JupyterHubLogoutHandler(LogoutHandler): ) -class OAuthCallbackHandler(HubOAuthCallbackHandler, IPythonHandler): +class OAuthCallbackHandlerMixin(HubOAuthCallbackHandler): """Mixin IPythonHandler to get the right error pages, etc.""" @property @@ -131,27 +152,21 @@ class OAuthCallbackHandler(HubOAuthCallbackHandler, IPythonHandler): # register new hub related command-line aliases -aliases = dict(notebook_aliases) -aliases.update( - { - 'user': 'SingleUserNotebookApp.user', - 'group': 'SingleUserNotebookApp.group', - 'cookie-name': 'HubAuth.cookie_name', - 'hub-prefix': 'SingleUserNotebookApp.hub_prefix', - 'hub-host': 'SingleUserNotebookApp.hub_host', - 'hub-api-url': 'SingleUserNotebookApp.hub_api_url', - 'base-url': 'SingleUserNotebookApp.base_url', - } -) -flags = dict(notebook_flags) -flags.update( - { - 'disable-user-config': ( - {'SingleUserNotebookApp': {'disable_user_config': True}}, - "Disable user-controlled configuration of the notebook server.", - ) - } -) +aliases = { + 'user': 'SingleUserNotebookApp.user', + 'group': 'SingleUserNotebookApp.group', + 'hub-prefix': 'SingleUserNotebookApp.hub_prefix', + 'hub-host': 'SingleUserNotebookApp.hub_host', + 'hub-api-url': 'SingleUserNotebookApp.hub_api_url', + 'base-url': 'SingleUserNotebookApp.base_url', +} +flags = { + 'disable-user-config': ( + {'SingleUserNotebookApp': {'disable_user_config': True}}, + "Disable user-controlled configuration of the notebook server.", + ) +} + page_template = """ {% extends "templates/page.html" %} @@ -161,6 +176,7 @@ page_template = """ Control Panel @@ -216,21 +232,29 @@ def _exclude_home(path_list): yield p -class SingleUserNotebookApp(NotebookApp): +class SingleUserNotebookAppMixin(Configurable): """A Subclass of the regular NotebookApp that is aware of the parent multiuser context.""" description = dedent( """ Single-user server for JupyterHub. Extends the Jupyter Notebook server. - Meant to be invoked by JupyterHub Spawners, and not directly. + Meant to be invoked by JupyterHub Spawners, not directly. """ ) examples = "" subcommands = {} version = __version__ - classes = NotebookApp.classes + [HubOAuth] + + # must be set in mixin subclass + # make_singleuser_app sets these + # aliases = aliases + # flags = flags + # login_handler_class = JupyterHubLoginHandler + # logout_handler_class = JupyterHubLogoutHandler + # oauth_callback_handler_class = OAuthCallbackHandler + # classes = NotebookApp.classes + [HubOAuth] # disable single-user app's localhost checking allow_remote_access = True @@ -243,7 +267,7 @@ class SingleUserNotebookApp(NotebookApp): cookie_secret = Bytes() def _cookie_secret_default(self): - return os.urandom(32) + return secrets.token_bytes(32) user = CUnicode().tag(config=True) group = CUnicode().tag(config=True) @@ -260,6 +284,10 @@ class SingleUserNotebookApp(NotebookApp): def _user_changed(self, change): self.log.name = change.new + @default("default_url") + def _default_url(self): + return os.environ.get("JUPYTERHUB_DEFAULT_URL", "/tree/") + hub_host = Unicode().tag(config=True) hub_prefix = Unicode('/hub/').tag(config=True) @@ -323,19 +351,15 @@ class SingleUserNotebookApp(NotebookApp): return url.hostname return '127.0.0.1' - aliases = aliases - flags = flags - - # disble some single-user configurables + # disable some single-user configurables token = '' open_browser = False quit_button = False trust_xheaders = True - login_handler_class = JupyterHubLoginHandler - logout_handler_class = JupyterHubLogoutHandler + port_retries = ( - 0 - ) # disable port-retries, since the Spawner will tell us what port to use + 0 # disable port-retries, since the Spawner will tell us what port to use + ) disable_user_config = Bool( False, @@ -346,7 +370,26 @@ class SingleUserNotebookApp(NotebookApp): """, ).tag(config=True) - @validate('notebook_dir') + @default("disable_user_config") + def _default_disable_user_config(self): + return _bool_env("JUPYTERHUB_DISABLE_USER_CONFIG") + + @default("root_dir") + def _default_root_dir(self): + if os.environ.get("JUPYTERHUB_ROOT_DIR"): + proposal = {"value": os.environ["JUPYTERHUB_ROOT_DIR"]} + # explicitly call validator, not called on default values + return self._notebook_dir_validate(proposal) + else: + return os.getcwd() + + # notebook_dir is used by the classic notebook server + # root_dir is the future in jupyter server + @default("notebook_dir") + def _default_notebook_dir(self): + return self._default_root_dir() + + @validate("notebook_dir", "root_dir") def _notebook_dir_validate(self, proposal): value = os.path.expanduser(proposal['value']) # Strip any trailing slashes @@ -362,6 +405,13 @@ class SingleUserNotebookApp(NotebookApp): raise TraitError("No such notebook dir: %r" % value) return value + @default('log_level') + def _log_level_default(self): + if _bool_env("JUPYTERHUB_DEBUG"): + return logging.DEBUG + else: + return logging.INFO + @default('log_datefmt') def _log_datefmt_default(self): """Exclude date from default date format""" @@ -381,11 +431,11 @@ class SingleUserNotebookApp(NotebookApp): # disable config-migration when user config is disabled return else: - super(SingleUserNotebookApp, self).migrate_config() + super().migrate_config() @property def config_file_paths(self): - path = super(SingleUserNotebookApp, self).config_file_paths + path = super().config_file_paths if self.disable_user_config: # filter out user-writable config dirs if user config is disabled @@ -394,7 +444,7 @@ class SingleUserNotebookApp(NotebookApp): @property def nbextensions_path(self): - path = super(SingleUserNotebookApp, self).nbextensions_path + path = super().nbextensions_path if self.disable_user_config: path = list(_exclude_home(path)) @@ -437,7 +487,7 @@ class SingleUserNotebookApp(NotebookApp): i, RETRIES, ) - await gen.sleep(min(2 ** i, 16)) + await asyncio.sleep(min(2**i, 16)) else: break else: @@ -490,7 +540,7 @@ class SingleUserNotebookApp(NotebookApp): # protect against mixed timezone comparisons if not last_activity.tzinfo: # assume naive timestamps are utc - self.log.warning("last activity is using naïve timestamps") + self.log.warning("last activity is using naive timestamps") last_activity = last_activity.replace(tzinfo=timezone.utc) if self._last_activity_sent and last_activity < self._last_activity_sent: @@ -505,7 +555,7 @@ class SingleUserNotebookApp(NotebookApp): url=self.hub_activity_url, method='POST', headers={ - "Authorization": "token {}".format(self.hub_auth.api_token), + "Authorization": f"token {self.hub_auth.api_token}", "Content-Type": "application/json", }, body=json.dumps( @@ -551,18 +601,77 @@ class SingleUserNotebookApp(NotebookApp): t = self.hub_activity_interval * (1 + 0.2 * (random.random() - 0.5)) await asyncio.sleep(t) + def _log_app_versions(self): + """Log application versions at startup + + Logs versions of jupyterhub and singleuser-server base versions (jupyterlab, jupyter_server, notebook) + """ + self.log.info(f"Starting jupyterhub single-user server version {__version__}") + + # don't log these package versions + seen = {"jupyterhub", "traitlets", "jupyter_core", "builtins"} + + for cls in self.__class__.mro(): + module_name = cls.__module__.partition(".")[0] + if module_name not in seen: + seen.add(module_name) + try: + mod = import_module(module_name) + mod_version = getattr(mod, "__version__") + except Exception: + mod_version = "" + self.log.info( + f"Extending {cls.__module__}.{cls.__name__} from {module_name} {mod_version}" + ) + def initialize(self, argv=None): # disable trash by default # this can be re-enabled by config self.config.FileContentsManager.delete_to_trash = False - return super().initialize(argv) + # load default-url env at higher priority than `@default`, + # which may have their own _defaults_ which should not override explicit default_url config + # via e.g. c.Spawner.default_url. Seen in jupyterlab's SingleUserLabApp. + default_url = os.environ.get("JUPYTERHUB_DEFAULT_URL") + if default_url: + self.config[self.__class__.__name__].default_url = default_url + self._log_app_versions() + # call our init_ioloop very early + # jupyter-server calls it too late, notebook doesn't define it yet + # only called in jupyter-server >= 1.9 + self.init_ioloop() + super().initialize(argv) + self.patch_templates() + + def init_ioloop(self): + """init_ioloop added in jupyter-server 1.9""" + # avoid deprecated access to current event loop + if getattr(self, "io_loop", None) is None: + try: + asyncio.get_running_loop() + except RuntimeError: + # not running, make our own loop + self.io_loop = ioloop.IOLoop(make_current=False) + else: + # running, use IOLoop.current + self.io_loop = ioloop.IOLoop.current() + + # Make our event loop the 'current' event loop. + # FIXME: this shouldn't be necessary, but it is. + # notebookapp (<=6.4, at least), and + # jupyter-server (<=1.17.0, at least) still need the 'current' event loop to be defined + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + self.io_loop.make_current() + + def init_httpserver(self): + self.io_loop.run_sync(super().init_httpserver) def start(self): self.log.info("Starting jupyterhub-singleuser server version %s", __version__) # start by hitting Hub to check version - ioloop.IOLoop.current().run_sync(self.check_hub_version) - ioloop.IOLoop.current().add_callback(self.keep_activity_updated) - super(SingleUserNotebookApp, self).start() + self.io_loop.run_sync(self.check_hub_version) + self.io_loop.add_callback(self.keep_activity_updated) + super().start() def init_hub_auth(self): api_token = None @@ -600,6 +709,7 @@ class SingleUserNotebookApp(NotebookApp): s['hub_prefix'] = self.hub_prefix s['hub_host'] = self.hub_host s['hub_auth'] = self.hub_auth + s['page_config_hook'] = self.page_config_hook csp_report_uri = s['csp_report_uri'] = self.hub_host + url_path_join( self.hub_prefix, 'security/csp-report' ) @@ -610,17 +720,33 @@ class SingleUserNotebookApp(NotebookApp): 'Content-Security-Policy', ';'.join(["frame-ancestors 'self'", "report-uri " + csp_report_uri]), ) - super(SingleUserNotebookApp, self).init_webapp() + super().init_webapp() # add OAuth callback self.web_app.add_handlers( r".*$", - [(urlparse(self.hub_auth.oauth_redirect_uri).path, OAuthCallbackHandler)], + [ + ( + urlparse(self.hub_auth.oauth_redirect_uri).path, + self.oauth_callback_handler_class, + ) + ], ) # apply X-JupyterHub-Version to *all* request handlers (even redirects) self.patch_default_headers() - self.patch_templates() + + def page_config_hook(self, handler, page_config): + """JupyterLab page config hook + + Adds JupyterHub info to page config. + + Places the JupyterHub API token in PageConfig.token. + + Only has effect on jupyterlab_server >=2.9 + """ + page_config["token"] = self.hub_auth.get_token(handler) or "" + return page_config def patch_default_headers(self): if hasattr(RequestHandler, '_orig_set_default_headers'): @@ -641,24 +767,211 @@ class SingleUserNotebookApp(NotebookApp): ) self.jinja_template_vars['hub_host'] = self.hub_host self.jinja_template_vars['hub_prefix'] = self.hub_prefix - env = self.web_app.settings['jinja2_env'] + self.jinja_template_vars[ + 'hub_control_panel_url' + ] = self.hub_host + url_path_join(self.hub_prefix, 'home') - env.globals['hub_control_panel_url'] = self.hub_host + url_path_join( - self.hub_prefix, 'home' - ) + settings = self.web_app.settings + # patch classic notebook jinja env + jinja_envs = [] + if 'jinja2_env' in settings: + # default jinja env (should we do this on jupyter-server, or only notebook?) + jinja_envs.append(settings['jinja2_env']) + for ext_name in ("notebook", "nbclassic"): + env_name = f"{ext_name}_jinja2_env" + if env_name in settings: + # when running with jupyter-server, classic notebook (nbclassic server extension or notebook v7) + # gets its own jinja env, which needs the same patch + jinja_envs.append(settings[env_name]) - # patch jinja env loading to modify page template + # patch jinja env loading to get modified template, only for base page.html def get_page(name): if name == 'page.html': return page_template - orig_loader = env.loader - env.loader = ChoiceLoader([FunctionLoader(get_page), orig_loader]) + for jinja_env in jinja_envs: + jinja_env.loader = ChoiceLoader( + [FunctionLoader(get_page), jinja_env.loader] + ) + + def load_server_extensions(self): + # Loading LabApp sets $JUPYTERHUB_API_TOKEN on load, which is incorrect + r = super().load_server_extensions() + # clear the token in PageConfig at this step + # so that cookie auth is used + # FIXME: in the future, + # it would probably make sense to set page_config.token to the token + # from the current request. + if 'page_config_data' in self.web_app.settings: + self.web_app.settings['page_config_data']['token'] = '' + return r -def main(argv=None): - return SingleUserNotebookApp.launch_instance(argv) +def detect_base_package(App): + """Detect the base package for an App class + + Will return 'notebook' or 'jupyter_server' + based on which package App subclasses from. + + Will return None if neither is identified (e.g. fork package, or duck-typing). + """ + # guess notebook or jupyter_server based on App class inheritance + for cls in App.mro(): + pkg = cls.__module__.split(".", 1)[0] + if pkg in {"notebook", "jupyter_server"}: + return pkg + return None -if __name__ == "__main__": - main() +def _nice_cls_repr(cls): + """Nice repr of classes, e.g. 'module.submod.Class' + + Also accepts tuples of classes + """ + return f"{cls.__module__}.{cls.__name__}" + + +def patch_base_handler(BaseHandler, log=None): + """Patch HubAuthenticated into a base handler class + + so anything inheriting from BaseHandler uses Hub authentication. + This works *even after* subclasses have imported and inherited from BaseHandler. + + .. versionadded: 1.5 + Made available as an importable utility + """ + if log is None: + log = logging.getLogger() + + if HubAuthenticatedHandler not in BaseHandler.__bases__: + new_bases = (HubAuthenticatedHandler,) + BaseHandler.__bases__ + log.info( + "Patching auth into {mod}.{name}({old_bases}) -> {name}({new_bases})".format( + mod=BaseHandler.__module__, + name=BaseHandler.__name__, + old_bases=', '.join( + _nice_cls_repr(cls) for cls in BaseHandler.__bases__ + ), + new_bases=', '.join(_nice_cls_repr(cls) for cls in new_bases), + ) + ) + BaseHandler.__bases__ = new_bases + # We've now inserted our class as a parent of BaseHandler, + # but we also need to ensure BaseHandler *itself* doesn't + # override the public tornado API methods we have inserted. + # If they are defined in BaseHandler, explicitly replace them with our methods. + for name in ("get_current_user", "get_login_url"): + if name in BaseHandler.__dict__: + log.debug( + f"Overriding {BaseHandler}.{name} with HubAuthenticatedHandler.{name}" + ) + method = getattr(HubAuthenticatedHandler, name) + setattr(BaseHandler, name, method) + return BaseHandler + + +def _patch_app_base_handlers(app): + """Patch Hub Authentication into the base handlers of an app + + Patches HubAuthenticatedHandler into: + + - App.base_handler_class (if defined) + - jupyter_server's JupyterHandler (if already imported) + - notebook's IPythonHandler (if already imported) + """ + BaseHandler = app_base_handler = getattr(app, "base_handler_class", None) + + base_handlers = [] + if BaseHandler is not None: + base_handlers.append(BaseHandler) + + # patch juptyer_server and notebook handlers if they have been imported + for base_handler_name in [ + "jupyter_server.base.handlers.JupyterHandler", + "notebook.base.handlers.IPythonHandler", + ]: + modname, _ = base_handler_name.rsplit(".", 1) + if modname in sys.modules: + base_handlers.append(import_item(base_handler_name)) + + if not base_handlers: + pkg = detect_base_package(app.__class__) + if pkg == "jupyter_server": + BaseHandler = import_item("jupyter_server.base.handlers.JupyterHandler") + elif pkg == "notebook": + BaseHandler = import_item("notebook.base.handlers.IPythonHandler") + else: + raise ValueError( + f"{app.__class__.__name__}.base_handler_class must be defined" + ) + base_handlers.append(BaseHandler) + + # patch-in HubAuthenticatedHandler to base handler classes + for BaseHandler in base_handlers: + patch_base_handler(BaseHandler) + + # return the first entry + return base_handlers[0] + + +def make_singleuser_app(App): + """Make and return a singleuser notebook app + + given existing notebook or jupyter_server Application classes, + mix-in jupyterhub auth. + + Instances of App must have the following attributes defining classes: + + - .login_handler_class + - .logout_handler_class + - .base_handler_class (only required if not a subclass of the default app + in jupyter_server or notebook) + + App should be a subclass of `notebook.notebookapp.NotebookApp` + or `jupyter_server.serverapp.ServerApp`. + """ + + empty_parent_app = App() + log = empty_parent_app.log + + # detect base classes + LoginHandler = empty_parent_app.login_handler_class + LogoutHandler = empty_parent_app.logout_handler_class + BaseHandler = _patch_app_base_handlers(empty_parent_app) + + # create Handler classes from mixins + bases + class JupyterHubLoginHandler(JupyterHubLoginHandlerMixin, LoginHandler): + pass + + class JupyterHubLogoutHandler(JupyterHubLogoutHandlerMixin, LogoutHandler): + pass + + class OAuthCallbackHandler(OAuthCallbackHandlerMixin, BaseHandler): + pass + + # create merged aliases & flags + merged_aliases = {} + merged_aliases.update(empty_parent_app.aliases or {}) + merged_aliases.update(aliases) + + merged_flags = {} + merged_flags.update(empty_parent_app.flags or {}) + merged_flags.update(flags) + # create mixed-in App class, bringing it all together + class SingleUserNotebookApp(SingleUserNotebookAppMixin, App): + aliases = merged_aliases + flags = merged_flags + classes = empty_parent_app.classes + [HubOAuth] + + login_handler_class = JupyterHubLoginHandler + logout_handler_class = JupyterHubLogoutHandler + oauth_callback_handler_class = OAuthCallbackHandler + + def initialize(self, *args, **kwargs): + result = super().initialize(*args, **kwargs) + # run patch again after initialize, so extensions have already been loaded + # probably a no-op most of the time + _patch_app_base_handlers(self) + return result + + return SingleUserNotebookApp diff --git a/jupyterhub/spawner.py b/jupyterhub/spawner.py index af84c122..756fefa7 100644 --- a/jupyterhub/spawner.py +++ b/jupyterhub/spawner.py @@ -4,8 +4,6 @@ Contains base Spawner class & default implementation # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import ast -import asyncio -import errno import json import os import pipes @@ -13,38 +11,46 @@ import shutil import signal import sys import warnings +from inspect import signature from subprocess import Popen from tempfile import mkdtemp +from textwrap import dedent +from urllib.parse import urlparse -from async_generator import async_generator -from async_generator import yield_ +from async_generator import aclosing from sqlalchemy import inspect from tornado.ioloop import PeriodicCallback -from traitlets import Any -from traitlets import Bool -from traitlets import default -from traitlets import Dict -from traitlets import Float -from traitlets import Instance -from traitlets import Integer -from traitlets import List -from traitlets import observe -from traitlets import Unicode -from traitlets import Union -from traitlets import validate +from traitlets import ( + Any, + Bool, + Dict, + Float, + Instance, + Integer, + List, + Unicode, + Union, + default, + observe, + validate, +) from traitlets.config import LoggingConfigurable +from . import orm from .objects import Server -from .traitlets import ByteSpecification -from .traitlets import Callable -from .traitlets import Command -from .utils import exponential_backoff -from .utils import iterate_until -from .utils import maybe_future -from .utils import random_port -from .utils import url_path_join +from .roles import roles_to_scopes +from .traitlets import ByteSpecification, Callable, Command +from .utils import ( + AnyTimeoutError, + exponential_backoff, + maybe_future, + random_port, + url_escape_path, + url_path_join, +) -# FIXME: remove when we drop Python 3.5 support +if os.name == 'nt': + import psutil def _quote_safe(s): @@ -86,6 +92,7 @@ class Spawner(LoggingConfigurable): _start_pending = False _stop_pending = False _proxy_pending = False + _check_pending = False _waiting_for_response = False _jupyterhub_version = None _spawn_future = None @@ -96,10 +103,15 @@ class Spawner(LoggingConfigurable): Used in logging for consistency with named servers. """ - if self.name: - return '%s:%s' % (self.user.name, self.name) + if self.user: + user_name = self.user.name else: - return self.user.name + # no user, only happens in mock tests + user_name = "(no user)" + if self.name: + return f"{user_name}:{self.name}" + else: + return user_name @property def _failed(self): @@ -121,6 +133,8 @@ class Spawner(LoggingConfigurable): return 'spawn' elif self._stop_pending: return 'stop' + elif self._check_pending: + return 'check' return None @property @@ -147,9 +161,27 @@ class Spawner(LoggingConfigurable): authenticator = Any() hub = Any() orm_spawner = Any() - db = Any() cookie_options = Dict() + db = Any() + + @default("db") + def _deprecated_db(self): + self.log.warning( + dedent( + """ + The shared database session at Spawner.db is deprecated, and will be removed. + Please manage your own database and connections. + + Contact JupyterHub at https://github.com/jupyterhub/jupyterhub/issues/3700 + if you have questions or ideas about direct database needs for your Spawner. + """ + ), + ) + return self._deprecated_db_session + + _deprecated_db_session = Any() + @observe('orm_spawner') def _orm_spawner_changed(self, change): if change.new and change.new.server: @@ -181,17 +213,38 @@ class Spawner(LoggingConfigurable): def last_activity(self): return self.orm_spawner.last_activity + # Spawner.server is a wrapper of the ORM orm_spawner.server + # make sure it's always in sync with the underlying state + # this is harder to do with traitlets, + # which do not run on every access, only on set and first-get + _server = None + @property def server(self): - if hasattr(self, '_server'): + # always check that we're in sync with orm_spawner + if not self.orm_spawner: + # no ORM spawner, nothing to check return self._server - if self.orm_spawner and self.orm_spawner.server: - return Server(orm_server=self.orm_spawner.server) + + orm_server = self.orm_spawner.server + + if orm_server is not None and ( + self._server is None or orm_server is not self._server.orm_server + ): + # self._server is not connected to orm_spawner + self._server = Server(orm_server=self.orm_spawner.server) + elif orm_server is None: + # no ORM server, clear it + self._server = None + return self._server @server.setter def server(self, server): self._server = server - if self.orm_spawner: + if self.orm_spawner is not None: + if server is not None and server.orm_server == self.orm_spawner.server: + # no change + return if self.orm_spawner.server is not None: # delete the old value db = inspect(self.orm_spawner.server).session @@ -199,7 +252,13 @@ class Spawner(LoggingConfigurable): if server is None: self.orm_spawner.server = None else: + if server.orm_server is None: + self.log.warning(f"No ORM server for {self._log_name}") self.orm_spawner.server = server.orm_server + elif server is not None: + self.log.warning( + f"Setting Spawner.server for {self._log_name} with no underlying orm_spawner" + ) @property def name(self): @@ -207,8 +266,6 @@ class Spawner(LoggingConfigurable): return self.orm_spawner.name return '' - hub = Any() - authenticator = Any() internal_ssl = Bool(False) internal_trust_bundles = Dict() internal_certs_location = Unicode('') @@ -216,8 +273,115 @@ class Spawner(LoggingConfigurable): admin_access = Bool(False) api_token = Unicode() oauth_client_id = Unicode() + + oauth_scopes = List(Unicode()) + + @property + def oauth_scopes(self): + warnings.warn( + """Spawner.oauth_scopes is deprecated in JupyterHub 2.3. + + Use Spawner.oauth_access_scopes + """, + DeprecationWarning, + stacklevel=2, + ) + return self.oauth_access_scopes + + oauth_access_scopes = List( + Unicode(), + help="""The scope(s) needed to access this server""", + ) + + @default("oauth_access_scopes") + def _default_access_scopes(self): + return [ + f"access:servers!server={self.user.name}/{self.name}", + f"access:servers!user={self.user.name}", + ] + handler = Any() + oauth_roles = Union( + [Callable(), List()], + help="""Allowed roles for oauth tokens. + + Deprecated in 3.0: use oauth_client_allowed_scopes + + This sets the maximum and default roles + assigned to oauth tokens issued by a single-user server's + oauth client (i.e. tokens stored in browsers after authenticating with the server), + defining what actions the server can take on behalf of logged-in users. + + Default is an empty list, meaning minimal permissions to identify users, + no actions can be taken on their behalf. + """, + ).tag(config=True) + + oauth_client_allowed_scopes = Union( + [Callable(), List()], + help="""Allowed scopes for oauth tokens issued by this server's oauth client. + + This sets the maximum and default scopes + assigned to oauth tokens issued by a single-user server's + oauth client (i.e. tokens stored in browsers after authenticating with the server), + defining what actions the server can take on behalf of logged-in users. + + Default is an empty list, meaning minimal permissions to identify users, + no actions can be taken on their behalf. + + If callable, will be called with the Spawner as a single argument. + Callables may be async. + """, + ).tag(config=True) + + async def _get_oauth_client_allowed_scopes(self): + """Private method: get oauth allowed scopes + + Handle: + + - oauth_client_allowed_scopes + - callable config + - deprecated oauth_roles config + - access_scopes + """ + # cases: + # 1. only scopes + # 2. only roles + # 3. both! (conflict, favor scopes) + scopes = [] + if self.oauth_client_allowed_scopes: + allowed_scopes = self.oauth_client_allowed_scopes + if callable(allowed_scopes): + allowed_scopes = allowed_scopes(self) + if inspect.isawaitable(allowed_scopes): + allowed_scopes = await allowed_scopes + scopes.extend(allowed_scopes) + + if self.oauth_roles: + if scopes: + # both defined! Warn + warnings.warn( + f"Ignoring deprecated Spawner.oauth_roles={self.oauth_roles} in favor of Spawner.oauth_client_allowed_scopes.", + ) + else: + role_names = self.oauth_roles + if callable(role_names): + role_names = role_names(self) + roles = list( + self.db.query(orm.Role).filter(orm.Role.name.in_(role_names)) + ) + if len(roles) != len(role_names): + missing_roles = set(role_names).difference( + {role.name for role in roles} + ) + raise ValueError(f"No such role(s): {', '.join(missing_roles)}") + scopes.extend(roles_to_scopes(roles)) + + # always add access scope + scopes.append(f"access:servers!server={self.user.name}/{self.name}") + return sorted(set(scopes)) + will_resume = Bool( False, help="""Whether the Spawner will resume on next start @@ -231,11 +395,22 @@ class Spawner(LoggingConfigurable): ) ip = Unicode( - '', + '127.0.0.1', help=""" The IP address (or hostname) the single-user server should listen on. + Usually either '127.0.0.1' (default) or '0.0.0.0'. + The JupyterHub proxy implementation should be able to send packets to this interface. + + Subclasses which launch remotely or in containers + should override the default to '0.0.0.0'. + + .. versionchanged:: 2.0 + Default changed to '127.0.0.1', from ''. + In most cases, this does not result in a change in behavior, + as '' was interpreted as 'unspecified', + which used the subprocesses' own default, itself usually '127.0.0.1'. """, ).tag(config=True) @@ -353,8 +528,9 @@ class Spawner(LoggingConfigurable): return options_form - def options_from_form(self, form_data): - """Interpret HTTP form data + options_from_form = Callable( + help=""" + Interpret HTTP form data Form data will always arrive as a dict of lists of strings. Override this function to understand single-values, numbers, etc. @@ -378,9 +554,54 @@ class Spawner(LoggingConfigurable): (with additional support for bytes in case of uploaded file data), and any non-bytes non-jsonable values will be replaced with None if the user_options are re-used. - """ + """, + ).tag(config=True) + + @default("options_from_form") + def _options_from_form(self): + return self._default_options_from_form + + def _default_options_from_form(self, form_data): return form_data + def run_options_from_form(self, form_data): + sig = signature(self.options_from_form) + if 'spawner' in sig.parameters: + return self.options_from_form(form_data, spawner=self) + else: + return self.options_from_form(form_data) + + def options_from_query(self, query_data): + """Interpret query arguments passed to /spawn + + Query arguments will always arrive as a dict of unicode strings. + Override this function to understand single-values, numbers, etc. + + By default, options_from_form is called from this function. You can however override + this function if you need to process the query arguments differently. + + This should coerce form data into the structure expected by self.user_options, + which must be a dict, and should be JSON-serializeable, + though it can contain bytes in addition to standard JSON data types. + + This method should not have any side effects. + Any handling of `user_options` should be done in `.start()` + to ensure consistent behavior across servers + spawned via the API and form submission page. + + Instances will receive this data on self.user_options, after passing through this function, + prior to `Spawner.start`. + + .. versionadded:: 1.2 + user_options are persisted in the JupyterHub database to be reused + on subsequent spawns if no options are given. + user_options is serialized to JSON as part of this persistence + (with additional support for bytes in case of uploaded file data), + and any non-bytes non-jsonable values will be replaced with None + if the user_options are re-used. + """ + return self.options_from_form(query_data) + user_options = Dict( help=""" Dict of user specified options for the user's spawned instance of a single-user server. @@ -399,11 +620,12 @@ class Spawner(LoggingConfigurable): 'VIRTUAL_ENV', 'LANG', 'LC_ALL', + 'JUPYTERHUB_SINGLEUSER_APP', ], help=""" - Whitelist of environment variables for the single-user server to inherit from the JupyterHub process. + List of environment variables for the single-user server to inherit from the JupyterHub process. - This whitelist is used to ensure that sensitive information in the JupyterHub process's environment + This list is used to ensure that sensitive information in the JupyterHub process's environment (such as `CONFIGPROXY_AUTH_TOKEN`) is not passed to the single-user server's process. """, ).tag(config=True) @@ -422,7 +644,7 @@ class Spawner(LoggingConfigurable): Environment variables that end up in the single-user server's process come from 3 sources: - This `environment` configurable - - The JupyterHub process' environment variables that are whitelisted in `env_keep` + - The JupyterHub process' environment variables that are listed in `env_keep` - Variables to establish contact between the single-user notebook and the hub (such as JUPYTERHUB_API_TOKEN) The `environment` configurable should be set by JupyterHub administrators to add @@ -433,6 +655,11 @@ class Spawner(LoggingConfigurable): Note that the spawner class' interface is not guaranteed to be exactly same across upgrades, so if you are using the callable take care to verify it continues to work after upgrades! + + .. versionchanged:: 1.2 + environment from this configuration has highest priority, + allowing override of 'default' env variables, + such as JUPYTERHUB_API_URL. """ ).tag(config=True) @@ -627,6 +854,37 @@ class Spawner(LoggingConfigurable): """ ).tag(config=True) + auth_state_hook = Any( + help=""" + An optional hook function that you can implement to pass `auth_state` + to the spawner after it has been initialized but before it starts. + The `auth_state` dictionary may be set by the `.authenticate()` + method of the authenticator. This hook enables you to pass some + or all of that information to your spawner. + + Example:: + + def userdata_hook(spawner, auth_state): + spawner.userdata = auth_state["userdata"] + + c.Spawner.auth_state_hook = userdata_hook + + """ + ).tag(config=True) + + hub_connect_url = Unicode( + None, + allow_none=True, + help=""" + The URL the single-user server should connect to the Hub. + + If the Hub URL set in your JupyterHub config is not reachable + from spawned notebooks, you can set differnt URL by this config. + + Is None if you don't need to change the URL. + """, + ).tag(config=True) + def load_state(self, state): """Restore state of spawner from database. @@ -638,7 +896,6 @@ class Spawner(LoggingConfigurable): Override in subclasses to restore any extra state that is needed to track the single-user server for that user. Subclasses should call super(). """ - pass def get_state(self): """Save state of spawner into database. @@ -652,7 +909,7 @@ class Spawner(LoggingConfigurable): Returns ------- state: dict - a JSONable dict of state + a JSONable dict of state """ state = {} return state @@ -688,16 +945,6 @@ class Spawner(LoggingConfigurable): if key in os.environ: env[key] = os.environ[key] - # config overrides. If the value is a callable, it will be called with - # one parameter - the current spawner instance - and the return value - # will be assigned to the environment variable. This will be called at - # spawn time. - for key, value in self.environment.items(): - if callable(value): - env[key] = value(self) - else: - env[key] = value - env['JUPYTERHUB_API_TOKEN'] = self.api_token # deprecated (as of 0.7.2), for old versions of singleuser env['JPY_API_TOKEN'] = self.api_token @@ -709,23 +956,47 @@ class Spawner(LoggingConfigurable): env['JUPYTERHUB_COOKIE_OPTIONS'] = json.dumps(self.cookie_options) env['JUPYTERHUB_HOST'] = self.hub.public_host env['JUPYTERHUB_OAUTH_CALLBACK_URL'] = url_path_join( - self.user.url, self.name, 'oauth_callback' + self.user.url, url_escape_path(self.name), 'oauth_callback' + ) + + # deprecated env, renamed in 3.0 for disambiguation + env['JUPYTERHUB_OAUTH_SCOPES'] = json.dumps(self.oauth_access_scopes) + env['JUPYTERHUB_OAUTH_ACCESS_SCOPES'] = json.dumps(self.oauth_access_scopes) + + # added in 3.0 + env['JUPYTERHUB_OAUTH_CLIENT_ALLOWED_SCOPES'] = json.dumps( + self.oauth_client_allowed_scopes ) # Info previously passed on args env['JUPYTERHUB_USER'] = self.user.name env['JUPYTERHUB_SERVER_NAME'] = self.name - env['JUPYTERHUB_API_URL'] = self.hub.api_url + if self.hub_connect_url is not None: + hub_api_url = url_path_join( + self.hub_connect_url, urlparse(self.hub.api_url).path + ) + else: + hub_api_url = self.hub.api_url + env['JUPYTERHUB_API_URL'] = hub_api_url env['JUPYTERHUB_ACTIVITY_URL'] = url_path_join( - self.hub.api_url, + hub_api_url, 'users', # tolerate mocks defining only user.name getattr(self.user, 'escaped_name', self.user.name), 'activity', ) env['JUPYTERHUB_BASE_URL'] = self.hub.base_url[:-4] + if self.server: + base_url = self.server.base_url env['JUPYTERHUB_SERVICE_PREFIX'] = self.server.base_url + else: + # this should only occur in mock/testing scenarios + base_url = '/' + + proto = 'https' if self.internal_ssl else 'http' + bind_url = f"{proto}://{self.ip}:{self.port}{base_url}" + env["JUPYTERHUB_SERVICE_URL"] = bind_url # Put in limit and guarantee info if they exist. # Note that this is for use by the humans / notebook extensions in the @@ -745,6 +1016,31 @@ class Spawner(LoggingConfigurable): env['JUPYTERHUB_SSL_CERTFILE'] = self.cert_paths['certfile'] env['JUPYTERHUB_SSL_CLIENT_CA'] = self.cert_paths['cafile'] + if self.notebook_dir: + notebook_dir = self.format_string(self.notebook_dir) + env["JUPYTERHUB_ROOT_DIR"] = notebook_dir + + if self.default_url: + default_url = self.format_string(self.default_url) + env["JUPYTERHUB_DEFAULT_URL"] = default_url + + if self.debug: + env["JUPYTERHUB_DEBUG"] = "1" + + if self.disable_user_config: + env["JUPYTERHUB_DISABLE_USER_CONFIG"] = "1" + + # env overrides from config. If the value is a callable, it will be called with + # one parameter - the current spawner instance - and the return value + # will be assigned to the environment variable. This will be called at + # spawn time. + # Called last to ensure highest priority, in case of overriding other + # 'default' variables like the API url + for key, value in self.environment.items(): + if callable(value): + env[key] = value(self) + else: + env[key] = value return env async def get_url(self): @@ -885,14 +1181,13 @@ class Spawner(LoggingConfigurable): Arguments: paths (dict): a list of paths for key, cert, and CA. - These paths will be resolvable and readable by the Hub process, - but not necessarily by the notebook server. + These paths will be resolvable and readable by the Hub process, + but not necessarily by the notebook server. Returns: - dict: a list (potentially altered) of paths for key, cert, - and CA. - These paths should be resolvable and readable - by the notebook server to be launched. + dict: a list (potentially altered) of paths for key, cert, and CA. + These paths should be resolvable and readable by the notebook + server to be launched. `.move_certs` is called after certs for the singleuser notebook have @@ -912,33 +1207,16 @@ class Spawner(LoggingConfigurable): """Return the arguments to be passed after self.cmd Doesn't expect shell expansion to happen. + + .. versionchanged:: 2.0 + Prior to 2.0, JupyterHub passed some options such as + ip, port, and default_url to the command-line. + JupyterHub 2.0 no longer builds any CLI args + other than `Spawner.cmd` and `Spawner.args`. + All values that come from jupyterhub itself + will be passed via environment variables. """ - args = [] - - if self.ip: - args.append('--ip=%s' % _quote_safe(self.ip)) - - if self.port: - args.append('--port=%i' % self.port) - elif self.server and self.server.port: - self.log.warning( - "Setting port from user.server is deprecated as of JupyterHub 0.7." - ) - args.append('--port=%i' % self.server.port) - - if self.notebook_dir: - notebook_dir = self.format_string(self.notebook_dir) - args.append('--notebook-dir=%s' % _quote_safe(notebook_dir)) - if self.default_url: - default_url = self.format_string(self.default_url) - args.append('--NotebookApp.default_url=%s' % _quote_safe(default_url)) - - if self.debug: - args.append('--debug') - if self.disable_user_config: - args.append('--disable-user-config') - args.extend(self.args) - return args + return self.args def run_pre_spawn_hook(self): """Run the pre_spawn_hook if defined""" @@ -953,11 +1231,15 @@ class Spawner(LoggingConfigurable): except Exception: self.log.exception("post_stop_hook failed with exception: %s", self) + async def run_auth_state_hook(self, auth_state): + """Run the auth_state_hook if defined""" + if self.auth_state_hook is not None: + await maybe_future(self.auth_state_hook(self, auth_state)) + @property def _progress_url(self): return self.user.progress_url(self.name) - @async_generator async def _generate_progress(self): """Private wrapper of progress generator @@ -969,21 +1251,17 @@ class Spawner(LoggingConfigurable): ) return - await yield_({"progress": 0, "message": "Server requested"}) - from async_generator import aclosing + yield {"progress": 0, "message": "Server requested"} async with aclosing(self.progress()) as progress: async for event in progress: - await yield_(event) + yield event - @async_generator async def progress(self): """Async generator for progress events Must be an async generator - For Python 3.5-compatibility, use the async_generator package - Should yield messages of the form: :: @@ -1000,7 +1278,7 @@ class Spawner(LoggingConfigurable): .. versionadded:: 0.9 """ - await yield_({"progress": 50, "message": "Spawning server..."}) + yield {"progress": 50, "message": "Spawning server..."} async def start(self): """Start the single-user server @@ -1011,9 +1289,7 @@ class Spawner(LoggingConfigurable): .. versionchanged:: 0.7 Return ip, port instead of setting on self.user.server directly. """ - raise NotImplementedError( - "Override in subclass. Must be a Tornado gen.coroutine." - ) + raise NotImplementedError("Override in subclass. Must be a coroutine.") async def stop(self, now=False): """Stop the single-user server @@ -1026,9 +1302,7 @@ class Spawner(LoggingConfigurable): Must be a coroutine. """ - raise NotImplementedError( - "Override in subclass. Must be a Tornado gen.coroutine." - ) + raise NotImplementedError("Override in subclass. Must be a coroutine.") async def poll(self): """Check if the single-user process is running @@ -1054,9 +1328,18 @@ class Spawner(LoggingConfigurable): process has not yet completed. """ - raise NotImplementedError( - "Override in subclass. Must be a Tornado gen.coroutine." - ) + raise NotImplementedError("Override in subclass. Must be a coroutine.") + + def delete_forever(self): + """Called when a user or server is deleted. + + This can do things like request removal of resources such as persistent storage. + Only called on stopped spawners, and is usually the last action ever taken for the user. + + Will only be called once on each Spawner, immediately prior to removal. + + Stopping a server does *not* call this method. + """ def add_poll_callback(self, callback, *args, **kwargs): """Add a callback to fire when the single-user server stops""" @@ -1121,12 +1404,12 @@ class Spawner(LoggingConfigurable): try: r = await exponential_backoff( _wait_for_death, - 'Process did not die in {timeout} seconds'.format(timeout=timeout), + f'Process did not die in {timeout} seconds', start_wait=self.death_interval, timeout=timeout, ) return r - except TimeoutError: + except AnyTimeoutError: return False @@ -1140,7 +1423,7 @@ def _try_setcwd(path): os.chdir(path) except OSError as e: exc = e # break exception instance out of except scope - print("Couldn't set CWD to %s (%s)" % (path, e), file=sys.stderr) + print(f"Couldn't set CWD to {path} ({e})", file=sys.stderr) path, _ = os.path.split(path) else: return @@ -1286,7 +1569,7 @@ class LocalProcessSpawner(Spawner): Local processes only need the process id. """ - super(LocalProcessSpawner, self).load_state(state) + super().load_state(state) if 'pid' in state: self.pid = state['pid'] @@ -1295,14 +1578,14 @@ class LocalProcessSpawner(Spawner): Local processes only need the process id. """ - state = super(LocalProcessSpawner, self).get_state() + state = super().get_state() if self.pid: state['pid'] = self.pid return state def clear_state(self): """Clear stored state about this spawner (pid)""" - super(LocalProcessSpawner, self).clear_state() + super().clear_state() self.pid = 0 def user_env(self, env): @@ -1351,7 +1634,8 @@ class LocalProcessSpawner(Spawner): home = user.pw_dir # Create dir for user's certs wherever we're starting - out_dir = "{home}/.jupyterhub/jupyterhub-certs".format(home=home) + hub_dir = f"{home}/.jupyterhub" + out_dir = f"{hub_dir}/jupyterhub-certs" shutil.rmtree(out_dir, ignore_errors=True) os.makedirs(out_dir, 0o700, exist_ok=True) @@ -1365,14 +1649,15 @@ class LocalProcessSpawner(Spawner): ca = os.path.join(out_dir, os.path.basename(paths['cafile'])) # Set cert ownership to user - for f in [out_dir, key, cert, ca]: + for f in [hub_dir, out_dir, key, cert, ca]: shutil.chown(f, user=uid, group=gid) return {"keyfile": key, "certfile": cert, "cafile": ca} async def start(self): """Start the single-user server.""" - self.port = random_port() + if self.port == 0: + self.port = random_port() cmd = [] env = self.get_env() @@ -1407,16 +1692,6 @@ class LocalProcessSpawner(Spawner): self.pid = self.proc.pid - if self.__class__ is not LocalProcessSpawner: - # subclasses may not pass through return value of super().start, - # relying on deprecated 0.6 way of setting ip, port, - # so keep a redundant copy here for now. - # A deprecation warning will be shown if the subclass - # does not return ip, port. - if self.ip: - self.server.ip = self.ip - self.server.port = self.port - self.db.commit() return (self.ip or '127.0.0.1', self.port) async def poll(self): @@ -1429,8 +1704,11 @@ class LocalProcessSpawner(Spawner): if self.proc is not None: status = self.proc.poll() if status is not None: - # clear state if the process is done - self.clear_state() + # handle SIGCHILD to avoid zombie processes + # and also close stdout/stderr file descriptors + with self.proc: + # clear state if the process is done + self.clear_state() return status # if we resumed from stored state, @@ -1440,9 +1718,11 @@ class LocalProcessSpawner(Spawner): self.clear_state() return 0 - # send signal 0 to check if PID exists - # this doesn't work on Windows, but that's okay because we don't support Windows. - alive = await self._signal(0) + # We use pustil.pid_exists on windows + if os.name == 'nt': + alive = psutil.pid_exists(self.pid) + else: + alive = await self._signal(0) if not alive: self.clear_state() return 0 @@ -1458,11 +1738,10 @@ class LocalProcessSpawner(Spawner): """ try: os.kill(self.pid, sig) + except ProcessLookupError: + return False # process is gone except OSError as e: - if e.errno == errno.ESRCH: - return False # process is gone - else: - raise + raise # Can be EPERM or EINVAL return True # process exists async def stop(self, now=False): @@ -1549,5 +1828,5 @@ class SimpleLocalProcessSpawner(LocalProcessSpawner): return env def move_certs(self, paths): - """No-op for installing certs""" + """No-op for installing certs.""" return paths diff --git a/jupyterhub/tests/conftest.py b/jupyterhub/tests/conftest.py index 7126baa7..b880017b 100644 --- a/jupyterhub/tests/conftest.py +++ b/jupyterhub/tests/conftest.py @@ -26,45 +26,32 @@ Fixtures to add functionality or spawning behavior # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import asyncio -import inspect -import logging +import copy import os import sys +from functools import partial from getpass import getuser from subprocess import TimeoutExpired from unittest import mock -from pytest import fixture -from pytest import raises -from tornado import gen -from tornado import ioloop +from pytest import fixture, raises from tornado.httpclient import HTTPError from tornado.platform.asyncio import AsyncIOMainLoop import jupyterhub.services.service -from . import mocking -from .. import crypto -from .. import orm + +from .. import crypto, orm, scopes +from ..roles import create_role, get_default_roles, mock_roles, update_roles from ..utils import random_port +from . import mocking from .mocking import MockHub from .test_services import mockservice_cmd from .utils import add_user -from .utils import ssl_setup # global db session object _db = None -def pytest_collection_modifyitems(items): - """add asyncio marker to all async tests""" - for item in items: - if inspect.iscoroutinefunction(item.obj): - item.add_marker('asyncio') - if hasattr(inspect, 'isasyncgenfunction'): - # double-check that we aren't mixing yield and async def - assert not inspect.isasyncgenfunction(item.obj) - - @fixture(scope='module') def ssl_tmpdir(tmpdir_factory): return tmpdir_factory.mktemp('ssl') @@ -74,7 +61,9 @@ def ssl_tmpdir(tmpdir_factory): def app(request, io_loop, ssl_tmpdir): """Mock a jupyterhub app for testing""" mocked_app = None - ssl_enabled = getattr(request.module, "ssl_enabled", False) + ssl_enabled = getattr( + request.module, 'ssl_enabled', os.environ.get('SSL_ENABLED', False) + ) kwargs = dict() if ssl_enabled: kwargs.update(dict(internal_ssl=True, internal_certs_location=str(ssl_tmpdir))) @@ -119,7 +108,13 @@ def db(): """Get a db session""" global _db if _db is None: - _db = orm.new_session_factory('sqlite:///:memory:')() + # make sure some initial db contents are filled out + # specifically, the 'default' jupyterhub oauth client + app = MockHub(db_url='sqlite:///:memory:') + app.init_db() + _db = app.db + for role in get_default_roles(): + create_role(_db, role) user = orm.User(name=getuser()) _db.add(user) _db.commit() @@ -135,16 +130,13 @@ def event_loop(request): @fixture(scope='module') -def io_loop(event_loop, request): +async def io_loop(event_loop, request): """Same as pytest-tornado.io_loop, but re-scoped to module-level""" - ioloop.IOLoop.configure(AsyncIOMainLoop) io_loop = AsyncIOMainLoop() - io_loop.make_current() assert asyncio.get_event_loop() is event_loop assert io_loop.asyncio_loop is event_loop def _close(): - io_loop.clear_current() io_loop.close(all_fds=True) request.addfinalizer(_close) @@ -158,13 +150,20 @@ def cleanup_after(request, io_loop): allows cleanup of servers between tests without having to launch a whole new app """ + try: yield finally: + if _db is not None: + # cleanup after failed transactions + _db.rollback() + if not MockHub.initialized(): return app = MockHub.instance() - for uid, user in app.users.items(): + if app.db_file.closed: + return + for uid, user in list(app.users.items()): for name, spawner in list(user.spawners.items()): if spawner.active: try: @@ -172,6 +171,11 @@ def cleanup_after(request, io_loop): except HTTPError: pass io_loop.run_sync(lambda: user.stop(name)) + if user.name not in {'admin', 'user'}: + app.users.delete(uid) + # delete groups + for group in app.db.query(orm.Group): + app.db.delete(group) app.db.commit() @@ -182,7 +186,7 @@ def new_username(prefix='testuser'): """Return a new unique username""" global _username_counter _username_counter += 1 - return '{}-{}'.format(prefix, _username_counter) + return f'{prefix}-{_username_counter}' @fixture @@ -211,10 +215,43 @@ def admin_user(app, username): yield user +_groupname_counter = 0 + + +def new_group_name(prefix='testgroup'): + """Return a new unique group name""" + global _groupname_counter + _groupname_counter += 1 + return f'{prefix}-{_groupname_counter}' + + +@fixture +def groupname(): + """allocate a temporary group name + + unique each time the fixture is used + """ + yield new_group_name() + + +@fixture +def group(app): + """Fixture for creating a temporary group + + Each time the fixture is used, a new group is created + + The group is deleted after the test + """ + group = orm.Group(name=new_group_name()) + app.db.add(group) + app.db.commit() + yield group + + class MockServiceSpawner(jupyterhub.services.service._ServiceSpawner): """mock services for testing. - Shorter intervals, etc. + Shorter intervals, etc. """ poll_interval = 1 @@ -223,7 +260,22 @@ class MockServiceSpawner(jupyterhub.services.service._ServiceSpawner): _mock_service_counter = 0 -def _mockservice(request, app, url=False): +def _mockservice(request, app, external=False, url=False): + """ + Add a service to the application + + Args: + request: pytest request fixture + app: MockHub application + external (bool): + If False (default), launch the service. + Otherwise, consider it 'external, + registering a service in the database, + but don't start it. + url (bool): + If True, register the service at a URL + (as opposed to headless, API-only). + """ global _mock_service_counter _mock_service_counter += 1 name = 'mock-service-%i' % _mock_service_counter @@ -234,6 +286,10 @@ def _mockservice(request, app, url=False): else: spec['url'] = 'http://127.0.0.1:%i' % random_port() + if external: + + spec['oauth_redirect_uri'] = 'http://127.0.0.1:%i' % random_port() + io_loop = app.io_loop with mock.patch.object( @@ -241,30 +297,32 @@ def _mockservice(request, app, url=False): ): app.services = [spec] app.init_services() + mock_roles(app, name, 'services') assert name in app._service_map service = app._service_map[name] + token = service.orm.api_tokens[0] - @gen.coroutine - def start(): + async def start(): # wait for proxy to be updated before starting the service - yield app.proxy.add_all_services(app._service_map) - service.start() + await app.proxy.add_all_services(app._service_map) + await service.start() - io_loop.run_sync(start) + if not external: + io_loop.run_sync(start) def cleanup(): - import asyncio - - asyncio.get_event_loop().run_until_complete(service.stop()) + if not external: + asyncio.get_event_loop().run_until_complete(service.stop()) app.services[:] = [] app._service_map.clear() request.addfinalizer(cleanup) # ensure process finishes starting - with raises(TimeoutExpired): - service.proc.wait(1) + if not external: + with raises(TimeoutExpired): + service.proc.wait(1) if url: - io_loop.run_sync(service.server.wait_up) + io_loop.run_sync(partial(service.server.wait_up, http=True)) return service @@ -274,6 +332,12 @@ def mockservice(request, app): yield _mockservice(request, app, url=False) +@fixture +def mockservice_external(request, app): + """Mock an externally managed service (don't start anything)""" + yield _mockservice(request, app, external=True, url=False) + + @fixture def mockservice_url(request, app): """Mock a service with its own url to test external services""" @@ -324,3 +388,87 @@ def slow_bad_spawn(app): app.tornado_settings, {'spawner_class': mocking.SlowBadSpawner} ): yield + + +@fixture +def create_temp_role(app): + """Generate a temporary role with certain scopes. + Convenience function that provides setup, database handling and teardown""" + temp_roles = [] + index = [1] + + def temp_role_creator(scopes, role_name=None): + if not role_name: + role_name = f'temp_role_{index[0]}' + index[0] += 1 + temp_role = orm.Role(name=role_name, scopes=list(scopes)) + temp_roles.append(temp_role) + app.db.add(temp_role) + app.db.commit() + return temp_role + + yield temp_role_creator + for role in temp_roles: + app.db.delete(role) + app.db.commit() + + +@fixture +def create_user_with_scopes(app, create_temp_role): + """Generate a temporary user with specific scopes. + Convenience function that provides setup, database handling and teardown""" + temp_users = [] + counter = 0 + get_role = create_temp_role + + def temp_user_creator(*scopes, name=None): + nonlocal counter + if name is None: + counter += 1 + name = f"temp_user_{counter}" + role = get_role(scopes) + orm_user = orm.User(name=name) + app.db.add(orm_user) + app.db.commit() + temp_users.append(orm_user) + update_roles(app.db, orm_user, roles=[role.name]) + return app.users[orm_user.id] + + yield temp_user_creator + for user in temp_users: + app.users.delete(user) + + +@fixture +def create_service_with_scopes(app, create_temp_role): + """Generate a temporary service with specific scopes. + Convenience function that provides setup, database handling and teardown""" + temp_service = [] + counter = 0 + role_function = create_temp_role + + def temp_service_creator(*scopes, name=None): + nonlocal counter + if name is None: + counter += 1 + name = f"temp_service_{counter}" + role = role_function(scopes) + app.services.append({'name': name}) + app.init_services() + orm_service = orm.Service.find(app.db, name) + app.db.commit() + update_roles(app.db, orm_service, roles=[role.name]) + return orm_service + + yield temp_service_creator + for service in temp_service: + app.db.delete(service) + app.db.commit() + + +@fixture +def preserve_scopes(): + """Revert any custom scopes after test""" + scope_definitions = copy.deepcopy(scopes.scope_definitions) + yield scope_definitions + scopes.scope_definitions = scope_definitions diff --git a/jupyterhub/tests/mocking.py b/jupyterhub/tests/mocking.py index 5ac2406d..8aed2bfb 100644 --- a/jupyterhub/tests/mocking.py +++ b/jupyterhub/tests/mocking.py @@ -36,26 +36,15 @@ from unittest import mock from urllib.parse import urlparse from pamela import PAMError -from tornado import gen -from tornado.concurrent import Future -from tornado.ioloop import IOLoop -from traitlets import Bool -from traitlets import default -from traitlets import Dict +from traitlets import Bool, Dict, default -from .. import orm +from .. import metrics, orm, roles from ..app import JupyterHub from ..auth import PAMAuthenticator -from ..objects import Server from ..singleuser import SingleUserNotebookApp -from ..spawner import LocalProcessSpawner from ..spawner import SimpleLocalProcessSpawner -from ..utils import random_port -from ..utils import url_path_join -from .utils import async_requests -from .utils import public_host -from .utils import public_url -from .utils import ssl_setup +from ..utils import random_port, utcnow +from .utils import async_requests, public_url, ssl_setup def mock_authenticate(username, password, service, encoding): @@ -110,19 +99,17 @@ class SlowSpawner(MockSpawner): delay = 2 _start_future = None - @gen.coroutine - def start(self): - (ip, port) = yield super().start() + async def start(self): + (ip, port) = await super().start() if self._start_future is not None: - yield self._start_future + await self._start_future else: - yield gen.sleep(self.delay) + await asyncio.sleep(self.delay) return ip, port - @gen.coroutine - def stop(self): - yield gen.sleep(self.delay) - yield super().stop() + async def stop(self): + await asyncio.sleep(self.delay) + await super().stop() class NeverSpawner(MockSpawner): @@ -134,14 +121,12 @@ class NeverSpawner(MockSpawner): def start(self): """Return a Future that will never finish""" - return Future() + return asyncio.Future() - @gen.coroutine - def stop(self): + async def stop(self): pass - @gen.coroutine - def poll(self): + async def poll(self): return 0 @@ -173,6 +158,9 @@ class FormSpawner(MockSpawner): options['energy'] = form_data['energy'][0] if 'hello_file' in form_data: options['hello'] = form_data['hello_file'][0] + + if 'illegal_argument' in form_data: + raise ValueError("You are not allowed to specify 'illegal_argument'") return options @@ -212,8 +200,7 @@ class MockPAMAuthenticator(PAMAuthenticator): # skip the add-system-user bit return not user.name.startswith('dne') - @gen.coroutine - def authenticate(self, *args, **kwargs): + async def authenticate(self, *args, **kwargs): with mock.patch.multiple( 'pamela', authenticate=mock_authenticate, @@ -221,9 +208,7 @@ class MockPAMAuthenticator(PAMAuthenticator): close_session=mock_open_session, check_account=mock_check_account, ): - username = yield super(MockPAMAuthenticator, self).authenticate( - *args, **kwargs - ) + username = await super().authenticate(*args, **kwargs) if username is None: return elif self.auth_state: @@ -308,61 +293,68 @@ class MockHub(JupyterHub): test_clean_db = Bool(True) def init_db(self): - """Ensure we start with a clean user list""" + """Ensure we start with a clean user & role list""" super().init_db() if self.test_clean_db: for user in self.db.query(orm.User): self.db.delete(user) for group in self.db.query(orm.Group): self.db.delete(group) + for role in self.db.query(orm.Role): + self.db.delete(role) self.db.commit() - @gen.coroutine - def initialize(self, argv=None): + async def initialize(self, argv=None): self.pid_file = NamedTemporaryFile(delete=False).name self.db_file = NamedTemporaryFile() self.db_url = os.getenv('JUPYTERHUB_TEST_DB_URL') or self.db_file.name if 'mysql' in self.db_url: self.db_kwargs['connect_args'] = {'auth_plugin': 'mysql_native_password'} - yield super().initialize([]) + await super().initialize([]) # add an initial user user = self.db.query(orm.User).filter(orm.User.name == 'user').first() if user is None: user = orm.User(name='user') + # avoid initial state inconsistency by setting initial activity + user.last_activity = utcnow() self.db.add(user) self.db.commit() + metrics.TOTAL_USERS.inc() + roles.assign_default_roles(self.db, entity=user) + self.db.commit() + + _stop_called = False def stop(self): - super().stop() - + if self._stop_called: + return + self._stop_called = True # run cleanup in a background thread # to avoid multiple eventloops in the same thread errors from asyncio def cleanup(): - asyncio.set_event_loop(asyncio.new_event_loop()) - loop = IOLoop.current() - loop.run_sync(self.cleanup) + loop = asyncio.new_event_loop() + loop.run_until_complete(self.cleanup()) loop.close() - pool = ThreadPoolExecutor(1) - f = pool.submit(cleanup) - # wait for cleanup to finish - f.result() - pool.shutdown() + with ThreadPoolExecutor(1) as pool: + f = pool.submit(cleanup) + # wait for cleanup to finish + f.result() - # ignore the call that will fire in atexit - self.cleanup = lambda: None + # prevent redundant atexit from running + self._atexit_ran = True + super().stop() self.db_file.close() - @gen.coroutine - def login_user(self, name): + async def login_user(self, name): """Login a user by name, returning her cookies.""" base_url = public_url(self) external_ca = None if self.internal_ssl: external_ca = self.external_certs['files']['ca'] - r = yield async_requests.post( + r = await async_requests.post( base_url + 'hub/login', data={'username': name, 'password': name}, allow_redirects=False, @@ -387,14 +379,29 @@ class MockSingleUserServer(SingleUserNotebookApp): def init_signal(self): pass + @default("log_level") + def _default_log_level(self): + return 10 + class StubSingleUserSpawner(MockSpawner): """Spawner that starts a MockSingleUserServer in a thread.""" + @default("default_url") + def _default_url(self): + """Use a default_url that any jupyter server will provide + + Should be: + + - authenticated, so we are testing auth + - always available (i.e. in mocked ServerApp and NotebookApp) + - *not* an API handler that raises 403 instead of redirecting + """ + return "/tree" + _thread = None - @gen.coroutine - def start(self): + async def start(self): ip = self.ip = '127.0.0.1' port = self.port = random_port() env = self.get_env() @@ -403,16 +410,13 @@ class StubSingleUserSpawner(MockSpawner): print(args, env) def _run(): - asyncio.set_event_loop(asyncio.new_event_loop()) - io_loop = IOLoop() - io_loop.make_current() - io_loop.add_callback(lambda: evt.set()) - with mock.patch.dict(os.environ, env): app = self._app = MockSingleUserServer() app.initialize(args) + app.io_loop.add_callback(lambda: evt.set()) assert app.hub_auth.oauth_client_id assert app.hub_auth.api_token + assert app.hub_auth.oauth_scopes app.start() self._thread = threading.Thread(target=_run) @@ -421,14 +425,12 @@ class StubSingleUserSpawner(MockSpawner): assert ready return (ip, port) - @gen.coroutine - def stop(self): + async def stop(self): self._app.stop() self._thread.join(timeout=30) assert not self._thread.is_alive() - @gen.coroutine - def poll(self): + async def poll(self): if self._thread is None: return 0 if self._thread.is_alive(): diff --git a/jupyterhub/tests/mockserverapp.py b/jupyterhub/tests/mockserverapp.py new file mode 100644 index 00000000..84479e4b --- /dev/null +++ b/jupyterhub/tests/mockserverapp.py @@ -0,0 +1,17 @@ +"""Example JupyterServer app subclass""" +from jupyter_server.base.handlers import JupyterHandler +from jupyter_server.serverapp import ServerApp +from tornado import web + + +class TreeHandler(JupyterHandler): + @web.authenticated + def get(self): + self.write("OK!") + + +class MockServerApp(ServerApp): + def initialize(self, argv=None): + self.default_url = "/tree" + super().initialize(argv) + self.web_app.add_handlers(".*$", [(self.base_url + "tree/?", TreeHandler)]) diff --git a/jupyterhub/tests/mockservice.py b/jupyterhub/tests/mockservice.py index 6194d844..2685cee4 100644 --- a/jupyterhub/tests/mockservice.py +++ b/jupyterhub/tests/mockservice.py @@ -15,17 +15,19 @@ Handlers and their purpose include: import json import os import pprint +import ssl import sys from urllib.parse import urlparse import requests -from tornado import httpserver -from tornado import ioloop -from tornado import web +from tornado import httpserver, ioloop, log, web +from tornado.httputil import url_concat -from jupyterhub.services.auth import HubAuthenticated -from jupyterhub.services.auth import HubOAuthCallbackHandler -from jupyterhub.services.auth import HubOAuthenticated +from jupyterhub.services.auth import ( + HubAuthenticated, + HubOAuthCallbackHandler, + HubOAuthenticated, +) from jupyterhub.utils import make_ssl_context @@ -60,24 +62,31 @@ class APIHandler(web.RequestHandler): class WhoAmIHandler(HubAuthenticated, web.RequestHandler): """Reply with the name of the user who made the request. - + Uses "deprecated" cookie login """ @web.authenticated def get(self): - self.write(self.get_current_user()) + self.write(json.dumps(self.get_current_user())) class OWhoAmIHandler(HubOAuthenticated, web.RequestHandler): """Reply with the name of the user who made the request. - + Uses OAuth login flow """ + def get_login_url(self): + login_url = super().get_login_url() + scopes = self.get_argument("request-scope", None) + if scopes is not None: + login_url = url_concat(login_url, {"scope": scopes}) + return login_url + @web.authenticated def get(self): - self.write(self.get_current_user()) + self.write(json.dumps(self.get_current_user())) def main(): @@ -103,7 +112,9 @@ def main(): ca = os.environ.get('JUPYTERHUB_SSL_CLIENT_CA') or '' if key and cert and ca: - ssl_context = make_ssl_context(key, cert, cafile=ca, check_hostname=False) + ssl_context = make_ssl_context( + key, cert, cafile=ca, purpose=ssl.Purpose.CLIENT_AUTH + ) server = httpserver.HTTPServer(app, ssl_options=ssl_context) server.listen(url.port, url.hostname) @@ -114,7 +125,9 @@ def main(): if __name__ == '__main__': - from tornado.options import parse_command_line + from tornado.options import options, parse_command_line parse_command_line() + options.logging = 'debug' + log.enable_pretty_logging() main() diff --git a/jupyterhub/tests/mocksu.py b/jupyterhub/tests/mocksu.py index c5714cff..e3683161 100644 --- a/jupyterhub/tests/mocksu.py +++ b/jupyterhub/tests/mocksu.py @@ -11,15 +11,12 @@ Handlers and their purpose include: - ArgsHandler: allowing retrieval of `sys.argv`. """ -import argparse import json import os import sys +from urllib.parse import urlparse -from tornado import httpserver -from tornado import ioloop -from tornado import log -from tornado import web +from tornado import httpserver, ioloop, log, web from tornado.options import options from ..utils import make_ssl_context @@ -36,7 +33,8 @@ class ArgsHandler(web.RequestHandler): self.write(json.dumps(sys.argv)) -def main(args): +def main(): + url = urlparse(os.environ["JUPYTERHUB_SERVICE_URL"]) options.logging = 'debug' log.enable_pretty_logging() app = web.Application( @@ -49,11 +47,16 @@ def main(args): ca = os.environ.get('JUPYTERHUB_SSL_CLIENT_CA') or '' if key and cert and ca: - ssl_context = make_ssl_context(key, cert, cafile=ca, check_hostname=False) + import ssl + + ssl_context = make_ssl_context( + key, cert, cafile=ca, purpose=ssl.Purpose.CLIENT_AUTH + ) + assert url.scheme == "https" server = httpserver.HTTPServer(app, ssl_options=ssl_context) - log.app_log.info("Starting mock singleuser server at 127.0.0.1:%s", args.port) - server.listen(args.port, '127.0.0.1') + log.app_log.info(f"Starting mock singleuser server at {url.hostname}:{url.port}") + server.listen(url.port, url.hostname) try: ioloop.IOLoop.instance().start() except KeyboardInterrupt: @@ -61,7 +64,4 @@ def main(args): if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument('--port', type=int) - args, extra = parser.parse_known_args() - main(args) + main() diff --git a/jupyterhub/tests/populate_db.py b/jupyterhub/tests/populate_db.py index 2b5c6007..33b5247b 100644 --- a/jupyterhub/tests/populate_db.py +++ b/jupyterhub/tests/populate_db.py @@ -4,8 +4,8 @@ Run with old versions of jupyterhub to test upgrade/downgrade used in test_db.py """ -import os from datetime import datetime +from functools import partial import jupyterhub from jupyterhub import orm @@ -17,6 +17,32 @@ def populate_db(url): if 'mysql' in url: connect_args['auth_plugin'] = 'mysql_native_password' db = orm.new_session_factory(url, connect_args=connect_args)() + + if jupyterhub.version_info >= (2,): + if ( + not db.query(orm.OAuthClient) + .filter_by(identifier="jupyterhub") + .one_or_none() + ): + # create the oauth client for jupyterhub itself + # this allows us to distinguish between orphaned tokens + # (failed cascade deletion) and tokens issued by the hub + # it has no client_secret, which means it cannot be used + # to make requests + client = orm.OAuthClient( + identifier="jupyterhub", + secret="", + redirect_uri="", + description="JupyterHub", + ) + db.add(client) + db.commit() + + from jupyterhub import roles + + for role in roles.get_default_roles(): + roles.create_role(db, role) + # create some users admin = orm.User(name='admin', admin=True) db.add(admin) @@ -62,32 +88,40 @@ def populate_db(url): db.commit() # create some oauth objects - if jupyterhub.version_info >= (0, 8): - # create oauth client - client = orm.OAuthClient(identifier='oauth-client') - db.add(client) - db.commit() - code = orm.OAuthCode(client_id=client.identifier) - db.add(code) - db.commit() - access_token = orm.OAuthAccessToken( - client_id=client.identifier, - user_id=user.id, + client = orm.OAuthClient(identifier='oauth-client') + db.add(client) + db.commit() + code = orm.OAuthCode(client_id=client.identifier) + db.add(code) + db.commit() + if jupyterhub.version_info < (2, 0): + Token = partial( + orm.OAuthAccessToken, grant_type=orm.GrantType.authorization_code, ) - db.add(access_token) - db.commit() + else: + Token = orm.APIToken + access_token = Token( + client_id=client.identifier, + user_id=user.id, + ) + if jupyterhub.version_info >= (2,): + if jupyterhub.version_info < (2, 2): + access_token.roles = [db.query(orm.Role).filter_by(name="server").one()] + else: + access_token.scopes = [f"read:users!user={user.name}"] + db.add(access_token) + db.commit() # set some timestamps added in 0.9 - if jupyterhub.version_info >= (0, 9): - assert user.created - assert admin.created - # set last_activity - user.last_activity = datetime.utcnow() - spawner = user.orm_spawners[''] - spawner.started = datetime.utcnow() - spawner.last_activity = datetime.utcnow() - db.commit() + assert user.created + assert admin.created + # set last_activity + user.last_activity = datetime.utcnow() + spawner = user.orm_spawners[''] + spawner.started = datetime.utcnow() + spawner.last_activity = datetime.utcnow() + db.commit() if __name__ == '__main__': diff --git a/.github/PULL_REQUEST_TEMPLATE/.keep b/jupyterhub/tests/selenium/__init__.py similarity index 100% rename from .github/PULL_REQUEST_TEMPLATE/.keep rename to jupyterhub/tests/selenium/__init__.py diff --git a/jupyterhub/tests/selenium/conftest.py b/jupyterhub/tests/selenium/conftest.py new file mode 100644 index 00000000..a68ce6e3 --- /dev/null +++ b/jupyterhub/tests/selenium/conftest.py @@ -0,0 +1,12 @@ +import pytest +from selenium import webdriver + + +@pytest.fixture() +def browser(): + options = webdriver.FirefoxOptions() + options.headless = True + driver = webdriver.Firefox(options=options) + yield driver + driver.close() + driver.quit() diff --git a/jupyterhub/tests/selenium/locators.py b/jupyterhub/tests/selenium/locators.py new file mode 100644 index 00000000..9a5657f7 --- /dev/null +++ b/jupyterhub/tests/selenium/locators.py @@ -0,0 +1,47 @@ +from selenium.webdriver.common.by import By + + +class LoginPageLocators: + """class for handling the login page locators""" + + FORM_LOGIN = (By.XPATH, '//*[@id="login-main"]/form') + SIGN_IN = (By.CLASS_NAME, 'auth-form-header') + ACCOUNT = (By.ID, "username_input") + PASSWORD = (By.ID, "password_input") + LOGIN_BUTTON = (By.ID, "login_submit") + LOGO = (By.ID, "jupyterhub-logo") + LOGO_LINK = (By.XPATH, '//*[@id="jupyterhub-logo"]/a') + LOGO_TITLE = (By.XPATH, '//*[@id="jupyterhub-logo"]/a/img') + ERROR_INVALID_CREDANTIALS = (By.CSS_SELECTOR, "p.login_error") + PAGE_TITLE = 'JupyterHub' + ERROR_MESSAGES_LOGIN = "Invalid username or password" + ERROR_403 = (By.CLASS_NAME, "error") + ERROR_MESSAGES_403 = ( + "Action is not authorized with current scopes; requires any of [admin-ui]" + ) + + +class HomePageLocators: + """class for handling the home page locators""" + + LINK_HOME_BAR = (By.CSS_SELECTOR, "div.container-fluid a") + LINK_HOME = (By.CSS_SELECTOR, "a[href*='/hub/home']") + LINK_TOKEN = (By.CSS_SELECTOR, "a[href*='/hub/token']") + BUTTON_LOGOUT = (By.ID, "logout") + BUTTON_START_SERVER = (By.ID, "start") + BUTTON_STOP_SERVER = (By.ID, "stop") + + +class TokenPageLocators: + """class for handling the Token page locators""" + + BUTTON_API_REQ = (By.XPATH, '//*[@id="request-token-form"]/div[1]/button') + INPUT_TOKEN = (By.ID, "token-note") + LIST_EXP_TOKEN_FIELD = (By.ID, "token-expiration-seconds") + LIST_EXP_TOKEN_OPT = (By.XPATH, '//option') + NEVER_EXP = (By.ID, "Never") + DAY1 = (By.ID, "3600") + PANEL_AREA = (By.ID, 'token-area') + PANEL_TOKEN = (By.CLASS_NAME, 'panel-heading') + RESULT_TOKEN = (By.ID, 'token-result') + TEXT = "Copy this token. You won't be able to see it again, but you can always come back here to get a new one." diff --git a/jupyterhub/tests/selenium/test_browser.py b/jupyterhub/tests/selenium/test_browser.py new file mode 100644 index 00000000..d02348cb --- /dev/null +++ b/jupyterhub/tests/selenium/test_browser.py @@ -0,0 +1,237 @@ +import asyncio +from functools import partial + +import pytest +from selenium.common.exceptions import NoSuchElementException +from selenium.webdriver.support import expected_conditions as EC +from selenium.webdriver.support.ui import WebDriverWait +from tornado.escape import url_escape +from tornado.httputil import url_concat + +from jupyterhub.tests.selenium.locators import LoginPageLocators +from jupyterhub.utils import exponential_backoff + +from ...utils import url_path_join +from ..utils import public_host, public_url, ujoin + +pytestmark = pytest.mark.selenium + + +async def webdriver_wait(driver, condition, timeout=30): + """an async wrapper for selenium's wait function, + a condition is something from selenium's expected_conditions""" + + return await exponential_backoff( + partial(condition, driver), + timeout=timeout, + fail_message=f"WebDriver condition not met: {condition}", + ) + + +def in_thread(f, *args, **kwargs): + """Run a function in a background thread + + via current event loop's run_in_executor + + Returns asyncio.Future + """ + + return asyncio.get_event_loop().run_in_executor(None, partial(f, *args, **kwargs)) + + +async def open_url(app, browser, url="login"): + """initiating open the login page in the browser""" + + url = url_path_join(public_host(app), app.hub.base_url, url) + await in_thread(browser.get, url) + return url + + +def click(browser, by_locator): + """wait for element to be visible, then click on it""" + + WebDriverWait(browser, 10).until( + EC.visibility_of_element_located(by_locator) + ).click() + + +def is_displayed(browser, by_locator): + """Whether the element is visible or not""" + + return ( + WebDriverWait(browser, 10) + .until(EC.visibility_of_element_located(by_locator)) + .is_displayed() + ) + + +def send_text(browser, by_locator, text): + """wait for element to be presented, then put the text in it""" + + return ( + WebDriverWait(browser, 10) + .until(EC.presence_of_element_located(by_locator)) + .send_keys(text) + ) + + +def clear(browser, by_locator): + """wait for element to be presented, then clear the text in it""" + + return ( + WebDriverWait(browser, 10) + .until(EC.presence_of_element_located(by_locator)) + .clear() + ) + + +# LOGIN PAGE +async def test_elements_of_login_page(app, browser): + await open_url(app, browser) + assert is_displayed(browser, LoginPageLocators.LOGO) + logo_text = browser.find_element(*LoginPageLocators.LOGO).get_attribute("innerHTML") + + +async def login(browser, user, pass_w): + # fill in username field + send_text(browser, LoginPageLocators.ACCOUNT, user) + # fill in password field + send_text(browser, LoginPageLocators.PASSWORD, pass_w) + # click submit button + click(browser, LoginPageLocators.LOGIN_BUTTON) + await webdriver_wait(browser, EC.url_changes(browser.current_url)) + + +async def test_submit_login_form(app, browser): + user = "test_user" + pass_w = "test_user" + + await open_url(app, browser, url="login") + redirected_url = ujoin(public_url(app), f"/user/{user}/") + await login(browser, user, pass_w) + # verify url contains username + if f"/user/{user}/" not in browser.current_url: + await webdriver_wait(browser, EC.url_to_be(redirected_url)) + else: + pass + assert browser.current_url == redirected_url + + +@pytest.mark.parametrize( + 'url, params, redirected_url, form_action', + [ + ( + # spawn?param=value + # will encode given parameters for an unauthenticated URL in the next url + # the next parameter will contain the app base URL (replaces BASE_URL in tests) + 'spawn', + [('param', 'value')], + '/hub/login?next={{BASE_URL}}hub%2Fspawn%3Fparam%3Dvalue', + '/hub/login?next={{BASE_URL}}hub%2Fspawn%3Fparam%3Dvalue', + ), + ( + # login?param=fromlogin&next=encoded(/hub/spawn?param=value) + # will drop parameters given to the login page, passing only the next url + 'login', + [('param', 'fromlogin'), ('next', '/hub/spawn?param=value')], + '/hub/login?param=fromlogin&next=%2Fhub%2Fspawn%3Fparam%3Dvalue', + '/hub/login?next=%2Fhub%2Fspawn%3Fparam%3Dvalue', + ), + ( + # login?param=value&anotherparam=anothervalue + # will drop parameters given to the login page, and use an empty next url + 'login', + [('param', 'value'), ('anotherparam', 'anothervalue')], + '/hub/login?param=value&anotherparam=anothervalue', + '/hub/login?next=', + ), + ( + # login + # simplest case, accessing the login URL, gives an empty next url + 'login', + [], + '/hub/login', + '/hub/login?next=', + ), + ], +) +async def test_open_url_login( + app, + browser, + url, + params, + redirected_url, + form_action, + user='test_user', + pass_w='test_user', +): + url = url_path_join(public_host(app), app.hub.base_url, url) + url_new = url_concat(url, params) + await in_thread(browser.get, url_new) + redirected_url = redirected_url.replace('{{BASE_URL}}', url_escape(app.base_url)) + form_action = form_action.replace('{{BASE_URL}}', url_escape(app.base_url)) + form = browser.find_element(*LoginPageLocators.FORM_LOGIN).get_attribute('action') + + # verify title / url + assert browser.title == LoginPageLocators.PAGE_TITLE + assert form.endswith(form_action) + # login in with params + await login(browser, user, pass_w) + # verify next url + params + next_url = browser.current_url + if url_escape(app.base_url) in form_action: + assert next_url.endswith("param=value") + elif "next=%2Fhub" in form_action: + assert next_url.endswith("spawn?param=value") + assert f"user/{user}/" not in next_url + else: + if not next_url.endswith(f"/user/{user}/"): + await webdriver_wait( + browser, EC.url_to_be(ujoin(public_url(app), f"/user/{user}/")) + ) + next_url = browser.current_url + assert next_url.endswith(f"/user/{user}/") + + +@pytest.mark.parametrize( + "user, pass_w", + [ + (" ", ""), + ("user", ""), + (" ", "password"), + ("user", "password"), + ], +) +async def test_invalid_credantials(app, browser, user, pass_w): + await open_url(app, browser) + await login(browser, user, pass_w) + await asyncio.sleep(0.1) + """adding for a catching of the reflected error""" + try: + error = browser.find_element(*LoginPageLocators.ERROR_INVALID_CREDANTIALS) + await webdriver_wait(browser, EC.visibility_of(error)) + except NoSuchElementException: + error = None + + # verify error message and url still eguals to the login page + assert LoginPageLocators.ERROR_MESSAGES_LOGIN == error.text + assert 'hub/login' in browser.current_url + + +# HOME PAGE +async def open_home_page(app, browser, user="test_user", pass_w="test_user"): + url = url_path_join(public_host(app), app.hub.base_url, "/login?next=/hub/home") + await in_thread(browser.get, url) + redirected_url = url_path_join(public_host(app), app.base_url, '/hub/home') + await login(browser, user, pass_w) + await in_thread(browser.get, redirected_url) + + +# TOKEN PAGE +async def open_token_page(app, browser, user="test_user", pass_w="test_user"): + + url = url_path_join(public_host(app), app.hub.base_url, "/login?next=/hub/token") + await in_thread(browser.get, url) + redirected_url = url_path_join(public_host(app), app.base_url, '/hub/token') + await login(browser, user, pass_w) + await in_thread(browser.get, redirected_url) diff --git a/jupyterhub/tests/test_api.py b/jupyterhub/tests/test_api.py index aedb4426..2bd8c811 100644 --- a/jupyterhub/tests/test_api.py +++ b/jupyterhub/tests/test_api.py @@ -4,30 +4,30 @@ import json import re import sys import uuid -from concurrent.futures import Future -from datetime import datetime -from datetime import timedelta +from datetime import datetime, timedelta from unittest import mock -from urllib.parse import quote -from urllib.parse import urlparse +from urllib.parse import quote, urlparse, urlunparse -from async_generator import async_generator -from async_generator import yield_ -from pytest import mark -from tornado import gen +from pytest import fixture, mark +from tornado.httputil import url_concat import jupyterhub + from .. import orm +from ..apihandlers.base import PAGINATION_MEDIA_TYPE +from ..objects import Server from ..utils import url_path_join as ujoin from ..utils import utcnow -from .mocking import public_host -from .mocking import public_url -from .utils import add_user -from .utils import api_request -from .utils import async_requests -from .utils import auth_header -from .utils import find_user - +from .conftest import new_username +from .utils import ( + add_user, + api_request, + async_requests, + auth_header, + find_user, + public_host, + public_url, +) # -------------------- # Authentication tests @@ -64,49 +64,139 @@ async def test_auth_api(app): assert r.status_code == 403 -async def test_referer_check(app): +@mark.parametrize( + "content_type, status", + [ + ("text/plain", 403), + # accepted, but invalid + ("application/json; charset=UTF-8", 400), + ], +) +async def test_post_content_type(app, content_type, status): url = ujoin(public_host(app), app.hub.base_url) host = urlparse(url).netloc + # add admin user user = find_user(app.db, 'admin') if user is None: user = add_user(app.db, name='admin', admin=True) cookies = await app.login_user('admin') - r = await api_request( - app, 'users', headers={'Authorization': '', 'Referer': 'null'}, cookies=cookies - ) - assert r.status_code == 403 - r = await api_request( app, 'users', + method='post', + data='{}', headers={ - 'Authorization': '', - 'Referer': 'http://attack.com/csrf/vulnerability', + "Authorization": "", + "Content-Type": content_type, }, cookies=cookies, ) - assert r.status_code == 403 + assert r.status_code == status + + +@mark.parametrize( + "host, referer, extraheaders, status", + [ + ('$host', '$url', {}, 200), + (None, None, {}, 200), + (None, 'null', {}, 403), + (None, 'http://attack.com/csrf/vulnerability', {}, 403), + ('$host', {"path": "/user/someuser"}, {}, 403), + ('$host', {"path": "{path}/foo/bar/subpath"}, {}, 200), + # mismatch host + ("mismatch.com", "$url", {}, 403), + # explicit host, matches + ("fake.example", {"netloc": "fake.example"}, {}, 200), + # explicit port, matches implicit port + ("fake.example:80", {"netloc": "fake.example"}, {}, 200), + # explicit port, mismatch + ("fake.example:81", {"netloc": "fake.example"}, {}, 403), + # implicit ports, mismatch proto + ("fake.example", {"netloc": "fake.example", "scheme": "https"}, {}, 403), + # explicit ports, match + ("fake.example:81", {"netloc": "fake.example:81"}, {}, 200), + # Test proxy protocol defined headers taken into account by utils.get_browser_protocol + ( + "fake.example", + {"netloc": "fake.example", "scheme": "https"}, + {'X-Scheme': 'https'}, + 200, + ), + ( + "fake.example", + {"netloc": "fake.example", "scheme": "https"}, + {'X-Forwarded-Proto': 'https'}, + 200, + ), + ( + "fake.example", + {"netloc": "fake.example", "scheme": "https"}, + { + 'Forwarded': 'host=fake.example;proto=https,for=1.2.34;proto=http', + 'X-Scheme': 'http', + }, + 200, + ), + ( + "fake.example", + {"netloc": "fake.example", "scheme": "https"}, + { + 'Forwarded': 'host=fake.example;proto=http,for=1.2.34;proto=http', + 'X-Scheme': 'https', + }, + 403, + ), + ("fake.example", {"netloc": "fake.example"}, {'X-Scheme': 'https'}, 403), + ("fake.example", {"netloc": "fake.example"}, {'X-Scheme': 'https, http'}, 403), + ], +) +async def test_cors_check(request, app, host, referer, extraheaders, status): + url = ujoin(public_host(app), app.hub.base_url) + real_host = urlparse(url).netloc + if host == "$host": + host = real_host + + if referer == '$url': + referer = url + elif isinstance(referer, dict): + parsed_url = urlparse(url) + # apply {} + url_ns = {key: getattr(parsed_url, key) for key in parsed_url._fields} + for key, value in referer.items(): + referer[key] = value.format(**url_ns) + referer = urlunparse(parsed_url._replace(**referer)) + + # disable default auth header, cors is for cookie auth + headers = {"Authorization": ""} + if host is not None: + headers['X-Forwarded-Host'] = host + if referer is not None: + headers['Referer'] = referer + headers.update(extraheaders) + + # add admin user + user = find_user(app.db, 'admin') + if user is None: + user = add_user(app.db, name='admin', admin=True) + cookies = await app.login_user('admin') + + # test custom forwarded_host_header behavior + app.forwarded_host_header = 'X-Forwarded-Host' + + # reset the config after the test to avoid leaking state + def reset_header(): + app.forwarded_host_header = "" + + request.addfinalizer(reset_header) r = await api_request( app, 'users', - headers={'Authorization': '', 'Referer': url, 'Host': host}, + headers=headers, cookies=cookies, ) - assert r.status_code == 200 - - r = await api_request( - app, - 'users', - headers={ - 'Authorization': '', - 'Referer': ujoin(url, 'foo/bar/baz/bat'), - 'Host': host, - }, - cookies=cookies, - ) - assert r.status_code == 200 + assert r.status_code == status # -------------- @@ -132,6 +222,8 @@ def normalize_user(user): """ for key in ('created', 'last_activity'): user[key] = normalize_timestamp(user[key]) + if 'roles' in user: + user['roles'] = sorted(user['roles']) if 'servers' in user: for server in user['servers'].values(): for key in ('started', 'last_activity'): @@ -153,9 +245,9 @@ def fill_user(model): """ model.setdefault('server', None) model.setdefault('kind', 'user') + model.setdefault('roles', []) model.setdefault('groups', []) model.setdefault('admin', False) - model.setdefault('server', None) model.setdefault('pending', None) model.setdefault('created', TIMESTAMP) model.setdefault('last_activity', TIMESTAMP) @@ -167,22 +259,252 @@ TIMESTAMP = normalize_timestamp(datetime.now().isoformat() + 'Z') @mark.user +@mark.role async def test_get_users(app): db = app.db - r = await api_request(app, 'users') + + r = await api_request(app, 'users', headers=auth_header(db, 'admin')) assert r.status_code == 200 users = sorted(r.json(), key=lambda d: d['name']) users = [normalize_user(u) for u in users] + user_model = { + 'name': 'user', + 'admin': False, + 'roles': ['user'], + 'auth_state': None, + } assert users == [ - fill_user({'name': 'admin', 'admin': True}), - fill_user({'name': 'user', 'admin': False, 'last_activity': None}), + fill_user( + { + 'name': 'admin', + 'admin': True, + 'roles': ['admin', 'user'], + 'auth_state': None, + } + ), + fill_user(user_model), ] - r = await api_request(app, 'users', headers=auth_header(db, 'user')) assert r.status_code == 403 +@fixture +def default_page_limit(app): + """Set and return low default page size for testing""" + n = 10 + with mock.patch.dict(app.tornado_settings, {"api_page_default_limit": n}): + yield n + + +@fixture +def max_page_limit(app): + """Set and return low max page size for testing""" + n = 20 + with mock.patch.dict(app.tornado_settings, {"api_page_max_limit": n}): + yield n + + +@mark.user +@mark.role +@mark.parametrize( + "n, offset, limit, accepts_pagination, expected_count", + [ + (10, None, None, False, 10), + (10, None, None, True, 10), + (10, 5, None, True, 5), + (10, 5, None, False, 5), + (10, 5, 1, True, 1), + (10, 10, 10, True, 0), + ( # default page limit, pagination expected + 30, + None, + None, + True, + 'default', + ), + ( + # default max page limit, pagination not expected + 30, + None, + None, + False, + 'max', + ), + ( + # limit exceeded + 30, + None, + 500, + False, + 'max', + ), + ], +) +async def test_get_users_pagination( + app, + n, + offset, + limit, + accepts_pagination, + expected_count, + default_page_limit, + max_page_limit, +): + db = app.db + + if expected_count == 'default': + expected_count = default_page_limit + elif expected_count == 'max': + expected_count = max_page_limit + # populate users + usernames = [] + + existing_users = db.query(orm.User).order_by(orm.User.id.asc()) + usernames.extend(u.name for u in existing_users) + + for i in range(n - existing_users.count()): + name = new_username() + usernames.append(name) + add_user(db, app, name=name) + print(f"{db.query(orm.User).count()} total users") + + url = 'users' + params = {} + if offset: + params['offset'] = offset + if limit: + params['limit'] = limit + url = url_concat(url, params) + headers = auth_header(db, 'admin') + if accepts_pagination: + headers['Accept'] = PAGINATION_MEDIA_TYPE + r = await api_request(app, url, headers=headers) + assert r.status_code == 200 + response = r.json() + if accepts_pagination: + assert set(response) == { + "items", + "_pagination", + } + pagination = response["_pagination"] + users = response["items"] + else: + users = response + assert len(users) == expected_count + expected_usernames = usernames + if offset: + expected_usernames = expected_usernames[offset:] + expected_usernames = expected_usernames[:expected_count] + + got_usernames = [u['name'] for u in users] + assert got_usernames == expected_usernames + + +@mark.user +@mark.parametrize( + "state", + ("inactive", "active", "ready", "invalid"), +) +async def test_get_users_state_filter(app, state): + db = app.db + + # has_one_active: one active, one inactive, zero ready + has_one_active = add_user(db, app=app, name='has_one_active') + # has_two_active: two active, ready servers + has_two_active = add_user(db, app=app, name='has_two_active') + # has_two_inactive: two spawners, neither active + has_two_inactive = add_user(db, app=app, name='has_two_inactive') + # has_zero: no Spawners registered at all + has_zero = add_user(db, app=app, name='has_zero') + + test_usernames = { + "has_one_active", + "has_two_active", + "has_two_inactive", + "has_zero", + } + + user_states = { + "inactive": ["has_two_inactive", "has_zero"], + "ready": ["has_two_active"], + "active": ["has_one_active", "has_two_active"], + "invalid": [], + } + expected = user_states[state] + + def add_spawner(user, name='', active=True, ready=True): + """Add a spawner in a requested state + + If active, should turn up in an active query + If active and ready, should turn up in a ready query + If not active, should turn up in an inactive query + """ + spawner = user.spawners[name] + db.commit() + if active: + orm_server = orm.Server() + db.add(orm_server) + db.commit() + spawner.server = Server(orm_server=orm_server) + db.commit() + if not ready: + spawner._spawn_pending = True + return spawner + + for name in ("", "secondary"): + add_spawner(has_two_active, name, active=True) + add_spawner(has_two_inactive, name, active=False) + + add_spawner(has_one_active, active=True, ready=False) + add_spawner(has_one_active, "inactive", active=False) + + r = await api_request(app, f'users?state={state}') + if state == "invalid": + assert r.status_code == 400 + return + assert r.status_code == 200 + + usernames = sorted(u["name"] for u in r.json() if u["name"] in test_usernames) + assert usernames == expected + + +@mark.user +async def test_get_users_name_filter(app): + db = app.db + + add_user(db, app=app, name='q') + add_user(db, app=app, name='qr') + add_user(db, app=app, name='qrs') + add_user(db, app=app, name='qrst') + added_usernames = {'q', 'qr', 'qrs', 'qrst'} + + r = await api_request(app, 'users') + assert r.status_code == 200 + response_users = [u.get("name") for u in r.json()] + assert added_usernames.intersection(response_users) == added_usernames + + r = await api_request(app, 'users?name_filter=q') + assert r.status_code == 200 + response_users = [u.get("name") for u in r.json()] + assert response_users == ['q', 'qr', 'qrs', 'qrst'] + + r = await api_request(app, 'users?name_filter=qr') + assert r.status_code == 200 + response_users = [u.get("name") for u in r.json()] + assert response_users == ['qr', 'qrs', 'qrst'] + + r = await api_request(app, 'users?name_filter=qrs') + assert r.status_code == 200 + response_users = [u.get("name") for u in r.json()] + assert response_users == ['qrs', 'qrst'] + + r = await api_request(app, 'users?name_filter=qrst') + assert r.status_code == 200 + response_users = [u.get("name") for u in r.json()] + assert response_users == ['qrst'] + + @mark.user async def test_get_self(app): db = app.db @@ -198,25 +520,44 @@ async def test_get_self(app): oauth_client = orm.OAuthClient(identifier='eurydice') db.add(oauth_client) db.commit() - oauth_token = orm.OAuthAccessToken( + oauth_token = orm.APIToken( user=u.orm_user, - client=oauth_client, + oauth_client=oauth_client, token=token, - grant_type=orm.GrantType.authorization_code, ) db.add(oauth_token) db.commit() - r = await api_request(app, 'user', headers={'Authorization': 'token ' + token}) + r = await api_request( + app, + 'user', + headers={'Authorization': 'token ' + token}, + ) r.raise_for_status() model = r.json() assert model['name'] == u.name # invalid auth gets 403 - r = await api_request(app, 'user', headers={'Authorization': 'token notvalid'}) + r = await api_request( + app, + 'user', + headers={'Authorization': 'token notvalid'}, + ) assert r.status_code == 403 +async def test_get_self_service(app, mockservice): + r = await api_request( + app, "user", headers={"Authorization": f"token {mockservice.api_token}"} + ) + r.raise_for_status() + service_info = r.json() + + assert service_info['kind'] == 'service' + assert service_info['name'] == mockservice.name + + @mark.user +@mark.role async def test_add_user(app): db = app.db name = 'newuser' @@ -226,16 +567,54 @@ async def test_add_user(app): assert user is not None assert user.name == name assert not user.admin + # assert newuser has default 'user' role + assert orm.Role.find(db, 'user') in user.roles + assert orm.Role.find(db, 'admin') not in user.roles @mark.user +@mark.role async def test_get_user(app): name = 'user' - r = await api_request(app, 'users', name) - assert r.status_code == 200 + # get own model + r = await api_request(app, 'users', name, headers=auth_header(app.db, name)) + r.raise_for_status() + # admin request + r = await api_request( + app, + 'users', + name, + ) + r.raise_for_status() user = normalize_user(r.json()) - assert user == fill_user({'name': name, 'auth_state': None}) + assert user == fill_user({'name': name, 'roles': ['user'], 'auth_state': None}) + + # admin request, no such user + r = await api_request( + app, + 'users', + 'nosuchuser', + ) + assert r.status_code == 404 + + # unauthorized request, no such user + r = await api_request( + app, + 'users', + 'nosuchuser', + headers=auth_header(app.db, name), + ) + assert r.status_code == 404 + + # unauthorized request for existing user + r = await api_request( + app, + 'users', + 'admin', + headers=auth_header(app.db, name), + ) + assert r.status_code == 404 @mark.user @@ -263,6 +642,7 @@ async def test_add_multi_user_invalid(app): @mark.user +@mark.role async def test_add_multi_user(app): db = app.db names = ['a', 'b'] @@ -279,6 +659,9 @@ async def test_add_multi_user(app): assert user is not None assert user.name == name assert not user.admin + # assert default 'user' role added + assert orm.Role.find(db, 'user') in user.roles + assert orm.Role.find(db, 'admin') not in user.roles # try to create the same users again r = await api_request( @@ -299,6 +682,7 @@ async def test_add_multi_user(app): @mark.user +@mark.role async def test_add_multi_user_admin(app): db = app.db names = ['c', 'd'] @@ -318,6 +702,8 @@ async def test_add_multi_user_admin(app): assert user is not None assert user.name == name assert user.admin + assert orm.Role.find(db, 'user') in user.roles + assert orm.Role.find(db, 'admin') in user.roles @mark.user @@ -343,6 +729,7 @@ async def test_add_user_duplicate(app): @mark.user +@mark.role async def test_add_admin(app): db = app.db name = 'newadmin' @@ -354,6 +741,9 @@ async def test_add_admin(app): assert user is not None assert user.name == name assert user.admin + # assert newadmin has default 'admin' role + assert orm.Role.find(db, 'user') in user.roles + assert orm.Role.find(db, 'admin') in user.roles @mark.user @@ -365,6 +755,7 @@ async def test_delete_user(app): @mark.user +@mark.role async def test_make_admin(app): db = app.db name = 'admin2' @@ -374,15 +765,20 @@ async def test_make_admin(app): assert user is not None assert user.name == name assert not user.admin + assert orm.Role.find(db, 'user') in user.roles + assert orm.Role.find(db, 'admin') not in user.roles r = await api_request( app, 'users', name, method='patch', data=json.dumps({'admin': True}) ) + assert r.status_code == 200 user = find_user(db, name) assert user is not None assert user.name == name assert user.admin + assert orm.Role.find(db, 'user') in user.roles + assert orm.Role.find(db, 'admin') in user.roles @mark.user @@ -413,7 +809,6 @@ async def test_user_set_auth_state(app, auth_state_enabled): assert user.name == name user_auth_state = await user.get_auth_state() assert user_auth_state is None - r = await api_request( app, 'users', @@ -422,7 +817,6 @@ async def test_user_set_auth_state(app, auth_state_enabled): data=json.dumps({'auth_state': auth_state}), headers=auth_header(app.db, name), ) - assert r.status_code == 403 user_auth_state = await user.get_auth_state() assert user_auth_state is None @@ -493,10 +887,17 @@ async def test_spawn(app): r = await async_requests.get(ujoin(url, 'args'), **kwargs) assert r.status_code == 200 argv = r.json() - assert '--port' in ' '.join(argv) + assert '--port' not in ' '.join(argv) + # we pass no CLI args anymore: + assert len(argv) == 1 r = await async_requests.get(ujoin(url, 'env'), **kwargs) env = r.json() - for expected in ['JUPYTERHUB_USER', 'JUPYTERHUB_BASE_URL', 'JUPYTERHUB_API_TOKEN']: + for expected in [ + 'JUPYTERHUB_USER', + 'JUPYTERHUB_BASE_URL', + 'JUPYTERHUB_API_TOKEN', + 'JUPYTERHUB_SERVICE_URL', + ]: assert expected in env if app.subdomain_host: assert env['JUPYTERHUB_HOST'] == app.subdomain_host @@ -614,7 +1015,7 @@ async def test_slow_spawn(app, no_patience, slow_spawn): async def wait_spawn(): while not app_user.running: - await gen.sleep(0.1) + await asyncio.sleep(0.1) await wait_spawn() assert not app_user.spawner._spawn_pending @@ -623,7 +1024,7 @@ async def test_slow_spawn(app, no_patience, slow_spawn): async def wait_stop(): while app_user.spawner._stop_pending: - await gen.sleep(0.1) + await asyncio.sleep(0.1) r = await api_request(app, 'users', name, 'server', method='delete') r.raise_for_status() @@ -657,13 +1058,13 @@ async def test_never_spawn(app, no_patience, never_spawn): assert app.users.count_active_users()['pending'] == 1 while app_user.spawner.pending: - await gen.sleep(0.1) + await asyncio.sleep(0.1) print(app_user.spawner.pending) assert not app_user.spawner._spawn_pending status = await app_user.spawner.poll() assert status is not None - # failed spawn should decrements pending count + # failed spawn should decrement pending count assert app.users.count_active_users()['pending'] == 0 @@ -672,9 +1073,21 @@ async def test_bad_spawn(app, bad_spawn): name = 'prim' user = add_user(db, app=app, name=name) r = await api_request(app, 'users', name, 'server', method='post') + # check that we don't re-use spawners that failed + user.spawners[''].reused = True assert r.status_code == 500 assert app.users.count_active_users()['pending'] == 0 + r = await api_request(app, 'users', name, 'server', method='post') + # check that we don't re-use spawners that failed + spawner = user.spawners[''] + assert not getattr(spawner, 'reused', False) + + +async def test_spawn_nosuch_user(app): + r = await api_request(app, 'users', "nosuchuser", 'server', method='post') + assert r.status_code == 404 + async def test_slow_bad_spawn(app, no_patience, slow_bad_spawn): db = app.db @@ -683,7 +1096,7 @@ async def test_slow_bad_spawn(app, no_patience, slow_bad_spawn): r = await api_request(app, 'users', name, 'server', method='post') r.raise_for_status() while user.spawner.pending: - await gen.sleep(0.1) + await asyncio.sleep(0.1) # spawn failed assert not user.running assert app.users.count_active_users()['pending'] == 0 @@ -722,7 +1135,7 @@ async def test_progress(request, app, no_patience, slow_spawn): url = app_user.url assert evt == { 'progress': 100, - 'message': 'Server ready at {}'.format(url), + 'message': f'Server ready at {url}', 'html_message': 'Server ready at {0}'.format(url), 'url': url, 'ready': True, @@ -819,32 +1232,12 @@ async def test_progress_bad_slow(request, app, no_patience, slow_bad_spawn): } -@async_generator async def progress_forever(): """progress function that yields messages forever""" for i in range(1, 10): - await yield_({'progress': i, 'message': 'Stage %s' % i}) + yield {'progress': i, 'message': 'Stage %s' % i} # wait a long time before the next event - await gen.sleep(10) - - -if sys.version_info >= (3, 6): - # additional progress_forever defined as native - # async generator - # to test for issues with async_generator wrappers - exec( - """ -async def progress_forever_native(): - for i in range(1, 10): - yield { - 'progress': i, - 'message': 'Stage %s' % i, - } - # wait a long time before the next event - await gen.sleep(10) -""", - globals(), - ) + await asyncio.sleep(10) async def test_spawn_progress_cutoff(request, app, no_patience, slow_spawn): @@ -855,11 +1248,7 @@ async def test_spawn_progress_cutoff(request, app, no_patience, slow_spawn): db = app.db name = 'geddy' app_user = add_user(db, app=app, name=name) - if sys.version_info >= (3, 6): - # Python >= 3.6, try native async generator - app_user.spawner.progress = globals()['progress_forever_native'] - else: - app_user.spawner.progress = progress_forever + app_user.spawner.progress = progress_forever app_user.spawner.delay = 1 r = await api_request(app, 'users', name, 'server', method='post') @@ -886,8 +1275,8 @@ async def test_spawn_limit(app, no_patience, slow_spawn, request): # start two pending spawns names = ['ykka', 'hjarka'] users = [add_user(db, app=app, name=name) for name in names] - users[0].spawner._start_future = Future() - users[1].spawner._start_future = Future() + users[0].spawner._start_future = asyncio.Future() + users[1].spawner._start_future = asyncio.Future() for name in names: await api_request(app, 'users', name, 'server', method='post') assert app.users.count_active_users()['pending'] == 2 @@ -895,7 +1284,7 @@ async def test_spawn_limit(app, no_patience, slow_spawn, request): # ykka and hjarka's spawns are both pending. Essun should fail with 429 name = 'essun' user = add_user(db, app=app, name=name) - user.spawner._start_future = Future() + user.spawner._start_future = asyncio.Future() r = await api_request(app, 'users', name, 'server', method='post') assert r.status_code == 429 @@ -903,7 +1292,7 @@ async def test_spawn_limit(app, no_patience, slow_spawn, request): users[0].spawner._start_future.set_result(None) # wait for ykka to finish while not users[0].running: - await gen.sleep(0.1) + await asyncio.sleep(0.1) assert app.users.count_active_users()['pending'] == 1 r = await api_request(app, 'users', name, 'server', method='post') @@ -914,7 +1303,7 @@ async def test_spawn_limit(app, no_patience, slow_spawn, request): for user in users[1:]: user.spawner._start_future.set_result(None) while not all(u.running for u in users): - await gen.sleep(0.1) + await asyncio.sleep(0.1) # everybody's running, pending count should be back to 0 assert app.users.count_active_users()['pending'] == 0 @@ -923,7 +1312,7 @@ async def test_spawn_limit(app, no_patience, slow_spawn, request): r = await api_request(app, 'users', u.name, 'server', method='delete') r.raise_for_status() while any(u.spawner.active for u in users): - await gen.sleep(0.1) + await asyncio.sleep(0.1) @mark.slow @@ -1001,7 +1390,7 @@ async def test_start_stop_race(app, no_patience, slow_spawn): r = await api_request(app, 'users', user.name, 'server', method='delete') assert r.status_code == 400 while not spawner.ready: - await gen.sleep(0.1) + await asyncio.sleep(0.1) spawner.delay = 3 # stop the spawner @@ -1009,7 +1398,7 @@ async def test_start_stop_race(app, no_patience, slow_spawn): assert r.status_code == 202 assert spawner.pending == 'stop' # make sure we get past deleting from the proxy - await gen.sleep(1) + await asyncio.sleep(1) # additional stops while stopping shouldn't trigger a new stop with mock.patch.object(spawner, 'stop') as m: r = await api_request(app, 'users', user.name, 'server', method='delete') @@ -1021,7 +1410,7 @@ async def test_start_stop_race(app, no_patience, slow_spawn): assert r.status_code == 400 while spawner.active: - await gen.sleep(0.1) + await asyncio.sleep(0.1) # start after stop is okay r = await api_request(app, 'users', user.name, 'server', method='post') assert r.status_code == 202 @@ -1034,6 +1423,17 @@ async def test_get_proxy(app): assert list(reply.keys()) == [app.hub.routespec] +@mark.parametrize("offset", (0, 1)) +async def test_get_proxy_pagination(app, offset): + r = await api_request( + app, f'proxy?offset={offset}', headers={"Accept": PAGINATION_MEDIA_TYPE} + ) + r.raise_for_status() + reply = r.json() + assert set(reply) == {"items", "_pagination"} + assert list(reply["items"].keys()) == [app.hub.routespec][offset:] + + async def test_cookie(app): db = app.db name = 'patience' @@ -1082,83 +1482,20 @@ async def test_check_token(app): assert r.status_code == 404 -@mark.parametrize("headers, status", [({}, 200), ({'Authorization': 'token bad'}, 403)]) +@mark.parametrize("headers, status", [({}, 404), ({'Authorization': 'token bad'}, 404)]) async def test_get_new_token_deprecated(app, headers, status): # request a new token r = await api_request( app, 'authorizations', 'token', method='post', headers=headers ) assert r.status_code == status - if status != 200: - return - reply = r.json() - assert 'token' in reply - r = await api_request(app, 'authorizations', 'token', reply['token']) - r.raise_for_status() - reply = r.json() - assert reply['name'] == 'admin' - - -async def test_token_formdata_deprecated(app): - """Create a token for a user with formdata and no auth header""" - data = {'username': 'fake', 'password': 'fake'} - r = await api_request( - app, - 'authorizations', - 'token', - method='post', - data=json.dumps(data) if data else None, - noauth=True, - ) - assert r.status_code == 200 - reply = r.json() - assert 'token' in reply - r = await api_request(app, 'authorizations', 'token', reply['token']) - r.raise_for_status() - reply = r.json() - assert reply['name'] == data['username'] - - -@mark.parametrize( - "as_user, for_user, status", - [ - ('admin', 'other', 200), - ('admin', 'missing', 400), - ('user', 'other', 403), - ('user', 'user', 200), - ], -) -async def test_token_as_user_deprecated(app, as_user, for_user, status): - # ensure both users exist - u = add_user(app.db, app, name=as_user) - if for_user != 'missing': - add_user(app.db, app, name=for_user) - data = {'username': for_user} - headers = {'Authorization': 'token %s' % u.new_api_token()} - r = await api_request( - app, - 'authorizations', - 'token', - method='post', - data=json.dumps(data), - headers=headers, - ) - assert r.status_code == status - reply = r.json() - if status != 200: - return - assert 'token' in reply - r = await api_request(app, 'authorizations', 'token', reply['token']) - r.raise_for_status() - reply = r.json() - assert reply['name'] == data['username'] @mark.parametrize( "headers, status, note, expires_in", [ - ({}, 200, 'test note', None), - ({}, 200, '', 100), + ({}, 201, 'test note', None), + ({}, 201, '', 100), ({'Authorization': 'token bad'}, 403, '', None), ], ) @@ -1177,7 +1514,7 @@ async def test_get_new_token(app, headers, status, note, expires_in): app, 'users/admin/tokens', method='post', headers=headers, data=body ) assert r.status_code == status - if status != 200: + if status != 201: return # check the new-token reply reply = r.json() @@ -1215,17 +1552,17 @@ async def test_get_new_token(app, headers, status, note, expires_in): @mark.parametrize( "as_user, for_user, status", [ - ('admin', 'other', 200), - ('admin', 'missing', 404), + ('admin', 'other', 201), + ('admin', 'missing', 403), ('user', 'other', 403), - ('user', 'user', 200), + ('user', 'user', 201), ], ) async def test_token_for_user(app, as_user, for_user, status): # ensure both users exist u = add_user(app.db, app, name=as_user) if for_user != 'missing': - add_user(app.db, app, name=for_user) + for_user_obj = add_user(app.db, app, name=for_user) data = {'username': for_user} headers = {'Authorization': 'token %s' % u.new_api_token()} r = await api_request( @@ -1239,9 +1576,10 @@ async def test_token_for_user(app, as_user, for_user, status): ) assert r.status_code == status reply = r.json() - if status != 200: + if status != 201: return assert 'token' in reply + token_id = reply['id'] r = await api_request(app, 'users', for_user, 'tokens', token_id, headers=headers) r.raise_for_status() @@ -1276,7 +1614,7 @@ async def test_token_authenticator_noauth(app): data=json.dumps(data) if data else None, noauth=True, ) - assert r.status_code == 200 + assert r.status_code == 201 reply = r.json() assert 'token' in reply r = await api_request(app, 'authorizations', 'token', reply['token']) @@ -1299,7 +1637,7 @@ async def test_token_authenticator_dict_noauth(app): data=json.dumps(data) if data else None, noauth=True, ) - assert r.status_code == 200 + assert r.status_code == 201 reply = r.json() assert 'token' in reply r = await api_request(app, 'authorizations', 'token', reply['token']) @@ -1313,7 +1651,7 @@ async def test_token_authenticator_dict_noauth(app): [ ('admin', 'other', 200), ('admin', 'missing', 404), - ('user', 'other', 403), + ('user', 'other', 404), ('user', 'user', 200), ], ) @@ -1327,12 +1665,11 @@ async def test_token_list(app, as_user, for_user, status): if status != 200: return reply = r.json() - assert sorted(reply) == ['api_tokens', 'oauth_tokens'] + assert sorted(reply) == ['api_tokens'] assert len(reply['api_tokens']) == len(for_user_obj.api_tokens) assert all(token['user'] == for_user for token in reply['api_tokens']) - assert all(token['user'] == for_user for token in reply['oauth_tokens']) # validate individual token ids - for token in reply['api_tokens'] + reply['oauth_tokens']: + for token in reply['api_tokens']: r = await api_request( app, 'users', for_user, 'tokens', token['id'], headers=headers ) @@ -1353,15 +1690,45 @@ async def test_groups_list(app): reply = r.json() assert reply == [] - # create a group + # create two groups group = orm.Group(name='alphaflight') + group_2 = orm.Group(name='betaflight') app.db.add(group) + app.db.add(group_2) app.db.commit() r = await api_request(app, 'groups') r.raise_for_status() reply = r.json() - assert reply == [{'kind': 'group', 'name': 'alphaflight', 'users': []}] + assert reply == [ + {'kind': 'group', 'name': 'alphaflight', 'users': [], 'roles': []}, + {'kind': 'group', 'name': 'betaflight', 'users': [], 'roles': []}, + ] + + # Test offset for pagination + r = await api_request(app, "groups?offset=1") + r.raise_for_status() + reply = r.json() + assert r.status_code == 200 + assert reply == [{'kind': 'group', 'name': 'betaflight', 'users': [], 'roles': []}] + + r = await api_request(app, "groups?offset=10") + r.raise_for_status() + reply = r.json() + assert reply == [] + + # Test limit for pagination + r = await api_request(app, "groups?limit=1") + r.raise_for_status() + reply = r.json() + assert r.status_code == 200 + assert reply == [{'kind': 'group', 'name': 'alphaflight', 'users': [], 'roles': []}] + + # 0 is rounded up to 1 + r = await api_request(app, "groups?limit=0") + r.raise_for_status() + reply = r.json() + assert reply == [{'kind': 'group', 'name': 'alphaflight', 'users': [], 'roles': []}] @mark.group @@ -1385,6 +1752,9 @@ async def test_add_multi_group(app): @mark.group async def test_group_get(app): + group = orm.Group(name='alphaflight') + app.db.add(group) + app.db.commit() group = orm.Group.find(app.db, name='alphaflight') user = add_user(app.db, app=app, name='sasquatch') group.users.append(user) @@ -1396,12 +1766,18 @@ async def test_group_get(app): r = await api_request(app, 'groups/alphaflight') r.raise_for_status() reply = r.json() - assert reply == {'kind': 'group', 'name': 'alphaflight', 'users': ['sasquatch']} + assert reply == { + 'kind': 'group', + 'name': 'alphaflight', + 'users': ['sasquatch'], + 'roles': [], + } @mark.group async def test_group_create_delete(app): db = app.db + user = add_user(app.db, app=app, name='sasquatch') r = await api_request(app, 'groups/runaways', method='delete') assert r.status_code == 404 @@ -1439,16 +1815,17 @@ async def test_group_create_delete(app): @mark.group -async def test_group_add_users(app): +async def test_group_add_delete_users(app): db = app.db + group = orm.Group(name='alphaflight') + app.db.add(group) + app.db.commit() # must specify users r = await api_request(app, 'groups/alphaflight/users', method='post', data='{}') assert r.status_code == 400 names = ['aurora', 'guardian', 'northstar', 'sasquatch', 'shaman', 'snowbird'] - users = [ - find_user(db, name=name) or add_user(db, app=app, name=name) for name in names - ] + users = [add_user(db, app=app, name=name) for name in names] r = await api_request( app, 'groups/alphaflight/users', @@ -1462,18 +1839,8 @@ async def test_group_add_users(app): assert [g.name for g in user.groups] == ['alphaflight'] group = orm.Group.find(db, name='alphaflight') - assert sorted([u.name for u in group.users]) == sorted(names) + assert sorted(u.name for u in group.users) == sorted(names) - -@mark.group -async def test_group_delete_users(app): - db = app.db - # must specify users - r = await api_request(app, 'groups/alphaflight/users', method='delete', data='{}') - assert r.status_code == 400 - - names = ['aurora', 'guardian', 'northstar', 'sasquatch', 'shaman', 'snowbird'] - users = [find_user(db, name=name) for name in names] r = await api_request( app, 'groups/alphaflight/users', @@ -1488,7 +1855,39 @@ async def test_group_delete_users(app): assert [g.name for g in user.groups] == ['alphaflight'] group = orm.Group.find(db, name='alphaflight') - assert sorted([u.name for u in group.users]) == sorted(names[2:]) + assert sorted(u.name for u in group.users) == sorted(names[2:]) + + +@mark.group +async def test_auth_managed_groups(request, app, group, user): + group.users.append(user) + app.db.commit() + app.authenticator.manage_groups = True + request.addfinalizer(lambda: setattr(app.authenticator, "manage_groups", False)) + # create groups + r = await api_request(app, 'groups', method='post') + assert r.status_code == 400 + r = await api_request(app, 'groups/newgroup', method='post') + assert r.status_code == 400 + # delete groups + r = await api_request(app, f'groups/{group.name}', method='delete') + assert r.status_code == 400 + # add users to group + r = await api_request( + app, + f'groups/{group.name}/users', + method='post', + data=json.dumps({"users": [user.name]}), + ) + assert r.status_code == 400 + # remove users from group + r = await api_request( + app, + f'groups/{group.name}/users', + method='delete', + data=json.dumps({"users": [user.name]}), + ) + assert r.status_code == 400 # ----------------- @@ -1507,16 +1906,18 @@ async def test_get_services(app, mockservice_url): services = r.json() assert services == { mockservice.name: { + 'kind': 'service', 'name': mockservice.name, 'admin': True, + 'roles': ['admin'], 'command': mockservice.command, 'pid': mockservice.proc.pid, 'prefix': mockservice.server.base_url, 'url': mockservice.url, 'info': {}, + 'display': True, } } - r = await api_request(app, 'services', headers=auth_header(db, 'user')) assert r.status_code == 403 @@ -1531,15 +1932,17 @@ async def test_get_service(app, mockservice_url): service = r.json() assert service == { + 'kind': 'service', 'name': mockservice.name, 'admin': True, + 'roles': ['admin'], 'command': mockservice.command, 'pid': mockservice.proc.pid, 'prefix': mockservice.server.base_url, 'url': mockservice.url, 'info': {}, + 'display': True, } - r = await api_request( app, 'services/%s' % mockservice.name, @@ -1559,7 +1962,7 @@ async def test_root_api(app): if app.internal_ssl: kwargs['cert'] = (app.internal_ssl_cert, app.internal_ssl_key) kwargs["verify"] = app.internal_ssl_ca - r = await async_requests.get(url, **kwargs) + r = await api_request(app, bypass_proxy=True) r.raise_for_status() expected = {'version': jupyterhub.__version__} assert r.json() == expected @@ -1598,20 +2001,20 @@ async def test_update_activity_403(app, user, admin_user): token = user.new_api_token() r = await api_request( app, - "users/{}/activity".format(admin_user.name), - headers={"Authorization": "token {}".format(token)}, + f"users/{admin_user.name}/activity", + headers={"Authorization": f"token {token}"}, data="{}", method="post", ) - assert r.status_code == 403 + assert r.status_code == 404 async def test_update_activity_admin(app, user, admin_user): - token = admin_user.new_api_token() + token = admin_user.new_api_token(roles=['admin']) r = await api_request( app, - "users/{}/activity".format(user.name), - headers={"Authorization": "token {}".format(token)}, + f"users/{user.name}/activity", + headers={"Authorization": f"token {token}"}, data=json.dumps({"last_activity": utcnow().isoformat()}), method="post", ) @@ -1647,8 +2050,8 @@ async def test_update_server_activity(app, user, server_name, fresh): r = await api_request( app, - "users/{}/activity".format(user.name), - headers={"Authorization": "token {}".format(token)}, + f"users/{user.name}/activity", + headers={"Authorization": f"token {token}"}, data=json.dumps( {"servers": {server_name: {"last_activity": activity.isoformat()}}} ), @@ -1710,14 +2113,23 @@ def test_shutdown(app): ) return r - real_stop = loop.stop + real_stop = loop.asyncio_loop.stop def stop(): stop.called = True loop.call_later(1, real_stop) - with mock.patch.object(loop, 'stop', stop): + real_cleanup = app.cleanup + + def cleanup(): + cleanup.called = True + return real_cleanup() + + app.cleanup = cleanup + + with mock.patch.object(loop.asyncio_loop, 'stop', stop): r = loop.run_sync(shutdown, timeout=5) r.raise_for_status() reply = r.json() + assert cleanup.called assert stop.called diff --git a/jupyterhub/tests/test_app.py b/jupyterhub/tests/test_app.py index 0fb3abb6..9595c6a1 100644 --- a/jupyterhub/tests/test_app.py +++ b/jupyterhub/tests/test_app.py @@ -1,22 +1,22 @@ """Test the JupyterHub entry point""" +import asyncio import binascii +import json +import logging import os import re import sys -from subprocess import check_output -from subprocess import PIPE -from subprocess import Popen -from tempfile import NamedTemporaryFile -from tempfile import TemporaryDirectory +import time +from subprocess import PIPE, Popen, check_output +from tempfile import NamedTemporaryFile, TemporaryDirectory from unittest.mock import patch import pytest -from tornado import gen +import traitlets from traitlets.config import Config from .. import orm -from ..app import COOKIE_SECRET_BYTES -from ..app import JupyterHub +from ..app import COOKIE_SECRET_BYTES, JupyterHub from .mocking import MockHub from .test_api import add_user @@ -29,6 +29,27 @@ def test_help_all(): assert '--JupyterHub.ip' in out +@pytest.mark.skipif(traitlets.version_info < (5,), reason="requires traitlets 5") +def test_show_config(tmpdir): + tmpdir.chdir() + p = Popen( + [sys.executable, '-m', 'jupyterhub', '--show-config', '--debug'], stdout=PIPE + ) + p.wait(timeout=10) + out = p.stdout.read().decode('utf8', 'replace') + assert 'log_level' in out + + p = Popen( + [sys.executable, '-m', 'jupyterhub', '--show-config-json', '--debug'], + stdout=PIPE, + ) + p.wait(timeout=10) + out = p.stdout.read().decode('utf8', 'replace') + config = json.loads(out) + assert 'JupyterHub' in config + assert config["JupyterHub"]["log_level"] == 10 + + def test_token_app(): cmd = [sys.executable, '-m', 'jupyterhub', 'token'] out = check_output(cmd + ['--help-all']).decode('utf8', 'replace') @@ -39,6 +60,28 @@ def test_token_app(): assert re.match(r'^[a-z0-9]+$', out) +def test_raise_error_on_missing_specified_config(): + """ + Using the -f or --config flag when starting JupyterHub should require the + file to be found and exit if it isn't. + """ + # subprocess.run doesn't have a timeout flag, so if this test would fail by + # not letting jupyterhub error out, we would wait forever. subprocess.Popen + # allow us to manually timeout. + process = Popen( + [sys.executable, '-m', 'jupyterhub', '--config', 'not-available.py'] + ) + # wait impatiently for the process to exit like we want it to + for i in range(100): + time.sleep(0.1) + returncode = process.poll() + if returncode is not None: + break + else: + process.kill() + assert returncode == 1 + + def test_generate_config(): with NamedTemporaryFile(prefix='jupyterhub_config', suffix='.py') as tf: cfg_file = tf.name @@ -69,7 +112,7 @@ def test_generate_config(): os.remove(cfg_file) assert cfg_file in out assert 'Spawner.cmd' in cfg_text - assert 'Authenticator.whitelist' in cfg_text + assert 'Authenticator.allowed_users' in cfg_text async def test_init_tokens(request): @@ -177,6 +220,18 @@ def test_cookie_secret_env(tmpdir, request): assert not os.path.exists(hub.cookie_secret_file) +def test_cookie_secret_string_(): + cfg = Config() + + cfg.JupyterHub.cookie_secret = "not hex" + with pytest.raises(ValueError): + JupyterHub(config=cfg) + + cfg.JupyterHub.cookie_secret = "abc123" + app = JupyterHub(config=cfg) + assert app.cookie_secret == binascii.a2b_hex('abc123') + + async def test_load_groups(tmpdir, request): to_load = { 'blue': ['cyclops', 'rogue', 'wolverine'], @@ -188,15 +243,16 @@ async def test_load_groups(tmpdir, request): kwargs['internal_certs_location'] = str(tmpdir) hub = MockHub(**kwargs) hub.init_db() + await hub.init_role_creation() await hub.init_users() await hub.init_groups() db = hub.db blue = orm.Group.find(db, name='blue') assert blue is not None - assert sorted([u.name for u in blue.users]) == sorted(to_load['blue']) + assert sorted(u.name for u in blue.users) == sorted(to_load['blue']) gold = orm.Group.find(db, name='gold') assert gold is not None - assert sorted([u.name for u in gold.users]) == sorted(to_load['gold']) + assert sorted(u.name for u in gold.users) == sorted(to_load['gold']) async def test_resume_spawners(tmpdir, request): @@ -295,3 +351,71 @@ def test_url_config(hub_config, expected): # validate additional properties for key, value in expected.items(): assert getattr(app, key) == value + + +@pytest.mark.parametrize( + "base_url, hub_routespec, expected_routespec, should_warn, bad_prefix", + [ + (None, None, "/", False, False), + ("/", "/", "/", False, False), + ("/base", "/base", "/base/", False, False), + ("/", "/hub", "/hub/", True, False), + (None, "hub/api", "/hub/api/", True, False), + ("/base", "/hub/", "/hub/", True, True), + (None, "/hub/api/health", "/hub/api/health/", True, True), + ], +) +def test_hub_routespec( + base_url, hub_routespec, expected_routespec, should_warn, bad_prefix, caplog +): + cfg = Config() + if base_url: + cfg.JupyterHub.base_url = base_url + if hub_routespec: + cfg.JupyterHub.hub_routespec = hub_routespec + with caplog.at_level(logging.WARNING): + app = JupyterHub(config=cfg, log=logging.getLogger()) + app.init_hub() + hub = app.hub + assert hub.routespec == expected_routespec + + if should_warn: + assert "custom route for Hub" in caplog.text + assert hub_routespec in caplog.text + else: + assert "custom route for Hub" not in caplog.text + + if bad_prefix: + assert "may not receive" in caplog.text + else: + assert "may not receive" not in caplog.text + + +@pytest.mark.parametrize( + "argv, sys_argv", + [ + (None, ["jupyterhub", "--debug", "--port=1234"]), + (["--log-level=INFO"], ["jupyterhub"]), + ], +) +def test_launch_instance(request, argv, sys_argv): + class DummyHub(JupyterHub): + def launch_instance_async(self, argv): + # short-circuit initialize + # by indicating we are going to generate config in start + self.generate_config = True + return super().launch_instance_async(argv) + + async def start(self): + asyncio.get_running_loop().stop() + + DummyHub.clear_instance() + request.addfinalizer(DummyHub.clear_instance) + + with patch.object(sys, "argv", sys_argv): + DummyHub.launch_instance(argv) + hub = DummyHub.instance() + if argv is None: + assert hub.argv == sys_argv[1:] + else: + assert hub.argv == argv diff --git a/jupyterhub/tests/test_auth.py b/jupyterhub/tests/test_auth.py index 10ae0b1a..db4fd8bc 100644 --- a/jupyterhub/tests/test_auth.py +++ b/jupyterhub/tests/test_auth.py @@ -1,19 +1,19 @@ """Tests for PAM authentication""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. -import os +import logging from unittest import mock +from urllib.parse import urlparse import pytest from requests import HTTPError +from traitlets import Any +from traitlets.config import Config -from .mocking import MockPAMAuthenticator -from .mocking import MockStructGroup -from .mocking import MockStructPasswd -from .utils import add_user -from jupyterhub import auth -from jupyterhub import crypto -from jupyterhub import orm +from jupyterhub import auth, crypto, orm + +from .mocking import MockPAMAuthenticator, MockStructGroup, MockStructPasswd +from .utils import add_user, async_requests, get_page, public_url async def test_pam_auth(): @@ -137,8 +137,8 @@ async def test_pam_auth_admin_groups(): assert authorized['admin'] is False -async def test_pam_auth_whitelist(): - authenticator = MockPAMAuthenticator(whitelist={'wash', 'kaylee'}) +async def test_pam_auth_allowed(): + authenticator = MockPAMAuthenticator(allowed_users={'wash', 'kaylee'}) authorized = await authenticator.get_authenticated_user( None, {'username': 'kaylee', 'password': 'kaylee'} ) @@ -155,11 +155,11 @@ async def test_pam_auth_whitelist(): assert authorized is None -async def test_pam_auth_group_whitelist(): +async def test_pam_auth_allowed_groups(): def getgrnam(name): return MockStructGroup('grp', ['kaylee']) - authenticator = MockPAMAuthenticator(group_whitelist={'group'}) + authenticator = MockPAMAuthenticator(allowed_groups={'group'}) with mock.patch.object(authenticator, '_getgrnam', getgrnam): authorized = await authenticator.get_authenticated_user( @@ -174,7 +174,7 @@ async def test_pam_auth_group_whitelist(): assert authorized is None -async def test_pam_auth_blacklist(): +async def test_pam_auth_blocked(): # Null case compared to next case authenticator = MockPAMAuthenticator() authorized = await authenticator.get_authenticated_user( @@ -183,33 +183,33 @@ async def test_pam_auth_blacklist(): assert authorized['name'] == 'wash' # Blacklist basics - authenticator = MockPAMAuthenticator(blacklist={'wash'}) + authenticator = MockPAMAuthenticator(blocked_users={'wash'}) authorized = await authenticator.get_authenticated_user( None, {'username': 'wash', 'password': 'wash'} ) assert authorized is None - # User in both white and blacklists: default deny. Make error someday? + # User in both allowed and blocked: default deny. Make error someday? authenticator = MockPAMAuthenticator( - blacklist={'wash'}, whitelist={'wash', 'kaylee'} + blocked_users={'wash'}, allowed_users={'wash', 'kaylee'} ) authorized = await authenticator.get_authenticated_user( None, {'username': 'wash', 'password': 'wash'} ) assert authorized is None - # User not in blacklist can log in + # User not in blocked set can log in authenticator = MockPAMAuthenticator( - blacklist={'wash'}, whitelist={'wash', 'kaylee'} + blocked_users={'wash'}, allowed_users={'wash', 'kaylee'} ) authorized = await authenticator.get_authenticated_user( None, {'username': 'kaylee', 'password': 'kaylee'} ) assert authorized['name'] == 'kaylee' - # User in whitelist, blacklist irrelevent + # User in allowed, blocked irrelevent authenticator = MockPAMAuthenticator( - blacklist={'mal'}, whitelist={'wash', 'kaylee'} + blocked_users={'mal'}, allowed_users={'wash', 'kaylee'} ) authorized = await authenticator.get_authenticated_user( None, {'username': 'wash', 'password': 'wash'} @@ -218,15 +218,16 @@ async def test_pam_auth_blacklist(): # User in neither list authenticator = MockPAMAuthenticator( - blacklist={'mal'}, whitelist={'wash', 'kaylee'} + blocked_users={'mal'}, allowed_users={'wash', 'kaylee'} ) authorized = await authenticator.get_authenticated_user( None, {'username': 'simon', 'password': 'simon'} ) assert authorized is None - # blacklist == {} - authenticator = MockPAMAuthenticator(blacklist=set(), whitelist={'wash', 'kaylee'}) + authenticator = MockPAMAuthenticator( + blocked_users=set(), allowed_users={'wash', 'kaylee'} + ) authorized = await authenticator.get_authenticated_user( None, {'username': 'kaylee', 'password': 'kaylee'} ) @@ -253,7 +254,7 @@ async def test_deprecated_signatures(): async def test_pam_auth_no_such_group(): - authenticator = MockPAMAuthenticator(group_whitelist={'nosuchcrazygroup'}) + authenticator = MockPAMAuthenticator(allowed_groups={'nosuchcrazygroup'}) authorized = await authenticator.get_authenticated_user( None, {'username': 'kaylee', 'password': 'kaylee'} ) @@ -262,7 +263,7 @@ async def test_pam_auth_no_such_group(): async def test_wont_add_system_user(): user = orm.User(name='lioness4321') - authenticator = auth.PAMAuthenticator(whitelist={'mal'}) + authenticator = auth.PAMAuthenticator(allowed_users={'mal'}) authenticator.create_system_users = False with pytest.raises(KeyError): await authenticator.add_user(user) @@ -270,7 +271,7 @@ async def test_wont_add_system_user(): async def test_cant_add_system_user(): user = orm.User(name='lioness4321') - authenticator = auth.PAMAuthenticator(whitelist={'mal'}) + authenticator = auth.PAMAuthenticator(allowed_users={'mal'}) authenticator.add_user_cmd = ['jupyterhub-fake-command'] authenticator.create_system_users = True @@ -296,7 +297,7 @@ async def test_cant_add_system_user(): async def test_add_system_user(): user = orm.User(name='lioness4321') - authenticator = auth.PAMAuthenticator(whitelist={'mal'}) + authenticator = auth.PAMAuthenticator(allowed_users={'mal'}) authenticator.create_system_users = True authenticator.add_user_cmd = ['echo', '/home/USERNAME'] @@ -317,13 +318,13 @@ async def test_add_system_user(): async def test_delete_user(): user = orm.User(name='zoe') - a = MockPAMAuthenticator(whitelist={'mal'}) + a = MockPAMAuthenticator(allowed_users={'mal'}) - assert 'zoe' not in a.whitelist + assert 'zoe' not in a.allowed_users await a.add_user(user) - assert 'zoe' in a.whitelist + assert 'zoe' in a.allowed_users a.delete_user(user) - assert 'zoe' not in a.whitelist + assert 'zoe' not in a.allowed_users def test_urls(): @@ -461,3 +462,132 @@ async def test_post_auth_hook(): ) assert authorized['testkey'] == 'testvalue' + + +class MyAuthenticator(auth.Authenticator): + def check_whitelist(self, username, authentication=None): + return username == "subclass-allowed" + + +def test_deprecated_config(caplog): + cfg = Config() + cfg.Authenticator.whitelist = {'user'} + log = logging.getLogger("testlog") + authenticator = auth.Authenticator(config=cfg, log=log) + assert caplog.record_tuples == [ + ( + log.name, + logging.WARNING, + 'Authenticator.whitelist is deprecated in JupyterHub 1.2, use ' + 'Authenticator.allowed_users instead', + ) + ] + assert authenticator.allowed_users == {'user'} + + +def test_deprecated_methods(): + cfg = Config() + cfg.Authenticator.whitelist = {'user'} + authenticator = auth.Authenticator(config=cfg) + + assert authenticator.check_allowed("user") + with pytest.deprecated_call(): + assert authenticator.check_whitelist("user") + assert not authenticator.check_allowed("otheruser") + with pytest.deprecated_call(): + assert not authenticator.check_whitelist("otheruser") + + +def test_deprecated_config_subclass(): + cfg = Config() + cfg.MyAuthenticator.whitelist = {'user'} + with pytest.deprecated_call(): + authenticator = MyAuthenticator(config=cfg) + assert authenticator.allowed_users == {'user'} + + +def test_deprecated_methods_subclass(): + with pytest.deprecated_call(): + authenticator = MyAuthenticator() + + assert authenticator.check_allowed("subclass-allowed") + assert authenticator.check_whitelist("subclass-allowed") + assert not authenticator.check_allowed("otheruser") + assert not authenticator.check_whitelist("otheruser") + + +async def test_nullauthenticator(app): + with mock.patch.dict( + app.tornado_settings, {"authenticator": auth.NullAuthenticator(parent=app)} + ): + r = await async_requests.get(public_url(app)) + assert urlparse(r.url).path.endswith("/hub/login") + assert r.status_code == 403 + + +class MockGroupsAuthenticator(auth.Authenticator): + authenticated_groups = Any() + refresh_groups = Any() + + manage_groups = True + + def authenticate(self, handler, data): + return { + "name": data["username"], + "groups": self.authenticated_groups, + } + + async def refresh_user(self, user, handler): + return { + "name": user.name, + "groups": self.refresh_groups, + } + + +@pytest.mark.parametrize( + "authenticated_groups, refresh_groups", + [ + (None, None), + (["auth1"], None), + (None, ["auth1"]), + (["auth1"], ["auth1", "auth2"]), + (["auth1", "auth2"], ["auth1"]), + (["auth1", "auth2"], ["auth3"]), + (["auth1", "auth2"], ["auth3"]), + ], +) +async def test_auth_managed_groups( + app, user, group, authenticated_groups, refresh_groups +): + + authenticator = MockGroupsAuthenticator( + parent=app, + authenticated_groups=authenticated_groups, + refresh_groups=refresh_groups, + ) + + user.groups.append(group) + app.db.commit() + before_groups = [group.name] + if authenticated_groups is None: + expected_authenticated_groups = before_groups + else: + expected_authenticated_groups = authenticated_groups + if refresh_groups is None: + expected_refresh_groups = expected_authenticated_groups + else: + expected_refresh_groups = refresh_groups + + with mock.patch.dict(app.tornado_settings, {"authenticator": authenticator}): + cookies = await app.login_user(user.name) + assert not app.db.dirty + groups = sorted(g.name for g in user.groups) + assert groups == expected_authenticated_groups + + # force refresh_user on next request + user._auth_refreshed -= 10 + app.authenticator.auth_refresh_age + r = await get_page('home', app, cookies=cookies, allow_redirects=False) + assert r.status_code == 200 + assert not app.db.dirty + groups = sorted(g.name for g in user.groups) + assert groups == expected_refresh_groups diff --git a/jupyterhub/tests/test_auth_expiry.py b/jupyterhub/tests/test_auth_expiry.py index 781c751b..876f85b8 100644 --- a/jupyterhub/tests/test_auth_expiry.py +++ b/jupyterhub/tests/test_auth_expiry.py @@ -7,16 +7,12 @@ authentication can expire in a number of ways: - doesn't need refresh - needs refresh and cannot be refreshed without new login """ -import asyncio -from contextlib import contextmanager from unittest import mock -from urllib.parse import parse_qs -from urllib.parse import urlparse +from urllib.parse import parse_qs, urlparse import pytest -from .utils import api_request -from .utils import get_page +from .utils import api_request, get_page async def refresh_expired(authenticator, user): @@ -128,7 +124,7 @@ async def test_refresh_pre_spawn(app, user, refresh_pre_spawn): # auth is fresh, but should be forced to refresh by spawn r = await api_request( - app, 'users/{}/server'.format(user.name), method='post', name=user.name + app, f'users/{user.name}/server', method='post', name=user.name ) assert 200 <= r.status_code < 300 assert user._auth_refreshed > before @@ -142,7 +138,7 @@ async def test_refresh_pre_spawn_expired(app, user, refresh_pre_spawn, disable_r # auth is fresh, doesn't trigger expiry r = await api_request( - app, 'users/{}/server'.format(user.name), method='post', name=user.name + app, f'users/{user.name}/server', method='post', name=user.name ) assert r.status_code == 403 assert user._auth_refreshed == before diff --git a/jupyterhub/tests/test_crypto.py b/jupyterhub/tests/test_crypto.py index 31e888ad..20d5f62b 100644 --- a/jupyterhub/tests/test_crypto.py +++ b/jupyterhub/tests/test_crypto.py @@ -1,13 +1,11 @@ import os -from binascii import b2a_base64 -from binascii import b2a_hex +from binascii import b2a_base64, b2a_hex from unittest.mock import patch import pytest from .. import crypto -from ..crypto import decrypt -from ..crypto import encrypt +from ..crypto import decrypt, encrypt keys = [('%i' % i).encode('ascii') * 32 for i in range(3)] hex_keys = [b2a_hex(key).decode('ascii') for key in keys] diff --git a/jupyterhub/tests/test_db.py b/jupyterhub/tests/test_db.py index 9369cd77..9284df0d 100644 --- a/jupyterhub/tests/test_db.py +++ b/jupyterhub/tests/test_db.py @@ -8,10 +8,9 @@ import pytest from pytest import raises from traitlets.config import Config -from ..app import JupyterHub -from ..app import NewToken -from ..app import UpgradeDB - +from .. import orm +from ..app import NewToken, UpgradeDB +from ..scopes import _check_scopes_exist here = os.path.abspath(os.path.dirname(__file__)) populate_db = os.path.join(here, 'populate_db.py') @@ -26,7 +25,8 @@ def generate_old_db(env_dir, hub_version, db_url): env_pip = os.path.join(env_dir, 'bin', 'pip') env_py = os.path.join(env_dir, 'bin', 'python') check_call([sys.executable, '-m', 'virtualenv', env_dir]) - pkgs = ['jupyterhub==' + hub_version] + # older jupyterhub needs older sqlachemy version + pkgs = ['jupyterhub==' + hub_version, 'sqlalchemy<1.4'] if 'mysql' in db_url: pkgs.append('mysql-connector-python') elif 'postgres' in db_url: @@ -35,7 +35,7 @@ def generate_old_db(env_dir, hub_version, db_url): check_call([env_py, populate_db, db_url]) -@pytest.mark.parametrize('hub_version', ['0.7.2', '0.8.1', '0.9.4']) +@pytest.mark.parametrize('hub_version', ['1.0.0', "1.2.2", "1.3.0", "1.5.0", "2.1.1"]) async def test_upgrade(tmpdir, hub_version): db_url = os.getenv('JUPYTERHUB_TEST_DB_URL') if db_url: @@ -74,3 +74,10 @@ async def test_upgrade(tmpdir, hub_version): # run tokenapp again, it should work tokenapp.start() + + db = orm.new_session_factory(db_url)() + query = db.query(orm.APIToken) + assert query.count() >= 1 + for token in query: + assert token.scopes, f"Upgraded token {token} has no scopes" + _check_scopes_exist(token.scopes) diff --git a/jupyterhub/tests/test_dummyauth.py b/jupyterhub/tests/test_dummyauth.py index 3f34c343..dbeaf583 100644 --- a/jupyterhub/tests/test_dummyauth.py +++ b/jupyterhub/tests/test_dummyauth.py @@ -1,8 +1,6 @@ """Tests for dummy authentication""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. -import pytest - from jupyterhub.auth import DummyAuthenticator diff --git a/jupyterhub/tests/test_eventlog.py b/jupyterhub/tests/test_eventlog.py new file mode 100644 index 00000000..5b6649cd --- /dev/null +++ b/jupyterhub/tests/test_eventlog.py @@ -0,0 +1,77 @@ +"""Tests for Eventlogging in JupyterHub. + +To test a new schema or event, simply add it to the +`valid_events` and `invalid_events` variables below. + +You *shouldn't* need to write new tests. +""" +import io +import json +import logging +from unittest import mock + +import jsonschema +import pytest +from traitlets.config import Config + +# To test new schemas, add them to the `valid_events` +# and `invalid_events` dictionary below. + +# To test valid events, add event item with the form: +# { ( '', ) : { } } +valid_events = [ + ( + 'hub.jupyter.org/server-action', + 1, + dict(action='start', username='test-username', servername='test-servername'), + ) +] + +# To test invalid events, add event item with the form: +# { ( '', ) : { } } +invalid_events = [ + # Missing required keys + ('hub.jupyter.org/server-action', 1, dict(action='start')) +] + + +@pytest.fixture +def eventlog_sink(app): + """Return eventlog and sink objects""" + sink = io.StringIO() + handler = logging.StreamHandler(sink) + # Update the EventLog config with handler + cfg = Config() + cfg.EventLog.handlers = [handler] + + with mock.patch.object(app.config, 'EventLog', cfg.EventLog): + # recreate the eventlog object with our config + app.init_eventlog() + # return the sink from the fixture + yield app.eventlog, sink + # reset eventlog with original config + app.init_eventlog() + + +@pytest.mark.parametrize('schema, version, event', valid_events) +def test_valid_events(eventlog_sink, schema, version, event): + eventlog, sink = eventlog_sink + eventlog.allowed_schemas = [schema] + # Record event + eventlog.record_event(schema, version, event) + # Inspect consumed event + output = sink.getvalue() + assert output + data = json.loads(output) + # Verify event data was recorded + assert data is not None + + +@pytest.mark.parametrize('schema, version, event', invalid_events) +def test_invalid_events(eventlog_sink, schema, version, event): + eventlog, sink = eventlog_sink + eventlog.allowed_schemas = [schema] + + # Make sure an error is thrown when bad events are recorded + with pytest.raises(jsonschema.ValidationError): + recorded_event = eventlog.record_event(schema, version, event) diff --git a/jupyterhub/tests/test_internal_ssl_api.py b/jupyterhub/tests/test_internal_ssl_api.py deleted file mode 100644 index 17349ae6..00000000 --- a/jupyterhub/tests/test_internal_ssl_api.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Tests for the SSL enabled REST API.""" -# Copyright (c) Jupyter Development Team. -# Distributed under the terms of the Modified BSD License. -from jupyterhub.tests.test_api import * - -ssl_enabled = True diff --git a/jupyterhub/tests/test_internal_ssl_app.py b/jupyterhub/tests/test_internal_ssl_app.py deleted file mode 100644 index 95b382c4..00000000 --- a/jupyterhub/tests/test_internal_ssl_app.py +++ /dev/null @@ -1,9 +0,0 @@ -"""Test the JupyterHub entry point with internal ssl""" -# Copyright (c) Jupyter Development Team. -# Distributed under the terms of the Modified BSD License. -import sys - -import jupyterhub.tests.mocking -from jupyterhub.tests.test_app import * - -ssl_enabled = True diff --git a/jupyterhub/tests/test_internal_ssl_connections.py b/jupyterhub/tests/test_internal_ssl_connections.py index a253367e..b2563578 100644 --- a/jupyterhub/tests/test_internal_ssl_connections.py +++ b/jupyterhub/tests/test_internal_ssl_connections.py @@ -1,23 +1,22 @@ """Tests for jupyterhub internal_ssl connections""" import sys import time -from subprocess import check_output from unittest import mock -from urllib.parse import urlparse import pytest -from requests.exceptions import SSLError -from tornado import gen +from requests.exceptions import ConnectionError, SSLError -import jupyterhub +from ..utils import AnyTimeoutError from .test_api import add_user from .utils import async_requests ssl_enabled = True +# possible errors raised by ssl failures +SSL_ERROR = (SSLError, ConnectionError) -@gen.coroutine -def wait_for_spawner(spawner, timeout=10): + +async def wait_for_spawner(spawner, timeout=10): """Wait for an http server to show up polling at shorter intervals for early termination @@ -28,20 +27,20 @@ def wait_for_spawner(spawner, timeout=10): return spawner.server.wait_up(timeout=1, http=True) while time.monotonic() < deadline: - status = yield spawner.poll() + status = await spawner.poll() assert status is None try: - yield wait() - except TimeoutError: + await wait() + except AnyTimeoutError: continue else: break - yield wait() + await wait() async def test_connection_hub_wrong_certs(app): """Connecting to the internal hub url fails without correct certs""" - with pytest.raises(SSLError): + with pytest.raises(SSL_ERROR): kwargs = {'verify': False} r = await async_requests.get(app.hub.url, **kwargs) r.raise_for_status() @@ -49,7 +48,7 @@ async def test_connection_hub_wrong_certs(app): async def test_connection_proxy_api_wrong_certs(app): """Connecting to the proxy api fails without correct certs""" - with pytest.raises(SSLError): + with pytest.raises(SSL_ERROR): kwargs = {'verify': False} r = await async_requests.get(app.proxy.api_url, **kwargs) r.raise_for_status() @@ -68,7 +67,7 @@ async def test_connection_notebook_wrong_certs(app): status = await spawner.poll() assert status is None - with pytest.raises(SSLError): + with pytest.raises(SSL_ERROR): kwargs = {'verify': False} r = await async_requests.get(spawner.server.url, **kwargs) r.raise_for_status() diff --git a/jupyterhub/tests/test_internal_ssl_spawner.py b/jupyterhub/tests/test_internal_ssl_spawner.py deleted file mode 100644 index 85ea5ccd..00000000 --- a/jupyterhub/tests/test_internal_ssl_spawner.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Tests for process spawning with internal_ssl""" -# Copyright (c) Jupyter Development Team. -# Distributed under the terms of the Modified BSD License. -from jupyterhub.tests.test_spawner import * - -ssl_enabled = True diff --git a/jupyterhub/tests/test_memoize.py b/jupyterhub/tests/test_memoize.py new file mode 100644 index 00000000..c37942a8 --- /dev/null +++ b/jupyterhub/tests/test_memoize.py @@ -0,0 +1,94 @@ +import pytest + +from jupyterhub._memoize import DoNotCache, FrozenDict, LRUCache, lru_cache_key + + +def test_lru_cache(): + cache = LRUCache(maxsize=2) + cache["a"] = 1 + assert "a" in cache + assert "b" not in cache + cache["b"] = 2 + assert cache["b"] == 2 + + # accessing a makes it more recent than b + assert cache["a"] == 1 + assert "b" in cache + assert "a" in cache + + # storing c pushes oldest ('b') out of cache + cache["c"] = 3 + assert len(cache._cache) == 2 + assert "a" in cache + assert "c" in cache + assert "b" not in cache + + +def test_lru_cache_key(): + + call_count = 0 + + @lru_cache_key(frozenset) + def reverse(arg): + nonlocal call_count + call_count += 1 + return list(reversed(arg)) + + in1 = [1, 2] + before = call_count + out1 = reverse(in1) + assert call_count == before + 1 + assert out1 == [2, 1] + + before = call_count + out2 = reverse(in1) + assert call_count == before + assert out2 is out1 + + +def test_do_not_cache(): + + call_count = 0 + + @lru_cache_key(lambda arg: arg) + def is_even(arg): + nonlocal call_count + call_count += 1 + if arg % 2: + return DoNotCache(False) + return True + + before = call_count + assert is_even(0) == True + assert call_count == before + 1 + + # caches even results + before = call_count + assert is_even(0) == True + assert call_count == before + + before = call_count + assert is_even(1) == False + assert call_count == before + 1 + + # doesn't cache odd results + before = call_count + assert is_even(1) == False + assert call_count == before + 1 + + +@pytest.mark.parametrize( + "d", + [ + {"key": "value"}, + {"key": ["list"]}, + {"key": {"set"}}, + {"key": ("tu", "ple")}, + {"key": {"nested": ["dict"]}}, + ], +) +def test_frozen_dict(d): + frozen_1 = FrozenDict(d) + frozen_2 = FrozenDict(d) + assert hash(frozen_1) == hash(frozen_2) + assert frozen_1 == frozen_2 diff --git a/jupyterhub/tests/test_metrics.py b/jupyterhub/tests/test_metrics.py new file mode 100644 index 00000000..072dba7b --- /dev/null +++ b/jupyterhub/tests/test_metrics.py @@ -0,0 +1,75 @@ +import json +from unittest import mock + +import pytest + +from jupyterhub import metrics, orm, roles + +from .utils import api_request, get_page + + +async def test_total_users(app): + num_users = app.db.query(orm.User).count() + sample = metrics.TOTAL_USERS.collect()[0].samples[0] + assert sample.value == num_users + + await api_request( + app, "/users", method="post", data=json.dumps({"usernames": ["incrementor"]}) + ) + + sample = metrics.TOTAL_USERS.collect()[0].samples[0] + assert sample.value == num_users + 1 + + # GET /users used to double-count + await api_request(app, "/users") + + # populate the Users cache dict if any are missing: + for user in app.db.query(orm.User): + _ = app.users[user.id] + + sample = metrics.TOTAL_USERS.collect()[0].samples[0] + assert sample.value == num_users + 1 + + await api_request(app, "/users/incrementor", method="delete") + + sample = metrics.TOTAL_USERS.collect()[0].samples[0] + assert sample.value == num_users + + +@pytest.mark.parametrize( + "authenticate_prometheus, authenticated, authorized, success", + [ + (True, True, True, True), + (True, True, False, False), + (True, False, False, False), + (False, True, True, True), + (False, False, False, True), + ], +) +async def test_metrics_auth( + app, + authenticate_prometheus, + authenticated, + authorized, + success, + create_temp_role, + user, +): + if authorized: + role = create_temp_role(["read:metrics"]) + roles.grant_role(app.db, user, role) + + headers = {} + if authenticated: + token = user.new_api_token() + headers["Authorization"] = f"token {token}" + + with mock.patch.dict( + app.tornado_settings, {"authenticate_prometheus": authenticate_prometheus} + ): + r = await get_page("metrics", app, headers=headers) + if success: + assert r.status_code == 200 + else: + assert r.status_code == 403 + assert 'read:metrics' in r.text diff --git a/jupyterhub/tests/test_named_servers.py b/jupyterhub/tests/test_named_servers.py index 0f6809c1..d5f03054 100644 --- a/jupyterhub/tests/test_named_servers.py +++ b/jupyterhub/tests/test_named_servers.py @@ -1,21 +1,19 @@ """Tests for named servers""" +import asyncio import json +import time from unittest import mock -from urllib.parse import urlparse +from urllib.parse import unquote, urlencode, urlparse import pytest +from requests.exceptions import HTTPError from tornado.httputil import url_concat -from ..utils import url_path_join +from .. import orm +from ..utils import url_escape_path, url_path_join from .mocking import FormSpawner -from .mocking import public_url -from .test_api import add_user -from .test_api import api_request -from .test_api import fill_user -from .test_api import normalize_user -from .test_api import TIMESTAMP -from .utils import async_requests -from .utils import get_page +from .test_api import TIMESTAMP, add_user, api_request, fill_user, normalize_user +from .utils import async_requests, get_page, public_url @pytest.fixture @@ -27,6 +25,36 @@ def named_servers(app): yield +@pytest.fixture +def named_servers_with_callable_limit(app): + def named_server_limit_per_user_fn(handler): + """Limit number of named servers to `2` for non-admin users. No limit for admin users.""" + user = handler.current_user + if user and user.admin: + return 0 + return 2 + + with mock.patch.dict( + app.tornado_settings, + { + 'allow_named_servers': True, + 'named_server_limit_per_user': named_server_limit_per_user_fn, + }, + ): + yield + + +@pytest.fixture +def default_server_name(app, named_servers): + """configure app to use a default server name""" + server_name = 'myserver' + try: + app.default_server_name = server_name + yield server_name + finally: + app.default_server_name = '' + + async def test_default_server(app, named_servers): """Test the default /users/:user/server handler when named servers are enabled""" username = 'rosie' @@ -39,10 +67,10 @@ async def test_default_server(app, named_servers): r.raise_for_status() user_model = normalize_user(r.json()) - print(user_model) assert user_model == fill_user( { 'name': username, + 'roles': ['user'], 'auth_state': None, 'server': user.url, 'servers': { @@ -53,10 +81,12 @@ async def test_default_server(app, named_servers): 'url': user.url, 'pending': None, 'ready': True, + 'stopped': False, 'progress_url': 'PREFIX/hub/api/users/{}/server/progress'.format( username ), 'state': {'pid': 0}, + 'user_options': {}, } }, } @@ -72,50 +102,79 @@ async def test_default_server(app, named_servers): user_model = normalize_user(r.json()) assert user_model == fill_user( - {'name': username, 'servers': {}, 'auth_state': None} + {'name': username, 'roles': ['user'], 'auth_state': None} ) -async def test_create_named_server(app, named_servers): +@pytest.mark.parametrize( + 'servername,escapedname,caller_escape', + [ + ('trevor', 'trevor', False), + ('$p~c|a! ch@rs', '%24p~c%7Ca%21%20ch@rs', False), + ('$p~c|a! ch@rs', '%24p~c%7Ca%21%20ch@rs', True), + ('hash#?question', 'hash%23%3Fquestion', True), + ], +) +async def test_create_named_server( + app, named_servers, servername, escapedname, caller_escape +): username = 'walnut' user = add_user(app.db, app, name=username) # assert user.allow_named_servers == True cookies = await app.login_user(username) - servername = 'trevor' - r = await api_request(app, 'users', username, 'servers', servername, method='post') + request_servername = servername + if caller_escape: + request_servername = url_escape_path(servername) + + r = await api_request( + app, 'users', username, 'servers', request_servername, method='post' + ) r.raise_for_status() assert r.status_code == 201 assert r.text == '' - url = url_path_join(public_url(app, user), servername, 'env') + url = url_path_join(public_url(app, user), request_servername, 'env') + expected_url = url_path_join(public_url(app, user), escapedname, 'env') r = await async_requests.get(url, cookies=cookies) r.raise_for_status() - assert r.url == url + # requests doesn't fully encode the servername: "$p~c%7Ca!%20ch@rs". + # Since this is the internal requests representation and not the JupyterHub + # representation it just needs to be equivalent. + assert unquote(r.url) == unquote(expected_url) env = r.json() prefix = env.get('JUPYTERHUB_SERVICE_PREFIX') assert prefix == user.spawners[servername].server.base_url - assert prefix.endswith('/user/%s/%s/' % (username, servername)) + assert prefix.endswith(f'/user/{username}/{escapedname}/') r = await api_request(app, 'users', username) r.raise_for_status() + # Ensure the unescaped name is stored in the DB + db_server_names = set( + app.db.query(orm.User).filter_by(name=username).first().orm_spawners.keys() + ) + assert db_server_names == {"", servername} + user_model = normalize_user(r.json()) assert user_model == fill_user( { 'name': username, + 'roles': ['user'], 'auth_state': None, 'servers': { servername: { 'name': name, 'started': TIMESTAMP, 'last_activity': TIMESTAMP, - 'url': url_path_join(user.url, name, '/'), + 'url': url_path_join(user.url, escapedname, '/'), 'pending': None, 'ready': True, + 'stopped': False, 'progress_url': 'PREFIX/hub/api/users/{}/servers/{}/progress'.format( - username, servername + username, escapedname ), 'state': {'pid': 0}, + 'user_options': {}, } for name in [servername] }, @@ -123,11 +182,31 @@ async def test_create_named_server(app, named_servers): ) +async def test_create_invalid_named_server(app, named_servers): + username = 'walnut' + user = add_user(app.db, app, name=username) + # assert user.allow_named_servers == True + cookies = await app.login_user(username) + server_name = "a$/b" + request_servername = 'a%24%2fb' + + r = await api_request( + app, 'users', username, 'servers', request_servername, method='post' + ) + + with pytest.raises(HTTPError) as exc: + r.raise_for_status() + assert exc.value.response.json() == { + 'status': 400, + 'message': "Invalid server_name (may not contain '/'): a$/b", + } + + async def test_delete_named_server(app, named_servers): username = 'donaar' user = add_user(app.db, app, name=username) assert user.allow_named_servers - cookies = app.login_user(username) + cookies = await app.login_user(username) servername = 'splugoth' r = await api_request(app, 'users', username, 'servers', servername, method='post') r.raise_for_status() @@ -144,7 +223,7 @@ async def test_delete_named_server(app, named_servers): user_model = normalize_user(r.json()) assert user_model == fill_user( - {'name': username, 'auth_state': None, 'servers': {}} + {'name': username, 'roles': ['user'], 'auth_state': None} ) # wrapper Spawner is gone assert servername not in user.spawners @@ -232,17 +311,66 @@ async def test_named_server_limit(app, named_servers): assert r.text == '' -async def test_named_server_spawn_form(app, username): +@pytest.mark.parametrize( + 'username,admin', + [ + ('nonsuperfoo', False), + ('superfoo', True), + ], +) +async def test_named_server_limit_as_callable( + app, named_servers_with_callable_limit, username, admin +): + """Test named server limit based on `named_server_limit_per_user_fn` callable""" + user = add_user(app.db, app, name=username, admin=admin) + cookies = await app.login_user(username) + + # Create 1st named server + servername1 = 'bar-1' + r = await api_request( + app, 'users', username, 'servers', servername1, method='post', cookies=cookies + ) + r.raise_for_status() + assert r.status_code == 201 + assert r.text == '' + + # Create 2nd named server + servername2 = 'bar-2' + r = await api_request( + app, 'users', username, 'servers', servername2, method='post', cookies=cookies + ) + r.raise_for_status() + assert r.status_code == 201 + assert r.text == '' + + # Create 3rd named server + servername3 = 'bar-3' + r = await api_request( + app, 'users', username, 'servers', servername3, method='post', cookies=cookies + ) + + # No named server limit for admin users as in `named_server_limit_per_user_fn` callable + if admin: + r.raise_for_status() + assert r.status_code == 201 + assert r.text == '' + else: + assert r.status_code == 400 + assert r.json() == { + "status": 400, + "message": f"User {username} already has the maximum of 2 named servers. One must be deleted before a new server can be created", + } + + +async def test_named_server_spawn_form(app, username, named_servers): server_name = "myserver" base_url = public_url(app) cookies = await app.login_user(username) user = app.users[username] with mock.patch.dict(app.users.settings, {'spawner_class': FormSpawner}): - r = await get_page( - 'spawn/%s/%s' % (username, server_name), app, cookies=cookies - ) + r = await get_page(f'spawn/{username}/{server_name}', app, cookies=cookies) r.raise_for_status() - assert r.url.endswith('/spawn/%s/%s' % (username, server_name)) + assert r.url.endswith(f'/spawn/{username}/{server_name}') assert FormSpawner.options_form in r.text # submit the form @@ -265,3 +393,174 @@ async def test_named_server_spawn_form(app, username): assert server_name in user.spawners spawner = user.spawners[server_name] spawner.user_options == {'energy': '938MeV', 'bounds': [-10, 10], 'notspecified': 5} + + +async def test_user_redirect_default_server_name( + app, username, named_servers, default_server_name +): + name = username + server_name = default_server_name + cookies = await app.login_user(name) + + r = await api_request(app, 'users', username, 'servers', server_name, method='post') + r.raise_for_status() + assert r.status_code == 201 + assert r.text == '' + + r = await get_page('/user-redirect/tree/top/', app) + r.raise_for_status() + print(urlparse(r.url)) + path = urlparse(r.url).path + assert path == url_path_join(app.base_url, '/hub/login') + query = urlparse(r.url).query + assert query == urlencode( + {'next': url_path_join(app.hub.base_url, '/user-redirect/tree/top/')} + ) + + r = await get_page('/user-redirect/notebooks/test.ipynb', app, cookies=cookies) + r.raise_for_status() + print(urlparse(r.url)) + path = urlparse(r.url).path + while '/spawn-pending/' in path: + await asyncio.sleep(0.1) + r = await async_requests.get(r.url, cookies=cookies) + path = urlparse(r.url).path + assert path == url_path_join( + app.base_url, f'/user/{name}/{server_name}/notebooks/test.ipynb' + ) + + +async def test_user_redirect_hook_default_server_name( + app, username, named_servers, default_server_name +): + """ + Test proper behavior of user_redirect_hook when c.JupyterHub.default_server_name is set + """ + name = username + server_name = default_server_name + cookies = await app.login_user(name) + + r = await api_request(app, 'users', username, 'servers', server_name, method='post') + r.raise_for_status() + assert r.status_code == 201 + assert r.text == '' + + async def dummy_redirect(path, request, user, base_url): + assert base_url == app.base_url + assert path == 'redirect-to-terminal' + assert request.uri == url_path_join( + base_url, 'hub', 'user-redirect', 'redirect-to-terminal' + ) + # exclude custom server_name + # custom hook is respected exactly + url = url_path_join(user.url, '/terminals/1') + return url + + app.user_redirect_hook = dummy_redirect + + r = await get_page('/user-redirect/redirect-to-terminal', app) + r.raise_for_status() + print(urlparse(r.url)) + path = urlparse(r.url).path + assert path == url_path_join(app.base_url, '/hub/login') + query = urlparse(r.url).query + assert query == urlencode( + {'next': url_path_join(app.hub.base_url, '/user-redirect/redirect-to-terminal')} + ) + + # We don't actually want to start the server by going through spawn - just want to make sure + # the redirect is to the right place + r = await get_page( + '/user-redirect/redirect-to-terminal', + app, + cookies=cookies, + allow_redirects=False, + ) + r.raise_for_status() + redirected_url = urlparse(r.headers['Location']) + assert redirected_url.path == url_path_join( + app.base_url, 'user', username, 'terminals/1' + ) + + +async def test_named_server_stop_server(app, username, named_servers): + server_name = "myserver" + await app.login_user(username) + user = app.users[username] + + r = await api_request(app, 'users', username, 'server', method='post') + assert r.status_code == 201 + assert r.text == '' + assert user.spawners[''].server + + with mock.patch.object( + app.proxy, 'add_user', side_effect=Exception('mock exception') + ): + r = await api_request( + app, 'users', username, 'servers', server_name, method='post' + ) + r.raise_for_status() + assert r.status_code == 201 + assert r.text == '' + + assert user.spawners[server_name].server is None + assert user.spawners[''].server + assert user.running + + +@pytest.mark.parametrize( + "include_stopped_servers", + [True, False], +) +async def test_stopped_servers(app, user, named_servers, include_stopped_servers): + r = await api_request(app, 'users', user.name, 'server', method='post') + r.raise_for_status() + r = await api_request(app, 'users', user.name, 'servers', "named", method='post') + r.raise_for_status() + + # wait for starts + for i in range(60): + r = await api_request(app, 'users', user.name) + r.raise_for_status() + user_model = r.json() + if not all(s["ready"] for s in user_model["servers"].values()): + time.sleep(1) + else: + break + else: + raise TimeoutError(f"User never stopped: {user_model}") + + r = await api_request(app, 'users', user.name, 'server', method='delete') + r.raise_for_status() + r = await api_request(app, 'users', user.name, 'servers', "named", method='delete') + r.raise_for_status() + + # wait for stops + for i in range(60): + r = await api_request(app, 'users', user.name) + r.raise_for_status() + user_model = r.json() + if not all(s["stopped"] for s in user_model["servers"].values()): + time.sleep(1) + else: + break + else: + raise TimeoutError(f"User never stopped: {user_model}") + + # we have two stopped servers + path = f"users/{user.name}" + if include_stopped_servers: + path = f"{path}?include_stopped_servers" + r = await api_request(app, path) + r.raise_for_status() + user_model = r.json() + servers = list(user_model["servers"].values()) + if include_stopped_servers: + assert len(servers) == 2 + assert all(s["last_activity"] for s in servers) + assert all(s["started"] is None for s in servers) + assert all(s["stopped"] for s in servers) + assert not any(s["ready"] for s in servers) + assert not any(s["pending"] for s in servers) + else: + assert user_model["servers"] == {} diff --git a/jupyterhub/tests/test_objects.py b/jupyterhub/tests/test_objects.py index 8f298a42..f24de313 100644 --- a/jupyterhub/tests/test_objects.py +++ b/jupyterhub/tests/test_objects.py @@ -48,8 +48,8 @@ _hostname = socket.gethostname() { 'ip': '', 'port': 123, - 'host': 'http://{}:123'.format(_hostname), - 'url': 'http://{}:123/x/'.format(_hostname), + 'host': f'http://{_hostname}:123', + 'url': f'http://{_hostname}:123/x/', 'bind_url': 'http://*:123/x/', }, ), diff --git a/jupyterhub/tests/test_orm.py b/jupyterhub/tests/test_orm.py index d609b27b..3009a11d 100644 --- a/jupyterhub/tests/test_orm.py +++ b/jupyterhub/tests/test_orm.py @@ -3,16 +3,12 @@ # Distributed under the terms of the Modified BSD License. import os import socket -from datetime import datetime -from datetime import timedelta +from datetime import datetime, timedelta from unittest import mock import pytest -from tornado import gen -from .. import crypto -from .. import objects -from .. import orm +from .. import crypto, objects, orm, roles from ..emptyclass import EmptyClass from ..user import User from .mocking import MockSpawner @@ -74,6 +70,16 @@ def test_user(db): assert found is None +def test_user_escaping(db): + orm_user = orm.User(name='company\\user@company.com,\"quoted\"') + db.add(orm_user) + db.commit() + user = User(orm_user) + assert user.name == 'company\\user@company.com,\"quoted\"' + assert user.escaped_name == 'company%5Cuser@company.com%2C%22quoted%22' + assert user.json_escaped_name == 'company\\\\user@company.com,\\\"quoted\\\"' + + def test_tokens(db): user = orm.User(name='inara') db.add(user) @@ -124,7 +130,7 @@ def test_token_expiry(db): assert orm_token.expires_at > now + timedelta(seconds=50) assert orm_token.expires_at < now + timedelta(seconds=70) the_future = mock.patch( - 'jupyterhub.orm.utcnow', lambda: now + timedelta(seconds=70) + 'jupyterhub.orm.APIToken.now', lambda: now + timedelta(seconds=70) ) with the_future: found = orm.APIToken.find(db, token=token) @@ -210,10 +216,13 @@ async def test_spawn_fails(db): orm_user = orm.User(name='aeofel') db.add(orm_user) db.commit() + def_roles = roles.get_default_roles() + for role in def_roles: + roles.create_role(db, role) + roles.assign_default_roles(db, orm_user) class BadSpawner(MockSpawner): - @gen.coroutine - def start(self): + async def start(self): raise RuntimeError("Split the party") user = User( @@ -235,10 +244,12 @@ def test_groups(db): db.commit() assert group.users == [] assert user.groups == [] + group.users.append(user) db.commit() assert group.users == [user] assert user.groups == [group] + db.delete(user) db.commit() assert group.users == [] @@ -344,8 +355,9 @@ def test_user_delete_cascade(db): spawner.server = server = orm.Server() oauth_code = orm.OAuthCode(client=oauth_client, user=user) db.add(oauth_code) - oauth_token = orm.OAuthAccessToken( - client=oauth_client, user=user, grant_type=orm.GrantType.authorization_code + oauth_token = orm.APIToken( + oauth_client=oauth_client, + user=user, ) db.add(oauth_token) db.commit() @@ -366,7 +378,7 @@ def test_user_delete_cascade(db): assert_not_found(db, orm.Spawner, spawner_id) assert_not_found(db, orm.Server, server_id) assert_not_found(db, orm.OAuthCode, oauth_code_id) - assert_not_found(db, orm.OAuthAccessToken, oauth_token_id) + assert_not_found(db, orm.APIToken, oauth_token_id) def test_oauth_client_delete_cascade(db): @@ -380,12 +392,13 @@ def test_oauth_client_delete_cascade(db): # these should all be deleted automatically when the user goes away oauth_code = orm.OAuthCode(client=oauth_client, user=user) db.add(oauth_code) - oauth_token = orm.OAuthAccessToken( - client=oauth_client, user=user, grant_type=orm.GrantType.authorization_code + oauth_token = orm.APIToken( + oauth_client=oauth_client, + user=user, ) db.add(oauth_token) db.commit() - assert user.oauth_tokens == [oauth_token] + assert user.api_tokens == [oauth_token] # record all of the ids oauth_code_id = oauth_code.id @@ -397,8 +410,8 @@ def test_oauth_client_delete_cascade(db): # verify that everything gets deleted assert_not_found(db, orm.OAuthCode, oauth_code_id) - assert_not_found(db, orm.OAuthAccessToken, oauth_token_id) - assert user.oauth_tokens == [] + assert_not_found(db, orm.APIToken, oauth_token_id) + assert user.api_tokens == [] assert user.oauth_codes == [] @@ -450,7 +463,7 @@ def test_group_delete_cascade(db): assert group2 in user2.groups # now start deleting - # 1. remove group via user.groups + # 1. remove group via user.group user1.groups.remove(group2) db.commit() assert user1 not in group2.users @@ -470,5 +483,80 @@ def test_group_delete_cascade(db): # 4. delete user object db.delete(user1) + db.delete(user2) db.commit() assert user1 not in group1.users + + +def test_expiring_api_token(app, user): + db = app.db + token = orm.APIToken.new(expires_in=30, user=user) + orm_token = orm.APIToken.find(db, token, kind='user') + assert orm_token + + # purge_expired doesn't delete non-expired + orm.APIToken.purge_expired(db) + found = orm.APIToken.find(db, token) + assert found is orm_token + + with mock.patch.object( + orm.APIToken, 'now', lambda: datetime.utcnow() + timedelta(seconds=60) + ): + found = orm.APIToken.find(db, token) + assert found is None + assert orm_token in db.query(orm.APIToken) + orm.APIToken.purge_expired(db) + assert orm_token not in db.query(orm.APIToken) + + +def test_expiring_oauth_token(app, user): + db = app.db + token = "abc123" + now = orm.APIToken.now + client = orm.OAuthClient(identifier="xxx", secret="yyy") + db.add(client) + orm_token = orm.APIToken( + token=token, + oauth_client=client, + user=user, + expires_at=now() + timedelta(seconds=30), + ) + db.add(orm_token) + db.commit() + + found = orm.APIToken.find(db, token) + assert found is orm_token + # purge_expired doesn't delete non-expired + orm.APIToken.purge_expired(db) + found = orm.APIToken.find(db, token) + assert found is orm_token + + with mock.patch.object(orm.APIToken, 'now', lambda: now() + timedelta(seconds=60)): + found = orm.APIToken.find(db, token) + assert found is None + assert orm_token in db.query(orm.APIToken) + orm.APIToken.purge_expired(db) + assert orm_token not in db.query(orm.APIToken) + + +def test_expiring_oauth_code(app, user): + db = app.db + code = "abc123" + now = orm.OAuthCode.now + orm_code = orm.OAuthCode(code=code, expires_at=now() + 30) + db.add(orm_code) + db.commit() + + found = orm.OAuthCode.find(db, code) + assert found is orm_code + # purge_expired doesn't delete non-expired + orm.OAuthCode.purge_expired(db) + found = orm.OAuthCode.find(db, code) + assert found is orm_code + + with mock.patch.object(orm.OAuthCode, 'now', lambda: now() + 60): + found = orm.OAuthCode.find(db, code) + assert found is None + assert orm_code in db.query(orm.OAuthCode) + orm.OAuthCode.purge_expired(db) + assert orm_code not in db.query(orm.OAuthCode) diff --git a/jupyterhub/tests/test_pages.py b/jupyterhub/tests/test_pages.py index 61a98065..0b622097 100644 --- a/jupyterhub/tests/test_pages.py +++ b/jupyterhub/tests/test_pages.py @@ -2,35 +2,35 @@ import asyncio import sys from unittest import mock -from urllib.parse import parse_qs -from urllib.parse import urlencode -from urllib.parse import urlparse +from urllib.parse import parse_qs, urlencode, urlparse import pytest from bs4 import BeautifulSoup -from tornado import gen +from tornado.escape import url_escape from tornado.httputil import url_concat -from .. import orm +from .. import orm, roles, scopes from ..auth import Authenticator from ..handlers import BaseHandler +from ..utils import url_path_join from ..utils import url_path_join as ujoin -from .mocking import FalsyCallableFormSpawner -from .mocking import FormSpawner +from .mocking import FalsyCallableFormSpawner, FormSpawner from .test_api import next_event -from .utils import add_user -from .utils import api_request -from .utils import async_requests -from .utils import get_page -from .utils import public_host -from .utils import public_url +from .utils import ( + AsyncSession, + api_request, + async_requests, + get_page, + public_host, + public_url, +) async def test_root_no_auth(app): url = ujoin(public_host(app), app.hub.base_url) r = await async_requests.get(url) r.raise_for_status() - assert r.url == ujoin(url, 'login') + assert r.url == url_concat(ujoin(url, 'login'), dict(next=app.hub.base_url)) async def test_root_auth(app): @@ -45,16 +45,16 @@ async def test_root_auth(app): # if spawning was quick, there will be one more entry that's public_url(user) -async def test_root_redirect(app): +async def test_root_redirect(app, user): name = 'wash' cookies = await app.login_user(name) - next_url = ujoin(app.base_url, 'user/other/test.ipynb') + next_url = ujoin(app.base_url, f'user/{user.name}/test.ipynb') url = '/?' + urlencode({'next': next_url}) r = await get_page(url, app, cookies=cookies) path = urlparse(r.url).path - assert path == ujoin(app.base_url, 'hub/user/%s/test.ipynb' % name) - # serve "server not running" page, which has status 503 - assert r.status_code == 503 + assert path == ujoin(app.base_url, f'hub/user/{user.name}/test.ipynb') + # preserves choice to requested user, which 404s as unavailable without access + assert r.status_code == 404 async def test_root_default_url_noauth(app): @@ -92,8 +92,9 @@ async def test_home_auth(app): async def test_admin_no_auth(app): - r = await get_page('admin', app) - assert r.status_code == 403 + r = await get_page('admin', app, allow_redirects=False) + assert r.status_code == 302 + assert '/hub/login' in r.headers['Location'] async def test_admin_not_admin(app): @@ -109,6 +110,13 @@ async def test_admin(app): assert r.url.endswith('/admin') +async def test_admin_version(app): + cookies = await app.login_user('admin') + r = await get_page('admin', app, cookies=cookies, allow_redirects=False) + r.raise_for_status() + assert "version_footer" in r.text + + @pytest.mark.parametrize('sort', ['running', 'last_activity', 'admin', 'name']) async def test_admin_sort(app, sort): cookies = await app.login_user('admin') @@ -117,11 +125,20 @@ async def test_admin_sort(app, sort): assert r.status_code == 200 -async def test_spawn_redirect(app): +@pytest.mark.parametrize("last_failed", [True, False]) +async def test_spawn_redirect(app, last_failed): name = 'wash' cookies = await app.login_user(name) u = app.users[orm.User.find(app.db, name)] + if last_failed: + # mock a failed spawn + last_spawner = u.spawners[''] + last_spawner._spawn_future = asyncio.Future() + last_spawner._spawn_future.set_exception(RuntimeError("I failed!")) + else: + last_spawner = None + status = await u.spawner.poll() assert status is not None @@ -130,6 +147,10 @@ async def test_spawn_redirect(app): r.raise_for_status() print(urlparse(r.url)) path = urlparse(r.url).path + + # ensure we got a new spawner + assert u.spawners[''] is not last_spawner + # make sure we visited hub/spawn-pending after spawn # if spawn was really quick, we might get redirected all the way to the running server, # so check history instead of r.url @@ -161,7 +182,7 @@ async def test_spawn_redirect(app): r = await get_page('user/' + name, app, hub=False, cookies=cookies) path = urlparse(r.url).path assert path == ujoin(app.base_url, 'hub/user/%s/' % name) - assert r.status_code == 503 + assert r.status_code == 424 async def test_spawn_handler_access(app): @@ -192,13 +213,34 @@ async def test_spawn_handler_access(app): r.raise_for_status() -async def test_spawn_admin_access(app, admin_access): - """GET /user/:name as admin with admin-access spawns user's server""" - cookies = await app.login_user('admin') - name = 'mariel' - user = add_user(app.db, app=app, name=name) - app.db.commit() +@pytest.mark.parametrize("has_access", ["all", "user", "group", False]) +async def test_spawn_other_user( + app, user, username, group, create_temp_role, has_access +): + """GET /user/:name as another user with access to spawns user's server""" + cookies = await app.login_user(username) + requester = app.users[username] + name = user.name + + if has_access: + if has_access == "group": + group.users.append(user) + app.db.commit() + scopes = [ + f"access:servers!group={group.name}", + f"servers!group={group.name}", + ] + elif has_access == "all": + scopes = ["access:servers", "servers"] + elif has_access == "user": + scopes = [f"access:servers!user={user.name}", f"servers!user={user.name}"] + role = create_temp_role(scopes) + roles.grant_role(app.db, requester, role) + r = await get_page('spawn/' + name, app, cookies=cookies) + if not has_access: + assert r.status_code == 404 + return r.raise_for_status() while '/spawn-pending/' in r.url: @@ -207,7 +249,7 @@ async def test_spawn_admin_access(app, admin_access): r.raise_for_status() assert (r.url.split('?')[0] + '/').startswith(public_url(app, user)) - r = await get_page('user/{}/env'.format(name), app, hub=False, cookies=cookies) + r = await get_page(f'user/{name}/env', app, hub=False, cookies=cookies) r.raise_for_status() env = r.json() @@ -226,6 +268,25 @@ async def test_spawn_page(app): assert FormSpawner.options_form in r.text +async def test_spawn_page_after_failed(app, user): + cookies = await app.login_user(user.name) + + # mock a failed spawn + last_spawner = user.spawners[''] + last_spawner._spawn_future = asyncio.Future() + last_spawner._spawn_future.set_exception(RuntimeError("I failed!")) + + with mock.patch.dict(app.users.settings, {'spawner_class': FormSpawner}): + r = await get_page('spawn', app, cookies=cookies) + spawner = user.spawners[''] + # make sure we didn't reuse last spawner + assert isinstance(spawner, FormSpawner) + assert spawner is not last_spawner + assert r.url.endswith('/spawn') + spawner = user.spawners[''] + assert FormSpawner.options_form in r.text + + async def test_spawn_page_falsy_callable(app): with mock.patch.dict( app.users.settings, {'spawner_class': FalsyCallableFormSpawner} @@ -237,14 +298,78 @@ async def test_spawn_page_falsy_callable(app): assert history[1] == ujoin(public_url(app), "hub/spawn-pending/erik") -async def test_spawn_page_admin(app, admin_access): +@pytest.mark.parametrize("has_access", ["all", "user", "group", False]) +async def test_spawn_page_access( + app, has_access, group, username, user, create_temp_role +): + cookies = await app.login_user(username) + requester = app.users[username] + if has_access: + if has_access == "group": + group.users.append(user) + app.db.commit() + scopes = [ + f"access:servers!group={group.name}", + f"servers!group={group.name}", + ] + elif has_access == "all": + scopes = ["access:servers", "servers"] + elif has_access == "user": + scopes = [f"access:servers!user={user.name}", f"servers!user={user.name}"] + role = create_temp_role(scopes) + roles.grant_role(app.db, requester, role) + with mock.patch.dict(app.users.settings, {'spawner_class': FormSpawner}): - cookies = await app.login_user('admin') - u = add_user(app.db, app=app, name='melanie') - r = await get_page('spawn/' + u.name, app, cookies=cookies) - assert r.url.endswith('/spawn/' + u.name) + r = await get_page('spawn/' + user.name, app, cookies=cookies) + if not has_access: + assert r.status_code == 404 + return + assert r.status_code == 200 + assert r.url.endswith('/spawn/' + user.name) assert FormSpawner.options_form in r.text - assert "Spawning server for {}".format(u.name) in r.text + assert f"Spawning server for {user.name}" in r.text + + +async def test_spawn_with_query_arguments(app): + with mock.patch.dict(app.users.settings, {'spawner_class': FormSpawner}): + base_url = ujoin(public_host(app), app.hub.base_url) + cookies = await app.login_user('jones') + orm_u = orm.User.find(app.db, 'jones') + u = app.users[orm_u] + await u.stop() + next_url = ujoin(app.base_url, 'user/jones/tree') + r = await async_requests.get( + url_concat( + ujoin(base_url, 'spawn'), + {'next': next_url, 'energy': '510keV'}, + ), + cookies=cookies, + ) + r.raise_for_status() + assert r.history + assert u.spawner.user_options == { + 'energy': '510keV', + 'notspecified': 5, + } + + +async def test_spawn_with_query_arguments_error(app): + with mock.patch.dict(app.users.settings, {'spawner_class': FormSpawner}): + base_url = ujoin(public_host(app), app.hub.base_url) + cookies = await app.login_user('jones') + orm_u = orm.User.find(app.db, 'jones') + u = app.users[orm_u] + await u.stop() + next_url = ujoin(app.base_url, 'user/jones/tree') + r = await async_requests.get( + url_concat( + ujoin(base_url, 'spawn'), + {'next': next_url, 'energy': '510keV', 'illegal_argument': '42'}, + ), + cookies=cookies, + ) + r.raise_for_status() + assert "You are not allowed to specify " in r.text async def test_spawn_form(app): @@ -269,22 +394,49 @@ async def test_spawn_form(app): } -async def test_spawn_form_admin_access(app, admin_access): +@pytest.mark.parametrize("has_access", ["all", "user", "group", False]) +async def test_spawn_form_other_user( + app, username, user, group, create_temp_role, has_access +): + cookies = await app.login_user(username) + requester = app.users[username] + if has_access: + if has_access == "group": + group.users.append(user) + app.db.commit() + scopes = [ + f"access:servers!group={group.name}", + f"servers!group={group.name}", + ] + elif has_access == "all": + scopes = ["access:servers", "servers"] + elif has_access == "user": + scopes = [f"access:servers!user={user.name}", f"servers!user={user.name}"] + role = create_temp_role(scopes) + roles.grant_role(app.db, requester, role) + with mock.patch.dict(app.tornado_settings, {'spawner_class': FormSpawner}): base_url = ujoin(public_host(app), app.hub.base_url) - cookies = await app.login_user('admin') - u = add_user(app.db, app=app, name='martha') - next_url = ujoin(app.base_url, 'user', u.name, 'tree') + next_url = ujoin(app.base_url, 'user', user.name, 'tree') r = await async_requests.post( - url_concat(ujoin(base_url, 'spawn', u.name), {'next': next_url}), + url_concat(ujoin(base_url, 'spawn', user.name), {'next': next_url}), cookies=cookies, data={'bounds': ['-3', '3'], 'energy': '938MeV'}, ) + if not has_access: + assert r.status_code == 404 + return r.raise_for_status() + + while '/spawn-pending/' in r.url: + await asyncio.sleep(0.1) + r = await async_requests.get(r.url, cookies=cookies) + r.raise_for_status() + assert r.history - assert r.url.startswith(public_url(app, u)) - assert u.spawner.user_options == { + assert r.url.startswith(public_url(app, user)) + assert user.spawner.user_options == { 'energy': '938MeV', 'bounds': [-3, 3], 'notspecified': 5, @@ -346,7 +498,7 @@ async def test_spawn_pending(app, username, slow_spawn): assert page.find('div', {'class': 'progress'}) # validate event source url by consuming it - script = page.body.find('script').text + script = page.body.find('script').string assert 'EventSource' in script # find EventSource url in javascript # maybe not the most robust way to check this? @@ -398,33 +550,184 @@ async def test_user_redirect(app, username): assert path == ujoin(app.base_url, '/user/%s/notebooks/test.ipynb' % name) -async def test_user_redirect_deprecated(app, username): - """redirecting from /user/someonelse/ URLs (deprecated)""" +async def test_user_redirect_hook(app, username): + """ + Test proper behavior of user_redirect_hook + """ name = username cookies = await app.login_user(name) - r = await get_page('/user/baduser', app, cookies=cookies, hub=False) - print(urlparse(r.url)) - path = urlparse(r.url).path - assert path == ujoin(app.base_url, 'hub/user/%s/' % name) - assert r.status_code == 503 + async def dummy_redirect(path, request, user, base_url): + assert base_url == app.base_url + assert path == 'redirect-to-terminal' + assert request.uri == ujoin( + base_url, 'hub', 'user-redirect', 'redirect-to-terminal' + ) + url = ujoin(user.url, '/terminals/1') + return url - r = await get_page('/user/baduser/test.ipynb', app, cookies=cookies, hub=False) - print(urlparse(r.url)) - path = urlparse(r.url).path - assert path == ujoin(app.base_url, 'hub/user/%s/test.ipynb' % name) - assert r.status_code == 503 + app.user_redirect_hook = dummy_redirect - r = await get_page('/user/baduser/test.ipynb', app, hub=False) + r = await get_page('/user-redirect/redirect-to-terminal', app) r.raise_for_status() print(urlparse(r.url)) path = urlparse(r.url).path assert path == ujoin(app.base_url, '/hub/login') query = urlparse(r.url).query assert query == urlencode( - {'next': ujoin(app.base_url, '/hub/user/baduser/test.ipynb')} + {'next': ujoin(app.hub.base_url, '/user-redirect/redirect-to-terminal')} ) + # We don't actually want to start the server by going through spawn - just want to make sure + # the redirect is to the right place + r = await get_page( + '/user-redirect/redirect-to-terminal', + app, + cookies=cookies, + allow_redirects=False, + ) + r.raise_for_status() + redirected_url = urlparse(r.headers['Location']) + assert redirected_url.path == ujoin(app.base_url, 'user', username, 'terminals/1') + + +@pytest.mark.parametrize("has_access", ["all", "user", "group", False]) +async def test_other_user_url(app, username, user, group, create_temp_role, has_access): + """Test accessing /user/someonelse/ URLs when the server is not running + + Used to redirect to your own server, + which produced inconsistent behavior depending on whether the server was running. + """ + name = username + cookies = await app.login_user(name) + other_user = user + requester = app.users[name] + other_user_url = f"/user/{other_user.name}" + if has_access: + if has_access == "group": + group.users.append(other_user) + app.db.commit() + scopes = [f"access:servers!group={group.name}"] + elif has_access == "all": + scopes = ["access:servers"] + elif has_access == "user": + scopes = [f"access:servers!user={other_user.name}"] + role = create_temp_role(scopes) + roles.grant_role(app.db, requester, role) + status = 424 + else: + # 404 - access denied without revealing if the user exists + status = 404 + + r = await get_page(other_user_url, app, cookies=cookies, hub=False) + print(urlparse(r.url)) + path = urlparse(r.url).path + assert path == ujoin(app.base_url, f'hub/user/{other_user.name}/') + assert r.status_code == status + + r = await get_page(f'{other_user_url}/test.ipynb', app, cookies=cookies, hub=False) + print(urlparse(r.url)) + path = urlparse(r.url).path + assert path == ujoin(app.base_url, f'hub/user/{other_user.name}/test.ipynb') + assert r.status_code == status + + r = await get_page(f'{other_user_url}/test.ipynb', app, hub=False) + r.raise_for_status() + print(urlparse(r.url)) + path = urlparse(r.url).path + assert path == ujoin(app.base_url, '/hub/login') + query = urlparse(r.url).query + assert query == urlencode( + {'next': ujoin(app.base_url, f'/hub/user/{other_user.name}/test.ipynb')} + ) + + +@pytest.mark.parametrize( + 'url, params, redirected_url, form_action', + [ + ( + # spawn?param=value + # will encode given parameters for an unauthenticated URL in the next url + # the next parameter will contain the app base URL (replaces BASE_URL in tests) + 'spawn', + [('param', 'value')], + '/hub/login?next={{BASE_URL}}hub%2Fspawn%3Fparam%3Dvalue', + '/hub/login?next={{BASE_URL}}hub%2Fspawn%3Fparam%3Dvalue', + ), + ( + # login?param=fromlogin&next=encoded(/hub/spawn?param=value) + # will drop parameters given to the login page, passing only the next url + 'login', + [('param', 'fromlogin'), ('next', '/hub/spawn?param=value')], + '/hub/login?param=fromlogin&next=%2Fhub%2Fspawn%3Fparam%3Dvalue', + '/hub/login?next=%2Fhub%2Fspawn%3Fparam%3Dvalue', + ), + ( + # login?param=value&anotherparam=anothervalue + # will drop parameters given to the login page, and use an empty next url + 'login', + [('param', 'value'), ('anotherparam', 'anothervalue')], + '/hub/login?param=value&anotherparam=anothervalue', + '/hub/login?next=', + ), + ( + # login + # simplest case, accessing the login URL, gives an empty next url + 'login', + [], + '/hub/login', + '/hub/login?next=', + ), + ], +) +async def test_login_page(app, url, params, redirected_url, form_action): + url = url_concat(url, params) + r = await get_page(url, app) + redirected_url = redirected_url.replace('{{BASE_URL}}', url_escape(app.base_url)) + assert r.url.endswith(redirected_url) + # now the login.html rendered template must include the given parameters in the form + # action URL, including the next URL + page = BeautifulSoup(r.text, "html.parser") + form = page.find("form", method="post") + action = form.attrs['action'] + form_action = form_action.replace('{{BASE_URL}}', url_escape(app.base_url)) + assert action.endswith(form_action) + + +@pytest.mark.parametrize( + "url, token_in", + [ + ("/home", "url"), + ("/home", "header"), + ("/login", "url"), + ("/login", "header"), + ], +) +async def test_page_with_token(app, user, url, token_in): + cookies = await app.login_user(user.name) + token = user.new_api_token() + if token_in == "url": + url = url_concat(url, {"token": token}) + headers = None + elif token_in == "header": + headers = { + "Authorization": f"token {token}", + } + + # request a page with ?token= in URL shouldn't be allowed + r = await get_page( + url, + app, + headers=headers, + allow_redirects=False, + ) + if "/hub/login" in r.url: + assert r.status_code == 200 + else: + assert r.status_code == 302 + assert r.headers["location"].partition("?")[0].endswith("/hub/login") + assert not r.cookies + async def test_login_fail(app): name = 'wash' @@ -437,14 +740,21 @@ async def test_login_fail(app): assert not r.cookies -async def test_login_strip(app): - """Test that login form doesn't strip whitespace from passwords""" - form_data = {'username': 'spiff', 'password': ' space man '} +@pytest.mark.parametrize( + "form_user, auth_user, form_password", + [ + ("spiff", "spiff", " space man "), + (" spiff ", "spiff", " space man "), + ], +) +async def test_login_strip(app, form_user, auth_user, form_password): + """Test that login form strips space form usernames, but not passwords""" + form_data = {"username": form_user, "password": form_password} + expected_auth = {"username": auth_user, "password": form_password} base_url = public_url(app) called_with = [] - @gen.coroutine - def mock_authenticate(handler, data): + async def mock_authenticate(handler, data): called_with.append(data) with mock.patch.object(app.authenticator, 'authenticate', mock_authenticate): @@ -452,43 +762,66 @@ async def test_login_strip(app): base_url + 'hub/login', data=form_data, allow_redirects=False ) - assert called_with == [form_data] + assert called_with == [expected_auth] @pytest.mark.parametrize( - 'running, next_url, location', + 'running, next_url, location, params', [ # default URL if next not specified, for both running and not - (True, '', ''), - (False, '', ''), + (True, '', '', None), + (False, '', '', None), # next_url is respected - (False, '/hub/admin', '/hub/admin'), - (False, '/user/other', '/hub/user/other'), - (False, '/absolute', '/absolute'), - (False, '/has?query#andhash', '/has?query#andhash'), + (False, '/hub/admin', '/hub/admin', None), + (False, '/user/other', '/hub/user/other', None), + (False, '/absolute', '/absolute', None), + (False, '/has?query#andhash', '/has?query#andhash', None), + # :// in query string or fragment + (False, '/has?repo=https/host.git', '/has?repo=https/host.git', None), + (False, '/has?repo=https://host.git', '/has?repo=https://host.git', None), + (False, '/has#repo=https://host.git', '/has#repo=https://host.git', None), # next_url outside is not allowed - (False, 'relative/path', ''), - (False, 'https://other.domain', ''), - (False, 'ftp://other.domain', ''), - (False, '//other.domain', ''), - (False, '///other.domain/triple', ''), - (False, '\\\\other.domain/backslashes', ''), + (False, 'relative/path', '', None), + (False, 'https://other.domain', '', None), + (False, 'ftp://other.domain', '', None), + (False, '//other.domain', '', None), + (False, '///other.domain/triple', '', None), + (False, '\\\\other.domain/backslashes', '', None), + # params are handled correctly (ignored if ?next= specified) + ( + True, + '/hub/admin?left=1&right=2', + 'hub/admin?left=1&right=2', + {"left": "abc"}, + ), + (False, '/hub/admin', 'hub/admin', [('left', 1), ('right', 2)]), + (True, '', '', {"keep": "yes"}), + (False, '', '', {"keep": "yes"}), ], ) -async def test_login_redirect(app, running, next_url, location): +async def test_login_redirect(app, running, next_url, location, params): cookies = await app.login_user('river') user = app.users['river'] if location: location = ujoin(app.base_url, location) elif running: + # location not specified, location = user.url + if params: + location = url_concat(location, params) else: # use default url location = ujoin(app.base_url, 'hub/spawn') + if params: + location = url_concat(location, params) url = 'login' + if params: + url = url_concat(url, params) if next_url: - if '//' not in next_url and next_url.startswith('/'): + if next_url.startswith('/') and not ( + next_url.startswith("//") or urlparse(next_url).netloc + ): next_url = ujoin(app.base_url, next_url, '') url = url_concat(url, dict(next=next_url)) @@ -501,7 +834,73 @@ async def test_login_redirect(app, running, next_url, location): r = await get_page(url, app, cookies=cookies, allow_redirects=False) r.raise_for_status() assert r.status_code == 302 - assert location == r.headers['Location'] + assert r.headers["Location"] == location + + +@pytest.mark.parametrize( + 'location, next, extra_params', + [ + ( + "{base_url}hub/spawn?a=5", + None, + {"a": "5"}, + ), # no ?next= given, preserve params + ("/x", "/x", {"a": "5"}), # ?next=given, params ignored + ( + "/x?b=10", + "/x?b=10", + {"a": "5"}, + ), # ?next=given with params, additional params ignored + ], +) +async def test_next_url(app, user, location, next, extra_params): + params = {} + if extra_params: + params.update(extra_params) + if next: + params["next"] = next + url = url_concat("/", params) + cookies = await app.login_user("monster") + + # location can be a string template + location = location.format(base_url=app.base_url) + + r = await get_page(url, app, cookies=cookies, allow_redirects=False) + r.raise_for_status() + assert r.status_code == 302 + assert r.headers["Location"] == location + + +async def test_next_url_params_sequence(app, user): + """Test each step of / -> login -> spawn + + and whether they preserve url params + """ + params = {"xyz": "5"} + # first request: root page, with params, not logged in + r = await get_page("/?xyz=5", app, allow_redirects=False) + r.raise_for_status() + location = r.headers["Location"] + + # next page: login + cookies = await app.login_user(user.name) + assert location == url_concat( + ujoin(app.base_url, "/hub/login"), {"next": ujoin(app.base_url, "/hub/?xyz=5")} + ) + r = await async_requests.get( + public_host(app) + location, cookies=cookies, allow_redirects=False + ) + r.raise_for_status() + location = r.headers["Location"] + + # after login, redirect back + assert location == ujoin(app.base_url, "/hub/?xyz=5") + r = await async_requests.get( + public_host(app) + location, cookies=cookies, allow_redirects=False + ) + r.raise_for_status() + location = r.headers["Location"] + assert location == ujoin(app.base_url, "/hub/spawn?xyz=5") async def test_auto_login(app, request): @@ -515,14 +914,18 @@ async def test_auto_login(app, request): ) # no auto_login: end up at /hub/login r = await async_requests.get(base_url) - assert r.url == public_url(app, path='hub/login') + assert r.url == url_concat( + public_url(app, path="hub/login"), {"next": app.hub.base_url} + ) # enable auto_login: redirect from /hub/login to /hub/dummy authenticator = Authenticator(auto_login=True) authenticator.login_url = lambda base_url: ujoin(base_url, 'dummy') with mock.patch.dict(app.tornado_settings, {'authenticator': authenticator}): r = await async_requests.get(base_url) - assert r.url == public_url(app, path='hub/dummy') + assert r.url == url_concat( + public_url(app, path="hub/dummy"), {"next": app.hub.base_url} + ) async def test_auto_login_logout(app): @@ -596,7 +999,7 @@ async def test_shutdown_on_logout(app, shutdown_on_logout): assert spawner.ready == (not shutdown_on_logout) -async def test_login_no_whitelist_adds_user(app): +async def test_login_no_allowed_adds_user(app): auth = app.authenticator mock_add_user = mock.Mock() with mock.patch.object(auth, 'add_user', mock_add_user): @@ -611,7 +1014,9 @@ async def test_static_files(app): r = await async_requests.get(ujoin(base_url, 'logo')) r.raise_for_status() assert r.headers['content-type'] == 'image/png' - r = await async_requests.get(ujoin(base_url, 'static', 'images', 'jupyter.png')) + r = await async_requests.get( + ujoin(base_url, 'static', 'images', 'jupyterhub-80.png') + ) r.raise_for_status() assert r.headers['content-type'] == 'image/png' r = await async_requests.get(ujoin(base_url, 'static', 'css', 'style.min.css')) @@ -632,8 +1037,9 @@ async def test_oauth_token_page(app): user = app.users[orm.User.find(app.db, name)] client = orm.OAuthClient(identifier='token') app.db.add(client) - oauth_token = orm.OAuthAccessToken( - client=client, user=user, grant_type=orm.GrantType.authorization_code + oauth_token = orm.APIToken( + oauth_client=client, + user=user, ) app.db.add(oauth_token) app.db.commit() @@ -708,6 +1114,73 @@ async def test_bad_oauth_get(app, params): assert r.status_code == 400 +@pytest.mark.parametrize( + "scopes, has_access", + [ + (["users"], False), + (["admin:users"], False), + (["users", "admin:users", "admin:servers"], False), + (["admin-ui"], True), + ], +) +async def test_admin_page_access(app, scopes, has_access, create_user_with_scopes): + user = create_user_with_scopes(*scopes) + cookies = await app.login_user(user.name) + home_resp = await get_page("/home", app, cookies=cookies) + admin_resp = await get_page("/admin", app, cookies=cookies) + assert home_resp.status_code == 200 + soup = BeautifulSoup(home_resp.text, "html.parser") + nav = soup.find("div", id="thenavbar") + links = [a["href"] for a in nav.find_all("a")] + + admin_url = app.base_url + "hub/admin" + if has_access: + assert admin_resp.status_code == 200 + assert admin_url in links + else: + assert admin_resp.status_code == 403 + assert admin_url not in links + + +async def test_oauth_page_scope_appearance( + app, mockservice_url, create_user_with_scopes, create_temp_role +): + service_role = create_temp_role( + [ + 'self', + 'read:users!user=gawain', + 'read:tokens', + 'read:groups!group=mythos', + ] + ) + service = mockservice_url + user = create_user_with_scopes("access:services") + roles.grant_role(app.db, user, service_role) + oauth_client = ( + app.db.query(orm.OAuthClient) + .filter_by(identifier=service.oauth_client_id) + .one() + ) + oauth_client.allowed_scopes = sorted(roles.roles_to_scopes([service_role])) + app.db.commit() + + s = AsyncSession() + s.cookies = await app.login_user(user.name) + url = url_path_join(public_url(app, service) + 'owhoami/?arg=x') + r = await s.get(url) + r.raise_for_status() + soup = BeautifulSoup(r.text, "html.parser") + scopes_block = soup.find('form') + for scope in service_role.scopes: + base_scope, _, filter_ = scope.partition('!') + scope_def = scopes.scope_definitions[base_scope] + assert scope_def['description'] in scopes_block.text + if filter_: + kind, _, name = filter_.partition('=') + assert kind in scopes_block.text + assert name in scopes_block.text + + async def test_token_page(app): name = "cake" cookies = await app.login_user(name) @@ -764,26 +1237,57 @@ async def test_token_page(app): async def test_server_not_running_api_request(app): cookies = await app.login_user("bees") r = await get_page("user/bees/api/status", app, hub=False, cookies=cookies) - assert r.status_code == 503 + assert r.status_code == 424 assert r.headers["content-type"] == "application/json" message = r.json()['message'] assert ujoin(app.base_url, "hub/spawn/bees") in message assert " /user/bees" in message -async def test_metrics_no_auth(app): - r = await get_page("metrics", app) - assert r.status_code == 403 - - -async def test_metrics_auth(app): - cookies = await app.login_user('river') - metrics_url = ujoin(public_host(app), app.hub.base_url, 'metrics') - r = await get_page("metrics", app, cookies=cookies) - assert r.status_code == 200 - assert r.url == metrics_url +async def test_server_not_running_api_request_legacy_status(app): + app.use_legacy_stopped_server_status_code = True + cookies = await app.login_user("bees") + r = await get_page("user/bees/api/status", app, hub=False, cookies=cookies) + assert r.status_code == 503 async def test_health_check_request(app): r = await get_page('health', app) assert r.status_code == 200 + + +async def test_pre_spawn_start_exc_no_form(app): + exc = "Unhandled error starting server" + + # throw exception from pre_spawn_start + async def mock_pre_spawn_start(user, spawner): + raise Exception(exc) + + with mock.patch.object(app.authenticator, 'pre_spawn_start', mock_pre_spawn_start): + cookies = await app.login_user('summer') + # spawn page should thow a 500 error and show the pre_spawn_start error message + r = await get_page('spawn', app, cookies=cookies) + assert r.status_code == 500 + assert exc in r.text + + +async def test_pre_spawn_start_exc_options_form(app): + exc = "pre_spawn_start error" + + # throw exception from pre_spawn_start + async def mock_pre_spawn_start(user, spawner): + raise Exception(exc) + + with mock.patch.dict( + app.users.settings, {'spawner_class': FormSpawner} + ), mock.patch.object(app.authenticator, 'pre_spawn_start', mock_pre_spawn_start): + cookies = await app.login_user('spring') + user = app.users['spring'] + # spawn page shouldn't throw any error until the spawn is started + r = await get_page('spawn', app, cookies=cookies) + assert r.url.endswith('/spawn') + r.raise_for_status() + assert FormSpawner.options_form in r.text + # spawning the user server should throw the pre_spawn_start error + with pytest.raises(Exception, match="%s" % exc): + await user.spawn() diff --git a/jupyterhub/tests/test_proxy.py b/jupyterhub/tests/test_proxy.py index 0de5748b..d4e6b247 100644 --- a/jupyterhub/tests/test_proxy.py +++ b/jupyterhub/tests/test_proxy.py @@ -2,20 +2,18 @@ import json import os from contextlib import contextmanager -from queue import Queue from subprocess import Popen -from urllib.parse import quote -from urllib.parse import urlparse +from urllib.parse import quote, urlparse import pytest +from traitlets import TraitError from traitlets.config import Config -from .. import orm from ..utils import url_path_join as ujoin from ..utils import wait_for_http_server from .mocking import MockHub -from .test_api import add_user -from .test_api import api_request +from .test_api import add_user, api_request +from .utils import skip_if_ssl @pytest.fixture @@ -28,6 +26,7 @@ def disable_check_routes(app): app.last_activity_callback.start() +@skip_if_ssl async def test_external_proxy(request): auth_token = 'secret!' proxy_ip = '127.0.0.1' @@ -101,7 +100,7 @@ async def test_external_proxy(request): print(app.base_url, user_path) host = '' if app.subdomain_host: - host = '%s.%s' % (name, urlparse(app.subdomain_host).hostname) + host = f'{name}.{urlparse(app.subdomain_host).hostname}' user_spec = host + user_path assert sorted(routes.keys()) == [app.hub.routespec, user_spec] @@ -148,7 +147,7 @@ async def test_external_proxy(request): await wait_for_proxy() # tell the hub where the new proxy is - new_api_url = 'http://{}:{}'.format(proxy_ip, proxy_port) + new_api_url = f'http://{proxy_ip}:{proxy_port}' r = await api_request( app, 'proxy', @@ -195,6 +194,98 @@ async def test_check_routes(app, username, disable_check_routes): assert before == after +@pytest.mark.parametrize( + "routespec", + [ + '/has%20space/foo/', + '/missing-trailing/slash', + '/has/@/', + '/has/' + quote('üñîçø∂é'), + 'host.name/path/', + 'other.host/path/no/slash', + ], +) +async def test_extra_routes(app, routespec): + proxy = app.proxy + # When using host_routing, it's up to the admin to + # provide routespecs that have a domain in them. + # We don't explicitly validate that here. + if app.subdomain_host and routespec.startswith("/"): + routespec = 'example.com/' + routespec + elif not app.subdomain_host and not routespec.startswith("/"): + pytest.skip("requires subdomains") + validated_routespec = routespec + if not routespec.endswith("/"): + validated_routespec = routespec + "/" + target = 'http://localhost:9999/test' + proxy.extra_routes = {routespec: target} + + await proxy.check_routes(app.users, app._service_map) + + routes = await app.proxy.get_all_routes() + print(routes) + assert validated_routespec in routes + assert routes[validated_routespec]['target'] == target + assert routes[validated_routespec]['data']['extra'] + + +@pytest.mark.parametrize( + "needs_subdomain, routespec, expected", + [ + (False, "/prefix/", "/prefix/"), + (False, "/prefix", "/prefix/"), + (False, "prefix/", ValueError), + (True, "/prefix/", ValueError), + (True, "example.com/prefix/", "example.com/prefix/"), + (True, "example.com/prefix", "example.com/prefix/"), + (False, 100, TraitError), + ], +) +def test_extra_routes_validate_routespec( + request, app, needs_subdomain, routespec, expected +): + save_host = app.subdomain_host + request.addfinalizer(lambda: setattr(app, "subdomain_host", save_host)) + if needs_subdomain: + app.subdomain_host = "localhost.jovyan.org" + else: + app.subdomain_host = "" + + proxy = app.proxy + + extra_routes = {routespec: "https://127.0.0.1"} + if isinstance(expected, type) and issubclass(expected, BaseException): + with pytest.raises(expected): + proxy.extra_routes = extra_routes + return + proxy.extra_routes = extra_routes + assert list(proxy.extra_routes) == [expected] + + +@pytest.mark.parametrize( + "target, expected", + [ + ("http://host", "http://host"), + ("https://host", "https://host"), + ("/missing-host", ValueError), + ("://missing-scheme", ValueError), + (100, TraitError), + ], +) +def test_extra_routes_validate_target(app, target, expected): + proxy = app.proxy + routespec = "/prefix/" + if app.subdomain_host: + routespec = f"host.tld{routespec}" + extra_routes = {routespec: target} + if isinstance(expected, type) and issubclass(expected, BaseException): + with pytest.raises(expected): + proxy.extra_routes = extra_routes + return + proxy.extra_routes = extra_routes + assert list(proxy.extra_routes.values()) == [expected] + + @pytest.mark.parametrize( "routespec", [ diff --git a/jupyterhub/tests/test_roles.py b/jupyterhub/tests/test_roles.py new file mode 100644 index 00000000..6dd5bdfe --- /dev/null +++ b/jupyterhub/tests/test_roles.py @@ -0,0 +1,1338 @@ +"""Test roles""" +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. +import json +import os + +import pytest +from pytest import mark +from tornado.log import app_log + +from .. import orm, roles +from ..scopes import get_scopes_for, scope_definitions +from ..utils import utcnow +from .mocking import MockHub +from .utils import add_user, api_request + + +@mark.role +def test_orm_roles(db): + """Test orm roles setup""" + user_role = orm.Role.find(db, name='user') + token_role = orm.Role.find(db, name='token') + service_role = orm.Role.find(db, name='service') + if not user_role: + user_role = orm.Role(name='user', scopes=['self']) + db.add(user_role) + if not token_role: + token_role = orm.Role(name='token', scopes=['inherit']) + db.add(token_role) + if not service_role: + service_role = orm.Role(name='service', scopes=[]) + db.add(service_role) + db.commit() + + group_role = orm.Role(name='group', scopes=['read:users']) + db.add(group_role) + db.commit() + + user = orm.User(name='falafel') + db.add(user) + db.commit() + + service = orm.Service(name='kebab') + db.add(service) + db.commit() + + group = orm.Group(name='fast-food') + db.add(group) + db.commit() + + assert user_role.users == [] + assert user_role.services == [] + assert user_role.groups == [] + assert service_role.users == [] + assert service_role.services == [] + assert service_role.groups == [] + assert user.roles == [] + assert service.roles == [] + assert group.roles == [] + + user_role.users.append(user) + service_role.services.append(service) + group_role.groups.append(group) + db.commit() + assert user_role.users == [user] + assert user.roles == [user_role] + assert service_role.services == [service] + assert service.roles == [service_role] + assert group_role.groups == [group] + assert group.roles == [group_role] + + # check token creation without specifying its role + # assigns it the default 'token' role + token = user.new_api_token() + user_token = orm.APIToken.find(db, token=token) + assert set(user_token.scopes) == set(token_role.scopes) + + # check creating token with a specific role + token = service.new_api_token(roles=['service']) + service_token = orm.APIToken.find(db, token=token) + assert set(service_token.scopes) == set(service_role.scopes) + + # check deleting user removes the user from roles + db.delete(user) + db.commit() + assert user_role.users == [] + # check deleting the service_role removes it from service.roles + db.delete(service_role) + db.commit() + assert service.roles == [] + # check deleting the group removes it from group_roles + db.delete(group) + db.commit() + assert group_role.groups == [] + + # clean up db + db.delete(service) + db.delete(group_role) + db.commit() + + +@mark.role +def test_orm_roles_delete_cascade(db): + """Orm roles cascade""" + user1 = orm.User(name='user1') + user2 = orm.User(name='user2') + role1 = orm.Role(name='role1') + role2 = orm.Role(name='role2') + db.add(user1) + db.add(user2) + db.add(role1) + db.add(role2) + db.commit() + # add user to role via user.roles + user1.roles.append(role1) + db.commit() + assert user1 in role1.users + assert role1 in user1.roles + + # add user to role via roles.users + role1.users.append(user2) + db.commit() + assert user2 in role1.users + assert role1 in user2.roles + + # fill role2 and check role1 again + role2.users.append(user1) + role2.users.append(user2) + db.commit() + assert user1 in role1.users + assert user2 in role1.users + assert user1 in role2.users + assert user2 in role2.users + assert role1 in user1.roles + assert role1 in user2.roles + assert role2 in user1.roles + assert role2 in user2.roles + + # now start deleting + # 1. remove role via user.roles + user1.roles.remove(role2) + db.commit() + assert user1 not in role2.users + assert role2 not in user1.roles + + # 2. remove user via role.users + role1.users.remove(user2) + db.commit() + assert user2 not in role1.users + assert role1 not in user2.roles + + # 3. delete role object + db.delete(role2) + db.commit() + assert role2 not in user1.roles + assert role2 not in user2.roles + + # 4. delete user object + db.delete(user1) + db.delete(user2) + db.commit() + assert user1 not in role1.users + + +@mark.role +@mark.parametrize( + "scopes, expected_scopes", + [ + ( + ['admin:users'], + { + 'admin:users', + 'admin:auth_state', + 'users', + 'delete:users', + 'list:users', + 'read:users', + 'users:activity', + 'read:users:name', + 'read:users:groups', + 'read:roles:users', + 'read:users:activity', + }, + ), + ( + ['users'], + { + 'users', + 'list:users', + 'read:users', + 'users:activity', + 'read:users:name', + 'read:users:groups', + 'read:users:activity', + }, + ), + ( + ['read:users'], + { + 'read:users', + 'read:users:name', + 'read:users:groups', + 'read:users:activity', + }, + ), + (['read:servers'], {'read:servers', 'read:users:name'}), + ( + ['admin:groups'], + { + 'admin:groups', + 'groups', + 'delete:groups', + 'list:groups', + 'read:groups', + 'read:roles:groups', + 'read:groups:name', + }, + ), + ( + ['admin:groups', 'read:servers'], + { + 'admin:groups', + 'groups', + 'delete:groups', + 'list:groups', + 'read:groups', + 'read:roles:groups', + 'read:groups:name', + 'read:servers', + 'read:users:name', + }, + ), + ( + ['tokens!group=hobbits'], + {'tokens!group=hobbits', 'read:tokens!group=hobbits'}, + ), + ], +) +def test_get_expanded_scopes(db, scopes, expected_scopes): + """Test role scopes expansion into their fully expanded scopes""" + roles.create_role(db, {'name': 'testing_scopes', 'scopes': scopes}) + role = orm.Role.find(db, name='testing_scopes') + expanded_scopes = roles.roles_to_expanded_scopes([role], owner=None) + assert expanded_scopes == expected_scopes + db.delete(role) + + +@mark.role +async def test_load_default_roles(tmpdir, request): + """Test loading default roles in app.py""" + kwargs = {} + ssl_enabled = getattr(request.module, "ssl_enabled", False) + if ssl_enabled: + kwargs['internal_certs_location'] = str(tmpdir) + hub = MockHub(**kwargs) + hub.init_db() + db = hub.db + await hub.init_role_creation() + # test default roles loaded to database + default_roles = roles.get_default_roles() + for role in default_roles: + assert orm.Role.find(db, role['name']) is not None + + +@mark.role +@mark.parametrize( + "role, role_def, response_type, response", + [ + ( + 'new-role', + { + 'name': 'new-role', + 'description': 'Some description', + 'scopes': ['groups'], + }, + 'info', + app_log.info('Role new-role added to database'), + ), + ( + 'the-same-role', + { + 'name': 'new-role', + 'description': 'Some description', + 'scopes': ['groups'], + }, + 'no-log', + None, + ), + ('no_name', {'scopes': ['users']}, 'error', KeyError), + ( + 'no_scopes', + {'name': 'no-permissions'}, + 'warning', + app_log.warning('Warning: New defined role no-permissions has no scopes'), + ), + ( + 'admin', + {'name': 'admin', 'scopes': ['admin:users']}, + 'error', + ValueError, + ), + ( + 'admin', + {'name': 'admin', 'description': 'New description'}, + 'error', + ValueError, + ), + ( + 'user', + {'name': 'user', 'scopes': ['read:users:name']}, + 'info', + app_log.info('Role user scopes attribute has been changed'), + ), + # rewrite the user role back to 'default' + ( + 'user', + {'name': 'user', 'scopes': ['self']}, + 'info', + app_log.info('Role user scopes attribute has been changed'), + ), + ], +) +async def test_creating_roles(app, role, role_def, response_type, response): + """Test raising errors and warnings when creating/modifying roles""" + + db = app.db + + if response_type == 'error': + with pytest.raises(response): + roles.create_role(db, role_def) + + elif response_type == 'warning' or response_type == 'info': + with pytest.warns(response): + roles.create_role(db, role_def) + # check the role has been created/modified + role = orm.Role.find(db, role_def['name']) + assert role is not None + if 'description' in role_def.keys(): + assert role.description == role_def['description'] + if 'scopes' in role_def.keys(): + assert role.scopes == role_def['scopes'] + + # make sure no warnings/info logged when the role exists and its definition hasn't been changed + elif response_type == 'no-log': + with pytest.warns(response) as record: + roles.create_role(db, role_def) + assert not record.list + role = orm.Role.find(db, role_def['name']) + assert role is not None + + +@mark.role +@mark.parametrize( + "role_type, rolename, response_type, response", + [ + ( + 'existing', + 'test-role1', + 'info', + app_log.info('Role user scopes attribute has been changed'), + ), + ('non-existing', 'test-role2', 'error', KeyError), + ('default', 'user', 'error', ValueError), + ], +) +async def test_delete_roles(db, role_type, rolename, response_type, response): + """Test raising errors and info when deleting roles""" + + if response_type == 'info': + # add the role to db + test_role = orm.Role(name=rolename) + db.add(test_role) + db.commit() + check_role = orm.Role.find(db, rolename) + assert check_role is not None + # check the role is deleted and info raised + with pytest.warns(response): + roles.delete_role(db, rolename) + check_role = orm.Role.find(db, rolename) + assert check_role is None + + elif response_type == 'error': + with pytest.raises(response): + roles.delete_role(db, rolename) + + +@mark.role +@mark.parametrize( + "role, response", + [ + ( + { + 'name': 'test-scopes-1', + 'scopes': [ + 'users', + 'users!user=charlie', + 'admin:groups', + 'read:tokens', + ], + }, + 'existing', + ), + ({'name': 'test-scopes-2', 'scopes': ['uses']}, KeyError), + ({'name': 'test-scopes-3', 'scopes': ['users:activities']}, KeyError), + ({'name': 'test-scopes-4', 'scopes': ['groups!goup=class-A']}, KeyError), + ], +) +async def test_scope_existence(tmpdir, request, role, response): + """Test checking of scopes provided in role definitions""" + kwargs = {'load_roles': [role]} + ssl_enabled = getattr(request.module, "ssl_enabled", False) + if ssl_enabled: + kwargs['internal_certs_location'] = str(tmpdir) + hub = MockHub(**kwargs) + hub.init_db() + db = hub.db + + if response == 'existing': + roles.create_role(db, role) + added_role = orm.Role.find(db, role['name']) + assert added_role is not None + assert added_role.scopes == role['scopes'] + + elif response == KeyError: + with pytest.raises(response): + roles.create_role(db, role) + added_role = orm.Role.find(db, role['name']) + assert added_role is None + + # delete the tested roles + if added_role: + roles.delete_role(db, added_role.name) + + +@mark.role +@mark.parametrize( + "explicit_allowed_users", + [ + (True,), + (False,), + ], +) +async def test_load_roles_users(tmpdir, request, explicit_allowed_users): + """Test loading predefined roles for users in app.py""" + roles_to_load = [ + { + 'name': 'teacher', + 'description': 'Access users information, servers and groups without create/delete privileges', + 'scopes': ['users', 'groups'], + 'users': ['cyclops', 'gandalf'], + }, + ] + kwargs = {'load_roles': roles_to_load} + ssl_enabled = getattr(request.module, "ssl_enabled", False) + if ssl_enabled: + kwargs['internal_certs_location'] = str(tmpdir) + hub = MockHub(**kwargs) + hub.init_db() + db = hub.db + hub.authenticator.admin_users = ['admin'] + if explicit_allowed_users: + hub.authenticator.allowed_users = ['cyclops', 'gandalf', 'bilbo', 'gargamel'] + await hub.init_role_creation() + await hub.init_users() + await hub.init_role_assignment() + admin_role = orm.Role.find(db, 'admin') + user_role = orm.Role.find(db, 'user') + # test if every user has a role (and no duplicates) + # and admins have admin role + for user in db.query(orm.User): + assert len(user.roles) > 0 + assert len(user.roles) == len(set(user.roles)) + if user.admin: + assert admin_role in user.roles + assert user_role in user.roles + + # test if predefined roles loaded and assigned + teacher_role = orm.Role.find(db, name='teacher') + assert teacher_role is not None + gandalf_user = orm.User.find(db, name='gandalf') + assert teacher_role in gandalf_user.roles + cyclops_user = orm.User.find(db, name='cyclops') + assert teacher_role in cyclops_user.roles + + # delete the test roles + for role in roles_to_load: + roles.delete_role(db, role['name']) + + +@mark.role +async def test_load_roles_services(tmpdir, request, preserve_scopes): + services = [ + {'name': 'idle-culler', 'api_token': 'some-token'}, + {'name': 'user_service', 'api_token': 'some-other-token'}, + {'name': 'admin_service', 'api_token': 'secret-token'}, + ] + service_tokens = { + 'some-token': 'idle-culler', + 'some-other-token': 'user_service', + 'secret-token': 'admin_service', + } + custom_scopes = { + "custom:empty-scope": { + "description": "empty custom scope", + } + } + roles_to_load = [ + { + 'name': 'idle-culler', + 'description': 'Cull idle servers', + 'scopes': [ + 'read:users:name', + 'read:users:activity', + 'read:servers', + 'servers', + 'custom:empty-scope', + ], + 'services': ['idle-culler'], + }, + ] + kwargs = { + 'custom_scopes': custom_scopes, + 'load_roles': roles_to_load, + 'services': services, + 'service_tokens': service_tokens, + } + ssl_enabled = getattr(request.module, "ssl_enabled", False) + if ssl_enabled: + kwargs['internal_certs_location'] = str(tmpdir) + hub = MockHub(**kwargs) + hub.init_db() + db = hub.db + await hub.init_role_creation() + await hub.init_api_tokens() + # make 'admin_service' admin + admin_service = orm.Service.find(db, 'admin_service') + admin_service.admin = True + db.commit() + await hub.init_role_assignment() + # test if every service has a role (and no duplicates) + admin_role = orm.Role.find(db, name='admin') + user_role = orm.Role.find(db, name='user') + + # test if predefined roles loaded and assigned + culler_role = orm.Role.find(db, name='idle-culler') + culler_service = orm.Service.find(db, name='idle-culler') + assert culler_service.roles == [culler_role] + user_service = orm.Service.find(db, name='user_service') + assert not user_service.roles + assert admin_service.roles == [admin_role] + + # delete the test services + for service in db.query(orm.Service): + db.delete(service) + db.commit() + + # delete the test tokens + for token in db.query(orm.APIToken): + db.delete(token) + db.commit() + + # delete the test roles + for role in roles_to_load: + roles.delete_role(db, role['name']) + + +@mark.role +async def test_load_roles_groups(tmpdir, request): + """Test loading predefined roles for groups in app.py""" + groups_to_load = { + 'group1': ['gandalf'], + 'group2': ['bilbo', 'gargamel'], + 'group3': ['cyclops'], + } + roles_to_load = [ + { + 'name': 'assistant', + 'description': 'Access users information only', + 'scopes': ['read:users'], + 'groups': ['group2'], + }, + { + 'name': 'head', + 'description': 'Whole user access', + 'scopes': ['users', 'admin:users'], + 'groups': ['group3', "group4"], + }, + ] + kwargs = {'load_groups': groups_to_load, 'load_roles': roles_to_load} + ssl_enabled = getattr(request.module, "ssl_enabled", False) + if ssl_enabled: + kwargs['internal_certs_location'] = str(tmpdir) + hub = MockHub(**kwargs) + hub.init_db() + db = hub.db + await hub.init_role_creation() + await hub.init_groups() + await hub.init_role_assignment() + + assist_role = orm.Role.find(db, name='assistant') + head_role = orm.Role.find(db, name='head') + + group1 = orm.Group.find(db, name='group1') + group2 = orm.Group.find(db, name='group2') + group3 = orm.Group.find(db, name='group3') + group4 = orm.Group.find(db, name='group4') + + # test group roles + assert group1.roles == [] + assert group2 in assist_role.groups + assert group3 in head_role.groups + assert group4 in head_role.groups + + # delete the test roles + for role in roles_to_load: + roles.delete_role(db, role['name']) + + +@mark.role +async def test_load_roles_user_tokens(tmpdir, request): + user_tokens = { + 'secret-token': 'cyclops', + 'secrety-token': 'gandalf', + 'super-secret-token': 'admin', + } + roles_to_load = [ + { + 'name': 'reader', + 'description': 'Read all users models', + 'scopes': ['read:users'], + }, + ] + kwargs = { + 'load_roles': roles_to_load, + 'api_tokens': user_tokens, + } + ssl_enabled = getattr(request.module, "ssl_enabled", False) + if ssl_enabled: + kwargs['internal_certs_location'] = str(tmpdir) + hub = MockHub(**kwargs) + hub.init_db() + db = hub.db + hub.authenticator.admin_users = ['admin'] + hub.authenticator.allowed_users = ['cyclops', 'gandalf'] + await hub.init_role_creation() + await hub.init_users() + await hub.init_api_tokens() + await hub.init_role_assignment() + # test if all other tokens have default 'user' role + token_role = orm.Role.find(db, 'token') + secret_token = orm.APIToken.find(db, 'secret-token') + assert set(secret_token.scopes) == set(token_role.scopes) + secrety_token = orm.APIToken.find(db, 'secrety-token') + assert set(secrety_token.scopes) == set(token_role.scopes) + + # delete the test tokens + for token in db.query(orm.APIToken): + db.delete(token) + db.commit() + + # delete the test roles + for role in roles_to_load: + roles.delete_role(db, role['name']) + + +@mark.role +@mark.parametrize( + "headers, rolename, scopes, status", + [ + # no role requested - gets default 'token' role + ({}, None, None, 201), + # role scopes within the user's default 'user' role + ({}, 'self-reader', ['read:users!user'], 201), + # role scopes within the user's default 'user' role, but with disjoint filter + ({}, 'other-reader', ['read:users!user=other'], 400), + # role scopes within the user's default 'user' role, without filter + ({}, 'other-reader', ['read:users'], 400), + # role scopes outside of the user's role but within the group's role scopes of which the user is a member + ({}, 'groups-reader', ['read:groups'], 201), + # non-existing role request + ({}, 'non-existing', [], 400), + # role scopes outside of both user's role and group's role scopes + ({}, 'users-creator', ['admin:users'], 400), + ], +) +async def test_get_new_token_via_api(app, headers, rolename, scopes, status): + """Test requesting a token via API with and without roles""" + + user = add_user(app.db, app, name='user') + if rolename and rolename != 'non-existing': + roles.create_role(app.db, {'name': rolename, 'scopes': scopes}) + if rolename == 'groups-reader': + # add role for a group + roles.create_role(app.db, {'name': 'group-role', 'scopes': ['groups']}) + # create a group and add the user and group_role + group = orm.Group.find(app.db, 'test-group') + if not group: + group = orm.Group(name='test-group') + app.db.add(group) + group_role = orm.Role.find(app.db, 'group-role') + group.roles.append(group_role) + user.groups.append(group) + app.db.commit() + if rolename: + body = json.dumps({'roles': [rolename]}) + else: + body = '' + # request a new token + r = await api_request( + app, 'users/user/tokens', method='post', headers=headers, data=body + ) + assert r.status_code == status + if status != 200: + return + # check the new-token reply for roles + reply = r.json() + assert 'token' in reply + assert reply['user'] == user.name + if not rolename: + assert reply['roles'] == ['token'] + else: + assert reply['roles'] == [rolename] + token_id = reply['id'] + + # delete the token + r = await api_request(app, 'users/user/tokens', token_id, method='delete') + assert r.status_code == 204 + # verify deletion + r = await api_request(app, 'users/user/tokens', token_id) + assert r.status_code == 404 + + +@mark.role +@mark.parametrize( + "kind, has_user_scopes", + [ + ('users', True), + ('services', False), + ], +) +async def test_self_expansion(app, kind, has_user_scopes): + Class = orm.get_class(kind) + orm_obj = Class(name=f'test_{kind}') + app.db.add(orm_obj) + app.db.commit() + test_role = orm.Role(name='test_role', scopes=['self']) + orm_obj.roles.append(test_role) + # test expansion of user/service scopes + scopes = get_scopes_for(orm_obj) + assert bool(scopes) == has_user_scopes + if kind == 'users': + for scope in scopes: + assert scope.endswith(f"!user={orm_obj.name}") + base_scope = scope.split("!", 1)[0] + assert base_scope in scope_definitions + + # test expansion of token scopes + orm_obj.new_api_token() + print(orm_obj.api_tokens[0]) + token_scopes = get_scopes_for(orm_obj.api_tokens[0]) + print(token_scopes) + assert bool(token_scopes) == has_user_scopes + app.db.delete(orm_obj) + app.db.delete(test_role) + + +@mark.role +@mark.parametrize( + "scope_list, kind, test_for_token", + [ + (['users:activity!user'], 'users', False), + (['users:activity!user', 'read:users'], 'users', False), + (['users:activity!user=otheruser', 'read:users'], 'users', False), + (['users:activity!user'], 'users', True), + (['users:activity!user=otheruser', 'groups'], 'users', True), + ], +) +async def test_user_filter_expansion(app, scope_list, kind, test_for_token): + Class = orm.get_class(kind) + orm_obj = Class(name=f'test_{kind}') + app.db.add(orm_obj) + app.db.commit() + + test_role = orm.Role(name='test_role', scopes=scope_list) + orm_obj.roles.append(test_role) + + if test_for_token: + token = orm_obj.new_api_token(roles=['test_role']) + orm_token = orm.APIToken.find(app.db, token) + expanded_scopes = get_scopes_for(orm_token) + else: + expanded_scopes = get_scopes_for(orm_obj) + + for scope in scope_list: + base, _, filter = scope.partition('!') + for ex_scope in expanded_scopes: + ex_base, ex__, ex_filter = ex_scope.partition('!') + # check that the filter has been expanded to include the username if '!user' filter + if scope in ex_scope and filter == 'user': + assert ex_filter == f'{filter}={orm_obj.name}' + # make sure the filter has been left unchanged if other filter provided + elif scope in ex_scope and '=' in filter: + assert ex_filter == filter + + app.db.delete(orm_obj) + app.db.delete(test_role) + + +@mark.role +@mark.parametrize( + "name, valid", + [ + ('abc', True), + ('group', True), + ("a-pretty-long-name-with-123", True), + ("0-abc", False), # starts with number + ("role-", False), # ends with - + ("has-Uppercase", False), # Uppercase + ("a" * 256, False), # too long + ("has space", False), # space is illegal + ], +) +async def test_valid_names(name, valid): + if valid: + assert roles._validate_role_name(name) + else: + with pytest.raises(ValueError): + roles._validate_role_name(name) + + +@mark.role +async def test_server_token_role(app): + user = add_user(app.db, app, name='test_user') + assert user.api_tokens == [] + spawner = user.spawner + spawner.cmd = ['jupyterhub-singleuser'] + await user.spawn() + + server_token = spawner.api_token + orm_server_token = orm.APIToken.find(app.db, server_token) + assert orm_server_token + + server_role = orm.Role.find(app.db, 'server') + assert set(server_role.scopes) == set(orm_server_token.scopes) + + assert orm_server_token.user.name == user.name + assert user.api_tokens == [orm_server_token] + + await user.stop() + + +@mark.role +@mark.parametrize( + "token_role, api_method, api_endpoint, for_user, response", + [ + ('server', 'post', 'activity', 'same_user', 200), + ('server', 'post', 'activity', 'other_user', 404), + ('server', 'get', 'users', 'same_user', 403), + ('token', 'post', 'activity', 'same_user', 200), + ('no_role', 'post', 'activity', 'same_user', 403), + ], +) +async def test_server_role_api_calls( + app, token_role, api_method, api_endpoint, for_user, response +): + user = add_user(app.db, app, name='test_user') + roles.grant_role(app.db, user, 'user') + app_log.debug(user.roles) + app_log.debug(get_scopes_for(user.orm_user)) + if token_role == 'no_role': + api_token = user.new_api_token(roles=[]) + else: + api_token = user.new_api_token(roles=[token_role]) + + if for_user == 'same_user': + username = user.name + else: + username = 'otheruser' + + if api_endpoint == 'activity': + path = f"users/{username}/activity" + data = json.dumps({"servers": {"": {"last_activity": utcnow().isoformat()}}}) + elif api_endpoint == 'users': + path = "users" + data = "" + + r = await api_request( + app, + path, + headers={"Authorization": f"token {api_token}"}, + data=data, + method=api_method, + ) + assert r.status_code == response + + +async def test_oauth_client_allowed_scopes(app): + allowed_scopes = ['read:users', 'read:groups'] + service = { + 'name': 'oas1', + 'api_token': 'some-token', + 'oauth_client_allowed_scopes': allowed_scopes, + } + app.services.append(service) + app.init_services() + app_service = app.services[0] + assert app_service['name'] == 'oas1' + assert set(app_service['oauth_client_allowed_scopes']) == set(allowed_scopes) + + +async def test_user_group_roles(app, create_temp_role): + user = add_user(app.db, app, name='jack') + another_user = add_user(app.db, app, name='jill') + + group = orm.Group.find(app.db, name='A') + if not group: + group = orm.Group(name='A') + app.db.add(group) + app.db.commit() + + if group not in user.groups: + user.groups.append(group) + app.db.commit() + + if group not in another_user.groups: + another_user.groups.append(group) + app.db.commit() + + group_role = orm.Role.find(app.db, 'student-a') + if not group_role: + create_temp_role(['read:groups!group=A', 'list:groups!group=A'], 'student-a') + roles.grant_role(app.db, group, rolename='student-a') + group_role = orm.Role.find(app.db, 'student-a') + + # repeat check to ensure group roles don't get added to the user at all + # regression test for #3472 + roles_before = list(user.roles) + for i in range(3): + get_scopes_for(user.orm_user) + user_roles = list(user.roles) + assert user_roles == roles_before + + # jack's API token + token = user.new_api_token() + + headers = {'Authorization': 'token %s' % token} + r = await api_request(app, f'users/{user.name}', method='get', headers=headers) + assert r.status_code == 200 + r.raise_for_status() + reply = r.json() + + print(reply) + + assert reply['name'] == 'jack' + assert len(reply['roles']) == 1 + assert group_role.name not in reply['roles'] + + headers = {'Authorization': 'token %s' % token} + r = await api_request(app, 'groups', method='get', headers=headers) + assert r.status_code == 200 + r.raise_for_status() + reply = r.json() + + print(reply) + assert len(reply) == 1 + assert reply[0]['name'] == 'A' + + headers = {'Authorization': 'token %s' % token} + r = await api_request(app, f'users/{user.name}', method='get', headers=headers) + assert r.status_code == 200 + r.raise_for_status() + reply = r.json() + + print(reply) + + assert reply['name'] == 'jack' + assert len(reply['roles']) == 1 + assert group_role.name not in reply['roles'] + + +async def test_config_role_list(): + roles_to_load = [ + { + 'name': 'elephant', + 'description': 'pacing about', + 'scopes': ['read:hub'], + }, + { + 'name': 'tiger', + 'description': 'pouncing stuff', + 'scopes': ['shutdown'], + }, + ] + hub = MockHub(load_roles=roles_to_load) + hub.init_db() + hub.authenticator.admin_users = ['admin'] + await hub.init_role_creation() + for role_conf in roles_to_load: + assert orm.Role.find(hub.db, name=role_conf['name']) + # Now remove elephant from config and see if it is removed from database + roles_to_load.pop(0) + hub.load_roles = roles_to_load + await hub.init_role_creation() + assert orm.Role.find(hub.db, name='tiger') + assert not orm.Role.find(hub.db, name='elephant') + + +async def test_config_role_users(): + role_name = 'painter' + user_name = 'benny' + user_names = ['agnetha', 'bjorn', 'anni-frid', user_name] + roles_to_load = [ + { + 'name': role_name, + 'description': 'painting with colors', + 'scopes': ['users', 'groups'], + 'users': user_names, + }, + ] + hub = MockHub(load_roles=roles_to_load) + hub.init_db() + hub.authenticator.admin_users = ['admin'] + hub.authenticator.allowed_users = user_names + await hub.init_role_creation() + await hub.init_users() + await hub.init_role_assignment() + user = orm.User.find(hub.db, name=user_name) + role = orm.Role.find(hub.db, name=role_name) + assert role in user.roles + # Now reload and see if user is removed from role list + roles_to_load[0]['users'].remove(user_name) + hub.load_roles = roles_to_load + await hub.init_role_creation() + await hub.init_users() + await hub.init_role_assignment() + user = orm.User.find(hub.db, name=user_name) + role = orm.Role.find(hub.db, name=role_name) + assert role not in user.roles + + +async def test_duplicate_role_users(): + role_name = 'painter' + user_name = 'benny' + user_names = ['agnetha', 'bjorn', 'anni-frid', user_name] + roles_to_load = [ + { + 'name': role_name, + 'description': 'painting with colors', + 'scopes': ['users', 'groups'], + 'users': user_names, + }, + { + 'name': role_name, + 'description': 'painting with colors', + 'scopes': ['users', 'groups'], + 'users': user_names, + }, + ] + hub = MockHub(load_roles=roles_to_load) + hub.init_db() + with pytest.raises(ValueError): + await hub.init_role_creation() + + +async def test_admin_role_and_flag(): + admin_role_spec = [ + { + 'name': 'admin', + 'users': ['eddy'], + } + ] + hub = MockHub(load_roles=admin_role_spec) + hub.init_db() + hub.authenticator.admin_users = ['admin'] + hub.authenticator.allowed_users = ['eddy'] + await hub.init_role_creation() + await hub.init_users() + await hub.init_role_assignment() + admin_role = orm.Role.find(hub.db, name='admin') + for user_name in ['eddy', 'admin']: + user = orm.User.find(hub.db, name=user_name) + assert user.admin + assert admin_role in user.roles + admin_role_spec[0]['users'].remove('eddy') + hub.load_roles = admin_role_spec + await hub.init_users() + await hub.init_role_assignment() + user = orm.User.find(hub.db, name='eddy') + assert not user.admin + assert admin_role not in user.roles + + +async def test_custom_role_reset(): + user_role_spec = [ + { + 'name': 'user', + 'scopes': ['self', 'shutdown'], + 'users': ['eddy'], + } + ] + hub = MockHub(load_roles=user_role_spec) + hub.init_db() + hub.authenticator.allowed_users = ['eddy'] + await hub.init_role_creation() + await hub.init_users() + await hub.init_role_assignment() + user_role = orm.Role.find(hub.db, name='user') + user = orm.User.find(hub.db, name='eddy') + assert user_role in user.roles + assert 'shutdown' in user_role.scopes + hub.load_roles = [] + await hub.init_role_creation() + await hub.init_users() + await hub.init_role_assignment() + user_role = orm.Role.find(hub.db, name='user') + user = orm.User.find(hub.db, name='eddy') + assert user_role in user.roles + assert 'shutdown' not in user_role.scopes + + +async def test_removal_config_to_db(): + role_spec = [ + { + 'name': 'user', + 'scopes': ['self', 'shutdown'], + }, + { + 'name': 'wizard', + 'scopes': ['self', 'read:groups'], + }, + ] + hub = MockHub(load_roles=role_spec) + hub.init_db() + await hub.init_role_creation() + assert orm.Role.find(hub.db, 'user') + assert orm.Role.find(hub.db, 'wizard') + hub.load_roles = [] + await hub.init_role_creation() + assert orm.Role.find(hub.db, 'user') + assert not orm.Role.find(hub.db, 'wizard') + + +async def test_no_admin_role_change(): + role_spec = [{'name': 'admin', 'scopes': ['shutdown']}] + hub = MockHub(load_roles=role_spec) + hub.init_db() + with pytest.raises(ValueError): + await hub.init_role_creation() + + +@pytest.mark.parametrize( + "in_db, role_users, allowed_users, expected_members", + [ + # users in the db, not specified in custom user role + # no change to membership + (["alpha", "beta"], None, None, ["alpha", "beta"]), + # allowed_users is additive, not strict + (["alpha", "beta"], None, {"gamma"}, ["alpha", "beta", "gamma"]), + # explicit empty revokes all assignments + (["alpha", "beta"], [], None, []), + # explicit value is respected exactly + (["alpha", "beta"], ["alpha", "gamma"], None, ["alpha", "gamma"]), + ], +) +async def test_user_role_from_config( + in_db, role_users, allowed_users, expected_members +): + role_spec = { + 'name': 'user', + 'scopes': ['self', 'shutdown'], + } + if role_users is not None: + role_spec['users'] = role_users + hub = MockHub(load_roles=[role_spec]) + hub.init_db() + db = hub.db + hub.authenticator.admin_users = set() + if allowed_users: + hub.authenticator.allowed_users = allowed_users + await hub.init_role_creation() + + +async def test_user_config_creates_default_role(): + role_spec = [ + { + 'name': 'new-role', + 'scopes': ['read:users'], + 'users': ['not-yet-created-user'], + } + ] + user_names = [] + hub = MockHub(load_roles=role_spec) + hub.init_db() + hub.authenticator.allowed_users = user_names + await hub.init_role_creation() + await hub.init_users() + await hub.init_role_assignment() + user_role = orm.Role.find(hub.db, 'user') + new_role = orm.Role.find(hub.db, 'new-role') + assert orm.User.find(hub.db, 'not-yet-created-user') in new_role.users + assert orm.User.find(hub.db, 'not-yet-created-user') in user_role.users + + +async def test_admin_role_respects_config(): + role_spec = [ + { + 'name': 'admin', + } + ] + admin_users = ['eddy', 'carol'] + hub = MockHub(load_roles=role_spec) + hub.init_db() + hub.authenticator.admin_users = admin_users + await hub.init_role_creation() + await hub.init_users() + await hub.init_role_assignment() + admin_role = orm.Role.find(hub.db, 'admin') + for user_name in admin_users: + user = orm.User.find(hub.db, user_name) + assert user in admin_role.users + + +@pytest.mark.parametrize( + "in_db, role_users, admin_users, expected_members", + [ + # users in the db, not specified in custom user role + # no change to membership + (["alpha", "beta"], None, None, ["alpha", "beta"]), + # admin_users is additive, not strict + (["alpha", "beta"], None, {"gamma"}, ["alpha", "beta", "gamma"]), + # explicit empty revokes all assignments + (["alpha", "beta"], [], None, []), + # explicit value is respected exactly + (["alpha", "beta"], ["alpha", "gamma"], None, ["alpha", "gamma"]), + ], +) +async def test_admin_role_membership(in_db, role_users, admin_users, expected_members): + + load_roles = [] + if role_users is not None: + load_roles.append({"name": "admin", "users": role_users}) + if not admin_users: + admin_users = set() + hub = MockHub(load_roles=load_roles, db_url="sqlite:///:memory:") + hub.init_db() + await hub.init_role_creation() + db = hub.db + hub.authenticator.admin_users = admin_users + # add in_db users to the database + # this is the 'before' state of who had the role before startup + for username in in_db or []: + user = orm.User(name=username) + db.add(user) + db.commit() + roles.grant_role(db, user, "admin") + db.commit() + await hub.init_users() + await hub.init_role_assignment() + admin_role = orm.Role.find(db, 'admin') + role_members = sorted(user.name for user in admin_role.users) + assert role_members == expected_members + + +async def test_no_default_service_role(): + services = [ + { + 'name': 'minesweeper', + 'api_token': 'some-token', + } + ] + hub = MockHub(services=services) + await hub.initialize() + service = orm.Service.find(hub.db, 'minesweeper') + assert not service.roles + + +async def test_hub_upgrade_detection(tmpdir): + db_url = f"sqlite:///{tmpdir.join('jupyterhub.sqlite')}" + os.environ['JUPYTERHUB_TEST_DB_URL'] = db_url + # Create hub with users and tokens + hub = MockHub(db_url=db_url) + await hub.initialize() + user_names = ['patricia', 'quentin'] + user_role = orm.Role.find(hub.db, 'user') + for name in user_names: + user = add_user(hub.db, name=name) + user.new_api_token() + assert user_role in user.roles + for role in hub.db.query(orm.Role): + hub.db.delete(role) + hub.db.commit() + # Restart hub in emulated upgrade mode: default roles for all entities + hub.test_clean_db = False + await hub.initialize() + assert getattr(hub, '_rbac_upgrade', False) + user_role = orm.Role.find(hub.db, 'user') + token_role = orm.Role.find(hub.db, 'token') + for name in user_names: + user = orm.User.find(hub.db, name) + assert user_role in user.roles + assert set(user.api_tokens[0].scopes) == set(token_role.scopes) + # Strip all roles and see if it sticks + user_role.users = [] + token_role.tokens = [] + hub.db.commit() + + hub.init_db() + hub.init_hub() + await hub.init_role_creation() + await hub.init_users() + hub.authenticator.allowed_users = ['patricia'] + await hub.init_api_tokens() + await hub.init_role_assignment() + assert not getattr(hub, '_rbac_upgrade', False) + user_role = orm.Role.find(hub.db, 'user') + token_role = orm.Role.find(hub.db, 'token') + allowed_user = orm.User.find(hub.db, 'patricia') + rem_user = orm.User.find(hub.db, 'quentin') + assert user_role in allowed_user.roles + assert user_role not in rem_user.roles + assert token_role not in rem_user.roles + + +async def test_login_default_role(app, username): + cookies = await app.login_user(username) + user = app.users[username] + # assert login new user gets 'user' role + assert [role.name for role in user.roles] == ["user"] + + # clear roles, keep user + user.roles = [] + app.db.commit() + + # login *again*; user exists, + # login should always trigger "user" role assignment + cookies = await app.login_user(username) + user = app.users[username] + assert [role.name for role in user.roles] == ["user"] diff --git a/jupyterhub/tests/test_scopes.py b/jupyterhub/tests/test_scopes.py new file mode 100644 index 00000000..c4792472 --- /dev/null +++ b/jupyterhub/tests/test_scopes.py @@ -0,0 +1,1295 @@ +"""Test scopes for API handlers""" +from operator import itemgetter +from unittest import mock + +import pytest +from pytest import mark +from tornado import web +from tornado.httputil import HTTPServerRequest + +from .. import orm, roles, scopes +from .._memoize import FrozenDict +from ..handlers import BaseHandler +from ..scopes import ( + Scope, + _check_scope_access, + _expand_self_scope, + _intersect_expanded_scopes, + _resolve_requested_scopes, + expand_scopes, + get_scopes_for, + identify_scopes, + needs_scope, + parse_scopes, +) +from .utils import add_user, api_request, auth_header + + +def get_handler_with_scopes(scopes): + handler = mock.Mock(spec=BaseHandler) + handler.parsed_scopes = parse_scopes(scopes) + return handler + + +def test_scope_constructor(): + user1 = 'george' + user2 = 'michael' + scope_list = [ + 'users', + f'read:users!user={user1}', + f'read:users!user={user2}', + ] + parsed_scopes = parse_scopes(scope_list) + assert isinstance(parsed_scopes, FrozenDict) + + assert 'read:users' in parsed_scopes + assert parsed_scopes['users'] + assert set(parsed_scopes['read:users']['user']) == {user1, user2} + + +def test_scope_precendence(): + scope_list = ['read:users!user=maeby', 'read:users'] + parsed_scopes = parse_scopes(scope_list) + assert parsed_scopes['read:users'] == Scope.ALL + + +def test_scope_check_present(): + handler = get_handler_with_scopes(['read:users']) + assert _check_scope_access(handler, 'read:users') + assert _check_scope_access(handler, 'read:users', user='maeby') + + +def test_scope_check_not_present(): + handler = get_handler_with_scopes(['read:users!user=maeby']) + assert _check_scope_access(handler, 'read:users') + with pytest.raises(web.HTTPError): + _check_scope_access(handler, 'read:users', user='gob') + with pytest.raises(web.HTTPError): + _check_scope_access(handler, 'read:users', user='gob', server='server') + + +def test_scope_filters(): + handler = get_handler_with_scopes( + ['read:users', 'read:users!group=bluths', 'read:users!user=maeby'] + ) + assert _check_scope_access(handler, 'read:users', group='bluth') + assert _check_scope_access(handler, 'read:users', user='maeby') + + +def test_scope_multiple_filters(): + handler = get_handler_with_scopes(['read:users!user=george_michael']) + assert _check_scope_access( + handler, 'read:users', user='george_michael', group='bluths' + ) + + +def test_scope_parse_server_name(): + handler = get_handler_with_scopes( + ['servers!server=maeby/server1', 'read:users!user=maeby'] + ) + assert _check_scope_access(handler, 'servers', user='maeby', server='server1') + + +class MockAPIHandler: + def __init__(self): + self.expanded_scopes = {'users'} + self.parsed_scopes = {} + self.request = mock.Mock(spec=HTTPServerRequest) + self.request.path = '/path' + + def set_scopes(self, *scopes): + self.expanded_scopes = set(scopes) + self.parsed_scopes = parse_scopes(self.expanded_scopes) + + @needs_scope('users') + def user_thing(self, user_name): + return True + + @needs_scope('servers') + def server_thing(self, user_name, server_name): + return True + + @needs_scope('read:groups') + def group_thing(self, group_name): + return True + + @needs_scope('read:services') + def service_thing(self, service_name): + return True + + @needs_scope('users') + def other_thing(self, non_filter_argument): + # Rely on inner vertical filtering + return True + + @needs_scope('users') + @needs_scope('read:services') + def secret_thing(self): + return True + + +@pytest.fixture +def mock_handler(): + obj = MockAPIHandler() + return obj + + +@mark.parametrize( + "scopes, method, arguments, is_allowed", + [ + (['users'], 'user_thing', ('user',), True), + (['users'], 'user_thing', ('michael',), True), + ([''], 'user_thing', ('michael',), False), + (['read:users'], 'user_thing', ('gob',), False), + (['read:users'], 'user_thing', ('michael',), False), + (['users!user=george'], 'user_thing', ('george',), True), + (['users!user=george'], 'user_thing', ('fake_user',), False), + (['users!user=george'], 'user_thing', ('oscar',), False), + (['users!user=george', 'users!user=oscar'], 'user_thing', ('oscar',), True), + (['servers'], 'server_thing', ('user1', 'server_1'), True), + (['servers'], 'server_thing', ('user1', ''), True), + (['servers'], 'server_thing', ('user1', None), True), + ( + ['servers!server=maeby/bluth'], + 'server_thing', + ('maeby', 'bluth'), + True, + ), + (['servers!server=maeby/bluth'], 'server_thing', ('gob', 'bluth'), False), + ( + ['servers!server=maeby/bluth'], + 'server_thing', + ('maybe', 'bluth2'), + False, + ), + (['read:services'], 'service_thing', ('service1',), True), + ( + ['users!user=george', 'read:groups!group=bluths'], + 'group_thing', + ('bluths',), + True, + ), + ( + ['users!user=george', 'read:groups!group=bluths'], + 'group_thing', + ('george',), + False, + ), + ( + ['groups!group=george', 'read:groups!group=bluths'], + 'group_thing', + ('george',), + False, + ), + (['users'], 'other_thing', ('gob',), True), + (['read:users'], 'other_thing', ('gob',), False), + (['users!user=gob'], 'other_thing', ('gob',), True), + (['users!user=gob'], 'other_thing', ('maeby',), True), + ], +) +def test_scope_method_access(mock_handler, scopes, method, arguments, is_allowed): + mock_handler.current_user = mock.Mock(name=arguments[0]) + mock_handler.set_scopes(*scopes) + api_call = getattr(mock_handler, method) + if is_allowed: + assert api_call(*arguments) + else: + with pytest.raises(web.HTTPError): + api_call(*arguments) + + +def test_double_scoped_method_succeeds(mock_handler): + mock_handler.current_user = mock.Mock(name='lucille') + mock_handler.set_scopes('users', 'read:services') + mock_handler.parsed_scopes = parse_scopes(mock_handler.expanded_scopes) + assert mock_handler.secret_thing() + + +def test_double_scoped_method_denials(mock_handler): + mock_handler.current_user = mock.Mock(name='lucille2') + mock_handler.set_scopes('users', 'read:groups') + with pytest.raises(web.HTTPError): + mock_handler.secret_thing() + + +@mark.parametrize( + "user_name, in_group, status_code", + [ + ('martha', False, 200), + ('michael', True, 200), + ('gob', True, 200), + ('tobias', False, 404), + ('ann', False, 404), + ], +) +async def test_expand_groups(app, user_name, in_group, status_code): + test_role = { + 'name': 'test', + 'description': '', + 'users': [user_name], + 'scopes': [ + 'read:users!user=martha', + 'read:users!group=bluth', + 'read:groups', + ], + } + roles.create_role(app.db, test_role) + user = add_user(app.db, name=user_name) + group_name = 'bluth' + group = orm.Group.find(app.db, name=group_name) + if not group: + group = orm.Group(name=group_name) + app.db.add(group) + if in_group and user not in group.users: + group.users.append(user) + roles.update_roles(app.db, user, roles=['test']) + roles.strip_role(app.db, user, 'user') + app.db.commit() + r = await api_request( + app, 'users', user_name, headers=auth_header(app.db, user_name) + ) + assert r.status_code == status_code + app.db.delete(group) + app.db.commit() + + +async def test_by_fake_user(app): + user_name = 'shade' + user = add_user(app.db, name=user_name) + auth_ = auth_header(app.db, user_name) + app.users.delete(user) + app.db.commit() + r = await api_request(app, 'users', headers=auth_) + assert r.status_code == 403 + + +err_message = "No access to resources or resources not found" + + +async def test_request_fake_user(app, create_user_with_scopes): + fake_user = 'annyong' + user = create_user_with_scopes('read:users!group=stuff') + r = await api_request( + app, 'users', fake_user, headers=auth_header(app.db, user.name) + ) + assert r.status_code == 404 + # Consistency between no user and user not accessible + assert r.json()['message'] == err_message + + +async def test_refuse_exceeding_token_permissions( + app, create_user_with_scopes, create_temp_role +): + user = create_user_with_scopes('self') + user.new_api_token() + with pytest.raises(ValueError): + user.api_tokens[0].update_scopes(["admin:users"]) + + +async def test_exceeding_user_permissions( + app, + create_user_with_scopes, +): + user = create_user_with_scopes('list:users', 'read:users:groups') + api_token = user.new_api_token() + orm_api_token = orm.APIToken.find(app.db, token=api_token) + # store scopes user does not have + orm_api_token.scopes = orm_api_token.scopes + ['list:users', 'read:users'] + headers = {'Authorization': 'token %s' % api_token} + r = await api_request(app, 'users', headers=headers) + assert r.status_code == 200 + keys = {key for user in r.json() for key in user.keys()} + assert 'groups' in keys + assert 'last_activity' not in keys + + +async def test_user_service_separation(app, mockservice_url, create_temp_role): + name = mockservice_url.name + user = add_user(app.db, name=name) + + create_temp_role(['read:users', 'list:users'], 'reader_role') + create_temp_role(['read:users:groups', 'list:users'], 'subreader_role') + roles.update_roles(app.db, user, roles=['subreader_role']) + roles.update_roles(app.db, mockservice_url.orm, roles=['reader_role']) + user.roles.remove(orm.Role.find(app.db, name='user')) + api_token = user.new_api_token() + headers = {'Authorization': 'token %s' % api_token} + r = await api_request(app, 'users', headers=headers) + assert r.status_code == 200 + keys = {key for user in r.json() for key in user.keys()} + assert 'groups' in keys + assert 'last_activity' not in keys + + +async def test_request_user_outside_group(app, create_user_with_scopes): + outside_user = 'hello' + user = create_user_with_scopes('read:users!group=stuff') + add_user(app.db, name=outside_user) + r = await api_request( + app, 'users', outside_user, headers=auth_header(app.db, user.name) + ) + assert r.status_code == 404 + # Consistency between no user and user not accessible + assert r.json()['message'] == err_message + + +async def test_user_filter(app, create_user_with_scopes): + name_in_scope = {'lindsay', 'oscar', 'gob'} + user = create_user_with_scopes( + *(f'list:users!user={name}' for name in name_in_scope) + ) + outside_scope = {'maeby', 'marta'} + group_name = 'bluth' + group = orm.Group.find(app.db, name=group_name) + if not group: + group = orm.Group(name=group_name) + app.db.add(group) + for name in name_in_scope | outside_scope: + group_user = add_user(app.db, name=name) + if name not in group.users: + group.users.append(group_user) + app.db.commit() + r = await api_request(app, 'users', headers=auth_header(app.db, user.name)) + assert r.status_code == 200 + result_names = {user['name'] for user in r.json()} + assert result_names == name_in_scope + app.db.delete(group) + app.db.commit() + + +async def test_service_filter(app, create_user_with_scopes): + services = [ + {'name': 'cull_idle', 'api_token': 'some-token'}, + {'name': 'user_service', 'api_token': 'some-other-token'}, + ] + for service in services: + app.services.append(service) + app.init_services() + user = create_user_with_scopes('list:services!service=cull_idle') + r = await api_request(app, 'services', headers=auth_header(app.db, user.name)) + assert r.status_code == 200 + service_names = set(r.json().keys()) + assert service_names == {'cull_idle'} + + +async def test_user_filter_with_group(app, create_user_with_scopes): + group_name = 'sitwell' + user1 = create_user_with_scopes(f'list:users!group={group_name}') + user2 = create_user_with_scopes('self') + external_user = create_user_with_scopes('self') + name_set = {user1.name, user2.name} + group = orm.Group.find(app.db, name=group_name) + if not group: + group = orm.Group(name=group_name) + app.db.add(group) + for user in {user1, user2}: + group.users.append(user) + app.db.commit() + + r = await api_request(app, 'users', headers=auth_header(app.db, user1.name)) + assert r.status_code == 200 + result_names = {user['name'] for user in r.json()} + assert result_names == name_set + assert external_user.name not in result_names + app.db.delete(group) + app.db.commit() + + +async def test_group_scope_filter(app, create_user_with_scopes): + in_groups = {'sitwell', 'bluth'} + out_groups = {'austero'} + user = create_user_with_scopes( + *(f'list:groups!group={group}' for group in in_groups) + ) + for group_name in in_groups | out_groups: + group = orm.Group.find(app.db, name=group_name) + if not group: + group = orm.Group(name=group_name) + app.db.add(group) + app.db.commit() + r = await api_request(app, 'groups', headers=auth_header(app.db, user.name)) + assert r.status_code == 200 + result_names = {user['name'] for user in r.json()} + assert result_names == in_groups + for group_name in in_groups | out_groups: + group = orm.Group.find(app.db, name=group_name) + app.db.delete(group) + app.db.commit() + + +async def test_vertical_filter(app, create_user_with_scopes): + user = create_user_with_scopes('list:users') + r = await api_request(app, 'users', headers=auth_header(app.db, user.name)) + assert r.status_code == 200 + allowed_keys = {'name', 'kind', 'admin'} + assert {key for user in r.json() for key in user.keys()} == allowed_keys + + +async def test_stacked_vertical_filter(app, create_user_with_scopes): + user = create_user_with_scopes( + 'list:users', 'read:users:activity', 'read:users:groups' + ) + r = await api_request(app, 'users', headers=auth_header(app.db, user.name)) + assert r.status_code == 200 + allowed_keys = {'admin', 'name', 'kind', 'groups', 'last_activity'} + for user in r.json(): + result_model = set(user) + assert result_model == allowed_keys + + +async def test_cross_filter(app, create_user_with_scopes): + user = create_user_with_scopes('read:users:activity', 'self', 'list:users') + new_users = {'britta', 'jeff', 'annie'} + for new_user_name in new_users: + add_user(app.db, name=new_user_name) + app.db.commit() + r = await api_request(app, 'users', headers=auth_header(app.db, user.name)) + assert r.status_code == 200 + restricted_keys = {'admin', 'name', 'kind', 'last_activity'} + key_in_full_model = 'created' + for model_user in r.json(): + if model_user['name'] == user.name: + assert key_in_full_model in model_user + else: + assert set(model_user.keys()) == restricted_keys + + +@mark.parametrize( + "kind, has_user_scopes", + [ + ('users', True), + ('services', False), + ], +) +async def test_metascope_self_expansion( + app, kind, has_user_scopes, create_user_with_scopes, create_service_with_scopes +): + if kind == 'users': + orm_obj = create_user_with_scopes('self').orm_user + else: + orm_obj = create_service_with_scopes('self') + # test expansion of user/service scopes + scopes = get_scopes_for(orm_obj) + assert isinstance(scopes, frozenset) + assert bool(scopes) == has_user_scopes + + # test expansion of token scopes + orm_obj.new_api_token() + token_scopes = get_scopes_for(orm_obj.api_tokens[0]) + assert bool(token_scopes) == has_user_scopes + + +async def test_metascope_inherit_expansion(app, create_user_with_scopes): + user = create_user_with_scopes('self') + user.new_api_token(scopes=["inherit"]) + token = user.api_tokens[0] + # Check 'inherit' expansion + token_scope_set = get_scopes_for(token) + user_scope_set = get_scopes_for(user) + assert user_scope_set == token_scope_set + + # Check no roles means no permissions + token.scopes.clear() + app.db.commit() + token_scope_set = get_scopes_for(token) + assert isinstance(token_scope_set, frozenset) + + assert token_scope_set.issubset(identify_scopes(user.orm_user)) + + +@mark.parametrize( + "scopes, can_stop ,num_servers, keys_in, keys_out", + [ + (['read:servers!user=almond'], False, 2, {'name'}, {'state'}), + (['admin:users', 'read:users'], False, 0, set(), set()), + ( + ['read:servers!group=nuts', 'servers'], + True, + 2, + {'name'}, + {'state'}, + ), + ( + ['admin:server_state', 'read:servers'], + False, + 2, + {'name', 'state'}, + set(), + ), + ( + [ + 'read:servers!server=almond/bianca', + 'admin:server_state!server=almond/bianca', + ], + False, + 1, + {'name', 'state'}, + set(), + ), + ], +) +async def test_server_state_access( + app, + create_user_with_scopes, + create_service_with_scopes, + scopes, + can_stop, + num_servers, + keys_in, + keys_out, +): + with mock.patch.dict( + app.tornado_settings, + {'allow_named_servers': True, 'named_server_limit_per_user': 2}, + ): + user = create_user_with_scopes('self', name='almond') + group_name = 'nuts' + group = orm.Group.find(app.db, name=group_name) + if not group: + group = orm.Group(name=group_name) + app.db.add(group) + group.users.append(user) + app.db.commit() + server_names = ['bianca', 'terry'] + for server_name in server_names: + await api_request( + app, 'users', user.name, 'servers', server_name, method='post' + ) + service = create_service_with_scopes("read:users:name!user=", *scopes) + api_token = service.new_api_token() + headers = {'Authorization': 'token %s' % api_token} + + # can I get the user model? + r = await api_request(app, 'users', user.name, headers=headers) + can_read_user_model = num_servers > 1 or 'read:users' in scopes + if can_read_user_model: + r.raise_for_status() + user_model = r.json() + if num_servers > 1: + assert 'servers' in user_model + server_models = user_model['servers'] + assert len(server_models) == num_servers + for server, server_model in server_models.items(): + assert keys_in.issubset(server_model) + assert keys_out.isdisjoint(server_model) + else: + assert 'servers' not in user_model + else: + assert r.status_code == 404 + + r = await api_request( + app, + 'users', + user.name, + 'servers', + server_names[0], + method='delete', + headers=headers, + ) + if can_stop: + assert r.status_code == 204 + else: + assert r.status_code == 403 + app.db.delete(group) + app.db.commit() + + +@mark.parametrize( + "name, user_scopes, token_scopes, intersection_scopes", + [ + ( + 'no_filter', + ['users:activity'], + ['users:activity'], + {'users:activity', 'read:users:activity'}, + ), + ( + 'valid_own_filter', + ['read:users:activity'], + ['read:users:activity!user'], + {'read:users:activity!user=temp_user_1'}, + ), + ( + 'valid_other_filter', + ['read:users:activity'], + ['read:users:activity!user=otheruser'], + {'read:users:activity!user=otheruser'}, + ), + ( + 'no_filter_owner_filter', + ['read:users:activity!user'], + ['read:users:activity'], + {'read:users:activity!user=temp_user_1'}, + ), + ( + 'valid_own_filter', + ['read:users:activity!user'], + ['read:users:activity!user'], + {'read:users:activity!user=temp_user_1'}, + ), + ( + 'invalid_filter', + ['read:users:activity!user'], + ['read:users:activity!user=otheruser'], + set(), + ), + ( + 'subscopes_cross_filter', + ['users!user=x'], + ['read:users:name'], + {'read:users:name!user=x'}, + ), + ( + 'multiple_user_filter', + ['users!user=x', 'users!user=y'], + ['read:users:name!user=x'], + {'read:users:name!user=x'}, + ), + ( + 'no_intersection_group_user', + ['users!group=y'], + ['users!user=x'], + set(), + ), + ( + 'no_intersection_user_server', + ['servers!user=y'], + ['servers!server=x'], + set(), + ), + ( + 'users_and_groups_both', + ['users!group=x', 'users!user=y'], + ['read:users:name!group=x', 'read:users!user=y'], + { + 'read:users:name!group=x', + 'read:users!user=y', + 'read:users:name!user=y', + 'read:users:groups!user=y', + 'read:users:activity!user=y', + }, + ), + ( + 'users_and_groups_user_only', + ['users!group=x', 'users!user=y'], + ['read:users:name!group=z', 'read:users!user=y'], + { + 'read:users!user=y', + 'read:users:name!user=y', + 'read:users:groups!user=y', + 'read:users:activity!user=y', + }, + ), + ], +) +async def test_resolve_token_permissions( + app, + create_user_with_scopes, + create_temp_role, + name, + user_scopes, + token_scopes, + intersection_scopes, +): + orm_user = create_user_with_scopes(*user_scopes).orm_user + # ensure user has full permissions when token is created + # to create tokens with permissions exceeding their owner + roles.grant_role(app.db, orm_user, "admin") + create_temp_role(token_scopes, 'active-posting') + api_token = orm_user.new_api_token(roles=['active-posting']) + orm_api_token = orm.APIToken.find(app.db, token=api_token) + # drop admin so that filtering can be applied + roles.strip_role(app.db, orm_user, "admin") + + # get expanded !user filter scopes for check + user_scopes = get_scopes_for(orm_user) + token_scopes = get_scopes_for(orm_api_token) + + token_retained_scopes = get_scopes_for(orm_api_token) + + assert token_retained_scopes == intersection_scopes + + +@mark.parametrize( + "scopes, model_keys", + [ + ( + {'read:services'}, + { + 'command', + 'name', + 'kind', + 'info', + 'display', + 'pid', + 'admin', + 'prefix', + 'url', + }, + ), + ( + {'read:roles:services', 'read:services:name'}, + {'name', 'kind', 'roles', 'admin'}, + ), + ({'read:services:name'}, {'name', 'kind', 'admin'}), + ], +) +async def test_service_model_filtering( + app, scopes, model_keys, create_user_with_scopes, create_service_with_scopes +): + user = create_user_with_scopes(*scopes, name='teddy') + service = create_service_with_scopes() + r = await api_request( + app, 'services', service.name, headers=auth_header(app.db, user.name) + ) + assert r.status_code == 200 + assert model_keys == r.json().keys() + + +@mark.parametrize( + "scopes, model_keys", + [ + ( + {'read:groups'}, + { + 'name', + 'kind', + 'users', + }, + ), + ( + {'read:roles:groups', 'read:groups:name'}, + {'name', 'kind', 'roles'}, + ), + ({'read:groups:name'}, {'name', 'kind'}), + ], +) +async def test_group_model_filtering( + app, scopes, model_keys, create_user_with_scopes, create_service_with_scopes +): + user = create_user_with_scopes(*scopes, name='teddy') + group_name = 'baker_street' + group = orm.Group.find(app.db, name=group_name) + if not group: + group = orm.Group(name=group_name) + app.db.add(group) + app.db.commit() + r = await api_request( + app, 'groups', group_name, headers=auth_header(app.db, user.name) + ) + assert r.status_code == 200 + assert model_keys == r.json().keys() + app.db.delete(group) + app.db.commit() + + +async def test_roles_access(app, create_service_with_scopes, create_user_with_scopes): + user = add_user(app.db, name='miranda') + read_user = create_user_with_scopes('read:roles:users') + r = await api_request( + app, 'users', user.name, headers=auth_header(app.db, read_user.name) + ) + assert r.status_code == 200 + model_keys = {'kind', 'name', 'roles', 'admin'} + assert model_keys == r.json().keys() + + +@pytest.mark.parametrize( + "left, right, expected, should_warn", + [ + (set(), set(), set(), False), + (set(), {"users"}, set(), False), + # no warning if users and groups only on the same side + ( + {"users!user=x", "users!group=y"}, + set(), + set(), + False, + ), + # no warning if users are on both sizes + ( + {"users!user=x", "users!user=y", "users!group=y"}, + {"users!user=x"}, + {"users!user=x"}, + False, + ), + # no warning if users and groups are both defined + # on both sides + ( + {"users!user=x", "users!group=y"}, + {"users!user=x", "users!group=y", "users!user=z"}, + {"users!user=x", "users!group=y"}, + False, + ), + # warn if there's a user on one side and a group on the other + # which *may* intersect + ( + {"users!group=y", "users!user=z"}, + {"users!user=x"}, + set(), + True, + ), + # same for group->server + ( + {"users!group=y", "users!user=z"}, + {"users!server=x/y"}, + set(), + True, + ), + # this one actually shouldn't warn because server=x/y is under user=x, + # but we don't need to overcomplicate things just for a warning + ( + {"users!group=y", "users!user=x"}, + {"users!server=x/y"}, + {"users!server=x/y"}, + True, + ), + # resolves server under user, without warning + ( + {"read:servers!user=abc"}, + {"read:servers!server=abc/xyz"}, + {"read:servers!server=abc/xyz"}, + False, + ), + # user->server, no match + ( + {"read:servers!user=abc"}, + {"read:servers!server=abcd/xyz"}, + set(), + False, + ), + ], +) +def test_intersect_expanded_scopes(left, right, expected, should_warn, recwarn): + # run every test in both directions, to ensure symmetry of the inputs + for a, b in [(left, right), (right, left)]: + intersection = _intersect_expanded_scopes(set(left), set(right)) + assert intersection == set(expected) + + if should_warn: + assert len(recwarn) == 1 + else: + assert len(recwarn) == 0 + + +@pytest.mark.parametrize( + "left, right, expected, groups", + [ + ( + ["users!group=gx"], + ["users!user=ux"], + ["users!user=ux"], + {"gx": ["ux"]}, + ), + ( + ["read:users!group=gx"], + ["read:users!user=nosuchuser"], + [], + {}, + ), + ( + ["read:users!group=gx"], + ["read:users!server=nosuchuser/server"], + [], + {}, + ), + ( + ["read:users!group=gx"], + ["read:users!server=ux/server"], + ["read:users!server=ux/server"], + {"gx": ["ux"]}, + ), + ( + ["read:users!group=gx"], + ["read:users!server=ux/server", "read:users!user=uy"], + ["read:users!server=ux/server"], + {"gx": ["ux"], "gy": ["uy"]}, + ), + ( + ["read:users!group=gy"], + ["read:users!server=ux/server", "read:users!user=uy"], + ["read:users!user=uy"], + {"gx": ["ux"], "gy": ["uy"]}, + ), + ], +) +def test_intersect_groups(request, db, left, right, expected, groups): + if isinstance(left, str): + left = {left} + if isinstance(right, str): + right = {right} + + # if we have a db connection, we can actually resolve + created = [] + for groupname, members in groups.items(): + group = orm.Group.find(db, name=groupname) + if not group: + group = orm.Group(name=groupname) + db.add(group) + created.append(group) + db.commit() + for username in members: + user = orm.User.find(db, name=username) + if user is None: + user = orm.User(name=username) + db.add(user) + created.append(user) + user.groups.append(group) + db.commit() + + def _cleanup(): + for obj in created: + db.delete(obj) + db.commit() + + request.addfinalizer(_cleanup) + + # run every test in both directions, to ensure symmetry of the inputs + for a, b in [(left, right), (right, left)]: + intersection = _intersect_expanded_scopes(set(left), set(right), db) + assert intersection == set(expected) + + +@mark.user +@mark.parametrize( + "scopes, expected", + [ + ("list:users", ['in-1', 'in-2', 'out-1', 'out-2', 'admin', 'user']), + ("read:users", 403), + ("list:users!server=irrelevant", 403), + ("list:users!user=nosuchuser", []), + ("list:users!group=nosuchgroup", []), + ("list:users!user=out-2", ['out-2']), + ("list:users!group=GROUP", ['in-1', 'in-2']), + ( + ["list:users!group=GROUP", "list:users!user=out-2"], + ['in-1', 'in-2', 'out-2'], + ), + ], +) +async def test_list_users_filter( + app, group, create_service_with_scopes, scopes, expected +): + + # create users: + for i in (1, 2): + user = add_user(app.db, app, name=f'in-{i}') + group.users.append(user) + add_user(app.db, app, name=f'out-{i}') + app.db.commit() + + if isinstance(scopes, str): + scopes = [scopes] + + # in-group are in the group + # out-group are not in the group + scopes = [s.replace("GROUP", group.name).replace("IN", "ingroup") for s in scopes] + + orm_service = create_service_with_scopes(*scopes) + token = orm_service.new_api_token() + r = await api_request(app, 'users', headers={"Authorization": f"token {token}"}) + if isinstance(expected, int): + assert r.status_code == expected + return + r.raise_for_status() + + expected_models = [ + { + 'name': name, + 'admin': name == 'admin', + 'kind': 'user', + } + for name in sorted(expected) + ] + assert sorted(r.json(), key=itemgetter('name')) == expected_models + + +@mark.group +@mark.parametrize( + "scopes, expected", + [ + ("list:groups", ['group1', 'group2', 'group3']), + ("read:groups", 403), + ("list:groups!user=irrelevant", 403), + ("list:groups!group=nosuchgroup", []), + ("list:groups!group=group1", ['group1']), + ( + ["list:groups!group=group1", "list:groups!group=group2"], + ['group1', 'group2'], + ), + ( + # prefix match shouldn't match! + "list:groups!group=group", + [], + ), + ], +) +async def test_list_groups_filter( + request, app, create_service_with_scopes, scopes, expected +): + + # create groups: + groups = [] + for i in (1, 2, 3): + group = orm.Group(name=f'group{i}') + groups.append(group) + app.db.add(group) + app.db.commit() + + def cleanup_groups(): + for g in groups: + app.db.delete(g) + app.db.commit() + + request.addfinalizer(cleanup_groups) + + if isinstance(scopes, str): + scopes = [scopes] + + orm_service = create_service_with_scopes(*scopes) + token = orm_service.new_api_token() + r = await api_request(app, 'groups', headers={"Authorization": f"token {token}"}) + if isinstance(expected, int): + assert r.status_code == expected + return + r.raise_for_status() + + expected_models = [ + { + 'name': name, + 'kind': 'group', + } + for name in sorted(expected) + ] + assert sorted(r.json(), key=itemgetter('name')) == expected_models + + +@pytest.mark.parametrize( + "custom_scopes", + [ + {"custom:okay": {"description": "simple custom scope"}}, + { + "custom:parent": { + "description": "parent", + "subscopes": ["custom:child"], + }, + "custom:child": {"description": "child"}, + }, + { + "custom:extra": { + "description": "I have extra info", + "extra": "warn about me", + } + }, + ], +) +def test_custom_scopes(preserve_scopes, custom_scopes): + scopes.define_custom_scopes(custom_scopes) + for name, scope_def in custom_scopes.items(): + assert name in scopes.scope_definitions + assert scopes.scope_definitions[name] == scope_def + + # make sure describe works after registering custom scopes + scopes.describe_raw_scopes(list(custom_scopes.keys())) + + +@pytest.mark.parametrize( + "custom_scopes", + [ + { + "read:users": { + "description": "Can't override", + }, + }, + { + "custom:empty": {}, + }, + { + "notcustom:prefix": {"descroption": "bad prefix"}, + }, + { + "custom:!illegal": {"descroption": "bad character"}, + }, + { + "custom:badsubscope": { + "description": "non-custom subscope not allowed", + "subscopes": [ + "read:users", + ], + }, + }, + { + "custom:nosubscope": { + "description": "subscope not defined", + "subscopes": [ + "custom:undefined", + ], + }, + }, + { + "custom:badsubscope": { + "description": "subscope not a list", + "subscopes": "custom:notalist", + }, + "custom:notalist": { + "description": "the subscope", + }, + }, + ], +) +def test_custom_scopes_bad(preserve_scopes, custom_scopes): + with pytest.raises(ValueError): + scopes.define_custom_scopes(custom_scopes) + assert scopes.scope_definitions == preserve_scopes + + +async def test_user_filter_expansion(app, create_user_with_scopes): + scope_list = _expand_self_scope('ignored') + # turn !user=ignored into !user + # Mimic the role 'self' based on '!user' filter for tokens + scope_list = [scope.partition("=")[0] for scope in scope_list] + user = create_user_with_scopes('self') + user.new_api_token(scopes=scope_list) + user.new_api_token() + manual_scope_set = get_scopes_for(user.api_tokens[0]) + auto_scope_set = get_scopes_for(user.api_tokens[1]) + assert manual_scope_set == auto_scope_set + + +@pytest.mark.parametrize( + "scopes, expected", + [ + ("read:users:name!user", ["read:users:name!user={user}"]), + ( + "users:activity!user", + [ + "read:users:activity!user={user}", + "users:activity!user={user}", + ], + ), + ("self", ["*"]), + (["access:services", "access:services!service=x"], ["access:services"]), + ("access:services!service", ["access:services!service={service}"]), + ("access:servers!server", ["access:servers!server={server}"]), + ], +) +def test_expand_scopes(app, user, scopes, expected, mockservice_external): + if isinstance(scopes, str): + scopes = [scopes] + + db = app.db + service = mockservice_external + spawner_name = "salmon" + server_name = f"{user.name}/{spawner_name}" + if 'server' in str(scopes): + oauth_client = orm.OAuthClient() + db.add(oauth_client) + spawner = user.spawners[spawner_name] + spawner.orm_spawner.oauth_client = oauth_client + db.commit() + assert oauth_client.spawner is spawner.orm_spawner + else: + oauth_client = service.oauth_client + assert oauth_client is not None + + def format_scopes(scopes): + return { + s.format(service=service.name, server=server_name, user=user.name) + for s in scopes + } + + scopes = format_scopes(scopes) + expected = format_scopes(expected) + + if "*" in expected: + expected.remove("*") + expected.update(_expand_self_scope(user.name)) + + expanded = expand_scopes(scopes, owner=user.orm_user, oauth_client=oauth_client) + assert isinstance(expanded, frozenset) + assert sorted(expanded) == sorted(expected) + + +@pytest.mark.parametrize( + "requested_scopes, have_scopes, expected_allowed, expected_disallowed", + [ + ( + ["read:users:name!user"], + ["read:users:name!user={user}"], + ["read:users:name!user"], + [], + ), + ( + ["read:servers!server"], + ["read:servers!user"], + ["read:servers!server"], + [], + ), + ( + ["read:servers!server={server}"], + ["read:servers"], + ["read:servers!server={server}"], + [], + ), + ( + ["admin:servers!server"], + ["read:servers"], + ["read:servers!server={server}"], + ["admin:servers!server"], + ), + ( + ["admin:servers", "read:users"], + ["read:users"], + ["read:users"], + ["admin:servers"], + ), + ], +) +def test_resolve_requested_scopes( + app, + user, + group, + requested_scopes, + have_scopes, + expected_allowed, + expected_disallowed, + mockservice_external, +): + if isinstance(requested_scopes, str): + requested_scopes = [requested_scopes] + + db = app.db + service = mockservice_external + spawner_name = "salmon" + server_name = f"{user.name}/{spawner_name}" + if '!server' in str(requested_scopes + have_scopes): + oauth_client = orm.OAuthClient() + db.add(oauth_client) + spawner = user.spawners[spawner_name] + spawner.orm_spawner.oauth_client = oauth_client + db.commit() + assert oauth_client.spawner is spawner.orm_spawner + else: + oauth_client = service.oauth_client + assert oauth_client is not None + + def format_scopes(scopes): + return { + s.format(service=service.name, server=server_name, user=user.name) + for s in scopes + } + + requested_scopes = format_scopes(requested_scopes) + have_scopes = format_scopes(have_scopes) + expected_allowed = format_scopes(expected_allowed) + expected_disallowed = format_scopes(expected_disallowed) + + allowed, disallowed = _resolve_requested_scopes( + requested_scopes, + have_scopes, + user=user.orm_user, + client=oauth_client, + db=db, + ) + assert allowed == expected_allowed + assert disallowed == expected_disallowed diff --git a/jupyterhub/tests/test_services.py b/jupyterhub/tests/test_services.py index 248de1b1..b1e9fe9f 100644 --- a/jupyterhub/tests/test_services.py +++ b/jupyterhub/tests/test_services.py @@ -1,27 +1,20 @@ """Tests for services""" -import asyncio import os import sys -import time from binascii import hexlify -from contextlib import contextmanager from subprocess import Popen -from threading import Event -import pytest -import requests -from async_generator import async_generator from async_generator import asynccontextmanager -from async_generator import yield_ -from tornado import gen -from tornado.ioloop import IOLoop -from ..utils import maybe_future -from ..utils import random_port -from ..utils import url_path_join -from ..utils import wait_for_http_server +from ..utils import ( + exponential_backoff, + maybe_future, + random_port, + url_path_join, + wait_for_http_server, +) from .mocking import public_url -from .utils import async_requests +from .utils import async_requests, skip_if_ssl mockservice_path = os.path.dirname(os.path.abspath(__file__)) mockservice_py = os.path.join(mockservice_path, 'mockservice.py') @@ -29,7 +22,6 @@ mockservice_cmd = [sys.executable, mockservice_py] @asynccontextmanager -@async_generator async def external_service(app, name='mockservice'): env = { 'JUPYTERHUB_API_TOKEN': hexlify(os.urandom(5)), @@ -40,7 +32,7 @@ async def external_service(app, name='mockservice'): proc = Popen(mockservice_cmd, env=env) try: await wait_for_http_server(env['JUPYTERHUB_SERVICE_URL']) - await yield_(env) + yield env finally: proc.terminate() @@ -58,29 +50,31 @@ async def test_managed_service(mockservice): assert proc.poll() is not None # ensure Hub notices service is down and brings it back up: - for i in range(20): - if service.proc is not proc: - break - else: - await asyncio.sleep(0.2) + await exponential_backoff( + lambda: service.proc is not proc, + "Process was never replaced", + timeout=20, + ) assert service.proc.pid != first_pid assert service.proc.poll() is None +@skip_if_ssl async def test_proxy_service(app, mockservice_url): service = mockservice_url name = service.name await app.proxy.get_all_routes() url = public_url(app, service) + '/foo' r = await async_requests.get(url, allow_redirects=False) - path = '/services/{}/foo'.format(name) + path = f'/services/{name}/foo' r.raise_for_status() assert r.status_code == 200 assert r.text.endswith(path) +@skip_if_ssl async def test_external_service(app): name = 'external' async with external_service(app, name=name) as env: @@ -90,13 +84,22 @@ async def test_external_service(app): 'admin': True, 'url': env['JUPYTERHUB_SERVICE_URL'], 'api_token': env['JUPYTERHUB_API_TOKEN'], + 'oauth_roles': ['user'], } ] await maybe_future(app.init_services()) await app.init_api_tokens() await app.proxy.add_all_services(app._service_map) + await app.init_role_assignment() service = app._service_map[name] + assert service.oauth_available + assert service.oauth_client is not None + assert set(service.oauth_client.allowed_scopes) == { + "self", + f"access:services!service={name}", + } + api_token = service.orm.api_tokens[0] url = public_url(app, service) + '/api/users' r = await async_requests.get(url, allow_redirects=False) r.raise_for_status() @@ -107,3 +110,51 @@ async def test_external_service(app): assert len(resp) >= 1 assert isinstance(resp[0], dict) assert 'name' in resp[0] + + +async def test_external_services_without_api_token_set(app): + """ + This test was made to reproduce an error like this: + + ValueError: Tokens must be at least 8 characters, got '' + + The error had the following stack trace in 1.4.1: + + jupyterhub/app.py:2213: in init_api_tokens + await self._add_tokens(self.service_tokens, kind='service') + jupyterhub/app.py:2182: in _add_tokens + obj.new_api_token( + jupyterhub/orm.py:424: in new_api_token + return APIToken.new(token=token, service=self, **kwargs) + jupyterhub/orm.py:699: in new + cls.check_token(db, token) + + This test also make _add_tokens receive a token_dict that is buggy: + + {"": "external_2"} + + It turned out that whatever passes token_dict to _add_tokens failed to + ignore service's api_tokens that were None, and instead passes them as blank + strings. + + It turned out that init_api_tokens was passing self.service_tokens, and that + self.service_tokens had been populated with blank string tokens for external + services registered with JupyterHub. + """ + name_1 = 'external_1' + name_2 = 'external_2' + async with external_service(app, name=name_1) as env_1, external_service( + app, name=name_2 + ) as env_2: + app.services = [ + { + 'name': name_1, + 'url': "http://irrelevant", + }, + { + 'name': name_2, + 'url': "http://irrelevant", + }, + ] + await maybe_future(app.init_services()) + await app.init_api_tokens() diff --git a/jupyterhub/tests/test_services_auth.py b/jupyterhub/tests/test_services_auth.py index de4d73e9..c558252a 100644 --- a/jupyterhub/tests/test_services_auth.py +++ b/jupyterhub/tests/test_services_auth.py @@ -1,41 +1,27 @@ """Tests for service authentication""" -import asyncio import copy -import json import os import sys from binascii import hexlify -from functools import partial -from queue import Queue -from threading import Thread from unittest import mock -from urllib.parse import urlparse +from urllib.parse import parse_qs, urlparse import pytest -import requests -import requests_mock +from bs4 import BeautifulSoup from pytest import raises -from tornado.httpserver import HTTPServer from tornado.httputil import url_concat -from tornado.ioloop import IOLoop -from tornado.web import Application -from tornado.web import authenticated -from tornado.web import HTTPError -from tornado.web import RequestHandler +from tornado.log import app_log -from .. import orm +from .. import orm, roles, scopes +from ..roles import roles_to_scopes from ..services.auth import _ExpiringDict -from ..services.auth import HubAuth -from ..services.auth import HubAuthenticated from ..utils import url_path_join -from .mocking import public_host from .mocking import public_url -from .test_api import add_user -from .utils import async_requests -from .utils import AsyncSession +from .utils import AsyncSession, async_requests # mock for sending monotonic counter way into the future monotonic_future = mock.patch('time.monotonic', lambda: sys.maxsize) +ssl_enabled = False def test_expiring_dict(): @@ -74,192 +60,29 @@ def test_expiring_dict(): assert cache.get('key', 'default') == 'cached value' -def test_hub_auth(): - auth = HubAuth(cookie_name='foo') - mock_model = {'name': 'onyxia'} - url = url_path_join(auth.api_url, "authorizations/cookie/foo/bar") - with requests_mock.Mocker() as m: - m.get(url, text=json.dumps(mock_model)) - user_model = auth.user_for_cookie('bar') - assert user_model == mock_model - # check cache - user_model = auth.user_for_cookie('bar') - assert user_model == mock_model - - with requests_mock.Mocker() as m: - m.get(url, status_code=404) - user_model = auth.user_for_cookie('bar', use_cache=False) - assert user_model is None - - # invalidate cache with timer - mock_model = {'name': 'willow'} - with monotonic_future, requests_mock.Mocker() as m: - m.get(url, text=json.dumps(mock_model)) - user_model = auth.user_for_cookie('bar') - assert user_model == mock_model - - with requests_mock.Mocker() as m: - m.get(url, status_code=500) - with raises(HTTPError) as exc_info: - user_model = auth.user_for_cookie('bar', use_cache=False) - assert exc_info.value.status_code == 502 - - with requests_mock.Mocker() as m: - m.get(url, status_code=400) - with raises(HTTPError) as exc_info: - user_model = auth.user_for_cookie('bar', use_cache=False) - assert exc_info.value.status_code == 500 - - -def test_hub_authenticated(request): - auth = HubAuth(cookie_name='jubal') - mock_model = {'name': 'jubalearly', 'groups': ['lions']} - cookie_url = url_path_join(auth.api_url, "authorizations/cookie", auth.cookie_name) - good_url = url_path_join(cookie_url, "early") - bad_url = url_path_join(cookie_url, "late") - - class TestHandler(HubAuthenticated, RequestHandler): - hub_auth = auth - - @authenticated - def get(self): - self.finish(self.get_current_user()) - - # start hub-authenticated service in a thread: - port = 50505 - q = Queue() - - def run(): - asyncio.set_event_loop(asyncio.new_event_loop()) - app = Application([('/*', TestHandler)], login_url=auth.login_url) - - http_server = HTTPServer(app) - http_server.listen(port) - loop = IOLoop.current() - loop.add_callback(lambda: q.put(loop)) - loop.start() - - t = Thread(target=run) - t.start() - - def finish_thread(): - loop.add_callback(loop.stop) - t.join(timeout=30) - assert not t.is_alive() - - request.addfinalizer(finish_thread) - - # wait for thread to start - loop = q.get(timeout=10) - - with requests_mock.Mocker(real_http=True) as m: - # no cookie - r = requests.get('http://127.0.0.1:%i' % port, allow_redirects=False) - r.raise_for_status() - assert r.status_code == 302 - assert auth.login_url in r.headers['Location'] - - # wrong cookie - m.get(bad_url, status_code=404) - r = requests.get( - 'http://127.0.0.1:%i' % port, - cookies={'jubal': 'late'}, - allow_redirects=False, - ) - r.raise_for_status() - assert r.status_code == 302 - assert auth.login_url in r.headers['Location'] - - # clear the cache because we are going to request - # the same url again with a different result - auth.cache.clear() - - # upstream 403 - m.get(bad_url, status_code=403) - r = requests.get( - 'http://127.0.0.1:%i' % port, - cookies={'jubal': 'late'}, - allow_redirects=False, - ) - assert r.status_code == 500 - - m.get(good_url, text=json.dumps(mock_model)) - - # no whitelist - r = requests.get( - 'http://127.0.0.1:%i' % port, - cookies={'jubal': 'early'}, - allow_redirects=False, - ) - r.raise_for_status() - assert r.status_code == 200 - - # pass whitelist - TestHandler.hub_users = {'jubalearly'} - r = requests.get( - 'http://127.0.0.1:%i' % port, - cookies={'jubal': 'early'}, - allow_redirects=False, - ) - r.raise_for_status() - assert r.status_code == 200 - - # no pass whitelist - TestHandler.hub_users = {'kaylee'} - r = requests.get( - 'http://127.0.0.1:%i' % port, - cookies={'jubal': 'early'}, - allow_redirects=False, - ) - assert r.status_code == 403 - - # pass group whitelist - TestHandler.hub_groups = {'lions'} - r = requests.get( - 'http://127.0.0.1:%i' % port, - cookies={'jubal': 'early'}, - allow_redirects=False, - ) - r.raise_for_status() - assert r.status_code == 200 - - # no pass group whitelist - TestHandler.hub_groups = {'tigers'} - r = requests.get( - 'http://127.0.0.1:%i' % port, - cookies={'jubal': 'early'}, - allow_redirects=False, - ) - assert r.status_code == 403 - - -async def test_hubauth_cookie(app, mockservice_url): - """Test HubAuthenticated service with user cookies""" - cookies = await app.login_user('badger') - r = await async_requests.get( - public_url(app, mockservice_url) + '/whoami/', cookies=cookies - ) - r.raise_for_status() - print(r.text) - reply = r.json() - sub_reply = {key: reply.get(key, 'missing') for key in ['name', 'admin']} - assert sub_reply == {'name': 'badger', 'admin': False} - - -async def test_hubauth_token(app, mockservice_url): +async def test_hubauth_token(app, mockservice_url, create_user_with_scopes): """Test HubAuthenticated service with user API tokens""" - u = add_user(app.db, name='river') + u = create_user_with_scopes("access:services") token = u.new_api_token() + no_access_token = u.new_api_token(roles=[]) app.db.commit() + # token without sufficient permission in Authorization header + r = await async_requests.get( + public_url(app, mockservice_url) + '/whoami/', + headers={'Authorization': f'token {no_access_token}'}, + ) + assert r.status_code == 403 + # token in Authorization header r = await async_requests.get( public_url(app, mockservice_url) + '/whoami/', - headers={'Authorization': 'token %s' % token}, + headers={'Authorization': f'token {token}'}, ) + r.raise_for_status() reply = r.json() sub_reply = {key: reply.get(key, 'missing') for key in ['name', 'admin']} - assert sub_reply == {'name': 'river', 'admin': False} + assert sub_reply == {'name': u.name, 'admin': False} # token in ?token parameter r = await async_requests.get( @@ -268,7 +91,7 @@ async def test_hubauth_token(app, mockservice_url): r.raise_for_status() reply = r.json() sub_reply = {key: reply.get(key, 'missing') for key in ['name', 'admin']} - assert sub_reply == {'name': 'river', 'admin': False} + assert sub_reply == {'name': u.name, 'admin': False} r = await async_requests.get( public_url(app, mockservice_url) + '/whoami/?token=no-such-token', @@ -281,34 +104,95 @@ async def test_hubauth_token(app, mockservice_url): assert path.endswith('/hub/login') -async def test_hubauth_service_token(app, mockservice_url): +@pytest.mark.parametrize( + "scopes, allowed", + [ + ( + [ + "access:services", + ], + True, + ), + ( + [ + "access:services!service=$service", + ], + True, + ), + ( + [ + "access:services!service=other-service", + ], + False, + ), + ( + [ + "access:servers!user=$service", + ], + False, + ), + ], +) +async def test_hubauth_service_token(request, app, mockservice_url, scopes, allowed): """Test HubAuthenticated service with service API tokens""" + scopes = [scope.replace('$service', mockservice_url.name) for scope in scopes] + token = hexlify(os.urandom(5)).decode('utf8') name = 'test-api-service' app.service_tokens[token] = name await app.init_api_tokens() + orm_service = app.db.query(orm.Service).filter_by(name=name).one() + role_name = "test-hubauth-service-token" + + roles.create_role( + app.db, + { + "name": role_name, + "description": "role for test", + "scopes": scopes, + }, + ) + request.addfinalizer(lambda: roles.delete_role(app.db, role_name)) + roles.grant_role(app.db, orm_service, role_name) + # token in Authorization header r = await async_requests.get( - public_url(app, mockservice_url) + '/whoami/', + public_url(app, mockservice_url) + 'whoami/', headers={'Authorization': 'token %s' % token}, + allow_redirects=False, ) - r.raise_for_status() - reply = r.json() - assert reply == {'kind': 'service', 'name': name, 'admin': False} - assert not r.cookies + service_model = { + 'kind': 'service', + 'name': name, + 'admin': False, + 'scopes': scopes, + } + if allowed: + r.raise_for_status() + assert r.status_code == 200 + reply = r.json() + assert service_model.items() <= reply.items() + assert not r.cookies + else: + assert r.status_code == 403 # token in ?token parameter r = await async_requests.get( - public_url(app, mockservice_url) + '/whoami/?token=%s' % token + public_url(app, mockservice_url) + 'whoami/?token=%s' % token ) - r.raise_for_status() - reply = r.json() - assert reply == {'kind': 'service', 'name': name, 'admin': False} + if allowed: + r.raise_for_status() + assert r.status_code == 200 + reply = r.json() + assert service_model.items() <= reply.items() + assert not r.cookies + else: + assert r.status_code == 403 r = await async_requests.get( - public_url(app, mockservice_url) + '/whoami/?token=no-such-token', + public_url(app, mockservice_url) + 'whoami/?token=no-such-token', allow_redirects=False, ) assert r.status_code == 302 @@ -318,23 +202,144 @@ async def test_hubauth_service_token(app, mockservice_url): assert path.endswith('/hub/login') -async def test_oauth_service(app, mockservice_url): +@pytest.mark.parametrize( + "client_allowed_roles, request_scopes, expected_scopes", + [ + # allow empty permissions + ([], [], []), + # allow original 'identify' scope to map to no role + ([], ["identify"], []), + # requesting roles outside client list doesn't work + ([], ["admin"], None), + ([], ["read:users"], None), + # requesting nonexistent roles or scopes fails in the same way (no server error) + ([], ["nosuchscope"], None), + ([], ["admin:invalid!no=bo!"], None), + # requesting role exactly client allow list works + (["user"], ["user"], ["user"]), + # Request individual scope, held by user, not listed in allowed role + # no explicit request, defaults to all + (["token", "user"], [], ["token", "user"]), + # explicit 'identify' maps to read:users:name!user + (["token", "user"], ["identify"], ["read:users:name!user=$user"]), + # any item outside the list isn't allowed + (["token", "server"], ["token", "user"], None), + (["read-only"], ["access:services"], None), + # requesting subset + (["admin", "user"], ["user"], ["user"]), + (["user", "token", "server"], ["token", "user"], ["token", "user"]), + (["admin", "user", "read-only"], ["read-only"], ["read-only"]), + # Request individual scopes, listed in allowed role + (["read-only"], ["access:servers"], ["access:servers"]), + # requesting valid subset, some not held by user + ( + ["admin", "user"], + ["admin:users", "access:servers", "self"], + ["access:servers", "user"], + ), + (["other"], ["other"], []), + # custom scopes + (["user"], ["custom:jupyter_server:read:*"], None), + ( + ["read-only"], + ["custom:jupyter_server:read:*"], + ["custom:jupyter_server:read:*"], + ), + # this one _should_ work, but doesn't until we implement expanded_scope filtering + ( + ["read-only"], + ["custom:jupyter_server:read:*!user=$user"], + ["custom:jupyter_server:read:*!user=$user"], + ), + ], +) +async def test_oauth_service_roles( + app, + mockservice_url, + create_user_with_scopes, + client_allowed_roles, + request_scopes, + expected_scopes, + preserve_scopes, +): service = mockservice_url + oauth_client = ( + app.db.query(orm.OAuthClient) + .filter_by(identifier=service.oauth_client_id) + .one() + ) + scopes.define_custom_scopes( + { + "custom:jupyter_server:read:*": { + "description": "read-only access to jupyter server", + }, + }, + ) + roles.create_role( + app.db, + { + "name": "read-only", + "description": "read-only access to servers", + "scopes": [ + "access:servers", + "custom:jupyter_server:read:*", + ], + }, + ) + + roles.create_role( + app.db, + { + "name": "other", + "description": "A role not held by our test user", + "scopes": [ + "admin-ui", + ], + }, + ) + oauth_client.allowed_scopes = sorted( + roles_to_scopes( + [orm.Role.find(app.db, role_name) for role_name in client_allowed_roles] + ) + ) + app.db.commit() + user = create_user_with_scopes("access:services") url = url_path_join(public_url(app, mockservice_url) + 'owhoami/?arg=x') + if request_scopes: + request_scopes = {s.replace("$user", user.name) for s in request_scopes} + url = url_concat(url, {"request-scope": " ".join(request_scopes)}) # first request is only going to login and get us to the oauth form page s = AsyncSession() - name = 'link' + roles.grant_role(app.db, user, "user") + roles.grant_role(app.db, user, "read-only") + name = user.name s.cookies = await app.login_user(name) r = await s.get(url) + if expected_scopes is None: + # expected failed auth, stop here + # verify expected 'invalid scope' error, not server error + dest_url, _, query = r.url.partition("?") + assert dest_url == public_url(app, mockservice_url) + "oauth_callback" + assert parse_qs(query).get("error") == ["invalid_scope"] + assert r.status_code == 400 + return + r.raise_for_status() # we should be looking at the oauth confirmation page assert urlparse(r.url).path == app.base_url + 'hub/api/oauth2/authorize' # verify oauth state cookie was set at some point assert set(r.history[0].cookies.keys()) == {'service-%s-oauth-state' % service.name} + page = BeautifulSoup(r.text, "html.parser") + scope_inputs = page.find_all("input", {"name": "scopes"}) + scope_values = [input["value"] for input in scope_inputs] + app_log.info(f"Submitting request with scope values {scope_values}") # submit the oauth form to complete authorization - r = await s.post(r.url, data={'scopes': ['identify']}, headers={'Referer': r.url}) + data = {} + if scope_values: + data["scopes"] = scope_values + r = await s.post(r.url, data=data, headers={'Referer': r.url}) r.raise_for_status() assert r.url == url # verify oauth cookie is set @@ -346,9 +351,34 @@ async def test_oauth_service(app, mockservice_url): r = await s.get(url, allow_redirects=False) r.raise_for_status() assert r.status_code == 200 + assert len(r.history) == 0 reply = r.json() sub_reply = {key: reply.get(key, 'missing') for key in ('kind', 'name')} - assert sub_reply == {'name': 'link', 'kind': 'user'} + assert sub_reply == {'name': user.name, 'kind': 'user'} + + expected_scopes = {s.replace("$user", user.name) for s in expected_scopes} + + # expand roles to scopes (shortcut) + for scope in list(expected_scopes): + role = orm.Role.find(app.db, scope) + if role: + expected_scopes.discard(role.name) + expected_scopes.update( + roles.roles_to_expanded_scopes([role], owner=user.orm_user) + ) + + if 'inherit' in expected_scopes: + expected_scopes = set(scopes.get_scopes_for(user.orm_user)) + + # always expect identify/access scopes + # on successful authentication + expected_scopes.update(scopes.identify_scopes(user.orm_user)) + expected_scopes.update(scopes.access_scopes(oauth_client)) + expected_scopes = scopes.reduce_scopes(expected_scopes) + have_scopes = scopes.reduce_scopes(set(reply['scopes'])) + # pytest is better at reporting list differences + # than set differences, especially with `-vv` + assert sorted(have_scopes) == sorted(expected_scopes) # token-authenticated request to HubOAuth token = app.users[name].new_api_token() @@ -368,12 +398,133 @@ async def test_oauth_service(app, mockservice_url): assert reply['name'] == name -async def test_oauth_cookie_collision(app, mockservice_url): +@pytest.mark.parametrize( + "access_scopes, expect_success", + [ + (["access:services"], True), + (["access:services!service=$service"], True), + (["access:services!service=other-service"], False), + (["self"], False), + ([], False), + ], +) +async def test_oauth_access_scopes( + app, + mockservice_url, + create_user_with_scopes, + access_scopes, + expect_success, +): + """Check that oauth/authorize validates access scopes""" + service = mockservice_url + access_scopes = [s.replace("$service", service.name) for s in access_scopes] + url = url_path_join(public_url(app, mockservice_url) + 'owhoami/?arg=x') + # first request is only going to login and get us to the oauth form page + s = AsyncSession() + user = create_user_with_scopes(*access_scopes) + name = user.name + s.cookies = await app.login_user(name) + + r = await s.get(url) + if not expect_success: + assert r.status_code == 403 + return + r.raise_for_status() + # we should be looking at the oauth confirmation page + assert urlparse(r.url).path == app.base_url + 'hub/api/oauth2/authorize' + # verify oauth state cookie was set at some point + assert set(r.history[0].cookies.keys()) == {'service-%s-oauth-state' % service.name} + + # submit the oauth form to complete authorization + r = await s.post(r.url, headers={'Referer': r.url}) + r.raise_for_status() + assert r.url == url + # verify oauth cookie is set + assert 'service-%s' % service.name in set(s.cookies.keys()) + # verify oauth state cookie has been consumed + assert 'service-%s-oauth-state' % service.name not in set(s.cookies.keys()) + + # second request should be authenticated, which means no redirects + r = await s.get(url, allow_redirects=False) + r.raise_for_status() + assert r.status_code == 200 + reply = r.json() + sub_reply = {key: reply.get(key, 'missing') for key in ('kind', 'name')} + assert sub_reply == {'name': name, 'kind': 'user'} + + # revoke user access, should result in 403 + user.roles = [] + app.db.commit() + + # reset session id to avoid cached response + s.cookies.pop('jupyterhub-session-id') + + r = await s.get(url, allow_redirects=False) + assert r.status_code == 403 + + +@pytest.mark.parametrize( + "token_roles, hits_page", + [ + ([], True), + (['writer'], True), + (['writer', 'reader'], False), + ], +) +async def test_oauth_page_hit( + app, + mockservice_url, + create_user_with_scopes, + create_temp_role, + token_roles, + hits_page, +): + test_roles = { + 'reader': create_temp_role(['read:users!user'], role_name='reader'), + 'writer': create_temp_role(['users:activity!user'], role_name='writer'), + } + service = mockservice_url + user = create_user_with_scopes("access:services", "self") + for role in test_roles.values(): + roles.grant_role(app.db, user, role) + + # Create a token with the prior authorization + oauth_client = ( + app.db.query(orm.OAuthClient) + .filter_by(identifier=service.oauth_client_id) + .one() + ) + oauth_client.allowed_scopes = sorted(roles_to_scopes(list(test_roles.values()))) + + authorized_scopes = roles.roles_to_scopes([test_roles[t] for t in token_roles]) + authorized_scopes.update(scopes.identify_scopes()) + authorized_scopes.update(scopes.access_scopes(oauth_client)) + user.new_api_token(scopes=authorized_scopes) + token = user.api_tokens[0] + token.client_id = service.oauth_client_id + app.db.commit() + + s = AsyncSession() + s.cookies = await app.login_user(user.name) + url = url_path_join(public_url(app, service) + 'owhoami/?arg=x') + r = await s.get(url) + r.raise_for_status() + if hits_page: + # hit auth page to confirm permissions + assert urlparse(r.url).path == app.base_url + 'hub/api/oauth2/authorize' + else: + # skip auth page, permissions are granted + assert r.status_code == 200 + assert r.url == url + + +async def test_oauth_cookie_collision(app, mockservice_url, create_user_with_scopes): service = mockservice_url url = url_path_join(public_url(app, mockservice_url), 'owhoami/') print(url) s = AsyncSession() name = 'mypha' + user = create_user_with_scopes("access:services", name=name) s.cookies = await app.login_user(name) state_cookie_name = 'service-%s-oauth-state' % service.name service_cookie_name = 'service-%s' % service.name @@ -426,7 +577,7 @@ async def test_oauth_cookie_collision(app, mockservice_url): assert state_cookies == [] -async def test_oauth_logout(app, mockservice_url): +async def test_oauth_logout(app, mockservice_url, create_user_with_scopes): """Verify that logout via the Hub triggers logout for oauth services 1. clears session id cookie @@ -440,15 +591,11 @@ async def test_oauth_logout(app, mockservice_url): # first request is only going to set login cookie s = AsyncSession() name = 'propha' - app_user = add_user(app.db, app=app, name=name) + user = create_user_with_scopes("access:services", name=name) def auth_tokens(): """Return list of OAuth access tokens for the user""" - return list( - app.db.query(orm.OAuthAccessToken).filter( - orm.OAuthAccessToken.user_id == app_user.id - ) - ) + return list(app.db.query(orm.APIToken).filter_by(user_id=user.id)) # ensure we start empty assert auth_tokens() == [] @@ -475,6 +622,10 @@ async def test_oauth_logout(app, mockservice_url): session_id = s.cookies['jupyterhub-session-id'] assert len(auth_tokens()) == 1 + token = auth_tokens()[0] + assert token.expires_in is not None + # verify that oauth_token_expires_in has its desired effect + assert abs(app.oauth_token_expires_in - token.expires_in) < 30 # hit hub logout URL r = await s.get(public_url(app, path='hub/logout')) diff --git a/jupyterhub/tests/test_singleuser.py b/jupyterhub/tests/test_singleuser.py index 1bd08696..e139b41f 100644 --- a/jupyterhub/tests/test_singleuser.py +++ b/jupyterhub/tests/test_singleuser.py @@ -1,19 +1,49 @@ """Tests for jupyterhub.singleuser""" +import os import sys -from subprocess import check_output -from urllib.parse import urlparse +from contextlib import nullcontext +from subprocess import CalledProcessError, check_output +from unittest import mock +from urllib.parse import urlencode, urlparse import pytest +from bs4 import BeautifulSoup import jupyterhub + +from .. import orm from ..utils import url_path_join -from .mocking import public_url -from .mocking import StubSingleUserSpawner -from .utils import async_requests -from .utils import AsyncSession +from .mocking import StubSingleUserSpawner, public_url +from .utils import AsyncSession, async_requests, get_page -async def test_singleuser_auth(app): +@pytest.mark.parametrize( + "access_scopes, server_name, expect_success", + [ + (["access:servers!group=$group"], "", True), + (["access:servers!group=other-group"], "", False), + (["access:servers"], "", True), + (["access:servers"], "named", True), + (["access:servers!user=$user"], "", True), + (["access:servers!user=$user"], "named", True), + (["access:servers!server=$server"], "", True), + (["access:servers!server=$server"], "named-server", True), + (["access:servers!server=$user/other"], "", False), + (["access:servers!server=$user/other"], "some-name", False), + (["access:servers!user=$other"], "", False), + (["access:servers!user=$other"], "named", False), + (["access:services"], "", False), + (["self"], "named", False), + ([], "", False), + ], +) +async def test_singleuser_auth( + app, + create_user_with_scopes, + access_scopes, + server_name, + expect_success, +): # use StubSingleUserSpawner to launch a single-user app in a thread app.spawner_class = StubSingleUserSpawner app.tornado_settings['spawner_class'] = StubSingleUserSpawner @@ -21,19 +51,44 @@ async def test_singleuser_auth(app): # login, start the server cookies = await app.login_user('nandy') user = app.users['nandy'] - if not user.running: - await user.spawn() - url = public_url(app, user) + + group = orm.Group.find(app.db, name="visitors") + if group is None: + group = orm.Group(name="visitors") + app.db.add(group) + app.db.commit() + if group not in user.groups: + user.groups.append(group) + app.db.commit() + + if server_name not in user.spawners or not user.spawners[server_name].active: + await user.spawn(server_name) + await app.proxy.add_user(user, server_name) + spawner = user.spawners[server_name] + url = url_path_join(public_url(app, user), server_name) # no cookies, redirects to login page r = await async_requests.get(url) r.raise_for_status() assert '/hub/login' in r.url + # unauthenticated /api/ should 403, not redirect + api_url = url_path_join(url, "api/status") + r = await async_requests.get(api_url, allow_redirects=False) + assert r.status_code == 403 + # with cookies, login successful r = await async_requests.get(url, cookies=cookies) r.raise_for_status() - assert urlparse(r.url).path.rstrip('/').endswith('/user/nandy/tree') + assert ( + urlparse(r.url) + .path.rstrip('/') + .endswith( + url_path_join( + f'/user/{user.name}', spawner.name, spawner.default_url or "/tree" + ) + ) + ) assert r.status_code == 200 # logout @@ -41,15 +96,40 @@ async def test_singleuser_auth(app): assert len(r.cookies) == 0 # accessing another user's server hits the oauth confirmation page + access_scopes = [s.replace("$user", user.name) for s in access_scopes] + access_scopes = [ + s.replace("$server", f"{user.name}/{server_name}") for s in access_scopes + ] + access_scopes = [s.replace("$group", f"{group.name}") for s in access_scopes] + other_user = create_user_with_scopes(*access_scopes, name="burgess") + cookies = await app.login_user('burgess') s = AsyncSession() s.cookies = cookies r = await s.get(url) assert urlparse(r.url).path.endswith('/oauth2/authorize') + if not expect_success: + # user isn't authorized, should raise 403 + assert r.status_code == 403 + return + r.raise_for_status() # submit the oauth form to complete authorization r = await s.post(r.url, data={'scopes': ['identify']}, headers={'Referer': r.url}) - assert urlparse(r.url).path.rstrip('/').endswith('/user/nandy/tree') - # user isn't authorized, should raise 403 + final_url = urlparse(r.url).path.rstrip('/') + final_path = url_path_join( + '/user/', user.name, spawner.name, spawner.default_url or "/tree" + ) + assert final_url.endswith(final_path) + r.raise_for_status() + + # revoke user access, should result in 403 + other_user.roles = [] + app.db.commit() + + # reset session id to avoid cached response + s.cookies.pop('jupyterhub-session-id') + + r = await s.get(r.url, allow_redirects=False) assert r.status_code == 403 assert 'burgess' in r.text @@ -76,7 +156,9 @@ async def test_disable_user_config(app): # with cookies, login successful r = await async_requests.get(url, cookies=cookies) r.raise_for_status() - assert r.url.rstrip('/').endswith('/user/nandy/tree') + assert r.url.rstrip('/').endswith( + url_path_join('/user/nandy', user.spawner.default_url or "/tree") + ) assert r.status_code == 200 @@ -92,3 +174,78 @@ def test_version(): [sys.executable, '-m', 'jupyterhub.singleuser', '--version'] ).decode('utf8', 'replace') assert jupyterhub.__version__ in out + + +@pytest.mark.parametrize( + "JUPYTERHUB_SINGLEUSER_APP", + [ + "", + "notebook.notebookapp.NotebookApp", + "jupyter_server.serverapp.ServerApp", + ], +) +def test_singleuser_app_class(JUPYTERHUB_SINGLEUSER_APP): + try: + import jupyter_server # noqa + except ImportError: + have_server = False + else: + have_server = True + try: + import notebook.notebookapp # noqa + except ImportError: + have_notebook = False + else: + have_notebook = True + + if JUPYTERHUB_SINGLEUSER_APP.startswith("notebook."): + expect_error = not have_notebook + elif JUPYTERHUB_SINGLEUSER_APP.startswith("jupyter_server."): + expect_error = not have_server + else: + # not specified, will try both + expect_error = not (have_server or have_notebook) + + if expect_error: + ctx = pytest.raises(CalledProcessError) + else: + ctx = nullcontext() + + with mock.patch.dict( + os.environ, + { + "JUPYTERHUB_SINGLEUSER_APP": JUPYTERHUB_SINGLEUSER_APP, + }, + ): + with ctx: + out = check_output( + [sys.executable, '-m', 'jupyterhub.singleuser', '--help-all'] + ).decode('utf8', 'replace') + if expect_error: + return + # use help-all output to check inheritance + if 'NotebookApp' in JUPYTERHUB_SINGLEUSER_APP or not have_server: + assert '--NotebookApp.' in out + assert '--ServerApp.' not in out + else: + assert '--ServerApp.' in out + assert '--NotebookApp.' not in out + + +async def test_nbclassic_control_panel(app, user): + # use StubSingleUserSpawner to launch a single-user app in a thread + app.spawner_class = StubSingleUserSpawner + app.tornado_settings['spawner_class'] = StubSingleUserSpawner + + # login, start the server + await user.spawn() + cookies = await app.login_user(user.name) + next_url = url_path_join(user.url, "tree/") + url = '/?' + urlencode({'next': next_url}) + r = await get_page(url, app, cookies=cookies) + r.raise_for_status() + assert urlparse(r.url).path == urlparse(next_url).path + page = BeautifulSoup(r.text, "html.parser") + link = page.find("a", id="jupyterhub-control-panel-link") + assert link, f"Missing jupyterhub-control-panel-link in {page}" + assert link["href"] == url_path_join(app.base_url, "hub/home") diff --git a/jupyterhub/tests/test_spawner.py b/jupyterhub/tests/test_spawner.py index 30d35b8f..cd8dfa73 100644 --- a/jupyterhub/tests/test_spawner.py +++ b/jupyterhub/tests/test_spawner.py @@ -1,6 +1,7 @@ """Tests for process spawning""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +import asyncio import logging import os import signal @@ -12,17 +13,14 @@ from unittest import mock from urllib.parse import urlparse import pytest -from tornado import gen from .. import orm from .. import spawner as spawnermod -from ..objects import Hub -from ..objects import Server -from ..spawner import LocalProcessSpawner -from ..spawner import Spawner +from ..objects import Hub, Server +from ..scopes import access_scopes +from ..spawner import LocalProcessSpawner, Spawner from ..user import User -from ..utils import new_token -from ..utils import url_path_join +from ..utils import AnyTimeoutError, new_token, url_path_join from .mocking import public_url from .test_api import add_user from .utils import async_requests @@ -76,7 +74,20 @@ async def test_spawner(db, request): assert status is None await spawner.stop() status = await spawner.poll() - assert status == 1 + assert status is not None + assert isinstance(status, int) + + +def test_spawner_from_db(app, user): + spawner = user.spawners['name'] + user_options = {"test": "value"} + spawner.orm_spawner.user_options = user_options + app.db.commit() + # delete and recreate the spawner from the db + user.spawners.pop('name') + new_spawner = user.spawners['name'] + assert new_spawner.orm_spawner.user_options == user_options + assert new_spawner.user_options == user_options async def wait_for_spawner(spawner, timeout=10): @@ -94,7 +105,7 @@ async def wait_for_spawner(spawner, timeout=10): assert status is None try: await wait() - except TimeoutError: + except AnyTimeoutError: continue else: break @@ -102,7 +113,8 @@ async def wait_for_spawner(spawner, timeout=10): async def test_single_user_spawner(app, request): - user = next(iter(app.users.values()), None) + orm_user = app.db.query(orm.User).first() + user = app.users[orm_user] spawner = user.spawner spawner.cmd = ['jupyterhub-singleuser'] await user.spawn() @@ -121,7 +133,7 @@ async def test_stop_spawner_sigint_fails(db): await spawner.start() # wait for the process to get to the while True: loop - await gen.sleep(1) + await asyncio.sleep(1) status = await spawner.poll() assert status is None @@ -136,7 +148,7 @@ async def test_stop_spawner_stop_now(db): await spawner.start() # wait for the process to get to the while True: loop - await gen.sleep(1) + await asyncio.sleep(1) status = await spawner.poll() assert status is None @@ -163,7 +175,7 @@ async def test_spawner_poll(db): spawner.start_polling() # wait for the process to get to the while True: loop - await gen.sleep(1) + await asyncio.sleep(1) status = await spawner.poll() assert status is None @@ -171,12 +183,12 @@ async def test_spawner_poll(db): proc.terminate() for i in range(10): if proc.poll() is None: - await gen.sleep(1) + await asyncio.sleep(1) else: break assert proc.poll() is not None - await gen.sleep(2) + await asyncio.sleep(2) status = await spawner.poll() assert status is not None @@ -256,13 +268,11 @@ async def test_shell_cmd(db, tmpdir, request): def test_inherit_overwrite(): - """On 3.6+ we check things are overwritten at import time - """ - if sys.version_info >= (3, 6): - with pytest.raises(NotImplementedError): + """We check things are overwritten at import time""" + with pytest.raises(NotImplementedError): - class S(Spawner): - pass + class S(Spawner): + pass def test_inherit_ok(): @@ -402,3 +412,126 @@ async def test_spawner_routing(app, name): assert r.url == url assert r.text == urlparse(url).path await user.stop() + + +async def test_spawner_env(db): + env_overrides = { + "JUPYTERHUB_API_URL": "https://test.horse/hub/api", + "TEST_KEY": "value", + } + spawner = new_spawner(db, environment=env_overrides) + env = spawner.get_env() + for key, value in env_overrides.items(): + assert key in env + assert env[key] == value + + +async def test_hub_connect_url(db): + spawner = new_spawner(db, hub_connect_url="https://example.com/") + name = spawner.user.name + env = spawner.get_env() + assert env["JUPYTERHUB_API_URL"] == "https://example.com/api" + assert ( + env["JUPYTERHUB_ACTIVITY_URL"] + == "https://example.com/api/users/%s/activity" % name + ) + + +async def test_spawner_oauth_scopes(app, user): + allowed_scopes = ["read:users"] + spawner = user.spawners[''] + spawner.oauth_client_allowed_scopes = allowed_scopes + # exercise start/stop which assign roles to oauth client + await spawner.user.spawn() + oauth_client = spawner.orm_spawner.oauth_client + assert sorted(oauth_client.allowed_scopes) == sorted( + allowed_scopes + list(access_scopes(oauth_client)) + ) + await spawner.user.stop() + + +async def test_spawner_oauth_roles_bad(app, user): + allowed_roles = ["user", "nosuchrole"] + spawner = user.spawners[''] + spawner.oauth_roles = allowed_roles + # exercise start/stop which assign roles + # raises ValueError if we try to assign a role that doesn't exist + with pytest.raises(ValueError): + await spawner.user.spawn() + + +async def test_spawner_options_from_form(db): + def options_from_form(form_data): + return form_data + + spawner = new_spawner(db, options_from_form=options_from_form) + form_data = {"key": ["value"]} + result = spawner.run_options_from_form(form_data) + for key, value in form_data.items(): + assert key in result + assert result[key] == value + + +async def test_spawner_options_from_form_with_spawner(db): + def options_from_form(form_data, spawner): + return form_data + + spawner = new_spawner(db, options_from_form=options_from_form) + form_data = {"key": ["value"]} + result = spawner.run_options_from_form(form_data) + for key, value in form_data.items(): + assert key in result + assert result[key] == value + + +def test_spawner_server(db): + spawner = new_spawner(db) + spawner.orm_spawner = None + orm_spawner = orm.Spawner() + orm_server = orm.Server(base_url="/1/") + orm_spawner.server = orm_server + db.add(orm_spawner) + db.add(orm_server) + db.commit() + # initial: no orm_spawner + assert spawner.server is None + # assigning spawner.orm_spawner updates spawner.server + spawner.orm_spawner = orm_spawner + assert spawner.server is not None + assert spawner.server.orm_server is orm_server + # update orm_spawner.server without direct access on Spawner + orm_spawner.server = new_server = orm.Server(base_url="/2/") + db.commit() + assert spawner.server is not None + assert spawner.server.orm_server is not orm_server + assert spawner.server.orm_server is new_server + # clear orm_server via orm_spawner clears spawner.server + orm_spawner.server = None + db.commit() + assert spawner.server is None + # assigning spawner.server updates orm_spawner.server + orm_server = orm.Server(base_url="/3/") + db.add(orm_server) + db.commit() + spawner.server = server = Server(orm_server=orm_server) + db.commit() + assert spawner.server is server + assert spawner.orm_spawner.server is orm_server + # change orm spawner.server + orm_server = orm.Server(base_url="/4/") + db.add(orm_server) + db.commit() + spawner.server = server2 = Server(orm_server=orm_server) + assert spawner.server is server2 + assert spawner.orm_spawner.server is orm_server + # clear server via spawner.server + spawner.server = None + db.commit() + assert spawner.orm_spawner.server is None + + # test with no underlying orm.Spawner + # (only relevant for mocking, never true for actual Spawners) + spawner = Spawner() + spawner.server = Server.from_url("http://1.2.3.4") + assert spawner.server is not None + assert spawner.server.ip == "1.2.3.4" diff --git a/jupyterhub/tests/test_traitlets.py b/jupyterhub/tests/test_traitlets.py index 416b532d..8ee21208 100644 --- a/jupyterhub/tests/test_traitlets.py +++ b/jupyterhub/tests/test_traitlets.py @@ -1,10 +1,7 @@ import pytest -from traitlets import HasTraits -from traitlets import TraitError +from traitlets import HasTraits, TraitError -from jupyterhub.traitlets import ByteSpecification -from jupyterhub.traitlets import Command -from jupyterhub.traitlets import URLPrefix +from jupyterhub.traitlets import ByteSpecification, Command, URLPrefix def test_url_prefix(): diff --git a/jupyterhub/tests/test_user.py b/jupyterhub/tests/test_user.py new file mode 100644 index 00000000..61723a85 --- /dev/null +++ b/jupyterhub/tests/test_user.py @@ -0,0 +1,55 @@ +import pytest + +from .. import orm +from ..user import UserDict +from .utils import add_user + + +@pytest.mark.parametrize("attr", ["self", "id", "name"]) +async def test_userdict_get(db, attr): + u = add_user(db, name="rey", app=False) + userdict = UserDict(db_factory=lambda: db, settings={}) + + if attr == "self": + key = u + else: + key = getattr(u, attr) + + # `in` checks cache only + assert key not in userdict + assert userdict.get(key) + assert userdict.get(key).id == u.id + # `in` should find it now + assert key in userdict + + +@pytest.mark.parametrize( + "group_names", + [ + ["isin1", "isin2"], + ["isin1"], + ["notin", "isin1"], + ["new-group", "new-group", "isin1"], + [], + ], +) +def test_sync_groups(app, user, group_names): + expected = sorted(set(group_names)) + db = app.db + db.add(orm.Group(name="notin")) + in_groups = [orm.Group(name="isin1"), orm.Group(name="isin2")] + for group in in_groups: + db.add(group) + db.commit() + user.groups = in_groups + db.commit() + user.sync_groups(group_names) + assert not app.db.dirty + after_groups = sorted(g.name for g in user.groups) + assert after_groups == expected + # double-check backref + for group in db.query(orm.Group): + if group.name in expected: + assert user.orm_user in group.users + else: + assert user.orm_user not in group.users diff --git a/jupyterhub/tests/test_utils.py b/jupyterhub/tests/test_utils.py index 49155e92..129bc0fa 100644 --- a/jupyterhub/tests/test_utils.py +++ b/jupyterhub/tests/test_utils.py @@ -1,21 +1,26 @@ """Tests for utilities""" import asyncio +import time +from concurrent.futures import ThreadPoolExecutor +from unittest.mock import Mock import pytest from async_generator import aclosing -from async_generator import async_generator -from async_generator import yield_ +from tornado import gen +from tornado.concurrent import run_on_executor +from tornado.httpserver import HTTPRequest +from tornado.httputil import HTTPHeaders +from .. import utils from ..utils import iterate_until -@async_generator async def yield_n(n, delay=0.01): """Yield n items with a delay between each""" for i in range(n): if delay: await asyncio.sleep(delay) - await yield_(i) + yield i def schedule_future(io_loop, *, delay, result=None): @@ -50,13 +55,70 @@ async def test_iterate_until(io_loop, deadline, n, delay, expected): async def test_iterate_until_ready_after_deadline(io_loop): f = schedule_future(io_loop, delay=0) - @async_generator async def gen(): for i in range(5): - await yield_(i) + yield i yielded = [] async with aclosing(iterate_until(f, gen())) as items: async for item in items: yielded.append(item) assert yielded == list(range(5)) + + +@gen.coroutine +def tornado_coroutine(): + yield gen.sleep(0.05) + return "ok" + + +class TornadoCompat: + def __init__(self): + self.executor = ThreadPoolExecutor(1) + + @run_on_executor + def on_executor(self): + time.sleep(0.05) + return "executor" + + @gen.coroutine + def tornado_coroutine(self): + yield gen.sleep(0.05) + return "gen.coroutine" + + +async def test_tornado_coroutines(): + t = TornadoCompat() + # verify that tornado gen and executor methods return awaitables + assert (await t.on_executor()) == "executor" + assert (await t.tornado_coroutine()) == "gen.coroutine" + + +@pytest.mark.parametrize( + "forwarded, x_scheme, x_forwarded_proto, expected", + [ + ("", "", "", "_attr_"), + ("for=1.2.3.4", "", "", "_attr_"), + ("for=1.2.3.4,proto=https", "", "", "_attr_"), + ("", "https", "http", "https"), + ("", "https, http", "", "https"), + ("", "https, http", "http", "https"), + ("proto=http ; for=1.2.3.4, proto=https", "https, http", "", "http"), + ("proto=invalid;for=1.2.3.4,proto=http", "https, http", "", "https"), + ("for=1.2.3.4,proto=http", "https, http", "", "https"), + ("", "invalid, http", "", "_attr_"), + ], +) +def test_browser_protocol(x_scheme, x_forwarded_proto, forwarded, expected): + request = Mock(spec=HTTPRequest) + request.protocol = "_attr_" + request.headers = HTTPHeaders() + if x_scheme: + request.headers["X-Scheme"] = x_scheme + if x_forwarded_proto: + request.headers["X-Forwarded-Proto"] = x_forwarded_proto + if forwarded: + request.headers["Forwarded"] = forwarded + + proto = utils.get_browser_protocol(request) + assert proto == expected diff --git a/jupyterhub/tests/test_version.py b/jupyterhub/tests/test_version.py index 974c01e5..789f420b 100644 --- a/jupyterhub/tests/test_version.py +++ b/jupyterhub/tests/test_version.py @@ -3,7 +3,11 @@ import logging import pytest -from .._version import _check_version +from .._version import _check_version, reset_globals + + +def setup_function(function): + reset_globals() @pytest.mark.parametrize( @@ -25,3 +29,27 @@ def test_check_version(hub_version, singleuser_version, log_level, msg, caplog): record = caplog.records[0] assert record.levelno == log_level assert msg in record.getMessage() + + +def test_check_version_singleton(caplog): + """Tests that minor version difference logging is only logged once.""" + # Run test_check_version twice which will assert that the warning is only logged + # once. + for x in range(2): + test_check_version( + '1.2.0', + '1.1.0', + logging.WARNING, + 'This could cause failure to authenticate', + caplog, + ) + # Run it again with a different singleuser_version to make sure that is logged as + # a warning. + caplog.clear() + test_check_version( + '1.2.0', + '1.1.1', + logging.WARNING, + 'This could cause failure to authenticate', + caplog, + ) diff --git a/jupyterhub/tests/utils.py b/jupyterhub/tests/utils.py index 09aeb196..2e535d4b 100644 --- a/jupyterhub/tests/utils.py +++ b/jupyterhub/tests/utils.py @@ -1,11 +1,15 @@ import asyncio +import inspect +import os from concurrent.futures import ThreadPoolExecutor +import pytest import requests from certipy import Certipy -from jupyterhub import orm +from jupyterhub import metrics, orm from jupyterhub.objects import Server +from jupyterhub.roles import assign_default_roles, update_roles from jupyterhub.utils import url_path_join as ujoin @@ -52,6 +56,12 @@ def ssl_setup(cert_dir, authority_name): return external_certs +"""Skip tests that don't work under internal-ssl when testing under internal-ssl""" +skip_if_ssl = pytest.mark.skipif( + os.environ.get('SSL_ENABLED', False), reason="Does not use internal SSL" +) + + def check_db_locks(func): """Decorator that verifies no locks are held on database upon exit. @@ -61,23 +71,34 @@ def check_db_locks(func): The decorator relies on an instance of JupyterHubApp being the first argument to the decorated function. - Example - ------- - + Examples + -------- @check_db_locks def api_request(app, *api_path, **kwargs): """ def new_func(app, *args, **kwargs): - retval = func(app, *args, **kwargs) + maybe_future = func(app, *args, **kwargs) - temp_session = app.session_factory() - temp_session.execute('CREATE TABLE dummy (foo INT)') - temp_session.execute('DROP TABLE dummy') - temp_session.close() + def _check(_=None): + temp_session = app.session_factory() + try: + temp_session.execute('CREATE TABLE dummy (foo INT)') + temp_session.execute('DROP TABLE dummy') + finally: + temp_session.close() - return retval + async def await_then_check(): + result = await maybe_future + _check() + return result + + if inspect.isawaitable(maybe_future): + return await_then_check() + else: + _check() + return maybe_future return new_func @@ -97,10 +118,16 @@ def add_user(db, app=None, **kwargs): if orm_user is None: orm_user = orm.User(**kwargs) db.add(orm_user) + metrics.TOTAL_USERS.inc() else: for attr, value in kwargs.items(): setattr(orm_user, attr, value) db.commit() + requested_roles = kwargs.get('roles') + if requested_roles: + update_roles(db, entity=orm_user, roles=requested_roles) + else: + assign_default_roles(db, entity=orm_user) if app: return app.users[orm_user.id] else: @@ -111,7 +138,7 @@ def auth_header(db, name): """Return header with user's API authorization token.""" user = find_user(db, name) if user is None: - user = add_user(db, name=name) + raise KeyError(f"No such user: {name}") token = user.new_api_token() return {'Authorization': 'token %s' % token} @@ -128,7 +155,6 @@ async def api_request( else: base_url = public_url(app, path='hub') headers = kwargs.setdefault('headers', {}) - if 'Authorization' not in headers and not noauth and 'cookies' not in kwargs: # make a copy to avoid modifying arg in-place kwargs['headers'] = h = {} diff --git a/jupyterhub/traitlets.py b/jupyterhub/traitlets.py index af3cd7fa..a7ed5829 100644 --- a/jupyterhub/traitlets.py +++ b/jupyterhub/traitlets.py @@ -3,13 +3,15 @@ Traitlets that are used in JupyterHub """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. -import entrypoints -from traitlets import Integer -from traitlets import List -from traitlets import TraitError -from traitlets import TraitType -from traitlets import Type -from traitlets import Unicode +import sys + +# See compatibility note on `group` keyword in https://docs.python.org/3/library/importlib.metadata.html#entry-points +if sys.version_info < (3, 10): + from importlib_metadata import entry_points +else: + from importlib.metadata import entry_points + +from traitlets import Integer, List, TraitError, TraitType, Type, Undefined, Unicode class URLPrefix(Unicode): @@ -27,11 +29,15 @@ class Command(List): but allows it to be specified as a single string. """ - def __init__(self, default_value=None, **kwargs): + def __init__(self, default_value=Undefined, **kwargs): kwargs.setdefault('minlen', 1) if isinstance(default_value, str): default_value = [default_value] - super().__init__(Unicode(), default_value, **kwargs) + if default_value is not Undefined and ( + not (default_value is None and not kwargs.get("allow_none", False)) + ): + kwargs["default_value"] = default_value + super().__init__(Unicode(), **kwargs) def validate(self, obj, value): if isinstance(value, str): @@ -126,11 +132,7 @@ class EntryPointType(Type): chunks = [self._original_help] chunks.append("Currently installed: ") for key, entry_point in self.load_entry_points().items(): - chunks.append( - " - {}: {}.{}".format( - key, entry_point.module_name, entry_point.object_name - ) - ) + chunks.append(f" - {key}: {entry_point.module}.{entry_point.attr}") return '\n'.join(chunks) @help.setter @@ -138,11 +140,14 @@ class EntryPointType(Type): self._original_help = value def load_entry_points(self): - """Load my entry point group""" - # load the group - group = entrypoints.get_group_named(self.entry_point_group) - # make it case-insensitive - return {key.lower(): value for key, value in group.items()} + """Load my entry point group + + Returns a dict whose keys are lowercase entrypoint names + """ + return { + entry_point.name.lower(): entry_point + for entry_point in entry_points(group=self.entry_point_group) + } def validate(self, obj, value): if isinstance(value, str): diff --git a/jupyterhub/user.py b/jupyterhub/user.py index 333175bb..19e8011a 100644 --- a/jupyterhub/user.py +++ b/jupyterhub/user.py @@ -1,39 +1,117 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +import json +import string import warnings from collections import defaultdict -from datetime import datetime -from datetime import timedelta -from urllib.parse import quote -from urllib.parse import urlparse +from datetime import datetime, timedelta +from functools import lru_cache +from urllib.parse import quote, urlparse from sqlalchemy import inspect -from tornado import gen -from tornado import web +from tornado import gen, web from tornado.httputil import urlencode from tornado.log import app_log from . import orm -from ._version import __version__ -from ._version import _check_version -from .crypto import CryptKeeper -from .crypto import decrypt -from .crypto import encrypt -from .crypto import EncryptionUnavailable -from .crypto import InvalidToken -from .metrics import RUNNING_SERVERS -from .metrics import TOTAL_USERS +from ._version import __version__, _check_version +from .crypto import CryptKeeper, EncryptionUnavailable, InvalidToken, decrypt, encrypt +from .metrics import RUNNING_SERVERS, TOTAL_USERS from .objects import Server from .spawner import LocalProcessSpawner -from .utils import make_ssl_context -from .utils import maybe_future -from .utils import url_path_join +from .utils import ( + AnyTimeoutError, + make_ssl_context, + maybe_future, + url_escape_path, + url_path_join, +) + +# detailed messages about the most common failure-to-start errors, +# which manifest timeouts during start +start_timeout_message = """ +Common causes of this timeout, and debugging tips: + +1. Everything is working, but it took too long. + To fix: increase `Spawner.start_timeout` configuration + to a number of seconds that is enough for spawners to finish starting. +2. The server didn't finish starting, + or it crashed due to a configuration issue. + Check the single-user server's logs for hints at what needs fixing. +""" + +http_timeout_message = """ +Common causes of this timeout, and debugging tips: + +1. The server didn't finish starting, + or it crashed due to a configuration issue. + Check the single-user server's logs for hints at what needs fixing. +2. The server started, but is not accessible at the specified URL. + This may be a configuration issue specific to your chosen Spawner. + Check the single-user server logs and resource to make sure the URL + is correct and accessible from the Hub. +3. (unlikely) Everything is working, but the server took too long to respond. + To fix: increase `Spawner.http_timeout` configuration + to a number of seconds that is enough for servers to become responsive. +""" + +# set of chars that are safe in dns labels +# (allow '.' because we don't mind multiple levels of subdomains) +_dns_safe = set(string.ascii_letters + string.digits + '-.') +# don't escape % because it's the escape char and we handle it separately +_dns_needs_replace = _dns_safe | {"%"} + + +@lru_cache() +def _dns_quote(name): + """Escape a name for use in a dns label + + this is _NOT_ fully domain-safe, but works often enough for realistic usernames. + Fully safe would be full IDNA encoding, + PLUS escaping non-IDNA-legal ascii, + PLUS some encoding of boundary conditions + """ + # escape name for subdomain label + label = quote(name, safe="").lower() + # some characters are not handled by quote, + # because they are legal in URLs but not domains, + # specifically _ and ~ (starting in 3.7). + # Escape these in the same way (%{hex_codepoint}). + unique_chars = set(label) + for c in unique_chars: + if c not in _dns_needs_replace: + label = label.replace(c, f"%{ord(c):x}") + + # underscore is our escape char - + # it's not officially legal in hostnames, + # but is valid in _domain_ names (?), + # and always works in practice. + # FIXME: We should consider switching to proper IDNA encoding + # for 3.0. + label = label.replace("%", "_") + return label class UserDict(dict): """Like defaultdict, but for users - Getting by a user id OR an orm.User instance returns a User wrapper around the orm user. + Users can be retrieved by: + + - integer database id + - orm.User object + - username str + + A User wrapper object is always returned. + + This dict contains at least all active users, + but not necessarily all users in the database. + + Checking `key in userdict` returns whether + an item is already in the cache, + *not* whether it is in the database. + + .. versionchanged:: 1.2 + ``'username' in userdict`` pattern is now supported """ def __init__(self, db_factory, settings): @@ -52,15 +130,31 @@ class UserDict(dict): """Add a user to the UserDict""" if orm_user.id not in self: self[orm_user.id] = self.from_orm(orm_user) - TOTAL_USERS.inc() return self[orm_user.id] def __contains__(self, key): + """key in userdict checks presence in the cache + + it does not check if the user is in the database + """ if isinstance(key, (User, orm.User)): key = key.id - return dict.__contains__(self, key) + elif isinstance(key, str): + # username lookup, O(N) + for user in self.values(): + if user.name == key: + key = user.id + break + return super().__contains__(key) def __getitem__(self, key): + """UserDict allows retrieval of user by any of: + + - User object + - orm.User object + - username (str) + - orm.User.id int (actual key used in underlying dict) + """ if isinstance(key, User): key = key.id elif isinstance(key, str): @@ -68,14 +162,14 @@ class UserDict(dict): if orm_user is None: raise KeyError("No such user: %s" % key) else: - key = orm_user + key = orm_user.id if isinstance(key, orm.User): # users[orm_user] returns User(orm_user) orm_user = key if orm_user.id not in self: user = self[orm_user.id] = User(orm_user, self.settings) return user - user = dict.__getitem__(self, orm_user.id) + user = super().__getitem__(orm_user.id) user.db = self.db return user elif isinstance(key, int): @@ -86,11 +180,25 @@ class UserDict(dict): raise KeyError("No such user: %s" % id) user = self.add(orm_user) else: - user = dict.__getitem__(self, id) + user = super().__getitem__(id) return user else: raise KeyError(repr(key)) + def get(self, key, default=None): + """Retrieve a User object if it can be found, else default + + Lookup can be by User object, id, or name + + .. versionchanged:: 1.2 + ``get()`` accesses the database instead of just the cache by integer id, + so is equivalent to catching KeyErrors on attempted lookup. + """ + try: + return self[key] + except KeyError: + return default + def __delitem__(self, key): user = self[key] for orm_spawner in user.orm_user._orm_spawners: @@ -98,7 +206,7 @@ class UserDict(dict): self.db.expunge(orm_spawner) if user.orm_user in self.db: self.db.expunge(user.orm_user) - dict.__delitem__(self, user.id) + super().__delitem__(user.id) def delete(self, key): """Delete a user from the cache and the database""" @@ -176,6 +284,58 @@ class User: def spawner_class(self): return self.settings.get('spawner_class', LocalProcessSpawner) + def get_spawner(self, server_name="", replace_failed=False): + """Get a spawner by name + + replace_failed governs whether a failed spawner should be replaced + or returned (default: returned). + + .. versionadded:: 2.2 + """ + spawner = self.spawners[server_name] + if replace_failed and spawner._failed: + self.log.debug(f"Discarding failed spawner {spawner._log_name}") + # remove failed spawner, create a new one + self.spawners.pop(server_name) + spawner = self.spawners[server_name] + return spawner + + def sync_groups(self, group_names): + """Synchronize groups with database""" + + current_groups = {g.name for g in self.orm_user.groups} + new_groups = set(group_names) + if current_groups == new_groups: + # no change, nothing to do + return + + # log group changes + added_groups = new_groups.difference(current_groups) + removed_groups = current_groups.difference(group_names) + if added_groups: + self.log.info(f"Adding user {self.name} to group(s): {added_groups}") + if removed_groups: + self.log.info(f"Removing user {self.name} from group(s): {removed_groups}") + + if group_names: + groups = ( + self.db.query(orm.Group).filter(orm.Group.name.in_(new_groups)).all() + ) + existing_groups = {g.name for g in groups} + for group_name in added_groups: + if group_name not in existing_groups: + # create groups that don't exist yet + self.log.info( + f"Creating new group {group_name} for user {self.name}" + ) + group = orm.Group(name=group_name) + self.db.add(group) + groups.append(group) + self.orm_user.groups = groups + else: + self.orm_user.groups = [] + self.db.commit() + async def save_auth_state(self, auth_state): """Encrypt and store auth_state""" if auth_state is None: @@ -205,6 +365,35 @@ class User: await self.save_auth_state(auth_state) return auth_state + async def delete_spawners(self): + """Call spawner cleanup methods + + Allows the spawner to cleanup persistent resources + """ + for name in self.orm_user.orm_spawners.keys(): + await self._delete_spawner(name) + + async def _delete_spawner(self, name_or_spawner): + """Delete a single spawner""" + # always ensure full Spawner + # this may instantiate the Spawner if it wasn't already running, + # just to delete it + if isinstance(name_or_spawner, str): + spawner = self.spawners[name_or_spawner] + else: + spawner = name_or_spawner + + if spawner.active: + raise RuntimeError( + f"Spawner {spawner._log_name} is active and cannot be deleted." + ) + try: + await maybe_future(spawner.delete_forever()) + except Exception as e: + self.log.exception( + f"Error cleaning up persistent resources on {spawner._log_name}" + ) + def all_spawners(self, include_default=True): """Generator yielding all my spawners @@ -252,7 +441,7 @@ class User: # self.escaped_name may contain @ which is legal in URLs but not cookie keys client_id = 'jupyterhub-user-%s' % quote(self.name) if server_name: - client_id = '%s-%s' % (client_id, quote(server_name)) + client_id = f'{client_id}-{quote(server_name)}' trusted_alt_names = [] trusted_alt_names.extend(self.settings.get('trusted_alt_names', [])) @@ -265,11 +454,14 @@ class User: hub=self.settings.get('hub'), authenticator=self.authenticator, config=self.settings.get('config'), - proxy_spec=url_path_join(self.proxy_spec, server_name, '/'), - db=self.db, + proxy_spec=url_path_join( + self.proxy_spec, url_escape_path(server_name), '/' + ), + _deprecated_db_session=self.db, oauth_client_id=client_id, cookie_options=self.settings.get('cookie_options', {}), trusted_alt_names=trusted_alt_names, + user_options=orm_spawner.user_options or {}, ) if self.settings.get('internal_ssl'): @@ -348,7 +540,12 @@ class User: @property def escaped_name(self): """My name, escaped for use in URLs, cookies, etc.""" - return quote(self.name, safe='@~') + return url_escape_path(self.name) + + @property + def json_escaped_name(self): + """The user name, escaped for use in javascript inserts, etc.""" + return json.dumps(self.name)[1:-1] @property def proxy_spec(self): @@ -361,17 +558,15 @@ class User: @property def domain(self): """Get the domain for my server.""" - # use underscore as escape char for domains - return ( - quote(self.name).replace('%', '_').lower() + '.' + self.settings['domain'] - ) + + return _dns_quote(self.name) + '.' + self.settings['domain'] @property def host(self): """Get the *host* for my server (proto://domain[:port])""" # FIXME: escaped_name probably isn't escaped enough in general for a domain fragment parsed = urlparse(self.settings['subdomain_host']) - h = '%s://%s' % (parsed.scheme, self.domain) + h = f'{parsed.scheme}://{self.domain}' if parsed.port: h += ':%i' % parsed.port return h @@ -383,7 +578,7 @@ class User: Full name.domain/path if using subdomains, otherwise just my /base/url """ if self.settings.get('subdomain_host'): - return '{host}{path}'.format(host=self.host, path=self.base_url) + return f'{self.host}{self.base_url}' else: return self.base_url @@ -392,13 +587,13 @@ class User: if not server_name: return self.url else: - return url_path_join(self.url, server_name) + return url_path_join(self.url, url_escape_path(server_name)) def progress_url(self, server_name=''): """API URL for progress endpoint for a server with a given name""" url_parts = [self.settings['hub'].base_url, 'api/users', self.escaped_name] if server_name: - url_parts.extend(['servers', server_name, 'progress']) + url_parts.extend(['servers', url_escape_path(server_name), 'progress']) else: url_parts.extend(['server/progress']) return url_path_join(*url_parts) @@ -452,9 +647,7 @@ class User: else: # spawn via POST or on behalf of another user. # nothing we can do here but fail - raise web.HTTPError( - 400, "{}'s authentication has expired".format(self.name) - ) + raise web.HTTPError(400, f"{self.name}'s authentication has expired") async def spawn(self, server_name='', options=None, handler=None): """Start the user's spawner @@ -474,15 +667,15 @@ class User: if handler: await self.refresh_auth(handler) - base_url = url_path_join(self.base_url, server_name) + '/' + base_url = url_path_join(self.base_url, url_escape_path(server_name)) + '/' orm_server = orm.Server(base_url=base_url) db.add(orm_server) note = "Server at %s" % base_url - api_token = self.new_api_token(note=note) + api_token = self.new_api_token(note=note, roles=['server']) db.commit() - spawner = self.spawners[server_name] + spawner = self.get_spawner(server_name, replace_failed=True) spawner.server = server = Server(orm_server=orm_server) assert spawner.orm_spawner.server is orm_server @@ -509,31 +702,38 @@ class User: client_id = spawner.oauth_client_id oauth_provider = self.settings.get('oauth_provider') if oauth_provider: - oauth_client = oauth_provider.fetch_by_client_id(client_id) - # create a new OAuth client + secret on every launch - # containers that resume will be updated below - oauth_provider.add_client( + allowed_scopes = await spawner._get_oauth_client_allowed_scopes() + oauth_client = oauth_provider.add_client( client_id, api_token, - url_path_join(self.url, server_name, 'oauth_callback'), + url_path_join(self.url, url_escape_path(server_name), 'oauth_callback'), + allowed_scopes=allowed_scopes, description="Server at %s" % (url_path_join(self.base_url, server_name) + '/'), ) + spawner.orm_spawner.oauth_client = oauth_client db.commit() # trigger pre-spawn hook on authenticator authenticator = self.authenticator - if authenticator: - await maybe_future(authenticator.pre_spawn_start(self, spawner)) - - spawner._start_pending = True - # update spawner start time, and activity for both spawner and user - self.last_activity = ( - spawner.orm_spawner.started - ) = spawner.orm_spawner.last_activity = datetime.utcnow() - db.commit() - # wait for spawner.start to return try: + spawner._start_pending = True + + if authenticator: + # pre_spawn_start can throw errors that can lead to a redirect loop + # if left uncaught (see https://github.com/jupyterhub/jupyterhub/issues/2683) + await maybe_future(authenticator.pre_spawn_start(self, spawner)) + + # trigger auth_state hook + auth_state = await self.get_auth_state() + await spawner.run_auth_state_hook(auth_state) + + # update spawner start time, and activity for both spawner and user + self.last_activity = ( + spawner.orm_spawner.started + ) = spawner.orm_spawner.last_activity = datetime.utcnow() + db.commit() + # wait for spawner.start to return # run optional preparation work to bootstrap the notebook await maybe_future(spawner.run_pre_spawn_hook()) if self.settings.get('internal_ssl'): @@ -553,7 +753,12 @@ class User: else: # >= 0.7 returns (ip, port) proto = 'https' if self.settings['internal_ssl'] else 'http' - url = '%s://%s:%i' % ((proto,) + url) + + # check if spawner returned an IPv6 address + if ':' in url[0]: + url = '%s://[%s]:%i' % ((proto,) + url) + else: + url = '%s://%s:%i' % ((proto,) + url) urlinfo = urlparse(url) server.proto = urlinfo.scheme server.ip = urlinfo.hostname @@ -608,21 +813,23 @@ class User: oauth_provider.add_client( client_id, spawner.api_token, - url_path_join(self.url, server_name, 'oauth_callback'), + url_path_join( + self.url, url_escape_path(server_name), 'oauth_callback' + ), ) db.commit() except Exception as e: - if isinstance(e, gen.TimeoutError): + if isinstance(e, AnyTimeoutError): self.log.warning( - "{user}'s server failed to start in {s} seconds, giving up".format( - user=self.name, s=spawner.start_timeout - ) + f"{self.name}'s server failed to start" + f" in {spawner.start_timeout} seconds, giving up." + f"\n{start_timeout_message}" ) e.reason = 'timeout' self.settings['statsd'].incr('spawner.failure.timeout') else: - self.log.error( + self.log.exception( "Unhandled error starting {user}'s server: {error}".format( user=self.name, error=e ) @@ -632,7 +839,7 @@ class User: try: await self.stop(spawner.name) except Exception: - self.log.error( + self.log.exception( "Failed to cleanup {user}'s server that failed to start".format( user=self.name ), @@ -670,20 +877,17 @@ class User: http=True, timeout=spawner.http_timeout, ssl_context=ssl_context ) except Exception as e: - if isinstance(e, TimeoutError): + if isinstance(e, AnyTimeoutError): self.log.warning( - "{user}'s server never showed up at {url} " - "after {http_timeout} seconds. Giving up".format( - user=self.name, - url=server.url, - http_timeout=spawner.http_timeout, - ) + f"{self.name}'s server never showed up at {server.url}" + f" after {spawner.http_timeout} seconds. Giving up." + f"\n{http_timeout_message}" ) e.reason = 'timeout' self.settings['statsd'].incr('spawner.failure.http_timeout') else: e.reason = 'error' - self.log.error( + self.log.exception( "Unhandled error waiting for {user}'s server to show up at {url}: {error}".format( user=self.name, url=server.url, error=e ) @@ -692,7 +896,7 @@ class User: try: await self.stop(spawner.name) except Exception: - self.log.error( + self.log.exception( "Failed to cleanup {user}'s server that failed to start".format( user=self.name ), @@ -719,6 +923,7 @@ class User: spawner = self.spawners[server_name] spawner._spawn_pending = False spawner._start_pending = False + spawner._check_pending = False spawner.stop_polling() spawner._stop_pending = True @@ -729,8 +934,6 @@ class User: status = await spawner.poll() if status is None: await spawner.stop() - spawner.clear_state() - spawner.orm_spawner.state = spawner.get_state() self.last_activity = spawner.orm_spawner.last_activity = datetime.utcnow() # remove server entry from db spawner.server = None @@ -741,14 +944,8 @@ class User: if orm_token: self.db.delete(orm_token) # remove oauth client as well - # handle upgrades from 0.8, where client id will be `user-USERNAME`, - # not just `jupyterhub-user-USERNAME` - client_ids = ( - spawner.oauth_client_id, - spawner.oauth_client_id.split('-', 1)[1], - ) - for oauth_client in self.db.query(orm.OAuthClient).filter( - orm.OAuthClient.identifier.in_(client_ids) + for oauth_client in self.db.query(orm.OAuthClient).filter_by( + identifier=spawner.oauth_client_id, ): self.log.debug("Deleting oauth client %s", oauth_client.identifier) self.db.delete(oauth_client) @@ -756,10 +953,18 @@ class User: self.log.debug("Finished stopping %s", spawner._log_name) RUNNING_SERVERS.dec() finally: + spawner.server = None spawner.orm_spawner.started = None self.db.commit() # trigger post-stop hook - await maybe_future(spawner.run_post_stop_hook()) + try: + await maybe_future(spawner.run_post_stop_hook()) + except: + self.log.exception("Error in Spawner.post_stop_hook for %s", self) + spawner.clear_state() + spawner.orm_spawner.state = spawner.get_state() + self.db.commit() + # trigger post-spawner hook on authenticator auth = spawner.authenticator try: diff --git a/jupyterhub/utils.py b/jupyterhub/utils.py index 681e8f9a..291c412b 100644 --- a/jupyterhub/utils.py +++ b/jupyterhub/utils.py @@ -4,10 +4,11 @@ import asyncio import concurrent.futures import errno +import functools import hashlib import inspect -import os import random +import secrets import socket import ssl import sys @@ -15,22 +16,37 @@ import threading import uuid import warnings from binascii import b2a_hex -from datetime import datetime -from datetime import timezone +from datetime import datetime, timezone from hmac import compare_digest from operator import itemgetter +from urllib.parse import quote from async_generator import aclosing -from async_generator import async_generator -from async_generator import asynccontextmanager -from async_generator import yield_ -from tornado import gen -from tornado import ioloop -from tornado import web -from tornado.httpclient import AsyncHTTPClient -from tornado.httpclient import HTTPError +from sqlalchemy.exc import SQLAlchemyError +from tornado import gen, ioloop, web +from tornado.httpclient import AsyncHTTPClient, HTTPError from tornado.log import app_log -from tornado.platform.asyncio import to_asyncio_future + + +# Deprecated aliases: no longer needed now that we require 3.7 +def asyncio_all_tasks(loop=None): + warnings.warn( + "jupyterhub.utils.asyncio_all_tasks is deprecated in JupyterHub 2.4." + " Use asyncio.all_tasks().", + DeprecationWarning, + stacklevel=2, + ) + return asyncio.all_tasks(loop=loop) + + +def asyncio_current_task(loop=None): + warnings.warn( + "jupyterhub.utils.asyncio_current_task is deprecated in JupyterHub 2.4." + " Use asyncio.current_task().", + DeprecationWarning, + stacklevel=2, + ) + return asyncio.current_task(loop=loop) def random_port(): @@ -50,7 +66,7 @@ ISO8601_s = '%Y-%m-%dT%H:%M:%SZ' def isoformat(dt): """Render a datetime object as an ISO 8601 UTC timestamp - Naïve datetime objects are assumed to be UTC + Naive datetime objects are assumed to be UTC """ # allow null timestamps to remain None without # having to check if isoformat should be called @@ -66,11 +82,11 @@ def can_connect(ip, port): Return True if we can connect, False otherwise. """ - if ip in {'', '0.0.0.0'}: + if ip in {'', '0.0.0.0', '::'}: ip = '127.0.0.1' try: socket.create_connection((ip, port)).close() - except socket.error as e: + except OSError as e: if e.errno not in {errno.ECONNREFUSED, errno.ETIMEDOUT}: app_log.error("Unexpected error connecting to %s:%i %s", ip, port, e) return False @@ -78,18 +94,60 @@ def can_connect(ip, port): return True -def make_ssl_context(keyfile, certfile, cafile=None, verify=True, check_hostname=True): +def make_ssl_context( + keyfile, + certfile, + cafile=None, + verify=None, + check_hostname=None, + purpose=ssl.Purpose.SERVER_AUTH, +): """Setup context for starting an https server or making requests over ssl. + + Used for verifying internal ssl connections. + Certificates are always verified in both directions. + Hostnames are checked for client sockets. + + Client sockets are created with `purpose=ssl.Purpose.SERVER_AUTH` (default), + Server sockets are created with `purpose=ssl.Purpose.CLIENT_AUTH`. """ if not keyfile or not certfile: return None - purpose = ssl.Purpose.SERVER_AUTH if verify else ssl.Purpose.CLIENT_AUTH + if verify is not None: + purpose = ssl.Purpose.SERVER_AUTH if verify else ssl.Purpose.CLIENT_AUTH + warnings.warn( + f"make_ssl_context(verify={verify}) is deprecated in jupyterhub 2.4." + f" Use make_ssl_context(purpose={purpose!s}).", + DeprecationWarning, + stacklevel=2, + ) + if check_hostname is not None: + purpose = ssl.Purpose.SERVER_AUTH if check_hostname else ssl.Purpose.CLIENT_AUTH + warnings.warn( + f"make_ssl_context(check_hostname={check_hostname}) is deprecated in jupyterhub 2.4." + f" Use make_ssl_context(purpose={purpose!s}).", + DeprecationWarning, + stacklevel=2, + ) + ssl_context = ssl.create_default_context(purpose, cafile=cafile) + # always verify + ssl_context.verify_mode = ssl.CERT_REQUIRED + + if purpose == ssl.Purpose.SERVER_AUTH: + # SERVER_AUTH is authenticating servers (i.e. for a client) + ssl_context.check_hostname = True + ssl_context.load_default_certs() + ssl_context.load_cert_chain(certfile, keyfile) ssl_context.check_hostname = check_hostname return ssl_context +# AnyTimeoutError catches TimeoutErrors coming from asyncio, tornado, stdlib +AnyTimeoutError = (gen.TimeoutError, asyncio.TimeoutError, TimeoutError) + + async def exponential_backoff( pass_func, fail_message, @@ -99,7 +157,7 @@ async def exponential_backoff( timeout=10, timeout_tolerance=0.1, *args, - **kwargs + **kwargs, ): """ Exponentially backoff until `pass_func` is true. @@ -172,14 +230,15 @@ async def exponential_backoff( # this prevents overloading any single tornado loop iteration with # too many things dt = min(max_wait, remaining, random.uniform(0, start_wait * scale)) - scale *= scale_factor - await gen.sleep(dt) - raise TimeoutError(fail_message) + if dt < max_wait: + scale *= scale_factor + await asyncio.sleep(dt) + raise asyncio.TimeoutError(fail_message) async def wait_for_server(ip, port, timeout=10): """Wait for any server to show up at ip:port.""" - if ip in {'', '0.0.0.0'}: + if ip in {'', '0.0.0.0', '::'}: ip = '127.0.0.1' await exponential_backoff( lambda: can_connect(ip, port), @@ -217,7 +276,7 @@ async def wait_for_http_server(url, timeout=10, ssl_context=None): else: app_log.debug("Server at %s responded with %s", url, e.code) return e.response - except (OSError, socket.error) as e: + except OSError as e: if e.errno not in { errno.ECONNABORTED, errno.ECONNREFUSED, @@ -246,9 +305,10 @@ def auth_decorator(check_auth): def decorator(method): def decorated(self, *args, **kwargs): - check_auth(self) + check_auth(self, **kwargs) return method(self, *args, **kwargs) + # Perhaps replace with functools.wrap decorated.__name__ = method.__name__ decorated.__doc__ = method.__doc__ return decorated @@ -279,20 +339,39 @@ def authenticated_403(self): raise web.HTTPError(403) -@auth_decorator -def admin_only(self): - """Decorator for restricting access to admin users""" - user = self.current_user - if user is None or not user.admin: - raise web.HTTPError(403) +def admin_only(f): + """Deprecated!""" + # write it this way to trigger deprecation warning at decoration time, + # not on the method call + warnings.warn( + """@jupyterhub.utils.admin_only is deprecated in JupyterHub 2.0. + + Use the new `@jupyterhub.scopes.needs_scope` decorator to resolve permissions, + or check against `self.current_user.parsed_scopes`. + """, + DeprecationWarning, + stacklevel=2, + ) + + # the original decorator + @auth_decorator + def admin_only(self): + """Decorator for restricting access to admin users""" + user = self.current_user + if user is None or not user.admin: + raise web.HTTPError(403) + + return admin_only(f) @auth_decorator def metrics_authentication(self): """Decorator for restricting access to metrics""" - user = self.current_user - if user is None and self.authenticate_prometheus: - raise web.HTTPError(403) + if not self.authenticate_prometheus: + return + scope = 'read:metrics' + if scope not in self.parsed_scopes: + raise web.HTTPError(403, f"Access to metrics requires scope '{scope}'") # Token utilities @@ -313,7 +392,7 @@ def hash_token(token, salt=8, rounds=16384, algorithm='sha512'): """ h = hashlib.new(algorithm) if isinstance(salt, int): - salt = b2a_hex(os.urandom(salt)) + salt = b2a_hex(secrets.token_bytes(salt)) if isinstance(salt, bytes): bsalt = salt salt = salt.decode('utf8') @@ -325,7 +404,7 @@ def hash_token(token, salt=8, rounds=16384, algorithm='sha512'): h.update(btoken) digest = h.hexdigest() - return "{algorithm}:{rounds}:{salt}:{digest}".format(**locals()) + return f"{algorithm}:{rounds}:{salt}:{digest}" def compare_token(compare, token): @@ -343,6 +422,11 @@ def compare_token(compare, token): return False +def url_escape_path(value): + """Escape a value to be used in URLs, cookies, etc.""" + return quote(value, safe='@~') + + def url_path_join(*pieces): """Join components of url into a relative url. @@ -443,9 +527,8 @@ def print_stacks(file=sys.stderr): """ # local imports because these will not be used often, # no need to add them to startup - import asyncio - import resource import traceback + from .log import coroutine_frames print("Active threads: %i" % threading.active_count(), file=file) @@ -478,7 +561,7 @@ def print_stacks(file=sys.stderr): # also show asyncio tasks, if any # this will increase over time as we transition from tornado # coroutines to native `async def` - tasks = asyncio.Task.all_tasks() + tasks = asyncio_all_tasks() if tasks: print("AsyncIO tasks: %i" % len(tasks)) for task in tasks: @@ -506,28 +589,12 @@ def maybe_future(obj): return asyncio.wrap_future(obj) else: # could also check for tornado.concurrent.Future - # but with tornado >= 5 tornado.Future is asyncio.Future + # but with tornado >= 5.1 tornado.Future is asyncio.Future f = asyncio.Future() f.set_result(obj) return f -@asynccontextmanager -@async_generator -async def not_aclosing(coro): - """An empty context manager for Python < 3.5.2 - which lacks the `aclose` method on async iterators - """ - await yield_(await coro) - - -if sys.version_info < (3, 5, 2): - # Python 3.5.1 is missing the aclose method on async iterators, - # so we can't close them - aclosing = not_aclosing - - -@async_generator async def iterate_until(deadline_future, generator): """An async generator that yields items from a generator until a deadline future resolves @@ -552,7 +619,7 @@ async def iterate_until(deadline_future, generator): ) if item_future.done(): try: - await yield_(item_future.result()) + yield item_future.result() except (StopAsyncIteration, asyncio.CancelledError): break elif deadline_future.done(): @@ -580,7 +647,7 @@ def utcnow(): def _parse_accept_header(accept): """ Parse the Accept header *accept* - + Return a list with 3-tuples of [(str(media_type), dict(params), float(q_value)),] ordered by q values. If the accept header includes vendor-specific types like:: @@ -618,7 +685,7 @@ def _parse_accept_header(accept): media_params.append(('vendor', vnd)) # and re-write media_type to something like application/json so # it can be used usefully when looking up emitters - media_type = '{}/{}'.format(typ, extra) + media_type = f'{typ}/{extra}' q = 1.0 for part in parts: @@ -652,3 +719,62 @@ def get_accepted_mimetype(accept_header, choices=None): else: return mime return None + + +def catch_db_error(f): + """Catch and rollback database errors""" + + @functools.wraps(f) + async def catching(self, *args, **kwargs): + try: + r = f(self, *args, **kwargs) + if inspect.isawaitable(r): + r = await r + except SQLAlchemyError: + self.log.exception("Rolling back session due to database error") + self.db.rollback() + else: + return r + + return catching + + +def get_browser_protocol(request): + """Get the _protocol_ seen by the browser + + Like tornado's _apply_xheaders, + but in the case of multiple proxy hops, + use the outermost value (what the browser likely sees) + instead of the innermost value, + which is the most trustworthy. + + We care about what the browser sees, + not where the request actually came from, + so trusting possible spoofs is the right thing to do. + """ + headers = request.headers + # first choice: Forwarded header + forwarded_header = headers.get("Forwarded") + if forwarded_header: + first_forwarded = forwarded_header.split(",", 1)[0].strip() + fields = {} + forwarded_dict = {} + for field in first_forwarded.split(";"): + key, _, value = field.partition("=") + fields[key.strip().lower()] = value.strip() + if "proto" in fields and fields["proto"].lower() in {"http", "https"}: + return fields["proto"].lower() + else: + app_log.warning( + f"Forwarded header present without protocol: {forwarded_header}" + ) + + # second choice: X-Scheme or X-Forwarded-Proto + proto_header = headers.get("X-Scheme", headers.get("X-Forwarded-Proto", None)) + if proto_header: + proto_header = proto_header.split(",")[0].strip().lower() + if proto_header in {"http", "https"}: + return proto_header + + # no forwarded headers + return request.protocol diff --git a/onbuild/Dockerfile b/onbuild/Dockerfile index ad941aac..93655f4f 100644 --- a/onbuild/Dockerfile +++ b/onbuild/Dockerfile @@ -1,12 +1,12 @@ # JupyterHub Dockerfile that loads your jupyterhub_config.py # -# Adds ONBUILD step to jupyter/jupyterhub to load your juptyerhub_config.py into the image +# Adds ONBUILD step to jupyter/jupyterhub to load your jupyterhub_config.py into the image # # Derivative images must have jupyterhub_config.py next to the Dockerfile. -ARG BASE_IMAGE=jupyterhub/jupyterhub -FROM ${BASE_IMAGE} +ARG BASE_IMAGE=jupyterhub/jupyterhub:latest +FROM $BASE_IMAGE -ONBUILD ADD jupyterhub_config.py /srv/jupyterhub/jupyterhub_config.py +ONBUILD COPY jupyterhub_config.py /srv/jupyterhub/jupyterhub_config.py CMD ["jupyterhub", "-f", "/srv/jupyterhub/jupyterhub_config.py"] diff --git a/onbuild/README.md b/onbuild/README.md index 8964110d..e1d6696e 100644 --- a/onbuild/README.md +++ b/onbuild/README.md @@ -2,7 +2,7 @@ If you base a Dockerfile on this image: - FROM juptyerhub/jupyterhub-onbuild:0.6 + FROM jupyterhub/jupyterhub-onbuild:1.4.0 ... then your `jupyterhub_config.py` adjacent to your Dockerfile will be loaded into the image and used by JupyterHub. diff --git a/package.json b/package.json index ff6c5dab..fab5d46d 100644 --- a/package.json +++ b/package.json @@ -3,13 +3,13 @@ "version": "0.0.0", "description": "JupyterHub nodejs dependencies", "author": "Jupyter Developers", - "license": "BSD", + "license": "BSD-3-Clause", "repository": { "type": "git", "url": "https://github.com/jupyter/jupyterhub.git" }, "scripts": { - "postinstall": "python ./bower-lite", + "postinstall": "python3 ./bower-lite", "fmt": "prettier --write --trailing-comma es5 share/jupyterhub/static/js/*", "lessc": "lessc" }, @@ -21,8 +21,8 @@ "dependencies": { "bootstrap": "^3.4.1", "font-awesome": "^4.7.0", - "jquery": "^3.3.1", - "moment": "^2.24.0", + "jquery": "^3.5.1", + "moment": "^2.29.4", "requirejs": "^2.3.6" } } diff --git a/pyproject.toml b/pyproject.toml index 0097e9f6..054c6f1c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,2 +1,77 @@ +# autoflake is used for autoformatting Python code +# +# ref: https://github.com/PyCQA/autoflake#readme +# +[tool.autoflake] +ignore-init-module-imports = true +remove-all-unused-imports = true +remove-duplicate-keys = true +#remove-unused-variables = true + + +# isort is used for autoformatting Python code +# +# ref: https://pycqa.github.io/isort/ +# +[tool.isort] +profile = "black" + + +# black is used for autoformatting Python code +# +# ref: https://black.readthedocs.io/en/stable/ +# [tool.black] skip-string-normalization = true +# target-version should be all supported versions, see +# https://github.com/psf/black/issues/751#issuecomment-473066811 +target_version = [ + "py37", + "py38", + "py39", + "py310", +] + + +# tbump is used to simplify and standardize the release process when updating +# the version, making a git commit and tag, and pushing changes. +# +# ref: https://github.com/your-tools/tbump#readme +# +[tool.tbump] +# Uncomment this if your project is hosted on GitHub: +github_url = "https://github.com/jupyterhub/jupyterhub" + +[tool.tbump.version] +current = "3.1.0.dev" + +# Example of a semver regexp. +# Make sure this matches current_version before +# using tbump +regex = ''' + (?P\d+) + \. + (?P\d+) + \. + (?P\d+) + (?P
((a|b|rc)\d+)|)
+  \.?
+  (?P(?<=\.)dev\d*|)
+  '''
+
+[tool.tbump.git]
+message_template = "Bump to {new_version}"
+tag_template = "{new_version}"
+
+# For each file to patch, add a [[tool.tbump.file]] config
+# section containing the path of the file, relative to the
+# pyproject.toml location.
+
+[[tool.tbump.file]]
+src = "jupyterhub/_version.py"
+version_template = '({major}, {minor}, {patch}, "{pre}", "{dev}")'
+search = "version_info = {current_version}"
+
+[[tool.tbump.file]]
+src = "docs/source/_static/rest-api.yml"
+search = "version: {current_version}"
diff --git a/pytest.ini b/pytest.ini
index e3af6b27..82c19dcd 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -3,6 +3,12 @@
 # so we have to disable this until pytest 3.11
 # minversion = 3.3
 
+# automatically run coroutine tests with asyncio
+asyncio_mode = auto
+
+# jupyter_server plugin is incompatible with notebook imports
+addopts = -p no:jupyter_server -m 'not selenium'
+
 python_files = test_*.py
 markers =
     gen_test: marks an async tornado test
@@ -10,3 +16,5 @@ markers =
     services: mark as a services test
     user: mark as a test for a user
     slow: mark a test as slow
+    role: mark as a test for roles
+    selenium: web tests that run with selenium
diff --git a/readthedocs.yml b/readthedocs.yml
deleted file mode 100644
index 3a1802a6..00000000
--- a/readthedocs.yml
+++ /dev/null
@@ -1,11 +0,0 @@
-name: jupyterhub
-type: sphinx
-conda:
-     file: docs/environment.yml
-python:
-  version: 3
-formats:
-  - htmlzip
-  - epub
-  # pdf disabled due to bug in sphinx 1.8 + recommonmark
-  # - pdf
diff --git a/requirements.txt b/requirements.txt
index ad40788f..21de4a74 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,13 +1,16 @@
-alembic
-async_generator>=1.8
+alembic>=1.4
+async_generator>=1.9
 certipy>=0.1.2
-entrypoints
-jinja2
+importlib_metadata>=3.6; python_version < '3.10'
+jinja2>=2.11.0
+jupyter_telemetry>=0.1.0
 oauthlib>=3.0
-pamela
-prometheus_client>=0.0.21
+packaging
+pamela; sys_platform != 'win32'
+prometheus_client>=0.4.0
+psutil>=5.6.5; sys_platform == 'win32'
 python-dateutil
 requests
 SQLAlchemy>=1.1
-tornado>=5.0
+tornado>=5.1
 traitlets>=4.3.2
diff --git a/setup.py b/setup.py
index 21b5ab4a..93c6fec7 100755
--- a/setup.py
+++ b/setup.py
@@ -1,27 +1,18 @@
 #!/usr/bin/env python3
-# coding: utf-8
-# Copyright (c) Juptyer Development Team.
+# Copyright (c) Jupyter Development Team.
 # Distributed under the terms of the Modified BSD License.
 # -----------------------------------------------------------------------------
 # Minimal Python version sanity check (from IPython)
 # -----------------------------------------------------------------------------
-from __future__ import print_function
-
 import os
 import shutil
 import sys
-from glob import glob
 from subprocess import check_call
 
-from setuptools import setup
+from setuptools import Command, setup
 from setuptools.command.bdist_egg import bdist_egg
-
-
-v = sys.version_info
-if v[:2] < (3, 5):
-    error = "ERROR: JupyterHub requires Python version 3.5 or above."
-    print(error, file=sys.stderr)
-    sys.exit(1)
+from setuptools.command.build_py import build_py
+from setuptools.command.sdist import sdist
 
 shell = False
 if os.name in ('nt', 'dos'):
@@ -47,10 +38,9 @@ def get_data_files():
     """Get data files in share/jupyter"""
 
     data_files = []
-    ntrim = len(here + os.path.sep)
-
     for (d, dirs, filenames) in os.walk(share_jupyterhub):
-        data_files.append((d[ntrim:], [pjoin(d, f) for f in filenames]))
+        rel_d = os.path.relpath(d, here)
+        data_files.append((rel_d, [os.path.join(rel_d, f) for f in filenames]))
     return data_files
 
 
@@ -95,12 +85,13 @@ setup_args = dict(
     license="BSD",
     platforms="Linux, Mac OS X",
     keywords=['Interactive', 'Interpreter', 'Shell', 'Web'],
-    python_requires=">=3.5",
+    python_requires=">=3.7",
     entry_points={
         'jupyterhub.authenticators': [
             'default = jupyterhub.auth:PAMAuthenticator',
             'pam = jupyterhub.auth:PAMAuthenticator',
             'dummy = jupyterhub.auth:DummyAuthenticator',
+            'null = jupyterhub.auth:NullAuthenticator',
         ],
         'jupyterhub.proxies': [
             'default = jupyterhub.proxy:ConfigurableHTTPProxy',
@@ -130,23 +121,48 @@ setup_args = dict(
         'Source': 'https://github.com/jupyterhub/jupyterhub/',
         'Tracker': 'https://github.com/jupyterhub/jupyterhub/issues',
     },
+    extras_require={
+        "test": [
+            "beautifulsoup4[html5lib]",
+            "coverage",
+            # cryptography is an optional dependency for jupyterhub that we test
+            # against by default
+            "cryptography",
+            "jsonschema",
+            "jupyterlab>=3",
+            "mock",
+            # nbclassic provides the '/tree/' handler that we tests against in
+            # the test test_nbclassic_control_panel.
+            "nbclassic",
+            "pytest>=3.3",
+            "pytest-asyncio>=0.17",
+            "pytest-cov",
+            "requests-mock",
+            "selenium",
+            "virtualenv",
+        ],
+    },
 )
 
-# ---------------------------------------------------------------------------
-# custom distutils commands
-# ---------------------------------------------------------------------------
-
-# imports here, so they are after setuptools import if there was one
-from distutils.cmd import Command
-from distutils.command.build_py import build_py
-from distutils.command.sdist import sdist
-
 
 def mtime(path):
     """shorthand for mtime"""
     return os.stat(path).st_mtime
 
 
+def recursive_mtime(path):
+    """Recursively get newest mtime of files"""
+    if os.path.isfile(path):
+        return mtime(path)
+    current = 0
+    for dirname, _, filenames in os.walk(path):
+        if filenames:
+            current = max(
+                current, max(mtime(os.path.join(dirname, f)) for f in filenames)
+            )
+    return current
+
+
 class BaseCommand(Command):
     """Dumb empty command because Command needs subclasses to override too much"""
 
@@ -173,9 +189,6 @@ class NPM(BaseCommand):
     bower_dir = pjoin(static, 'components')
 
     def should_run(self):
-        if not shutil.which('npm'):
-            print("npm unavailable", file=sys.stderr)
-            return False
         if not os.path.exists(self.bower_dir):
             return True
         if not os.path.exists(self.node_modules):
@@ -200,6 +213,7 @@ class NPM(BaseCommand):
         os.utime(self.bower_dir)
         # update data-files in case this created new files
         self.distribution.data_files = get_data_files()
+        assert not self.should_run(), 'NPM.run failed'
 
 
 class CSS(BaseCommand):
@@ -246,8 +260,8 @@ class CSS(BaseCommand):
             'lessc',
             '--',
             '--clean-css',
-            '--source-map-basepath={}'.format(static),
-            '--source-map={}'.format(sourcemap),
+            f'--source-map-basepath={static}',
+            f'--source-map={sourcemap}',
             '--source-map-rootpath=../',
             style_less,
             style_css,
@@ -260,6 +274,67 @@ class CSS(BaseCommand):
             raise
         # update data-files in case this created new files
         self.distribution.data_files = get_data_files()
+        assert not self.should_run(), 'CSS.run failed'
+
+
+class JSX(BaseCommand):
+    description = "build admin app"
+
+    jsx_dir = pjoin(here, 'jsx')
+    js_target = pjoin(static, 'js', 'admin-react.js')
+
+    def should_run(self):
+        if os.getenv('READTHEDOCS'):
+            # yarn not available on RTD
+            return False
+
+        if not os.path.exists(self.js_target):
+            return True
+
+        js_target_mtime = mtime(self.js_target)
+        jsx_mtime = recursive_mtime(self.jsx_dir)
+        if js_target_mtime < jsx_mtime:
+            return True
+        return False
+
+    def run(self):
+        if not self.should_run():
+            print("JSX admin app is up to date")
+            return
+
+        # jlpm is a version of yarn bundled with JupyterLab
+        if shutil.which('yarn'):
+            yarn = 'yarn'
+        elif shutil.which('jlpm'):
+            print("yarn not found, using jlpm")
+            yarn = 'jlpm'
+        else:
+            raise Exception('JSX needs to be updated but yarn is not installed')
+
+        print("Installing JSX admin app requirements")
+        check_call(
+            [yarn],
+            cwd=self.jsx_dir,
+            shell=shell,
+        )
+
+        print("Building JSX admin app")
+        check_call(
+            [yarn, 'build'],
+            cwd=self.jsx_dir,
+            shell=shell,
+        )
+
+        print("Copying JSX admin app to static/js")
+        check_call(
+            [yarn, 'place'],
+            cwd=self.jsx_dir,
+            shell=shell,
+        )
+
+        # update data-files in case this created new files
+        self.distribution.data_files = get_data_files()
+        assert not self.should_run(), 'JSX.run failed'
 
 
 def js_css_first(cls, strict=True):
@@ -268,6 +343,7 @@ def js_css_first(cls, strict=True):
             try:
                 self.run_command('js')
                 self.run_command('css')
+                self.run_command('jsx')
             except Exception:
                 if strict:
                     raise
@@ -294,6 +370,7 @@ class bdist_egg_disabled(bdist_egg):
 setup_args['cmdclass'] = {
     'js': NPM,
     'css': CSS,
+    'jsx': JSX,
     'build_py': js_css_first(build_py, strict=is_repo),
     'sdist': js_css_first(sdist, strict=True),
     'bdist_egg': bdist_egg if 'bdist_egg' in sys.argv else bdist_egg_disabled,
@@ -311,7 +388,7 @@ class develop_js_css(develop):
         if not self.uninstall:
             self.distribution.run_command('js')
             self.distribution.run_command('css')
-        develop.run(self)
+        super().run()
 
 
 setup_args['cmdclass']['develop'] = develop_js_css
diff --git a/setupegg.py b/setupegg.py
deleted file mode 100755
index fa537b36..00000000
--- a/setupegg.py
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/usr/bin/env python
-"""Wrapper to run setup.py using setuptools."""
-# Import setuptools and call the actual setup
-import setuptools
-
-with open('setup.py', 'rb') as f:
-    exec(compile(f.read(), 'setup.py', 'exec'))
diff --git a/share/jupyterhub/static/js/admin.js b/share/jupyterhub/static/js/admin.js
index 63680b30..89af21ac 100644
--- a/share/jupyterhub/static/js/admin.js
+++ b/share/jupyterhub/static/js/admin.js
@@ -1,12 +1,11 @@
 // Copyright (c) Jupyter Development Team.
 // Distributed under the terms of the Modified BSD License.
 
-require(["jquery", "bootstrap", "moment", "jhapi", "utils"], function(
+require(["jquery", "moment", "jhapi", "utils"], function (
   $,
-  bs,
   moment,
   JHAPI,
-  utils
+  utils,
 ) {
   "use strict";
 
@@ -52,41 +51,41 @@ require(["jquery", "bootstrap", "moment", "jhapi", "utils"], function(
     window.location = window.location.pathname + "?" + query.join("&");
   }
 
-  $("th").map(function(i, th) {
+  $("th").map(function (i, th) {
     th = $(th);
     var col = th.data("sort");
     if (!col || col.length === 0) {
       return;
     }
     var order = th.find("i").hasClass("fa-sort-desc") ? "asc" : "desc";
-    th.find("a").click(function() {
+    th.find("a").click(function () {
       resort(col, order);
     });
   });
 
-  $(".time-col").map(function(i, el) {
+  $(".time-col").map(function (i, el) {
     // convert ISO datestamps to nice momentjs ones
     el = $(el);
     var m = moment(new Date(el.text().trim()));
     el.text(m.isValid() ? m.fromNow() : "Never");
   });
 
-  $(".stop-server").click(function() {
+  $(".stop-server").click(function () {
     var el = $(this);
     var row = getRow(el);
     var serverName = row.data("server-name");
     var user = row.data("user");
     el.text("stopping...");
-    var stop = function(options) {
+    var stop = function (options) {
       return api.stop_server(user, options);
     };
     if (serverName !== "") {
-      stop = function(options) {
+      stop = function (options) {
         return api.stop_named_server(user, serverName, options);
       };
     }
     stop({
-      success: function() {
+      success: function () {
         el.text("stop " + serverName).addClass("hidden");
         row.find(".access-server").addClass("hidden");
         row.find(".start-server").removeClass("hidden");
@@ -94,63 +93,63 @@ require(["jquery", "bootstrap", "moment", "jhapi", "utils"], function(
     });
   });
 
-  $(".delete-server").click(function() {
+  $(".delete-server").click(function () {
     var el = $(this);
     var row = getRow(el);
     var serverName = row.data("server-name");
     var user = row.data("user");
     el.text("deleting...");
     api.delete_named_server(user, serverName, {
-      success: function() {
+      success: function () {
         row.remove();
       },
     });
   });
 
-  $(".access-server").map(function(i, el) {
+  $(".access-server").map(function (i, el) {
     el = $(el);
     var row = getRow(el);
     var user = row.data("user");
     var serverName = row.data("server-name");
     el.attr(
       "href",
-      utils.url_path_join(prefix, "user", user, serverName) + "/"
+      utils.url_path_join(prefix, "user", user, serverName) + "/",
     );
   });
 
   if (admin_access && options_form) {
     // if admin access and options form are enabled
     // link to spawn page instead of making API requests
-    $(".start-server").map(function(i, el) {
+    $(".start-server").map(function (i, el) {
       el = $(el);
       var row = getRow(el);
       var user = row.data("user");
       var serverName = row.data("server-name");
       el.attr(
         "href",
-        utils.url_path_join(prefix, "hub/spawn", user, serverName)
+        utils.url_path_join(prefix, "hub/spawn", user, serverName),
       );
     });
     // cannot start all servers in this case
     // since it would mean opening a bunch of tabs
     $("#start-all-servers").addClass("hidden");
   } else {
-    $(".start-server").click(function() {
+    $(".start-server").click(function () {
       var el = $(this);
       var row = getRow(el);
       var user = row.data("user");
       var serverName = row.data("server-name");
       el.text("starting...");
-      var start = function(options) {
+      var start = function (options) {
         return api.start_server(user, options);
       };
       if (serverName !== "") {
-        start = function(options) {
+        start = function (options) {
           return api.start_named_server(user, serverName, options);
         };
       }
       start({
-        success: function() {
+        success: function () {
           el.text("start " + serverName).addClass("hidden");
           row.find(".stop-server").removeClass("hidden");
           row.find(".access-server").removeClass("hidden");
@@ -159,7 +158,7 @@ require(["jquery", "bootstrap", "moment", "jhapi", "utils"], function(
     });
   }
 
-  $(".edit-user").click(function() {
+  $(".edit-user").click(function () {
     var el = $(this);
     var row = getRow(el);
     var user = row.data("user");
@@ -173,7 +172,7 @@ require(["jquery", "bootstrap", "moment", "jhapi", "utils"], function(
 
   $("#edit-user-dialog")
     .find(".save-button")
-    .click(function() {
+    .click(function () {
       var dialog = $("#edit-user-dialog");
       var user = dialog.data("user");
       var name = dialog.find(".username-input").val();
@@ -185,14 +184,14 @@ require(["jquery", "bootstrap", "moment", "jhapi", "utils"], function(
           name: name,
         },
         {
-          success: function() {
+          success: function () {
             window.location.reload();
           },
-        }
+        },
       );
     });
 
-  $(".delete-user").click(function() {
+  $(".delete-user").click(function () {
     var el = $(this);
     var row = getRow(el);
     var user = row.data("user");
@@ -203,18 +202,18 @@ require(["jquery", "bootstrap", "moment", "jhapi", "utils"], function(
 
   $("#delete-user-dialog")
     .find(".delete-button")
-    .click(function() {
+    .click(function () {
       var dialog = $("#delete-user-dialog");
       var username = dialog.find(".delete-username").text();
       console.log("deleting", username);
       api.delete_user(username, {
-        success: function() {
+        success: function () {
           window.location.reload();
         },
       });
     });
 
-  $("#add-users").click(function() {
+  $("#add-users").click(function () {
     var dialog = $("#add-users-dialog");
     dialog.find(".username-input").val("");
     dialog.find(".admin-checkbox").prop("checked", false);
@@ -223,15 +222,12 @@ require(["jquery", "bootstrap", "moment", "jhapi", "utils"], function(
 
   $("#add-users-dialog")
     .find(".save-button")
-    .click(function() {
+    .click(function () {
       var dialog = $("#add-users-dialog");
-      var lines = dialog
-        .find(".username-input")
-        .val()
-        .split("\n");
+      var lines = dialog.find(".username-input").val().split("\n");
       var admin = dialog.find(".admin-checkbox").prop("checked");
       var usernames = [];
-      lines.map(function(line) {
+      lines.map(function (line) {
         var username = line.trim();
         if (username.length) {
           usernames.push(username);
@@ -242,47 +238,45 @@ require(["jquery", "bootstrap", "moment", "jhapi", "utils"], function(
         usernames,
         { admin: admin },
         {
-          success: function() {
+          success: function () {
             window.location.reload();
           },
-        }
+        },
       );
     });
 
-  $("#stop-all-servers").click(function() {
+  $("#stop-all-servers").click(function () {
     $("#stop-all-servers-dialog").modal();
   });
 
-  $("#start-all-servers").click(function() {
+  $("#start-all-servers").click(function () {
     $("#start-all-servers-dialog").modal();
   });
 
   $("#stop-all-servers-dialog")
     .find(".stop-all-button")
-    .click(function() {
+    .click(function () {
       // stop all clicks all the active stop buttons
-      $(".stop-server")
-        .not(".hidden")
-        .click();
+      $(".stop-server").not(".hidden").click();
     });
 
   function start(el) {
-    return function() {
+    return function () {
       $(el).click();
     };
   }
 
   $("#start-all-servers-dialog")
     .find(".start-all-button")
-    .click(function() {
+    .click(function () {
       $(".start-server")
         .not(".hidden")
-        .each(function(i) {
+        .each(function (i) {
           setTimeout(start(this), i * 500);
         });
     });
 
-  $("#shutdown-hub").click(function() {
+  $("#shutdown-hub").click(function () {
     var dialog = $("#shutdown-hub-dialog");
     dialog.find("input[type=checkbox]").prop("checked", true);
     dialog.modal();
@@ -290,7 +284,7 @@ require(["jquery", "bootstrap", "moment", "jhapi", "utils"], function(
 
   $("#shutdown-hub-dialog")
     .find(".shutdown-button")
-    .click(function() {
+    .click(function () {
       var dialog = $("#shutdown-hub-dialog");
       var servers = dialog.find(".shutdown-servers-checkbox").prop("checked");
       var proxy = dialog.find(".shutdown-proxy-checkbox").prop("checked");
diff --git a/share/jupyterhub/static/js/home.js b/share/jupyterhub/static/js/home.js
index e81b6690..f4b30cf3 100644
--- a/share/jupyterhub/static/js/home.js
+++ b/share/jupyterhub/static/js/home.js
@@ -1,16 +1,10 @@
 // Copyright (c) Jupyter Development Team.
 // Distributed under the terms of the Modified BSD License.
 
-require(["jquery", "moment", "jhapi", "utils"], function(
-  $,
-  moment,
-  JHAPI,
-  utils
-) {
+require(["jquery", "moment", "jhapi"], function ($, moment, JHAPI) {
   "use strict";
 
   var base_url = window.jhdata.base_url;
-  var prefix = window.jhdata.prefix;
   var user = window.jhdata.user;
   var api = new JHAPI(base_url);
 
@@ -24,10 +18,7 @@ require(["jquery", "moment", "jhapi", "utils"], function(
   }
 
   function disableRow(row) {
-    row
-      .find(".btn")
-      .attr("disabled", true)
-      .off("click");
+    row.find(".btn").attr("disabled", true).off("click");
   }
 
   function enableRow(row, running) {
@@ -50,6 +41,17 @@ require(["jquery", "moment", "jhapi", "utils"], function(
     }
   }
 
+  function startServer() {
+    var row = getRow($(this));
+    var serverName = row.find(".new-server-name").val();
+    if (serverName === "") {
+      // ../spawn/user/ causes a 404, ../spawn/user redirects correctly to the default server
+      window.location.href = "./spawn/" + user;
+    } else {
+      window.location.href = "./spawn/" + user + "/" + serverName;
+    }
+  }
+
   function stopServer() {
     var row = getRow($(this));
     var serverName = row.data("server-name");
@@ -59,7 +61,7 @@ require(["jquery", "moment", "jhapi", "utils"], function(
 
     // request
     api.stop_named_server(user, serverName, {
-      success: function() {
+      success: function () {
         enableRow(row, false);
       },
     });
@@ -74,43 +76,45 @@ require(["jquery", "moment", "jhapi", "utils"], function(
 
     // request
     api.delete_named_server(user, serverName, {
-      success: function() {
+      success: function () {
         row.remove();
       },
     });
   }
 
   // initial state: hook up click events
-  $("#stop").click(function() {
+  $("#stop").click(function () {
     $("#start")
       .attr("disabled", true)
       .attr("title", "Your server is stopping")
-      .click(function() {
+      .click(function () {
         return false;
       });
     api.stop_server(user, {
-      success: function() {
+      success: function () {
         $("#stop").hide();
         $("#start")
           .text("Start My Server")
           .attr("title", "Start your default server")
           .attr("disabled", false)
+          .attr("href", base_url + "spawn/" + user)
           .off("click");
       },
     });
   });
 
-  $(".new-server-btn").click(function() {
-    var row = getRow($(this));
-    var serverName = row.find(".new-server-name").val();
-    window.location.href = "../spawn/" + user + "/" + serverName;
+  $(".new-server-btn").click(startServer);
+  $(".new-server-name").on("keypress", function (e) {
+    if (e.which === 13) {
+      startServer.call(this);
+    }
   });
 
   $(".stop-server").click(stopServer);
   $(".delete-server").click(deleteServer);
 
   // render timestamps
-  $(".time-col").map(function(i, el) {
+  $(".time-col").map(function (i, el) {
     // convert ISO datestamps to nice momentjs ones
     el = $(el);
     var m = moment(new Date(el.text().trim()));
diff --git a/share/jupyterhub/static/js/jhapi.js b/share/jupyterhub/static/js/jhapi.js
index c2d597ea..ba3c7a53 100644
--- a/share/jupyterhub/static/js/jhapi.js
+++ b/share/jupyterhub/static/js/jhapi.js
@@ -1,10 +1,10 @@
 // Copyright (c) Jupyter Development Team.
 // Distributed under the terms of the Modified BSD License.
 
-define(["jquery", "utils"], function($, utils) {
+define(["jquery", "utils"], function ($, utils) {
   "use strict";
 
-  var JHAPI = function(base_url) {
+  var JHAPI = function (base_url) {
     this.base_url = base_url;
   };
 
@@ -18,76 +18,76 @@ define(["jquery", "utils"], function($, utils) {
     error: utils.ajax_error_dialog,
   };
 
-  var update = function(d1, d2) {
-    $.map(d2, function(i, key) {
+  var update = function (d1, d2) {
+    $.map(d2, function (i, key) {
       d1[key] = d2[key];
     });
     return d1;
   };
 
-  var ajax_defaults = function(options) {
+  var ajax_defaults = function (options) {
     var d = {};
     update(d, default_options);
     update(d, options);
     return d;
   };
 
-  JHAPI.prototype.api_request = function(path, options) {
+  JHAPI.prototype.api_request = function (path, options) {
     options = options || {};
     options = ajax_defaults(options || {});
     var url = utils.url_path_join(
       this.base_url,
       "api",
-      utils.encode_uri_components(path)
+      utils.encode_uri_components(path),
     );
     $.ajax(url, options);
   };
 
-  JHAPI.prototype.start_server = function(user, options) {
+  JHAPI.prototype.start_server = function (user, options) {
     options = options || {};
     options = update(options, { type: "POST", dataType: null });
     this.api_request(utils.url_path_join("users", user, "server"), options);
   };
 
-  JHAPI.prototype.start_named_server = function(user, server_name, options) {
+  JHAPI.prototype.start_named_server = function (user, server_name, options) {
     options = options || {};
     options = update(options, { type: "POST", dataType: null });
     this.api_request(
       utils.url_path_join("users", user, "servers", server_name),
-      options
+      options,
     );
   };
 
-  JHAPI.prototype.stop_server = function(user, options) {
+  JHAPI.prototype.stop_server = function (user, options) {
     options = options || {};
     options = update(options, { type: "DELETE", dataType: null });
     this.api_request(utils.url_path_join("users", user, "server"), options);
   };
 
-  JHAPI.prototype.stop_named_server = function(user, server_name, options) {
+  JHAPI.prototype.stop_named_server = function (user, server_name, options) {
     options = options || {};
     options = update(options, { type: "DELETE", dataType: null });
     this.api_request(
       utils.url_path_join("users", user, "servers", server_name),
-      options
+      options,
     );
   };
 
-  JHAPI.prototype.delete_named_server = function(user, server_name, options) {
+  JHAPI.prototype.delete_named_server = function (user, server_name, options) {
     options = options || {};
     options.data = JSON.stringify({ remove: true });
     return this.stop_named_server(user, server_name, options);
   };
 
-  JHAPI.prototype.list_users = function(options) {
+  JHAPI.prototype.list_users = function (options) {
     this.api_request("users", options);
   };
 
-  JHAPI.prototype.get_user = function(user, options) {
+  JHAPI.prototype.get_user = function (user, options) {
     this.api_request(utils.url_path_join("users", user), options);
   };
 
-  JHAPI.prototype.add_users = function(usernames, userinfo, options) {
+  JHAPI.prototype.add_users = function (usernames, userinfo, options) {
     options = options || {};
     var data = update(userinfo, { usernames: usernames });
     options = update(options, {
@@ -99,7 +99,7 @@ define(["jquery", "utils"], function($, utils) {
     this.api_request("users", options);
   };
 
-  JHAPI.prototype.edit_user = function(user, userinfo, options) {
+  JHAPI.prototype.edit_user = function (user, userinfo, options) {
     options = options || {};
     options = update(options, {
       type: "PATCH",
@@ -110,7 +110,7 @@ define(["jquery", "utils"], function($, utils) {
     this.api_request(utils.url_path_join("users", user), options);
   };
 
-  JHAPI.prototype.admin_access = function(user, options) {
+  JHAPI.prototype.admin_access = function (user, options) {
     options = options || {};
     options = update(options, {
       type: "POST",
@@ -119,17 +119,17 @@ define(["jquery", "utils"], function($, utils) {
 
     this.api_request(
       utils.url_path_join("users", user, "admin-access"),
-      options
+      options,
     );
   };
 
-  JHAPI.prototype.delete_user = function(user, options) {
+  JHAPI.prototype.delete_user = function (user, options) {
     options = options || {};
     options = update(options, { type: "DELETE", dataType: null });
     this.api_request(utils.url_path_join("users", user), options);
   };
 
-  JHAPI.prototype.request_token = function(user, props, options) {
+  JHAPI.prototype.request_token = function (user, props, options) {
     options = options || {};
     options = update(options, { type: "POST" });
     if (props) {
@@ -138,16 +138,16 @@ define(["jquery", "utils"], function($, utils) {
     this.api_request(utils.url_path_join("users", user, "tokens"), options);
   };
 
-  JHAPI.prototype.revoke_token = function(user, token_id, options) {
+  JHAPI.prototype.revoke_token = function (user, token_id, options) {
     options = options || {};
     options = update(options, { type: "DELETE" });
     this.api_request(
       utils.url_path_join("users", user, "tokens", token_id),
-      options
+      options,
     );
   };
 
-  JHAPI.prototype.shutdown_hub = function(data, options) {
+  JHAPI.prototype.shutdown_hub = function (data, options) {
     options = options || {};
     options = update(options, { type: "POST" });
     if (data) {
diff --git a/share/jupyterhub/static/js/not_running.js b/share/jupyterhub/static/js/not_running.js
new file mode 100644
index 00000000..3195d65d
--- /dev/null
+++ b/share/jupyterhub/static/js/not_running.js
@@ -0,0 +1,13 @@
+// Copyright (c) Jupyter Development Team.
+// Distributed under the terms of the Modified BSD License.
+
+require(["jquery", "utils"], function ($, utils) {
+  "use strict";
+
+  var hash = utils.parse_url(window.location.href).hash;
+  if (hash !== undefined && hash !== "") {
+    var el = $("#start");
+    var current_spawn_url = el.attr("href");
+    el.attr("href", current_spawn_url + hash);
+  }
+});
diff --git a/share/jupyterhub/static/js/token.js b/share/jupyterhub/static/js/token.js
index e7c0a992..96ac559d 100644
--- a/share/jupyterhub/static/js/token.js
+++ b/share/jupyterhub/static/js/token.js
@@ -1,34 +1,39 @@
 // Copyright (c) Jupyter Development Team.
 // Distributed under the terms of the Modified BSD License.
 
-require(["jquery", "jhapi", "moment"], function($, JHAPI, moment) {
+require(["jquery", "jhapi", "moment"], function ($, JHAPI, moment) {
   "use strict";
 
   var base_url = window.jhdata.base_url;
   var user = window.jhdata.user;
   var api = new JHAPI(base_url);
 
-  $(".time-col").map(function(i, el) {
+  $(".time-col").map(function (i, el) {
     // convert ISO datestamps to nice momentjs ones
     el = $(el);
     var m = moment(new Date(el.text().trim()));
     el.text(m.isValid() ? m.fromNow() : el.text());
   });
 
-  $("#request-token-form").submit(function() {
+  $("#request-token-form").submit(function () {
     var note = $("#token-note").val();
     if (!note.length) {
       note = "Requested via token page";
     }
+    var expiration_seconds =
+      parseInt($("#token-expiration-seconds").val()) || null;
     api.request_token(
       user,
-      { note: note },
       {
-        success: function(reply) {
+        note: note,
+        expires_in: expiration_seconds,
+      },
+      {
+        success: function (reply) {
           $("#token-result").text(reply.token);
           $("#token-area").show();
         },
-      }
+      },
     );
     return false;
   });
@@ -40,12 +45,12 @@ require(["jquery", "jhapi", "moment"], function($, JHAPI, moment) {
     return element;
   }
 
-  $(".revoke-token-btn").click(function() {
+  $(".revoke-token-btn").click(function () {
     var el = $(this);
     var row = get_token_row(el);
     el.attr("disabled", true);
     api.revoke_token(user, row.data("token-id"), {
-      success: function(reply) {
+      success: function (reply) {
         row.remove();
       },
     });
diff --git a/share/jupyterhub/static/js/utils.js b/share/jupyterhub/static/js/utils.js
index 63ed0bd0..b57c72a0 100644
--- a/share/jupyterhub/static/js/utils.js
+++ b/share/jupyterhub/static/js/utils.js
@@ -2,13 +2,13 @@
 // Original Copyright (c) IPython Development Team.
 // Distributed under the terms of the Modified BSD License.
 
-// Modifications Copyright (c) Juptyer Development Team.
+// Modifications Copyright (c) Jupyter Development Team.
 // Distributed under the terms of the Modified BSD License.
 
-define(["jquery"], function($) {
+define(["jquery"], function ($) {
   "use strict";
 
-  var url_path_join = function() {
+  var url_path_join = function () {
     // join a sequence of url components with '/'
     var url = "";
     for (var i = 0; i < arguments.length; i++) {
@@ -25,7 +25,7 @@ define(["jquery"], function($) {
     return url;
   };
 
-  var parse_url = function(url) {
+  var parse_url = function (url) {
     // an `a` element with an href allows attr-access to the parsed segments of a URL
     // a = parse_url("http://localhost:8888/path/name#hash")
     // a.protocol = "http:"
@@ -39,29 +39,24 @@ define(["jquery"], function($) {
     return a;
   };
 
-  var encode_uri_components = function(uri) {
+  var encode_uri_components = function (uri) {
     // encode just the components of a multi-segment uri,
     // leaving '/' separators
-    return uri
-      .split("/")
-      .map(encodeURIComponent)
-      .join("/");
+    return uri.split("/").map(encodeURIComponent).join("/");
   };
 
-  var url_join_encode = function() {
+  var url_join_encode = function () {
     // join a sequence of url components with '/',
     // encoding each component with encodeURIComponent
     return encode_uri_components(url_path_join.apply(null, arguments));
   };
 
-  var escape_html = function(text) {
+  var escape_html = function (text) {
     // escape text to HTML
-    return $("
") - .text(text) - .html(); + return $("
").text(text).html(); }; - var get_body_data = function(key) { + var get_body_data = function (key) { // get a url-encoded item from body.data and decode it // we should never have any encoded URLs anywhere else in code // until we are building an actual request @@ -69,7 +64,7 @@ define(["jquery"], function($) { }; // http://stackoverflow.com/questions/2400935/browser-detection-in-javascript - var browser = (function() { + var browser = (function () { if (typeof navigator === "undefined") { // navigator undefined in node return "None"; @@ -78,7 +73,7 @@ define(["jquery"], function($) { ua = navigator.userAgent, tem; var M = ua.match( - /(opera|chrome|safari|firefox|msie)\/?\s*(\.?\d+(\.\d+)*)/i + /(opera|chrome|safari|firefox|msie)\/?\s*(\.?\d+(\.\d+)*)/i, ); if (M && (tem = ua.match(/version\/([\.\d]+)/i)) !== null) M[2] = tem[1]; M = M ? [M[1], M[2]] : [N, navigator.appVersion, "-?"]; @@ -86,7 +81,7 @@ define(["jquery"], function($) { })(); // http://stackoverflow.com/questions/11219582/how-to-detect-my-browser-version-and-operating-system-using-javascript - var platform = (function() { + var platform = (function () { if (typeof navigator === "undefined") { // navigator undefined in node return "None"; @@ -99,7 +94,7 @@ define(["jquery"], function($) { return OSName; })(); - var ajax_error_msg = function(jqXHR) { + var ajax_error_msg = function (jqXHR) { // Return a JSON error message if there is one, // otherwise the basic HTTP status text. if (jqXHR.responseJSON && jqXHR.responseJSON.message) { @@ -109,7 +104,7 @@ define(["jquery"], function($) { } }; - var log_ajax_error = function(jqXHR, status, error) { + var log_ajax_error = function (jqXHR, status, error) { // log ajax failures with informative messages var msg = "API request failed (" + jqXHR.status + "): "; console.log(jqXHR); @@ -118,7 +113,7 @@ define(["jquery"], function($) { return msg; }; - var ajax_error_dialog = function(jqXHR, status, error) { + var ajax_error_dialog = function (jqXHR, status, error) { console.log("ajax dialog", arguments); var msg = log_ajax_error(jqXHR, status, error); var dialog = $("#error-dialog"); diff --git a/share/jupyterhub/static/less/admin.less b/share/jupyterhub/static/less/admin.less index 70a262d9..0995b019 100644 --- a/share/jupyterhub/static/less/admin.less +++ b/share/jupyterhub/static/less/admin.less @@ -1,3 +1,12 @@ i.sort-icon { margin-left: 4px; } + +tr.pagination-row > td.pagination-page-info { + vertical-align: middle; +} + +.version_footer { + bottom: 0; + width: 100%; +} diff --git a/share/jupyterhub/static/less/login.less b/share/jupyterhub/static/less/login.less index d0040908..2853749f 100644 --- a/share/jupyterhub/static/less/login.less +++ b/share/jupyterhub/static/less/login.less @@ -1,59 +1,62 @@ #login-main { - display: table; - height: 80vh; + display: table; + height: 80vh; - & #insecure-login-warning{ - .bg-warning(); - padding:10px; - } + & #insecure-login-warning { + .bg-warning(); + padding: 10px; + } - .service-login { - text-align: center; - display: table-cell; - vertical-align: middle; - margin: auto auto 20% auto; - } + .service-login { + text-align: center; + display: table-cell; + vertical-align: middle; + margin: auto auto 20% auto; + } - form { - display: table-cell; - vertical-align: middle; - margin: auto auto 20% auto; - width: 350px; - font-size: large; - } + form { + display: table-cell; + vertical-align: middle; + margin: auto auto 20% auto; + width: 350px; + font-size: large; + } - .input-group, input[type=text], button { - width: 100%; - } + .input-group, + input[type="text"], + button { + width: 100%; + } - input[type=submit] { - margin-top: 16px; - } + input[type="submit"] { + margin-top: 0px; + } - .form-control:focus, input[type=submit]:focus { - box-shadow: inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px @jupyter-orange; - border-color: @jupyter-orange; - outline-color: @jupyter-orange; - } + .form-control:focus, + input[type="submit"]:focus { + box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 8px @jupyter-orange; + border-color: @jupyter-orange; + outline-color: @jupyter-orange; + } - .login_error { - color: orangered; - font-weight: bold; - text-align: center; - } + .login_error { + color: orangered; + font-weight: bold; + text-align: center; + } - .auth-form-header { - padding: 10px 20px; - color: #fff; - background: @jupyter-orange; - border-radius: @border-radius-large @border-radius-large 0 0; - } + .auth-form-header { + padding: 10px 20px; + color: #fff; + background: @jupyter-orange; + border-radius: @border-radius-large @border-radius-large 0 0; + } - .auth-form-body { - padding: 20px; - font-size: 14px; - border: thin silver solid; - border-top: none; - border-radius: 0 0 @border-radius-large @border-radius-large; - } + .auth-form-body { + padding: 20px; + font-size: 14px; + border: thin silver solid; + border-top: none; + border-radius: 0 0 @border-radius-large @border-radius-large; + } } diff --git a/share/jupyterhub/static/less/page.less b/share/jupyterhub/static/less/page.less index 6193b357..e8848b4e 100644 --- a/share/jupyterhub/static/less/page.less +++ b/share/jupyterhub/static/less/page.less @@ -1,8 +1,30 @@ +@import "../components/bootstrap/less/variables.less"; + @logo-height: 28px; -.jpy-logo { - height: @logo-height; - margin-top: (@navbar-height - @logo-height) / 2; +#jupyterhub-logo { + @media (max-width: @grid-float-breakpoint) { + // same length as the navbar-toggle element, displayed on responsive mode + margin-left: 15px; + } + .jpy-logo { + height: @logo-height; + margin-top: (@navbar-height - @logo-height) / 2; + } +} + +.navbar-right { + li { + span { + // same as .nav > li > a from bootstrap, but applied to the span[id="login_widget"] + // or any other span that matches .nav > li > span, but only in responsive mode + @media (max-width: @grid-float-breakpoint) { + position: relative; + display: block; + padding: 10px 15px; + } + } + } } #header { @@ -26,3 +48,17 @@ // .progress-log-event:hover { // background: rgba(66, 165, 245, 0.2); // } + +.feedback { + &-container { + margin-top: 16px; + } + + &-widget { + padding: 5px 0px 0px 6px; + i { + font-size: 2em; + color: lightgrey; + } + } +} diff --git a/share/jupyterhub/static/less/style.less b/share/jupyterhub/static/less/style.less index d74d7efc..8cd185b8 100644 --- a/share/jupyterhub/static/less/style.less +++ b/share/jupyterhub/static/less/style.less @@ -12,7 +12,7 @@ * */ @import "../components/font-awesome/less/font-awesome.less"; -@fa-font-path: "../components/font-awesome/fonts"; +@fa-font-path: "../components/font-awesome/fonts"; /*! * diff --git a/share/jupyterhub/static/less/variables.less b/share/jupyterhub/static/less/variables.less index 88295e47..de11782d 100644 --- a/share/jupyterhub/static/less/variables.less +++ b/share/jupyterhub/static/less/variables.less @@ -4,8 +4,8 @@ @navbar-height: 40px; @grid-float-breakpoint: @screen-xs-min; -@jupyter-orange: #F37524; -@jupyter-red: #E34F21; +@jupyter-orange: #f37524; +@jupyter-red: #e34f21; // color blind-friendly alternative to red/green // from 5-class RdYlBu via colorbrewer.org // eliminate distinction between 'primary' and 'success' diff --git a/share/jupyterhub/templates/admin.html b/share/jupyterhub/templates/admin.html index 12edbdcd..a5ff53ad 100644 --- a/share/jupyterhub/templates/admin.html +++ b/share/jupyterhub/templates/admin.html @@ -1,161 +1,21 @@ {% extends "page.html" %} -{% macro th(label, key='', colspan=1) %} -{{label}} - {% if key %} - - - {% endif %} - -{% endmacro %} - {% block main %} - -
- - - - {% block thead %} - {{ th("User (%i)" % users|length, 'name') }} - {{ th("Admin", 'admin') }} - {{ th("Last Activity", 'last_activity') }} - {{ th("Running (%i)" % running|length, 'running', colspan=2) }} - {% endblock thead %} - - - - - - - {% for user in users %} - {% for spawner in user.all_spawners() %} - - {% block user_row scoped %} - - - - - - - - - - - - - {% endblock user_row %} - {% endfor %} - {% endfor %} - -
- Add Users - - Start All - Stop All - - Shutdown Hub -
{{user.name}} - {%- if spawner.name -%} - /{{ spawner.name }} - {%- endif -%} - - {%- if spawner.name == '' -%} - {% if user.admin %}admin{% endif %} - {%- endif -%} - - {%- if spawner.last_activity -%} - {{ spawner.last_activity.isoformat() + 'Z' }} - {%- else -%} - Never - {%- endif -%} - - - stop server - - - start server - - - {%- if admin_access %} - - access server - - {%- endif %} - - {%- if spawner.name == '' -%} - edit user - {%- endif -%} - - {%- if spawner.name == '' -%} - {#- user row -#} - {%- if user.name != current_user.name -%} - delete user - {%- endif -%} - {%- else -%} - {#- named spawner row -#} - delete server - {%- endif -%} -
+
+ +
+{% endblock %} -{% call modal('Delete User', btn_class='btn-danger delete-button') %} - Are you sure you want to delete user USER? - This operation cannot be undone. -{% endcall %} - -{% call modal('Stop All Servers', btn_label='Stop All', btn_class='btn-danger stop-all-button') %} - Are you sure you want to stop all your users' servers? Kernels will be shutdown and unsaved data may be lost. -{% endcall %} - -{% call modal('Start All Servers', btn_label='Start All', btn_class='btn-primary start-all-button') %} - Are you sure you want to start all servers? This can slam your server resources. -{% endcall %} - -{% call modal('Shutdown Hub', btn_label='Shutdown', btn_class='btn-danger shutdown-button') %} - Are you sure you want to shutdown the Hub? - You can choose to leave the proxy and/or single-user servers running by unchecking the boxes below: -
- +{% block footer %} +
@@ -56,6 +71,7 @@ Note Last used Created + Expires at @@ -77,6 +93,13 @@ N/A {%- endif -%} + + {%- if token.expires_at -%} + {{ token.expires_at.isoformat() + 'Z' }} + {%- else -%} + Never + {%- endif -%} + diff --git a/singleuser/Dockerfile b/singleuser/Dockerfile index 6b9115f2..ef7090ae 100644 --- a/singleuser/Dockerfile +++ b/singleuser/Dockerfile @@ -6,7 +6,6 @@ FROM $BASE_IMAGE MAINTAINER Project Jupyter ADD install_jupyterhub /tmp/install_jupyterhub -ARG JUPYTERHUB_VERSION=master -# install pinned jupyterhub and ensure notebook is installed -RUN python3 /tmp/install_jupyterhub && \ - python3 -m pip install notebook +ARG JUPYTERHUB_VERSION=git:HEAD +# install pinned jupyterhub +RUN python3 /tmp/install_jupyterhub diff --git a/singleuser/README.md b/singleuser/README.md index 9801b6dd..c47d6270 100644 --- a/singleuser/README.md +++ b/singleuser/README.md @@ -5,7 +5,7 @@ Built from the `jupyter/base-notebook` base image. This image contains a single user notebook server for use with [JupyterHub](https://github.com/jupyterhub/jupyterhub). In particular, it is meant to be used with the -[DockerSpawner](https://github.com/jupyterhub/dockerspawner/blob/master/dockerspawner/dockerspawner.py) +[DockerSpawner](https://github.com/jupyterhub/dockerspawner/blob/HEAD/dockerspawner/dockerspawner.py) class to launch user notebook servers within docker containers. The only thing this image accomplishes is pinning the jupyterhub version on top of base-notebook. @@ -31,6 +31,6 @@ This particular image runs as the `jovyan` user, with home directory at `/home/j ## Note on persistence -This home directory, `/home/jovyan`, is *not* persistent by default, +This home directory, `/home/jovyan`, is _not_ persistent by default, so some configuration is required unless the directory is to be used with temporary or demonstration JupyterHub deployments. diff --git a/singleuser/hooks/build b/singleuser/hooks/build deleted file mode 100644 index a0a77621..00000000 --- a/singleuser/hooks/build +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash -set -ex - -stable=0.9 - -for V in master $stable; do - docker build --build-arg JUPYTERHUB_VERSION=$V -t $DOCKER_REPO:$V . -done - -echo "tagging $IMAGE_NAME" -docker tag $DOCKER_REPO:$stable $IMAGE_NAME diff --git a/singleuser/hooks/post_push b/singleuser/hooks/post_push deleted file mode 100644 index 95c3cb01..00000000 --- a/singleuser/hooks/post_push +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/bash -set -ex - -stable=0.9 -for V in master $stable; do - docker push $DOCKER_REPO:$V -done - -function get_hub_version() { - rm -f hub_version - V=$1 - docker run --rm -v $PWD:/version -u $(id -u) -i $DOCKER_REPO:$V sh -c 'jupyterhub --version > /version/hub_version' - hub_xyz=$(cat hub_version) - split=( ${hub_xyz//./ } ) - hub_xy="${split[0]}.${split[1]}" - # add .dev on hub_xy so it's 1.0.dev - if [[ ! -z "${split[3]:-}" ]]; then - hub_xy="${hub_xy}.${split[3]}" - fi -} -# tag e.g. 0.8.1 with 0.8 -get_hub_version $stable -docker tag $DOCKER_REPO:$stable $DOCKER_REPO:$hub_xyz -docker push $DOCKER_REPO:$hub_xyz - -# tag e.g. 0.9 with master -get_hub_version master -docker tag $DOCKER_REPO:master $DOCKER_REPO:$hub_xy -docker push $DOCKER_REPO:$hub_xy -docker tag $DOCKER_REPO:master $DOCKER_REPO:$hub_xyz -docker push $DOCKER_REPO:$hub_xyz diff --git a/singleuser/install_jupyterhub b/singleuser/install_jupyterhub index b4ea2d42..cf20bff2 100644 --- a/singleuser/install_jupyterhub +++ b/singleuser/install_jupyterhub @@ -3,19 +3,22 @@ import os from subprocess import check_call import sys -V = os.environ['JUPYTERHUB_VERSION'] +version = os.environ['JUPYTERHUB_VERSION'] pip_install = [ - sys.executable, '-m', 'pip', 'install', '--no-cache', '--upgrade', - '--upgrade-strategy', 'only-if-needed', + sys.executable, + '-m', + 'pip', + 'install', + '--no-cache', + '--upgrade', + '--upgrade-strategy', + 'only-if-needed', ] -if V == 'master': - req = 'https://github.com/jupyterhub/jupyterhub/archive/master.tar.gz' +if version.startswith("git:"): + ref = version.partition(":")[-1] + req = f"https://github.com/jupyterhub/jupyterhub/archive/{ref}.tar.gz" else: - version_info = [ int(part) for part in V.split('.') ] - version_info[-1] += 1 - upper_bound = '.'.join(map(str, version_info)) - vs = '>=%s,<%s' % (V, upper_bound) - req = 'jupyterhub%s' % vs + req = f"jupyterhub=={version}" check_call(pip_install + [req]) diff --git a/tools/tasks.py b/tools/tasks.py deleted file mode 100644 index dbbc89f4..00000000 --- a/tools/tasks.py +++ /dev/null @@ -1,240 +0,0 @@ -#!/usr/bin/env python3 -""" -invoke script for releasing jupyterhub - -usage: - - invoke release 1.2.3 [--upload] - -This does: - -- clone into /tmp/jupyterhub-repo -- patches version.py with release version -- creates tag (push if uploading) -- makes a virtualenv with python3.4 (PYTHON_EXE env to override) -- builds an sdist (optionally uploads) -- patches version.py with post-release version (X.Y+1.Z.dev) (push if uploading) -- unpacks sdist to /tmp/jupyterhub-release -- builds bdist_wheel from sdist (optional upload) - -""" -# derived from PyZMQ release/tasks.py (used under BSD) -# Copyright (c) Jupyter Developers -# Distributed under the terms of the Modified BSD License. -import glob -import os -import pipes -import shutil -from contextlib import contextmanager -from distutils.version import LooseVersion as V - -from invoke import run as invoke_run -from invoke import task - -pjoin = os.path.join -here = os.path.dirname(__file__) - -repo = "git@github.com:jupyter/jupyterhub" -pkg = repo.rsplit('/', 1)[-1] - -py_exe = os.environ.get('PYTHON_EXE', 'python3.4') - -tmp = "/tmp" -env_root = os.path.join(tmp, 'envs') -repo_root = pjoin(tmp, '%s-repo' % pkg) -sdist_root = pjoin(tmp, '%s-release' % pkg) - - -def run(cmd, **kwargs): - """wrapper around invoke.run that accepts a Popen list""" - if isinstance(cmd, list): - cmd = " ".join(pipes.quote(s) for s in cmd) - kwargs.setdefault('echo', True) - return invoke_run(cmd, **kwargs) - - -@contextmanager -def cd(path): - """Context manager for temporary CWD""" - cwd = os.getcwd() - os.chdir(path) - try: - yield - finally: - os.chdir(cwd) - - -@task -def clone_repo(reset=False): - """Clone the repo""" - if os.path.exists(repo_root) and reset: - shutil.rmtree(repo_root) - if os.path.exists(repo_root): - with cd(repo_root): - run("git pull") - else: - run("git clone %s %s" % (repo, repo_root)) - - -@task -def patch_version(vs, path=pjoin(here, '..')): - """Patch zmq/sugar/version.py for the current release""" - v = parse_vs(vs) - version_py = pjoin(path, 'jupyterhub', 'version.py') - print("patching %s with %s" % (version_py, vs)) - # read version.py, minus version parts - with open(version_py) as f: - pre_lines = [] - post_lines = [] - for line in f: - pre_lines.append(line) - if line.startswith("version_info"): - break - for line in f: - if line.startswith(')'): - post_lines.append(line) - break - for line in f: - post_lines.append(line) - - # write new version.py - with open(version_py, 'w') as f: - for line in pre_lines: - f.write(line) - for part in v: - f.write(' %r,\n' % part) - for line in post_lines: - f.write(line) - - # verify result - ns = {} - with open(version_py) as f: - exec(f.read(), {}, ns) - assert ns['__version__'] == vs, "%r != %r" % (ns['__version__'], vs) - - -@task -def tag(vs, push=False): - """Make the tagged release commit""" - patch_version(vs, repo_root) - with cd(repo_root): - run('git commit -a -m "release {}"'.format(vs)) - run('git tag -a -m "release {0}" {0}'.format(vs)) - if push: - run('git push') - run('git push --tags') - - -@task -def untag(vs, push=False): - """Make the post-tag 'back to dev' commit""" - v2 = parse_vs(vs) - v2.append('dev') - v2[1] += 1 - v2[2] = 0 - vs2 = unparse_vs(v2) - patch_version(vs2, repo_root) - with cd(repo_root): - run('git commit -a -m "back to dev"') - if push: - run('git push') - - -def make_env(*packages): - """Make a virtualenv - - Assumes `which python` has the `virtualenv` package - """ - if not os.path.exists(env_root): - os.makedirs(env_root) - - env = os.path.join(env_root, os.path.basename(py_exe)) - py = pjoin(env, 'bin', 'python') - # new env - if not os.path.exists(py): - run( - 'python -m virtualenv {} -p {}'.format( - pipes.quote(env), pipes.quote(py_exe) - ) - ) - py = pjoin(env, 'bin', 'python') - run([py, '-V']) - install(py, 'pip', 'setuptools') - if packages: - install(py, *packages) - return py - - -def build_sdist(py): - """Build sdists - - Returns the path to the tarball - """ - with cd(repo_root): - cmd = [py, 'setup.py', 'sdist', '--formats=gztar'] - run(cmd) - - return glob.glob(pjoin(repo_root, 'dist', '*.tar.gz'))[0] - - -@task -def sdist(vs, upload=False): - clone_repo() - tag(vs, push=upload) - py = make_env() - tarball = build_sdist(py) - if upload: - with cd(repo_root): - install(py, 'twine') - run([py, '-m', 'twine', 'upload', 'dist/*']) - - untag(vs, push=upload) - return untar(tarball) - - -def install(py, *packages): - run([py, '-m', 'pip', 'install', '--upgrade'] + list(packages)) - - -def parse_vs(vs): - """version string to list""" - return V(vs).version - - -def unparse_vs(tup): - """version list to string""" - return '.'.join(map(str, tup)) - - -def untar(tarball): - """extract sdist, returning path to unpacked package directory""" - if os.path.exists(sdist_root): - shutil.rmtree(sdist_root) - os.makedirs(sdist_root) - with cd(sdist_root): - run(['tar', '-xzf', tarball]) - - return glob.glob(pjoin(sdist_root, '*'))[0] - - -def bdist(): - """build a wheel, optionally uploading it""" - py = make_env('wheel') - run([py, 'setup.py', 'bdist_wheel']) - - -@task -def release(vs, upload=False): - """Release the package""" - # start from scrach with clone and envs - clone_repo(reset=True) - if os.path.exists(env_root): - shutil.rmtree(env_root) - - path = sdist(vs, upload=upload) - print("Working in %r" % path) - with cd(path): - bdist() - if upload: - py = make_env('twine') - run([py, '-m', 'twine', 'upload', 'dist/*'])