mirror of
https://github.com/jupyterhub/jupyterhub.git
synced 2025-10-14 13:33:00 +00:00
Compare commits
50 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
69bb34b943 | ||
![]() |
728fbc68e0 | ||
![]() |
0dad9a3f39 | ||
![]() |
41f291c0c9 | ||
![]() |
9a5b11d5e1 | ||
![]() |
b47159b31e | ||
![]() |
bbe377b70a | ||
![]() |
374a3a7b36 | ||
![]() |
32c493e5ab | ||
![]() |
edfd363758 | ||
![]() |
d72a5ca3e4 | ||
![]() |
3a6309a570 | ||
![]() |
588407200f | ||
![]() |
5cc36a6809 | ||
![]() |
5733eb76c2 | ||
![]() |
d9719e3538 | ||
![]() |
7c91fbea93 | ||
![]() |
5076745085 | ||
![]() |
39eea2f053 | ||
![]() |
998f5d7b6c | ||
![]() |
fc6cd33ce0 | ||
![]() |
b0b8e2d058 | ||
![]() |
6bfa402bfa | ||
![]() |
b51a0bba92 | ||
![]() |
2d3f962a1d | ||
![]() |
625242136a | ||
![]() |
f92560fed0 | ||
![]() |
8249ef69f0 | ||
![]() |
c63605425f | ||
![]() |
5b57900c0b | ||
![]() |
d0afdabd4c | ||
![]() |
618746fa00 | ||
![]() |
e7bc6c2ba9 | ||
![]() |
e9f86cd602 | ||
![]() |
6e8517f795 | ||
![]() |
5fa540bea1 | ||
![]() |
99f597887c | ||
![]() |
352526c36a | ||
![]() |
cbbed04eed | ||
![]() |
b2756fb18c | ||
![]() |
37b88029e4 | ||
![]() |
4b7413184e | ||
![]() |
41ef0da180 | ||
![]() |
a4a8b3fa2c | ||
![]() |
02e5984f34 | ||
![]() |
b91c5a489c | ||
![]() |
c47c3b2f9e | ||
![]() |
eaa1353dcd | ||
![]() |
b9a3b0a66a | ||
![]() |
929b805fae |
206
.github/workflows/test.yml
vendored
Normal file
206
.github/workflows/test.yml
vendored
Normal file
@@ -0,0 +1,206 @@
|
|||||||
|
# This is a GitHub workflow defining a set of jobs with a set of steps.
|
||||||
|
# ref: https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-syntax-for-github-actions
|
||||||
|
#
|
||||||
|
name: Run tests
|
||||||
|
|
||||||
|
# Trigger the workflow's on all PRs but only on pushed tags or commits to
|
||||||
|
# main/master branch to avoid PRs developed in a GitHub fork's dedicated branch
|
||||||
|
# to trigger.
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
push:
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
# Declare bash be used by default in this workflow's "run" steps.
|
||||||
|
#
|
||||||
|
# NOTE: bash will by default run with:
|
||||||
|
# --noprofile: Ignore ~/.profile etc.
|
||||||
|
# --norc: Ignore ~/.bashrc etc.
|
||||||
|
# -e: Exit directly on errors
|
||||||
|
# -o pipefail: Don't mask errors from a command piped into another command
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
env:
|
||||||
|
# UTF-8 content may be interpreted as ascii and causes errors without this.
|
||||||
|
LANG: C.UTF-8
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# Run "pre-commit run --all-files"
|
||||||
|
pre-commit:
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
timeout-minutes: 2
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: 3.8
|
||||||
|
|
||||||
|
# ref: https://github.com/pre-commit/action
|
||||||
|
- uses: pre-commit/action@v2.0.0
|
||||||
|
- name: Help message if pre-commit fail
|
||||||
|
if: ${{ failure() }}
|
||||||
|
run: |
|
||||||
|
echo "You can install pre-commit hooks to automatically run formatting"
|
||||||
|
echo "on each commit with:"
|
||||||
|
echo " pre-commit install"
|
||||||
|
echo "or you can run by hand on staged files with"
|
||||||
|
echo " pre-commit run"
|
||||||
|
echo "or after-the-fact on already committed files with"
|
||||||
|
echo " pre-commit run --all-files"
|
||||||
|
|
||||||
|
|
||||||
|
# Run "pytest jupyterhub/tests" in various configurations
|
||||||
|
pytest:
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
timeout-minutes: 10
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
# Keep running even if one variation of the job fail
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
# We run this job multiple times with different parameterization
|
||||||
|
# specified below, these parameters have no meaning on their own and
|
||||||
|
# gain meaning on how job steps use them.
|
||||||
|
#
|
||||||
|
# subdomain:
|
||||||
|
# Tests everything when JupyterHub is configured to add routes for
|
||||||
|
# users with dedicated subdomains like user1.jupyter.example.com
|
||||||
|
# rather than jupyter.example.com/user/user1.
|
||||||
|
#
|
||||||
|
# db: [mysql/postgres]
|
||||||
|
# Tests everything when JupyterHub works against a dedicated mysql or
|
||||||
|
# postgresql server.
|
||||||
|
#
|
||||||
|
# jupyter_server:
|
||||||
|
# Tests everything when the user instances are started with
|
||||||
|
# jupyter_server instead of notebook.
|
||||||
|
#
|
||||||
|
# main_dependencies:
|
||||||
|
# Tests everything when the we use the latest available dependencies
|
||||||
|
# from: ipytraitlets.
|
||||||
|
#
|
||||||
|
# NOTE: Since only the value of these parameters are presented in the
|
||||||
|
# GitHub UI when the workflow run, we avoid using true/false as
|
||||||
|
# values by instead duplicating the name to signal true.
|
||||||
|
include:
|
||||||
|
- python: "3.6"
|
||||||
|
subdomain: subdomain
|
||||||
|
- python: "3.7"
|
||||||
|
db: mysql
|
||||||
|
- python: "3.8"
|
||||||
|
db: postgres
|
||||||
|
- python: "3.8"
|
||||||
|
jupyter_server: jupyter_server
|
||||||
|
- python: "3.9"
|
||||||
|
main_dependencies: main_dependencies
|
||||||
|
|
||||||
|
steps:
|
||||||
|
# NOTE: In GitHub workflows, environment variables are set by writing
|
||||||
|
# assignment statements to a file. They will be set in the following
|
||||||
|
# steps as if would used `export MY_ENV=my-value`.
|
||||||
|
- name: Configure environment variables
|
||||||
|
run: |
|
||||||
|
if [ "${{ matrix.subdomain }}" != "" ]; then
|
||||||
|
echo "JUPYTERHUB_TEST_SUBDOMAIN_HOST=http://localhost.jovyan.org:8000" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
if [ "${{ matrix.db }}" == "mysql" ]; then
|
||||||
|
echo "MYSQL_HOST=127.0.0.1" >> $GITHUB_ENV
|
||||||
|
echo "JUPYTERHUB_TEST_DB_URL=mysql+mysqlconnector://root@127.0.0.1:3306/jupyterhub" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
if [ "${{ matrix.db }}" == "postgres" ]; then
|
||||||
|
echo "PGHOST=127.0.0.1" >> $GITHUB_ENV
|
||||||
|
echo "PGUSER=test_user" >> $GITHUB_ENV
|
||||||
|
echo "PGPASSWORD=hub[test/:?" >> $GITHUB_ENV
|
||||||
|
echo "JUPYTERHUB_TEST_DB_URL=postgresql://test_user:hub%5Btest%2F%3A%3F@127.0.0.1:5432/jupyterhub" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
if [ "${{ matrix.jupyter_server }}" != "" ]; then
|
||||||
|
echo "JUPYTERHUB_SINGLEUSER_APP=jupyterhub.tests.mockserverapp.MockServerApp" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
# NOTE: actions/setup-node@v1 make use of a cache within the GitHub base
|
||||||
|
# environment and setup in a fraction of a second.
|
||||||
|
- name: Install Node v14
|
||||||
|
uses: actions/setup-node@v1
|
||||||
|
with:
|
||||||
|
node-version: "14"
|
||||||
|
- name: Install Node dependencies
|
||||||
|
run: |
|
||||||
|
npm install
|
||||||
|
npm install -g configurable-http-proxy
|
||||||
|
npm list
|
||||||
|
|
||||||
|
# NOTE: actions/setup-python@v2 make use of a cache within the GitHub base
|
||||||
|
# environment and setup in a fraction of a second.
|
||||||
|
- name: Install Python ${{ matrix.python }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python }}
|
||||||
|
- name: Install Python dependencies
|
||||||
|
run: |
|
||||||
|
pip install --upgrade pip
|
||||||
|
pip install --upgrade . -r dev-requirements.txt
|
||||||
|
|
||||||
|
if [ "${{ matrix.main_dependencies }}" != "" ]; then
|
||||||
|
pip install git+https://github.com/ipython/traitlets#egg=traitlets --force
|
||||||
|
fi
|
||||||
|
if [ "${{ matrix.jupyter_server }}" != "" ]; then
|
||||||
|
pip uninstall notebook --yes
|
||||||
|
pip install jupyter_server
|
||||||
|
fi
|
||||||
|
if [ "${{ matrix.db }}" == "mysql" ]; then
|
||||||
|
pip install mysql-connector-python
|
||||||
|
fi
|
||||||
|
if [ "${{ matrix.db }}" == "postgres" ]; then
|
||||||
|
pip install psycopg2-binary
|
||||||
|
fi
|
||||||
|
|
||||||
|
pip freeze
|
||||||
|
|
||||||
|
# NOTE: If you need to debug this DB setup step, consider the following.
|
||||||
|
#
|
||||||
|
# 1. mysql/postgressql are database servers we start as docker containers,
|
||||||
|
# and we use clients named mysql/psql.
|
||||||
|
#
|
||||||
|
# 2. When we start a database server we need to pass environment variables
|
||||||
|
# explicitly as part of the `docker run` command. These environment
|
||||||
|
# variables are named differently from the similarly named environment
|
||||||
|
# variables used by the clients.
|
||||||
|
#
|
||||||
|
# - mysql server ref: https://hub.docker.com/_/mysql/
|
||||||
|
# - mysql client ref: https://dev.mysql.com/doc/refman/5.7/en/environment-variables.html
|
||||||
|
# - postgres server ref: https://hub.docker.com/_/postgres/
|
||||||
|
# - psql client ref: https://www.postgresql.org/docs/9.5/libpq-envars.html
|
||||||
|
#
|
||||||
|
# 3. When we connect, they should use 127.0.0.1 rather than the
|
||||||
|
# default way of connecting which leads to errors like below both for
|
||||||
|
# mysql and postgresql unless we set MYSQL_HOST/PGHOST to 127.0.0.1.
|
||||||
|
#
|
||||||
|
# - ERROR 2002 (HY000): Can't connect to local MySQL server through socket '/var/run/mysqld/mysqld.sock' (2)
|
||||||
|
#
|
||||||
|
- name: Start a database server (${{ matrix.db }})
|
||||||
|
if: ${{ matrix.db }}
|
||||||
|
run: |
|
||||||
|
if [ "${{ matrix.db }}" == "mysql" ]; then
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y mysql-client
|
||||||
|
DB=mysql bash ci/docker-db.sh
|
||||||
|
DB=mysql bash ci/init-db.sh
|
||||||
|
fi
|
||||||
|
if [ "${{ matrix.db }}" == "postgres" ]; then
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y postgresql-client
|
||||||
|
DB=postgres bash ci/docker-db.sh
|
||||||
|
DB=postgres bash ci/init-db.sh
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Run pytest
|
||||||
|
# FIXME: --color=yes explicitly set because:
|
||||||
|
# https://github.com/actions/runner/issues/241
|
||||||
|
run: |
|
||||||
|
pytest -v --maxfail=2 --color=yes --cov=jupyterhub jupyterhub/tests
|
||||||
|
- name: Submit codecov report
|
||||||
|
run: |
|
||||||
|
codecov
|
120
.travis.yml
120
.travis.yml
@@ -1,120 +0,0 @@
|
|||||||
dist: bionic
|
|
||||||
language: python
|
|
||||||
cache:
|
|
||||||
- pip
|
|
||||||
env:
|
|
||||||
global:
|
|
||||||
- MYSQL_HOST=127.0.0.1
|
|
||||||
- MYSQL_TCP_PORT=13306
|
|
||||||
|
|
||||||
# request additional services for the jobs to access
|
|
||||||
services:
|
|
||||||
- postgresql
|
|
||||||
- docker
|
|
||||||
|
|
||||||
# install dependencies for running pytest (but not linting)
|
|
||||||
before_install:
|
|
||||||
- set -e
|
|
||||||
- nvm install 6; nvm use 6
|
|
||||||
- npm install
|
|
||||||
- npm install -g configurable-http-proxy
|
|
||||||
- |
|
|
||||||
# setup database
|
|
||||||
if [[ $JUPYTERHUB_TEST_DB_URL == mysql* ]]; then
|
|
||||||
unset MYSQL_UNIX_PORT
|
|
||||||
DB=mysql bash ci/docker-db.sh
|
|
||||||
DB=mysql bash ci/init-db.sh
|
|
||||||
# FIXME: mysql-connector-python 8.0.16 incorrectly decodes bytes to str
|
|
||||||
# ref: https://bugs.mysql.com/bug.php?id=94944
|
|
||||||
pip install 'mysql-connector-python==8.0.11'
|
|
||||||
elif [[ $JUPYTERHUB_TEST_DB_URL == postgresql* ]]; then
|
|
||||||
psql -c "CREATE USER $PGUSER WITH PASSWORD '$PGPASSWORD';" -U postgres
|
|
||||||
DB=postgres bash ci/init-db.sh
|
|
||||||
pip install psycopg2-binary
|
|
||||||
fi
|
|
||||||
|
|
||||||
# install general dependencies
|
|
||||||
install:
|
|
||||||
- pip install --upgrade pip
|
|
||||||
- pip install --upgrade --pre -r dev-requirements.txt .
|
|
||||||
- |
|
|
||||||
if [[ "$MASTER_DEPENDENCIES" == "True" ]]; then
|
|
||||||
pip install git+https://github.com/ipython/traitlets#egg=traitlets --force
|
|
||||||
fi
|
|
||||||
- |
|
|
||||||
if [[ "$TEST" == "jupyter_server" ]]; then
|
|
||||||
pip uninstall notebook --yes
|
|
||||||
pip install jupyter_server
|
|
||||||
fi
|
|
||||||
- pip freeze
|
|
||||||
|
|
||||||
# run tests
|
|
||||||
script:
|
|
||||||
- pytest -v --maxfail=2 --cov=jupyterhub jupyterhub/tests
|
|
||||||
|
|
||||||
# collect test coverage information
|
|
||||||
after_success:
|
|
||||||
- codecov
|
|
||||||
|
|
||||||
# list the jobs
|
|
||||||
jobs:
|
|
||||||
include:
|
|
||||||
- name: autoformatting check
|
|
||||||
python: 3.6
|
|
||||||
# NOTE: It does not suffice to override to: null, [], or [""]. Travis will
|
|
||||||
# fall back to the default if we do.
|
|
||||||
before_install: echo "Do nothing before install."
|
|
||||||
script:
|
|
||||||
- pre-commit run --all-files
|
|
||||||
after_success: echo "Do nothing after success."
|
|
||||||
after_failure:
|
|
||||||
- |
|
|
||||||
echo "You can install pre-commit hooks to automatically run formatting"
|
|
||||||
echo "on each commit with:"
|
|
||||||
echo " pre-commit install"
|
|
||||||
echo "or you can run by hand on staged files with"
|
|
||||||
echo " pre-commit run"
|
|
||||||
echo "or after-the-fact on already committed files with"
|
|
||||||
echo " pre-commit run --all-files"
|
|
||||||
# When we run pytest, we want to run it with python>=3.5 as well as with
|
|
||||||
# various configurations. We increment the python version at the same time
|
|
||||||
# as we test new configurations in order to reduce the number of test jobs.
|
|
||||||
- name: python:3.5 + dist:xenial
|
|
||||||
python: 3.5
|
|
||||||
dist: xenial
|
|
||||||
- name: python:3.6 + subdomain
|
|
||||||
python: 3.6
|
|
||||||
env: JUPYTERHUB_TEST_SUBDOMAIN_HOST=http://localhost.jovyan.org:8000
|
|
||||||
- name: python:3.7 + mysql
|
|
||||||
python: 3.7
|
|
||||||
env:
|
|
||||||
- JUPYTERHUB_TEST_DB_URL=mysql+mysqlconnector://root@127.0.0.1:$MYSQL_TCP_PORT/jupyterhub
|
|
||||||
- name: python:3.8 + postgresql
|
|
||||||
python: 3.8
|
|
||||||
env:
|
|
||||||
- PGUSER=jupyterhub
|
|
||||||
- PGPASSWORD=hub[test/:?
|
|
||||||
# The password in url below is url-encoded with: urllib.parse.quote($PGPASSWORD, safe='')
|
|
||||||
- JUPYTERHUB_TEST_DB_URL=postgresql://jupyterhub:hub%5Btest%2F%3A%3F@127.0.0.1/jupyterhub
|
|
||||||
- name: python:3.8 + master dependencies
|
|
||||||
python: 3.8
|
|
||||||
env:
|
|
||||||
- PGUSER=jupyterhub
|
|
||||||
- PGPASSWORD=hub[test/:?
|
|
||||||
# The password in url below is url-encoded with: urllib.parse.quote($PGPASSWORD, safe='')
|
|
||||||
- JUPYTERHUB_TEST_DB_URL=postgresql://jupyterhub:hub%5Btest%2F%3A%3F@127.0.0.1/jupyterhub
|
|
||||||
- MASTER_DEPENDENCIES=True
|
|
||||||
- name: python:3.8 + jupyter_server
|
|
||||||
python: 3.8
|
|
||||||
env:
|
|
||||||
- TEST=jupyter_server
|
|
||||||
- JUPYTERHUB_SINGLEUSER_APP=jupyterhub.tests.mockserverapp.MockServerApp
|
|
||||||
|
|
||||||
- name: python:nightly
|
|
||||||
python: nightly
|
|
||||||
allow_failures:
|
|
||||||
- name: python:nightly
|
|
||||||
# https://github.com/jupyterhub/jupyterhub/issues/3141
|
|
||||||
# The latest traitlets is close to release so it should not fail
|
|
||||||
# - name: python:3.8 + master dependencies
|
|
||||||
fast_finish: true
|
|
@@ -13,7 +13,7 @@
|
|||||||
[](https://pypi.python.org/pypi/jupyterhub)
|
[](https://pypi.python.org/pypi/jupyterhub)
|
||||||
[](https://www.npmjs.com/package/jupyterhub)
|
[](https://www.npmjs.com/package/jupyterhub)
|
||||||
[](https://jupyterhub.readthedocs.org/en/latest/)
|
[](https://jupyterhub.readthedocs.org/en/latest/)
|
||||||
[](https://travis-ci.org/jupyterhub/jupyterhub)
|
[](https://travis-ci.com/jupyterhub/jupyterhub)
|
||||||
[](https://hub.docker.com/r/jupyterhub/jupyterhub/tags)
|
[](https://hub.docker.com/r/jupyterhub/jupyterhub/tags)
|
||||||
[](https://circleci.com/gh/jupyterhub/jupyterhub)<!-- CircleCI Token: b5b65862eb2617b9a8d39e79340b0a6b816da8cc -->
|
[](https://circleci.com/gh/jupyterhub/jupyterhub)<!-- CircleCI Token: b5b65862eb2617b9a8d39e79340b0a6b816da8cc -->
|
||||||
[](https://codecov.io/gh/jupyterhub/jupyterhub)
|
[](https://codecov.io/gh/jupyterhub/jupyterhub)
|
||||||
|
@@ -1,36 +1,55 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
# source this file to setup postgres and mysql
|
# The goal of this script is to start a database server as a docker container.
|
||||||
# for local testing (as similar as possible to docker)
|
#
|
||||||
|
# Required environment variables:
|
||||||
|
# - DB: The database server to start, either "postgres" or "mysql".
|
||||||
|
#
|
||||||
|
# - PGUSER/PGPASSWORD: For the creation of a postgresql user with associated
|
||||||
|
# password.
|
||||||
|
|
||||||
set -eu
|
set -eu
|
||||||
|
|
||||||
export MYSQL_HOST=127.0.0.1
|
# Stop and remove any existing database container
|
||||||
export MYSQL_TCP_PORT=${MYSQL_TCP_PORT:-13306}
|
DOCKER_CONTAINER="hub-test-$DB"
|
||||||
export PGHOST=127.0.0.1
|
docker rm -f "$DOCKER_CONTAINER" 2>/dev/null || true
|
||||||
NAME="hub-test-$DB"
|
|
||||||
DOCKER_RUN="docker run -d --name $NAME"
|
|
||||||
|
|
||||||
docker rm -f "$NAME" 2>/dev/null || true
|
# Prepare environment variables to startup and await readiness of either a mysql
|
||||||
|
# or postgresql server.
|
||||||
case "$DB" in
|
if [[ "$DB" == "mysql" ]]; then
|
||||||
"mysql")
|
# Environment variables can influence both the mysql server in the docker
|
||||||
RUN_ARGS="-e MYSQL_ALLOW_EMPTY_PASSWORD=1 -p $MYSQL_TCP_PORT:3306 mysql:5.7"
|
# container and the mysql client.
|
||||||
CHECK="mysql --host $MYSQL_HOST --port $MYSQL_TCP_PORT --user root -e \q"
|
#
|
||||||
;;
|
# ref server: https://hub.docker.com/_/mysql/
|
||||||
"postgres")
|
# ref client: https://dev.mysql.com/doc/refman/5.7/en/setting-environment-variables.html
|
||||||
RUN_ARGS="-p 5432:5432 postgres:9.5"
|
#
|
||||||
CHECK="psql --user postgres -c \q"
|
DOCKER_RUN_ARGS="-p 3306:3306 --env MYSQL_ALLOW_EMPTY_PASSWORD=1 mysql:5.7"
|
||||||
;;
|
READINESS_CHECK="mysql --user root --execute \q"
|
||||||
*)
|
elif [[ "$DB" == "postgres" ]]; then
|
||||||
|
# Environment variables can influence both the postgresql server in the
|
||||||
|
# docker container and the postgresql client (psql).
|
||||||
|
#
|
||||||
|
# ref server: https://hub.docker.com/_/postgres/
|
||||||
|
# ref client: https://www.postgresql.org/docs/9.5/libpq-envars.html
|
||||||
|
#
|
||||||
|
# POSTGRES_USER / POSTGRES_PASSWORD will create a user on startup of the
|
||||||
|
# postgres server, but PGUSER and PGPASSWORD are the environment variables
|
||||||
|
# used by the postgresql client psql, so we configure the user based on how
|
||||||
|
# we want to connect.
|
||||||
|
#
|
||||||
|
DOCKER_RUN_ARGS="-p 5432:5432 --env "POSTGRES_USER=${PGUSER}" --env "POSTGRES_PASSWORD=${PGPASSWORD}" postgres:9.5"
|
||||||
|
READINESS_CHECK="psql --command \q"
|
||||||
|
else
|
||||||
echo '$DB must be mysql or postgres'
|
echo '$DB must be mysql or postgres'
|
||||||
exit 1
|
exit 1
|
||||||
esac
|
fi
|
||||||
|
|
||||||
$DOCKER_RUN $RUN_ARGS
|
# Start the database server
|
||||||
|
docker run --detach --name "$DOCKER_CONTAINER" $DOCKER_RUN_ARGS
|
||||||
|
|
||||||
|
# Wait for the database server to start
|
||||||
echo -n "waiting for $DB "
|
echo -n "waiting for $DB "
|
||||||
for i in {1..60}; do
|
for i in {1..60}; do
|
||||||
if $CHECK; then
|
if $READINESS_CHECK; then
|
||||||
echo 'done'
|
echo 'done'
|
||||||
break
|
break
|
||||||
else
|
else
|
||||||
@@ -38,22 +57,4 @@ for i in {1..60}; do
|
|||||||
sleep 1
|
sleep 1
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
$CHECK
|
$READINESS_CHECK
|
||||||
|
|
||||||
case "$DB" in
|
|
||||||
"mysql")
|
|
||||||
;;
|
|
||||||
"postgres")
|
|
||||||
# create the user
|
|
||||||
psql --user postgres -c "CREATE USER $PGUSER WITH PASSWORD '$PGPASSWORD';"
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
esac
|
|
||||||
|
|
||||||
echo -e "
|
|
||||||
Set these environment variables:
|
|
||||||
|
|
||||||
export MYSQL_HOST=127.0.0.1
|
|
||||||
export MYSQL_TCP_PORT=$MYSQL_TCP_PORT
|
|
||||||
export PGHOST=127.0.0.1
|
|
||||||
"
|
|
||||||
|
@@ -1,27 +1,26 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
# initialize jupyterhub databases for testing
|
# The goal of this script is to initialize a running database server with clean
|
||||||
|
# databases for use during tests.
|
||||||
|
#
|
||||||
|
# Required environment variables:
|
||||||
|
# - DB: The database server to start, either "postgres" or "mysql".
|
||||||
|
|
||||||
set -eu
|
set -eu
|
||||||
|
|
||||||
MYSQL="mysql --user root --host $MYSQL_HOST --port $MYSQL_TCP_PORT -e "
|
# Prepare env vars SQL_CLIENT and EXTRA_CREATE_DATABASE_ARGS
|
||||||
PSQL="psql --user postgres -c "
|
if [[ "$DB" == "mysql" ]]; then
|
||||||
|
SQL_CLIENT="mysql --user root --execute "
|
||||||
case "$DB" in
|
EXTRA_CREATE_DATABASE_ARGS='CHARACTER SET utf8 COLLATE utf8_general_ci'
|
||||||
"mysql")
|
elif [[ "$DB" == "postgres" ]]; then
|
||||||
EXTRA_CREATE='CHARACTER SET utf8 COLLATE utf8_general_ci'
|
SQL_CLIENT="psql --command "
|
||||||
SQL="$MYSQL"
|
else
|
||||||
;;
|
|
||||||
"postgres")
|
|
||||||
SQL="$PSQL"
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
echo '$DB must be mysql or postgres'
|
echo '$DB must be mysql or postgres'
|
||||||
exit 1
|
exit 1
|
||||||
esac
|
fi
|
||||||
|
|
||||||
|
# Configure a set of databases in the database server for upgrade tests
|
||||||
set -x
|
set -x
|
||||||
|
|
||||||
for SUFFIX in '' _upgrade_072 _upgrade_081 _upgrade_094; do
|
for SUFFIX in '' _upgrade_072 _upgrade_081 _upgrade_094; do
|
||||||
$SQL "DROP DATABASE jupyterhub${SUFFIX};" 2>/dev/null || true
|
$SQL_CLIENT "DROP DATABASE jupyterhub${SUFFIX};" 2>/dev/null || true
|
||||||
$SQL "CREATE DATABASE jupyterhub${SUFFIX} ${EXTRA_CREATE:-};"
|
$SQL_CLIENT "CREATE DATABASE jupyterhub${SUFFIX} ${EXTRA_CREATE_DATABASE_ARGS:-};"
|
||||||
done
|
done
|
||||||
|
File diff suppressed because one or more lines are too long
@@ -21,7 +21,7 @@ Here is a quick breakdown of these three tools:
|
|||||||
* **The Jupyter Notebook** is a document specification (the `.ipynb`) file that interweaves
|
* **The Jupyter Notebook** is a document specification (the `.ipynb`) file that interweaves
|
||||||
narrative text with code cells and their outputs. It is also a graphical interface
|
narrative text with code cells and their outputs. It is also a graphical interface
|
||||||
that allows users to edit these documents. There are also several other graphical interfaces
|
that allows users to edit these documents. There are also several other graphical interfaces
|
||||||
that allow users to edit the `.ipynb` format (nteract, Jupyer Lab, Google Colab, Kaggle, etc).
|
that allow users to edit the `.ipynb` format (nteract, Jupyter Lab, Google Colab, Kaggle, etc).
|
||||||
* **JupyterLab** is a flexible and extendible user interface for interactive computing. It
|
* **JupyterLab** is a flexible and extendible user interface for interactive computing. It
|
||||||
has several extensions that are tailored for using Jupyter Notebooks, as well as extensions
|
has several extensions that are tailored for using Jupyter Notebooks, as well as extensions
|
||||||
for other parts of the data science stack.
|
for other parts of the data science stack.
|
||||||
|
@@ -5,7 +5,7 @@ that interacts with the Hub's REST API. A Service may perform a specific
|
|||||||
or action or task. For example, shutting down individuals' single user
|
or action or task. For example, shutting down individuals' single user
|
||||||
notebook servers that have been idle for some time is a good example of
|
notebook servers that have been idle for some time is a good example of
|
||||||
a task that could be automated by a Service. Let's look at how the
|
a task that could be automated by a Service. Let's look at how the
|
||||||
[cull_idle_servers][] script can be used as a Service.
|
[jupyterhub_idle_culler][] script can be used as a Service.
|
||||||
|
|
||||||
## Real-world example to cull idle servers
|
## Real-world example to cull idle servers
|
||||||
|
|
||||||
@@ -15,11 +15,11 @@ document will:
|
|||||||
- explain some basic information about API tokens
|
- explain some basic information about API tokens
|
||||||
- clarify that API tokens can be used to authenticate to
|
- clarify that API tokens can be used to authenticate to
|
||||||
single-user servers as of [version 0.8.0](../changelog)
|
single-user servers as of [version 0.8.0](../changelog)
|
||||||
- show how the [cull_idle_servers][] script can be:
|
- show how the [jupyterhub_idle_culler][] script can be:
|
||||||
- used in a Hub-managed service
|
- used in a Hub-managed service
|
||||||
- run as a standalone script
|
- run as a standalone script
|
||||||
|
|
||||||
Both examples for `cull_idle_servers` will communicate tasks to the
|
Both examples for `jupyterhub_idle_culler` will communicate tasks to the
|
||||||
Hub via the REST API.
|
Hub via the REST API.
|
||||||
|
|
||||||
## API Token basics
|
## API Token basics
|
||||||
@@ -78,17 +78,23 @@ single-user servers, and only cookies can be used for authentication.
|
|||||||
0.8 supports using JupyterHub API tokens to authenticate to single-user
|
0.8 supports using JupyterHub API tokens to authenticate to single-user
|
||||||
servers.
|
servers.
|
||||||
|
|
||||||
## Configure `cull-idle` to run as a Hub-Managed Service
|
## Configure the idle culler to run as a Hub-Managed Service
|
||||||
|
|
||||||
|
Install the idle culler:
|
||||||
|
|
||||||
|
```
|
||||||
|
pip install jupyterhub-idle-culler
|
||||||
|
```
|
||||||
|
|
||||||
In `jupyterhub_config.py`, add the following dictionary for the
|
In `jupyterhub_config.py`, add the following dictionary for the
|
||||||
`cull-idle` Service to the `c.JupyterHub.services` list:
|
`idle-culler` Service to the `c.JupyterHub.services` list:
|
||||||
|
|
||||||
```python
|
```python
|
||||||
c.JupyterHub.services = [
|
c.JupyterHub.services = [
|
||||||
{
|
{
|
||||||
'name': 'cull-idle',
|
'name': 'idle-culler',
|
||||||
'admin': True,
|
'admin': True,
|
||||||
'command': [sys.executable, 'cull_idle_servers.py', '--timeout=3600'],
|
'command': [sys.executable, '-m', 'jupyterhub_idle_culler', '--timeout=3600'],
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
```
|
```
|
||||||
@@ -101,21 +107,21 @@ where:
|
|||||||
|
|
||||||
## Run `cull-idle` manually as a standalone script
|
## Run `cull-idle` manually as a standalone script
|
||||||
|
|
||||||
Now you can run your script, i.e. `cull_idle_servers`, by providing it
|
Now you can run your script by providing it
|
||||||
the API token and it will authenticate through the REST API to
|
the API token and it will authenticate through the REST API to
|
||||||
interact with it.
|
interact with it.
|
||||||
|
|
||||||
This will run `cull-idle` manually. `cull-idle` can be run as a standalone
|
This will run the idle culler service manually. It can be run as a standalone
|
||||||
script anywhere with access to the Hub, and will periodically check for idle
|
script anywhere with access to the Hub, and will periodically check for idle
|
||||||
servers and shut them down via the Hub's REST API. In order to shutdown the
|
servers and shut them down via the Hub's REST API. In order to shutdown the
|
||||||
servers, the token given to cull-idle must have admin privileges.
|
servers, the token given to cull-idle must have admin privileges.
|
||||||
|
|
||||||
Generate an API token and store it in the `JUPYTERHUB_API_TOKEN` environment
|
Generate an API token and store it in the `JUPYTERHUB_API_TOKEN` environment
|
||||||
variable. Run `cull_idle_servers.py` manually.
|
variable. Run `jupyterhub_idle_culler` manually.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
export JUPYTERHUB_API_TOKEN='token'
|
export JUPYTERHUB_API_TOKEN='token'
|
||||||
python3 cull_idle_servers.py [--timeout=900] [--url=http://127.0.0.1:8081/hub/api]
|
python -m jupyterhub_idle_culler [--timeout=900] [--url=http://127.0.0.1:8081/hub/api]
|
||||||
```
|
```
|
||||||
|
|
||||||
[cull_idle_servers]: https://github.com/jupyterhub/jupyterhub/blob/master/examples/cull-idle/cull_idle_servers.py
|
[jupyterhub_idle_culler]: https://github.com/jupyterhub/jupyterhub-idle-culler
|
||||||
|
@@ -5,8 +5,8 @@
|
|||||||
version_info = (
|
version_info = (
|
||||||
1,
|
1,
|
||||||
2,
|
2,
|
||||||
0,
|
2,
|
||||||
"b1", # release (b1, rc1, or "" for final or dev)
|
"", # release (b1, rc1, or "" for final or dev)
|
||||||
# "dev", # dev or nothing for beta/rc/stable releases
|
# "dev", # dev or nothing for beta/rc/stable releases
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@@ -215,7 +215,8 @@ class OAuthAuthorizeHandler(OAuthHandler, BaseHandler):
|
|||||||
# it's the user's own server
|
# it's the user's own server
|
||||||
oauth_client.identifier in own_oauth_client_ids
|
oauth_client.identifier in own_oauth_client_ids
|
||||||
# or it's in the global no-confirm list
|
# or it's in the global no-confirm list
|
||||||
or oauth_client.identifier in self.settings.get('oauth_no_confirm', set())
|
or oauth_client.identifier
|
||||||
|
in self.settings.get('oauth_no_confirm_list', set())
|
||||||
):
|
):
|
||||||
return False
|
return False
|
||||||
# default: require confirmation
|
# default: require confirmation
|
||||||
@@ -274,9 +275,26 @@ class OAuthAuthorizeHandler(OAuthHandler, BaseHandler):
|
|||||||
uri, http_method, body, headers = self.extract_oauth_params()
|
uri, http_method, body, headers = self.extract_oauth_params()
|
||||||
referer = self.request.headers.get('Referer', 'no referer')
|
referer = self.request.headers.get('Referer', 'no referer')
|
||||||
full_url = self.request.full_url()
|
full_url = self.request.full_url()
|
||||||
if referer != full_url:
|
# trim protocol, which cannot be trusted with multiple layers of proxies anyway
|
||||||
|
# Referer is set by browser, but full_url can be modified by proxy layers to appear as http
|
||||||
|
# when it is actually https
|
||||||
|
referer_proto, _, stripped_referer = referer.partition("://")
|
||||||
|
referer_proto = referer_proto.lower()
|
||||||
|
req_proto, _, stripped_full_url = full_url.partition("://")
|
||||||
|
req_proto = req_proto.lower()
|
||||||
|
if referer_proto != req_proto:
|
||||||
|
self.log.warning("Protocol mismatch: %s != %s", referer, full_url)
|
||||||
|
if req_proto == "https":
|
||||||
|
# insecure origin to secure target is not allowed
|
||||||
|
raise web.HTTPError(
|
||||||
|
403, "Not allowing authorization form submitted from insecure page"
|
||||||
|
)
|
||||||
|
if stripped_referer != stripped_full_url:
|
||||||
# OAuth post must be made to the URL it came from
|
# OAuth post must be made to the URL it came from
|
||||||
self.log.error("OAuth POST from %s != %s", referer, full_url)
|
self.log.error("Original OAuth POST from %s != %s", referer, full_url)
|
||||||
|
self.log.error(
|
||||||
|
"Stripped OAuth POST from %s != %s", stripped_referer, stripped_full_url
|
||||||
|
)
|
||||||
raise web.HTTPError(
|
raise web.HTTPError(
|
||||||
403, "Authorization form must be sent from authorization page"
|
403, "Authorization form must be sent from authorization page"
|
||||||
)
|
)
|
||||||
|
@@ -77,6 +77,7 @@ from .user import UserDict
|
|||||||
from .oauth.provider import make_provider
|
from .oauth.provider import make_provider
|
||||||
from ._data import DATA_FILES_PATH
|
from ._data import DATA_FILES_PATH
|
||||||
from .log import CoroutineLogFormatter, log_request
|
from .log import CoroutineLogFormatter, log_request
|
||||||
|
from .pagination import Pagination
|
||||||
from .proxy import Proxy, ConfigurableHTTPProxy
|
from .proxy import Proxy, ConfigurableHTTPProxy
|
||||||
from .traitlets import URLPrefix, Command, EntryPointType, Callable
|
from .traitlets import URLPrefix, Command, EntryPointType, Callable
|
||||||
from .utils import (
|
from .utils import (
|
||||||
@@ -279,7 +280,7 @@ class JupyterHub(Application):
|
|||||||
|
|
||||||
@default('classes')
|
@default('classes')
|
||||||
def _load_classes(self):
|
def _load_classes(self):
|
||||||
classes = [Spawner, Authenticator, CryptKeeper]
|
classes = [Spawner, Authenticator, CryptKeeper, Pagination]
|
||||||
for name, trait in self.traits(config=True).items():
|
for name, trait in self.traits(config=True).items():
|
||||||
# load entry point groups into configurable class list
|
# load entry point groups into configurable class list
|
||||||
# so that they show up in config files, etc.
|
# so that they show up in config files, etc.
|
||||||
|
@@ -634,6 +634,12 @@ class BaseHandler(RequestHandler):
|
|||||||
next_url,
|
next_url,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# this is where we know if next_url is coming from ?next= param or we are using a default url
|
||||||
|
if next_url:
|
||||||
|
next_url_from_param = True
|
||||||
|
else:
|
||||||
|
next_url_from_param = False
|
||||||
|
|
||||||
if not next_url:
|
if not next_url:
|
||||||
# custom default URL, usually passed because user landed on that page but was not logged in
|
# custom default URL, usually passed because user landed on that page but was not logged in
|
||||||
if default:
|
if default:
|
||||||
@@ -659,6 +665,9 @@ class BaseHandler(RequestHandler):
|
|||||||
else:
|
else:
|
||||||
next_url = url_path_join(self.hub.base_url, 'home')
|
next_url = url_path_join(self.hub.base_url, 'home')
|
||||||
|
|
||||||
|
if not next_url_from_param:
|
||||||
|
# when a request made with ?next=... assume all the params have already been encoded
|
||||||
|
# otherwise, preserve params from the current request across the redirect
|
||||||
next_url = self.append_query_parameters(next_url, exclude=['next'])
|
next_url = self.append_query_parameters(next_url, exclude=['next'])
|
||||||
return next_url
|
return next_url
|
||||||
|
|
||||||
|
@@ -40,11 +40,15 @@ class RootHandler(BaseHandler):
|
|||||||
def get(self):
|
def get(self):
|
||||||
user = self.current_user
|
user = self.current_user
|
||||||
if self.default_url:
|
if self.default_url:
|
||||||
|
# As set in jupyterhub_config.py
|
||||||
|
if callable(self.default_url):
|
||||||
|
url = self.default_url(self)
|
||||||
|
else:
|
||||||
url = self.default_url
|
url = self.default_url
|
||||||
elif user:
|
elif user:
|
||||||
url = self.get_next_url(user)
|
url = self.get_next_url(user)
|
||||||
else:
|
else:
|
||||||
url = self.settings['login_url']
|
url = url_concat(self.settings["login_url"], dict(next=self.request.uri))
|
||||||
self.redirect(url)
|
self.redirect(url)
|
||||||
|
|
||||||
|
|
||||||
@@ -453,7 +457,7 @@ class AdminHandler(BaseHandler):
|
|||||||
@web.authenticated
|
@web.authenticated
|
||||||
@admin_only
|
@admin_only
|
||||||
async def get(self):
|
async def get(self):
|
||||||
page, per_page, offset = Pagination.get_page_args(self)
|
page, per_page, offset = Pagination(config=self.config).get_page_args(self)
|
||||||
|
|
||||||
available = {'name', 'admin', 'running', 'last_activity'}
|
available = {'name', 'admin', 'running', 'last_activity'}
|
||||||
default_sort = ['admin', 'name']
|
default_sort = ['admin', 'name']
|
||||||
@@ -511,7 +515,11 @@ class AdminHandler(BaseHandler):
|
|||||||
|
|
||||||
total = self.db.query(orm.User.id).count()
|
total = self.db.query(orm.User.id).count()
|
||||||
pagination = Pagination(
|
pagination = Pagination(
|
||||||
url=self.request.uri, total=total, page=page, per_page=per_page,
|
url=self.request.uri,
|
||||||
|
total=total,
|
||||||
|
page=page,
|
||||||
|
per_page=per_page,
|
||||||
|
config=self.config,
|
||||||
)
|
)
|
||||||
|
|
||||||
auth_state = await self.current_user.get_auth_state()
|
auth_state = await self.current_user.get_auth_state()
|
||||||
|
@@ -1,69 +1,94 @@
|
|||||||
"""Basic class to manage pagination utils."""
|
"""Basic class to manage pagination utils."""
|
||||||
# Copyright (c) Jupyter Development Team.
|
# Copyright (c) Jupyter Development Team.
|
||||||
# Distributed under the terms of the Modified BSD License.
|
# Distributed under the terms of the Modified BSD License.
|
||||||
|
from traitlets import Bool
|
||||||
|
from traitlets import default
|
||||||
|
from traitlets import Integer
|
||||||
|
from traitlets import observe
|
||||||
|
from traitlets import Unicode
|
||||||
|
from traitlets import validate
|
||||||
|
from traitlets.config import Configurable
|
||||||
|
|
||||||
|
|
||||||
class Pagination:
|
class Pagination(Configurable):
|
||||||
|
|
||||||
_page_name = 'page'
|
# configurable options
|
||||||
_per_page_name = 'per_page'
|
default_per_page = Integer(
|
||||||
_default_page = 1
|
100,
|
||||||
_default_per_page = 100
|
config=True,
|
||||||
_max_per_page = 250
|
help="Default number of entries per page for paginated results.",
|
||||||
|
)
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
max_per_page = Integer(
|
||||||
"""Potential parameters.
|
250,
|
||||||
**url**: URL in request
|
config=True,
|
||||||
**page**: current page in use
|
help="Maximum number of entries per page for paginated results.",
|
||||||
**per_page**: number of records to display in the page. By default 100
|
)
|
||||||
**total**: total records considered while paginating
|
|
||||||
"""
|
|
||||||
self.page = kwargs.get(self._page_name, 1)
|
|
||||||
|
|
||||||
if self.per_page > self._max_per_page:
|
# state variables
|
||||||
self.per_page = self._max_per_page
|
url = Unicode("")
|
||||||
|
page = Integer(1)
|
||||||
|
per_page = Integer(1, min=1)
|
||||||
|
|
||||||
self.total = int(kwargs.get('total', 0))
|
@default("per_page")
|
||||||
self.url = kwargs.get('url') or self.get_url()
|
def _default_per_page(self):
|
||||||
self.init_values()
|
return self.default_per_page
|
||||||
|
|
||||||
def init_values(self):
|
@validate("per_page")
|
||||||
self._cached = {}
|
def _limit_per_page(self, proposal):
|
||||||
self.skip = (self.page - 1) * self.per_page
|
if self.max_per_page and proposal.value > self.max_per_page:
|
||||||
pages = divmod(self.total, self.per_page)
|
return self.max_per_page
|
||||||
self.total_pages = pages[0] + 1 if pages[1] else pages[0]
|
if proposal.value <= 1:
|
||||||
|
return 1
|
||||||
|
return proposal.value
|
||||||
|
|
||||||
self.has_prev = self.page > 1
|
@observe("max_per_page")
|
||||||
self.has_next = self.page < self.total_pages
|
def _apply_max(self, change):
|
||||||
|
if change.new:
|
||||||
|
self.per_page = min(change.new, self.per_page)
|
||||||
|
|
||||||
|
total = Integer(0)
|
||||||
|
|
||||||
|
total_pages = Integer(0)
|
||||||
|
|
||||||
|
@default("total_pages")
|
||||||
|
def _calculate_total_pages(self):
|
||||||
|
total_pages = self.total // self.per_page
|
||||||
|
if self.total % self.per_page:
|
||||||
|
# there's a remainder, add 1
|
||||||
|
total_pages += 1
|
||||||
|
return total_pages
|
||||||
|
|
||||||
|
@observe("per_page", "total")
|
||||||
|
def _update_total_pages(self, change):
|
||||||
|
"""Update total_pages when per_page or total is changed"""
|
||||||
|
self.total_pages = self._calculate_total_pages()
|
||||||
|
|
||||||
|
separator = Unicode("...")
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_page_args(self, handler):
|
def get_page_args(self, handler):
|
||||||
"""
|
"""
|
||||||
This method gets the arguments used in the webpage to configurate the pagination
|
This method gets the arguments used in the webpage to configurate the pagination
|
||||||
In case of no arguments, it uses the default values from this class
|
In case of no arguments, it uses the default values from this class
|
||||||
|
|
||||||
It returns:
|
Returns:
|
||||||
- self.page: The page requested for paginating or the default value (1)
|
- page: The page requested for paginating or the default value (1)
|
||||||
- self.per_page: The number of items to return in this page. By default 100 and no more than 250
|
- per_page: The number of items to return in this page. No more than max_per_page
|
||||||
- self.per_page * (self.page - 1): The offset to consider when managing pagination via the ORM
|
- offset: The offset to consider when managing pagination via the ORM
|
||||||
"""
|
"""
|
||||||
self.page = handler.get_argument(self._page_name, self._default_page)
|
page = handler.get_argument("page", 1)
|
||||||
self.per_page = handler.get_argument(
|
per_page = handler.get_argument("per_page", self.default_per_page)
|
||||||
self._per_page_name, self._default_per_page
|
|
||||||
)
|
|
||||||
try:
|
try:
|
||||||
self.per_page = int(self.per_page)
|
self.per_page = int(per_page)
|
||||||
if self.per_page > self._max_per_page:
|
except Exception:
|
||||||
self.per_page = self._max_per_page
|
|
||||||
except:
|
|
||||||
self.per_page = self._default_per_page
|
self.per_page = self._default_per_page
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.page = int(self.page)
|
self.page = int(page)
|
||||||
if self.page < 1:
|
if self.page < 1:
|
||||||
self.page = self._default_page
|
self.page = 1
|
||||||
except:
|
except:
|
||||||
self.page = self._default_page
|
self.page = 1
|
||||||
|
|
||||||
return self.page, self.per_page, self.per_page * (self.page - 1)
|
return self.page, self.per_page, self.per_page * (self.page - 1)
|
||||||
|
|
||||||
@@ -91,38 +116,44 @@ class Pagination:
|
|||||||
(in case the current page + 5 does not overflow the total lenght of pages) and the first one for reference.
|
(in case the current page + 5 does not overflow the total lenght of pages) and the first one for reference.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.separator_character = '...'
|
before_page = 2
|
||||||
default_pages_to_render = 7
|
after_page = 2
|
||||||
after_page = 5
|
window_size = before_page + after_page + 1
|
||||||
before_end = 2
|
|
||||||
|
|
||||||
# Add 1 to self.total_pages since our default page is 1 and not 0
|
# Add 1 to total_pages since our starting page is 1 and not 0
|
||||||
total_pages = self.total_pages + 1
|
last_page = self.total_pages
|
||||||
|
|
||||||
pages = []
|
pages = []
|
||||||
|
|
||||||
if total_pages > default_pages_to_render:
|
# will default window + start, end fit without truncation?
|
||||||
if self.page > 1:
|
if self.total_pages > window_size + 2:
|
||||||
pages.extend([1, '...'])
|
if self.page - before_page > 1:
|
||||||
|
# before_page will not reach page 1
|
||||||
|
pages.append(1)
|
||||||
|
if self.page - before_page > 2:
|
||||||
|
# before_page will not reach page 2, need separator
|
||||||
|
pages.append(self.separator)
|
||||||
|
|
||||||
if total_pages < self.page + after_page:
|
pages.extend(range(max(1, self.page - before_page), self.page))
|
||||||
pages.extend(list(range(self.page, total_pages)))
|
# we now have up to but not including self.page
|
||||||
|
|
||||||
|
if self.page + after_page + 1 >= last_page:
|
||||||
|
# after_page gets us to the end
|
||||||
|
pages.extend(range(self.page, last_page + 1))
|
||||||
else:
|
else:
|
||||||
if total_pages >= self.page + after_page + before_end:
|
# add full after_page entries
|
||||||
pages.extend(list(range(self.page, self.page + after_page)))
|
pages.extend(range(self.page, self.page + after_page + 1))
|
||||||
pages.append('...')
|
# add separator *if* this doesn't get to last page - 1
|
||||||
pages.extend(list(range(total_pages - before_end, total_pages)))
|
if self.page + after_page < last_page - 1:
|
||||||
else:
|
pages.append(self.separator)
|
||||||
pages.extend(list(range(self.page, self.page + after_page)))
|
pages.append(last_page)
|
||||||
if self.page + after_page < total_pages:
|
|
||||||
# show only last page when the after_page window left space to show it
|
|
||||||
pages.append('...')
|
|
||||||
pages.extend(list(range(total_pages - 1, total_pages)))
|
|
||||||
|
|
||||||
return pages
|
return pages
|
||||||
|
|
||||||
else:
|
else:
|
||||||
return list(range(1, total_pages))
|
# everything will fit, nothing to think about
|
||||||
|
# always return at least one page
|
||||||
|
return list(range(1, last_page + 1)) or [1]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def links(self):
|
def links(self):
|
||||||
@@ -155,9 +186,11 @@ class Pagination:
|
|||||||
page=page
|
page=page
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
elif page == self.separator_character:
|
elif page == self.separator:
|
||||||
links.append(
|
links.append(
|
||||||
'<li class="disabled"><span> <span aria-hidden="true">...</span></span></li>'
|
'<li class="disabled"><span> <span aria-hidden="true">{separator}</span></span></li>'.format(
|
||||||
|
separator=self.separator
|
||||||
|
)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
links.append(
|
links.append(
|
||||||
|
@@ -43,6 +43,7 @@ from . import utils
|
|||||||
from .metrics import CHECK_ROUTES_DURATION_SECONDS
|
from .metrics import CHECK_ROUTES_DURATION_SECONDS
|
||||||
from .metrics import PROXY_POLL_DURATION_SECONDS
|
from .metrics import PROXY_POLL_DURATION_SECONDS
|
||||||
from .objects import Server
|
from .objects import Server
|
||||||
|
from .utils import exponential_backoff
|
||||||
from .utils import make_ssl_context
|
from .utils import make_ssl_context
|
||||||
from .utils import url_path_join
|
from .utils import url_path_join
|
||||||
from jupyterhub.traitlets import Command
|
from jupyterhub.traitlets import Command
|
||||||
@@ -496,6 +497,19 @@ class ConfigurableHTTPProxy(Proxy):
|
|||||||
|
|
||||||
if not psutil.pid_exists(pid):
|
if not psutil.pid_exists(pid):
|
||||||
raise ProcessLookupError
|
raise ProcessLookupError
|
||||||
|
|
||||||
|
try:
|
||||||
|
process = psutil.Process(pid)
|
||||||
|
if self.command and self.command[0]:
|
||||||
|
process_cmd = process.cmdline()
|
||||||
|
if process_cmd and not any(
|
||||||
|
self.command[0] in clause for clause in process_cmd
|
||||||
|
):
|
||||||
|
raise ProcessLookupError
|
||||||
|
except (psutil.AccessDenied, psutil.NoSuchProcess):
|
||||||
|
# If there is a process at the proxy's PID but we don't have permissions to see it,
|
||||||
|
# then it is unlikely to actually be the proxy.
|
||||||
|
raise ProcessLookupError
|
||||||
else:
|
else:
|
||||||
os.kill(pid, 0)
|
os.kill(pid, 0)
|
||||||
|
|
||||||
@@ -691,8 +705,17 @@ class ConfigurableHTTPProxy(Proxy):
|
|||||||
parent = psutil.Process(pid)
|
parent = psutil.Process(pid)
|
||||||
children = parent.children(recursive=True)
|
children = parent.children(recursive=True)
|
||||||
for child in children:
|
for child in children:
|
||||||
child.kill()
|
child.terminate()
|
||||||
psutil.wait_procs(children, timeout=5)
|
gone, alive = psutil.wait_procs(children, timeout=5)
|
||||||
|
for p in alive:
|
||||||
|
p.kill()
|
||||||
|
# Clear the shell, too, if it still exists.
|
||||||
|
try:
|
||||||
|
parent.terminate()
|
||||||
|
parent.wait(timeout=5)
|
||||||
|
parent.kill()
|
||||||
|
except psutil.NoSuchProcess:
|
||||||
|
pass
|
||||||
|
|
||||||
def _terminate(self):
|
def _terminate(self):
|
||||||
"""Terminate our process"""
|
"""Terminate our process"""
|
||||||
@@ -768,9 +791,35 @@ class ConfigurableHTTPProxy(Proxy):
|
|||||||
method=method,
|
method=method,
|
||||||
headers={'Authorization': 'token {}'.format(self.auth_token)},
|
headers={'Authorization': 'token {}'.format(self.auth_token)},
|
||||||
body=body,
|
body=body,
|
||||||
|
connect_timeout=3, # default: 20s
|
||||||
|
request_timeout=10, # default: 20s
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def _wait_for_api_request():
|
||||||
|
try:
|
||||||
async with self.semaphore:
|
async with self.semaphore:
|
||||||
result = await client.fetch(req)
|
return await client.fetch(req)
|
||||||
|
except HTTPError as e:
|
||||||
|
# Retry on potentially transient errors in CHP, typically
|
||||||
|
# numbered 500 and up. Note that CHP isn't able to emit 429
|
||||||
|
# errors.
|
||||||
|
if e.code >= 500:
|
||||||
|
self.log.warning(
|
||||||
|
"api_request to the proxy failed with status code {}, retrying...".format(
|
||||||
|
e.code
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return False # a falsy return value make exponential_backoff retry
|
||||||
|
else:
|
||||||
|
self.log.error("api_request to proxy failed: {0}".format(e))
|
||||||
|
# An unhandled error here will help the hub invoke cleanup logic
|
||||||
|
raise
|
||||||
|
|
||||||
|
result = await exponential_backoff(
|
||||||
|
_wait_for_api_request,
|
||||||
|
'Repeated api_request to proxy path "{}" failed.'.format(path),
|
||||||
|
timeout=30,
|
||||||
|
)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
async def add_route(self, routespec, target, data):
|
async def add_route(self, routespec, target, data):
|
||||||
|
@@ -31,7 +31,7 @@ async def test_root_no_auth(app):
|
|||||||
url = ujoin(public_host(app), app.hub.base_url)
|
url = ujoin(public_host(app), app.hub.base_url)
|
||||||
r = await async_requests.get(url)
|
r = await async_requests.get(url)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
assert r.url == ujoin(url, 'login')
|
assert r.url == url_concat(ujoin(url, 'login'), dict(next=app.hub.base_url))
|
||||||
|
|
||||||
|
|
||||||
async def test_root_auth(app):
|
async def test_root_auth(app):
|
||||||
@@ -616,9 +616,16 @@ async def test_login_strip(app):
|
|||||||
(False, '//other.domain', '', None),
|
(False, '//other.domain', '', None),
|
||||||
(False, '///other.domain/triple', '', None),
|
(False, '///other.domain/triple', '', None),
|
||||||
(False, '\\\\other.domain/backslashes', '', None),
|
(False, '\\\\other.domain/backslashes', '', None),
|
||||||
# params are handled correctly
|
# params are handled correctly (ignored if ?next= specified)
|
||||||
(True, '/hub/admin', 'hub/admin?left=1&right=2', [('left', 1), ('right', 2)]),
|
(
|
||||||
(False, '/hub/admin', 'hub/admin?left=1&right=2', [('left', 1), ('right', 2)]),
|
True,
|
||||||
|
'/hub/admin?left=1&right=2',
|
||||||
|
'hub/admin?left=1&right=2',
|
||||||
|
{"left": "abc"},
|
||||||
|
),
|
||||||
|
(False, '/hub/admin', 'hub/admin', [('left', 1), ('right', 2)]),
|
||||||
|
(True, '', '', {"keep": "yes"}),
|
||||||
|
(False, '', '', {"keep": "yes"}),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
async def test_login_redirect(app, running, next_url, location, params):
|
async def test_login_redirect(app, running, next_url, location, params):
|
||||||
@@ -627,10 +634,15 @@ async def test_login_redirect(app, running, next_url, location, params):
|
|||||||
if location:
|
if location:
|
||||||
location = ujoin(app.base_url, location)
|
location = ujoin(app.base_url, location)
|
||||||
elif running:
|
elif running:
|
||||||
|
# location not specified,
|
||||||
location = user.url
|
location = user.url
|
||||||
|
if params:
|
||||||
|
location = url_concat(location, params)
|
||||||
else:
|
else:
|
||||||
# use default url
|
# use default url
|
||||||
location = ujoin(app.base_url, 'hub/spawn')
|
location = ujoin(app.base_url, 'hub/spawn')
|
||||||
|
if params:
|
||||||
|
location = url_concat(location, params)
|
||||||
|
|
||||||
url = 'login'
|
url = 'login'
|
||||||
if params:
|
if params:
|
||||||
@@ -649,7 +661,73 @@ async def test_login_redirect(app, running, next_url, location, params):
|
|||||||
r = await get_page(url, app, cookies=cookies, allow_redirects=False)
|
r = await get_page(url, app, cookies=cookies, allow_redirects=False)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
assert r.status_code == 302
|
assert r.status_code == 302
|
||||||
assert location == r.headers['Location']
|
assert r.headers["Location"] == location
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'location, next, extra_params',
|
||||||
|
[
|
||||||
|
(
|
||||||
|
"{base_url}hub/spawn?a=5",
|
||||||
|
None,
|
||||||
|
{"a": "5"},
|
||||||
|
), # no ?next= given, preserve params
|
||||||
|
("/x", "/x", {"a": "5"}), # ?next=given, params ignored
|
||||||
|
(
|
||||||
|
"/x?b=10",
|
||||||
|
"/x?b=10",
|
||||||
|
{"a": "5"},
|
||||||
|
), # ?next=given with params, additional params ignored
|
||||||
|
],
|
||||||
|
)
|
||||||
|
async def test_next_url(app, user, location, next, extra_params):
|
||||||
|
params = {}
|
||||||
|
if extra_params:
|
||||||
|
params.update(extra_params)
|
||||||
|
if next:
|
||||||
|
params["next"] = next
|
||||||
|
url = url_concat("/", params)
|
||||||
|
cookies = await app.login_user("monster")
|
||||||
|
|
||||||
|
# location can be a string template
|
||||||
|
location = location.format(base_url=app.base_url)
|
||||||
|
|
||||||
|
r = await get_page(url, app, cookies=cookies, allow_redirects=False)
|
||||||
|
r.raise_for_status()
|
||||||
|
assert r.status_code == 302
|
||||||
|
assert r.headers["Location"] == location
|
||||||
|
|
||||||
|
|
||||||
|
async def test_next_url_params_sequence(app, user):
|
||||||
|
"""Test each step of / -> login -> spawn
|
||||||
|
|
||||||
|
and whether they preserve url params
|
||||||
|
"""
|
||||||
|
params = {"xyz": "5"}
|
||||||
|
# first request: root page, with params, not logged in
|
||||||
|
r = await get_page("/?xyz=5", app, allow_redirects=False)
|
||||||
|
r.raise_for_status()
|
||||||
|
location = r.headers["Location"]
|
||||||
|
|
||||||
|
# next page: login
|
||||||
|
cookies = await app.login_user(user.name)
|
||||||
|
assert location == url_concat(
|
||||||
|
ujoin(app.base_url, "/hub/login"), {"next": ujoin(app.base_url, "/hub/?xyz=5")}
|
||||||
|
)
|
||||||
|
r = await async_requests.get(
|
||||||
|
public_host(app) + location, cookies=cookies, allow_redirects=False
|
||||||
|
)
|
||||||
|
r.raise_for_status()
|
||||||
|
location = r.headers["Location"]
|
||||||
|
|
||||||
|
# after login, redirect back
|
||||||
|
assert location == ujoin(app.base_url, "/hub/?xyz=5")
|
||||||
|
r = await async_requests.get(
|
||||||
|
public_host(app) + location, cookies=cookies, allow_redirects=False
|
||||||
|
)
|
||||||
|
r.raise_for_status()
|
||||||
|
location = r.headers["Location"]
|
||||||
|
assert location == ujoin(app.base_url, "/hub/spawn?xyz=5")
|
||||||
|
|
||||||
|
|
||||||
async def test_auto_login(app, request):
|
async def test_auto_login(app, request):
|
||||||
@@ -663,14 +741,18 @@ async def test_auto_login(app, request):
|
|||||||
)
|
)
|
||||||
# no auto_login: end up at /hub/login
|
# no auto_login: end up at /hub/login
|
||||||
r = await async_requests.get(base_url)
|
r = await async_requests.get(base_url)
|
||||||
assert r.url == public_url(app, path='hub/login')
|
assert r.url == url_concat(
|
||||||
|
public_url(app, path="hub/login"), {"next": app.hub.base_url}
|
||||||
|
)
|
||||||
# enable auto_login: redirect from /hub/login to /hub/dummy
|
# enable auto_login: redirect from /hub/login to /hub/dummy
|
||||||
authenticator = Authenticator(auto_login=True)
|
authenticator = Authenticator(auto_login=True)
|
||||||
authenticator.login_url = lambda base_url: ujoin(base_url, 'dummy')
|
authenticator.login_url = lambda base_url: ujoin(base_url, 'dummy')
|
||||||
|
|
||||||
with mock.patch.dict(app.tornado_settings, {'authenticator': authenticator}):
|
with mock.patch.dict(app.tornado_settings, {'authenticator': authenticator}):
|
||||||
r = await async_requests.get(base_url)
|
r = await async_requests.get(base_url)
|
||||||
assert r.url == public_url(app, path='hub/dummy')
|
assert r.url == url_concat(
|
||||||
|
public_url(app, path="hub/dummy"), {"next": app.hub.base_url}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def test_auto_login_logout(app):
|
async def test_auto_login_logout(app):
|
||||||
|
45
jupyterhub/tests/test_pagination.py
Normal file
45
jupyterhub/tests/test_pagination.py
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
"""tests for pagination"""
|
||||||
|
from pytest import mark
|
||||||
|
from pytest import raises
|
||||||
|
from traitlets.config import Config
|
||||||
|
|
||||||
|
from jupyterhub.pagination import Pagination
|
||||||
|
|
||||||
|
|
||||||
|
def test_per_page_bounds():
|
||||||
|
cfg = Config()
|
||||||
|
cfg.Pagination.max_per_page = 10
|
||||||
|
p = Pagination(config=cfg, per_page=20, total=100)
|
||||||
|
assert p.per_page == 10
|
||||||
|
with raises(Exception):
|
||||||
|
p.per_page = 0
|
||||||
|
|
||||||
|
|
||||||
|
@mark.parametrize(
|
||||||
|
"page, per_page, total, expected",
|
||||||
|
[
|
||||||
|
(1, 10, 99, [1, 2, 3, "...", 10]),
|
||||||
|
(2, 10, 99, [1, 2, 3, 4, "...", 10]),
|
||||||
|
(3, 10, 99, [1, 2, 3, 4, 5, "...", 10]),
|
||||||
|
(4, 10, 99, [1, 2, 3, 4, 5, 6, "...", 10]),
|
||||||
|
(5, 10, 99, [1, "...", 3, 4, 5, 6, 7, "...", 10]),
|
||||||
|
(6, 10, 99, [1, "...", 4, 5, 6, 7, 8, "...", 10]),
|
||||||
|
(7, 10, 99, [1, "...", 5, 6, 7, 8, 9, 10]),
|
||||||
|
(8, 10, 99, [1, "...", 6, 7, 8, 9, 10]),
|
||||||
|
(9, 10, 99, [1, "...", 7, 8, 9, 10]),
|
||||||
|
(1, 20, 99, [1, 2, 3, 4, 5]),
|
||||||
|
(1, 10, 0, [1]),
|
||||||
|
(1, 10, 1, [1]),
|
||||||
|
(1, 10, 10, [1]),
|
||||||
|
(1, 10, 11, [1, 2]),
|
||||||
|
(1, 10, 50, [1, 2, 3, 4, 5]),
|
||||||
|
(1, 10, 60, [1, 2, 3, 4, 5, 6]),
|
||||||
|
(1, 10, 70, [1, 2, 3, 4, 5, 6, 7]),
|
||||||
|
(1, 10, 80, [1, 2, 3, "...", 8]),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_window(page, per_page, total, expected):
|
||||||
|
cfg = Config()
|
||||||
|
cfg.Pagination
|
||||||
|
pagination = Pagination(page=page, per_page=per_page, total=total)
|
||||||
|
assert pagination.calculate_pages_window() == expected
|
@@ -5,7 +5,7 @@ entrypoints
|
|||||||
jinja2
|
jinja2
|
||||||
jupyter_telemetry>=0.1.0
|
jupyter_telemetry>=0.1.0
|
||||||
oauthlib>=3.0
|
oauthlib>=3.0
|
||||||
pamela
|
pamela; sys_platform != 'win32'
|
||||||
prometheus_client>=0.0.21
|
prometheus_client>=0.0.21
|
||||||
psutil>=5.6.5; sys_platform == 'win32'
|
psutil>=5.6.5; sys_platform == 'win32'
|
||||||
python-dateutil
|
python-dateutil
|
||||||
|
@@ -61,7 +61,7 @@
|
|||||||
id="login_submit"
|
id="login_submit"
|
||||||
type="submit"
|
type="submit"
|
||||||
class='btn btn-jupyter'
|
class='btn btn-jupyter'
|
||||||
value='Sign In'
|
value='Sign in'
|
||||||
tabindex="3"
|
tabindex="3"
|
||||||
/>
|
/>
|
||||||
<div class="feedback-widget hidden">
|
<div class="feedback-widget hidden">
|
||||||
|
Reference in New Issue
Block a user