Merge remote-tracking branch 'origin/main' into oauth_config

This commit is contained in:
Min RK
2022-08-02 13:53:51 +02:00
47 changed files with 2625 additions and 2596 deletions

15
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,15 @@
# dependabot.yml reference: https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
#
# Notes:
# - Status and logs from dependabot are provided at
# https://github.com/jupyterhub/jupyterhub/network/updates.
#
version: 2
updates:
# Maintain dependencies in our GitHub Workflows
- package-ecosystem: github-actions
directory: "/"
schedule:
interval: weekly
time: "05:00"
timezone: "Etc/UTC"

View File

@@ -32,12 +32,12 @@ jobs:
build-release:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: 3.8
python-version: "3.9"
- uses: actions/setup-node@v1
- uses: actions/setup-node@v3
with:
node-version: "14"
@@ -63,7 +63,7 @@ jobs:
./ci/check_installed_data.py
# ref: https://github.com/actions/upload-artifact#readme
- uses: actions/upload-artifact@v2
- uses: actions/upload-artifact@v3
with:
name: jupyterhub-${{ github.sha }}
path: "dist/*"
@@ -98,16 +98,16 @@ jobs:
echo "REGISTRY=localhost:5000/" >> $GITHUB_ENV
fi
- uses: actions/checkout@v2
- uses: actions/checkout@v3
# Setup docker to build for multiple platforms, see:
# https://github.com/docker/build-push-action/tree/v2.4.0#usage
# https://github.com/docker/build-push-action/blob/v2.4.0/docs/advanced/multi-platform.md
- name: Set up QEMU (for docker buildx)
uses: docker/setup-qemu-action@25f0500ff22e406f7191a2a8ba8cda16901ca018 # associated tag: v1.0.2
uses: docker/setup-qemu-action@8b122486cedac8393e77aa9734c3528886e4a1a8 # associated tag: v1.0.2
- name: Set up Docker Buildx (for multi-arch builds)
uses: docker/setup-buildx-action@2a4b53665e15ce7d7049afb11ff1f70ff1610609 # associated tag: v1.1.2
uses: docker/setup-buildx-action@dc7b9719a96d48369863986a06765841d7ea23f6 # associated tag: v1.1.2
with:
# Allows pushing to registry on localhost:5000
driver-opts: network=host
@@ -145,7 +145,7 @@ jobs:
branchRegex: ^\w[\w-.]*$
- name: Build and push jupyterhub
uses: docker/build-push-action@e1b7f96249f2e4c8e4ac1519b9608c0d48944a1f
uses: docker/build-push-action@1cb9d22b932e4832bb29793b7777ec860fc1cde0
with:
context: .
platforms: linux/amd64,linux/arm64
@@ -166,7 +166,7 @@ jobs:
branchRegex: ^\w[\w-.]*$
- name: Build and push jupyterhub-onbuild
uses: docker/build-push-action@e1b7f96249f2e4c8e4ac1519b9608c0d48944a1f
uses: docker/build-push-action@1cb9d22b932e4832bb29793b7777ec860fc1cde0
with:
build-args: |
BASE_IMAGE=${{ fromJson(steps.jupyterhubtags.outputs.tags)[0] }}
@@ -187,7 +187,7 @@ jobs:
branchRegex: ^\w[\w-.]*$
- name: Build and push jupyterhub-demo
uses: docker/build-push-action@e1b7f96249f2e4c8e4ac1519b9608c0d48944a1f
uses: docker/build-push-action@1cb9d22b932e4832bb29793b7777ec860fc1cde0
with:
build-args: |
BASE_IMAGE=${{ fromJson(steps.onbuildtags.outputs.tags)[0] }}
@@ -211,7 +211,7 @@ jobs:
branchRegex: ^\w[\w-.]*$
- name: Build and push jupyterhub/singleuser
uses: docker/build-push-action@e1b7f96249f2e4c8e4ac1519b9608c0d48944a1f
uses: docker/build-push-action@1cb9d22b932e4832bb29793b7777ec860fc1cde0
with:
build-args: |
JUPYTERHUB_VERSION=${{ github.ref_type == 'tag' && github.ref_name || format('git:{0}', github.sha) }}

View File

@@ -15,15 +15,13 @@ on:
- "docs/**"
- "jupyterhub/_version.py"
- "jupyterhub/scopes.py"
- ".github/workflows/*"
- "!.github/workflows/test-docs.yml"
- ".github/workflows/test-docs.yml"
push:
paths:
- "docs/**"
- "jupyterhub/_version.py"
- "jupyterhub/scopes.py"
- ".github/workflows/*"
- "!.github/workflows/test-docs.yml"
- ".github/workflows/test-docs.yml"
branches-ignore:
- "dependabot/**"
- "pre-commit-ci-update-config"
@@ -40,18 +38,18 @@ jobs:
validate-rest-api-definition:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v3
- name: Validate REST API definition
uses: char0n/swagger-editor-validate@182d1a5d26ff5c2f4f452c43bd55e2c7d8064003
uses: char0n/swagger-editor-validate@v1.3.1
with:
definition-file: docs/source/_static/rest-api.yml
test-docs:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.9"

View File

@@ -19,6 +19,9 @@ on:
- "**"
workflow_dispatch:
permissions:
contents: read
jobs:
# The ./jsx folder contains React based source code files that are to compile
# to share/jupyterhub/static/js/admin-react.js. The ./jsx folder includes
@@ -29,8 +32,8 @@ jobs:
timeout-minutes: 5
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v1
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: "14"

View File

@@ -30,6 +30,9 @@ env:
LANG: C.UTF-8
PYTEST_ADDOPTS: "--verbose --color=yes"
permissions:
contents: read
jobs:
# Run "pytest jupyterhub/tests" in various configurations
pytest:
@@ -53,9 +56,9 @@ jobs:
# Tests everything when JupyterHub works against a dedicated mysql or
# postgresql server.
#
# nbclassic:
# legacy_notebook:
# Tests everything when the user instances are started with
# notebook instead of jupyter_server.
# the legacy notebook server instead of jupyter_server.
#
# ssl:
# Tests everything using internal SSL connections instead of
@@ -69,20 +72,24 @@ jobs:
# GitHub UI when the workflow run, we avoid using true/false as
# values by instead duplicating the name to signal true.
include:
- python: "3.6"
- python: "3.7"
oldest_dependencies: oldest_dependencies
nbclassic: nbclassic
- python: "3.6"
subdomain: subdomain
- python: "3.7"
db: mysql
- python: "3.7"
ssl: ssl
legacy_notebook: legacy_notebook
- python: "3.8"
db: postgres
- python: "3.8"
nbclassic: nbclassic
legacy_notebook: legacy_notebook
- python: "3.9"
db: mysql
- python: "3.10"
db: postgres
- python: "3.10"
subdomain: subdomain
- python: "3.10"
ssl: ssl
# can't test 3.11.0-beta.4 until a greenlet release
# greenlet is a dependency of sqlalchemy on linux
# see https://github.com/gevent/gevent/issues/1867
# - python: "3.11.0-beta.4"
- python: "3.10"
main_dependencies: main_dependencies
steps:
@@ -110,11 +117,11 @@ jobs:
if [ "${{ matrix.jupyter_server }}" != "" ]; then
echo "JUPYTERHUB_SINGLEUSER_APP=jupyterhub.tests.mockserverapp.MockServerApp" >> $GITHUB_ENV
fi
- uses: actions/checkout@v2
# NOTE: actions/setup-node@v1 make use of a cache within the GitHub base
- uses: actions/checkout@v3
# NOTE: actions/setup-node@v3 make use of a cache within the GitHub base
# environment and setup in a fraction of a second.
- name: Install Node v14
uses: actions/setup-node@v1
uses: actions/setup-node@v3
with:
node-version: "14"
- name: Install Javascript dependencies
@@ -123,12 +130,12 @@ jobs:
npm install -g configurable-http-proxy yarn
npm list
# NOTE: actions/setup-python@v2 make use of a cache within the GitHub base
# NOTE: actions/setup-python@v4 make use of a cache within the GitHub base
# environment and setup in a fraction of a second.
- name: Install Python ${{ matrix.python }}
uses: actions/setup-python@v2
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python }}
python-version: "${{ matrix.python }}"
- name: Install Python dependencies
run: |
pip install --upgrade pip
@@ -145,9 +152,9 @@ jobs:
if [ "${{ matrix.main_dependencies }}" != "" ]; then
pip install git+https://github.com/ipython/traitlets#egg=traitlets --force
fi
if [ "${{ matrix.nbclassic }}" != "" ]; then
if [ "${{ matrix.legacy_notebook }}" != "" ]; then
pip uninstall jupyter_server --yes
pip install notebook
pip install 'notebook<7'
fi
if [ "${{ matrix.db }}" == "mysql" ]; then
pip install mysql-connector-python
@@ -211,7 +218,7 @@ jobs:
timeout-minutes: 20
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v3
- name: build images
run: |

View File

@@ -11,7 +11,7 @@
repos:
# Autoformat: Python code, syntax patterns are modernized
- repo: https://github.com/asottile/pyupgrade
rev: v2.32.1
rev: v2.37.3
hooks:
- id: pyupgrade
args:
@@ -25,19 +25,19 @@ repos:
# Autoformat: Python code
- repo: https://github.com/psf/black
rev: 22.3.0
rev: 22.6.0
hooks:
- id: black
# Autoformat: markdown, yaml, javascript (see the file .prettierignore)
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v2.6.2
rev: v2.7.1
hooks:
- id: prettier
# Autoformat and linting, misc. details
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.2.0
rev: v4.3.0
hooks:
- id: end-of-file-fixer
exclude: share/jupyterhub/static/js/admin-react.js
@@ -47,6 +47,6 @@ repos:
# Linting: Python code (see the file .flake8)
- repo: https://github.com/PyCQA/flake8
rev: "4.0.1"
rev: "5.0.2"
hooks:
- id: flake8

View File

@@ -21,7 +21,7 @@
# your jupyterhub_config.py will be added automatically
# from your docker directory.
ARG BASE_IMAGE=ubuntu:focal-20200729
ARG BASE_IMAGE=ubuntu:22.04
FROM $BASE_IMAGE AS builder
USER root

View File

@@ -9,10 +9,13 @@ cryptography
html5lib # needed for beautifulsoup
jupyterlab >=3
mock
# nbclassic provides the '/tree/' handler, which we use in tests
# it is a transitive dependency via jupyterlab,
# but depend on it directly
nbclassic
pre-commit
pytest>=3.3
pytest-asyncio; python_version < "3.7"
pytest-asyncio>=0.17; python_version >= "3.7"
pytest-asyncio>=0.17
pytest-cov
requests-mock
tbump

View File

@@ -6,7 +6,7 @@ info:
description: The REST API for JupyterHub
license:
name: BSD-3-Clause
version: 2.3.0.dev
version: 2.4.0.dev
servers:
- url: /hub/api
security:

View File

@@ -183,12 +183,6 @@ itself, ``jupyterhub_config.py``, as a binary string:
c.JupyterHub.cookie_secret = bytes.fromhex('64 CHAR HEX STRING')
.. important::
If the cookie secret value changes for the Hub, all single-user notebook
servers must also be restarted.
.. _cookies:
Cookies used by JupyterHub authentication

View File

@@ -2,6 +2,8 @@
# 1. start/stop servers, and
# 2. access the server API
c = get_config() # noqa
c.JupyterHub.load_roles = [
{
"name": "launcher",

View File

@@ -28,17 +28,7 @@
}
},
"dependencies": {
"@babel/core": "^7.12.3",
"@babel/preset-env": "^7.12.11",
"@babel/preset-react": "^7.12.10",
"@testing-library/jest-dom": "^5.15.1",
"@testing-library/react": "^12.1.2",
"@testing-library/user-event": "^13.5.0",
"babel-loader": "^8.2.1",
"bootstrap": "^4.5.3",
"css-loader": "^5.0.1",
"eslint-plugin-unused-imports": "^1.1.1",
"file-loader": "^6.2.0",
"history": "^5.0.0",
"lodash.debounce": "^4.0.8",
"prop-types": "^15.7.2",
@@ -51,24 +41,35 @@
"react-redux": "^7.2.2",
"react-router": "^5.2.0",
"react-router-dom": "^5.2.0",
"recompose": "^0.30.0",
"recompose": "npm:react-recompose@^0.31.2",
"redux": "^4.0.5",
"regenerator-runtime": "^0.13.9",
"style-loader": "^2.0.0",
"webpack": "^5.6.0",
"webpack-cli": "^3.3.4",
"webpack-dev-server": "^3.11.0"
"regenerator-runtime": "^0.13.9"
},
"devDependencies": {
"@babel/core": "^7.12.3",
"@babel/preset-env": "^7.12.11",
"@babel/preset-react": "^7.12.10",
"@testing-library/jest-dom": "^5.15.1",
"@testing-library/react": "^12.1.2",
"@testing-library/user-event": "^13.5.0",
"@webpack-cli/serve": "^1.7.0",
"@wojtekmaj/enzyme-adapter-react-17": "^0.6.5",
"babel-jest": "^26.6.3",
"babel-loader": "^8.2.1",
"css-loader": "^5.0.1",
"enzyme": "^3.11.0",
"eslint": "^7.18.0",
"eslint-plugin-prettier": "^3.3.1",
"eslint-plugin-react": "^7.22.0",
"eslint-plugin-unused-imports": "^1.1.1",
"file-loader": "^6.2.0",
"identity-obj-proxy": "^3.0.0",
"jest": "^26.6.3",
"prettier": "^2.2.1",
"sinon": "^13.0.1"
"sinon": "^13.0.1",
"style-loader": "^2.0.0",
"webpack": "^5.6.0",
"webpack-cli": "^4.10.0",
"webpack-dev-server": "^4.9.3"
}
}

View File

@@ -60,7 +60,10 @@ const AddUser = (props) => {
placeholder="usernames separated by line"
data-testid="user-textarea"
onBlur={(e) => {
let split_users = e.target.value.split("\n");
let split_users = e.target.value
.split("\n")
.map((u) => u.trim())
.filter((u) => u.length > 0);
setUsers(split_users);
}}
></textarea>
@@ -88,17 +91,7 @@ const AddUser = (props) => {
data-testid="submit"
className="btn btn-primary"
onClick={() => {
let filtered_users = users.filter(
(e) =>
e.length > 2 &&
/[!@#$%^&*(),.?":{}|<>]/g.test(e) == false
);
if (filtered_users.length < users.length) {
setUsers(filtered_users);
failRegexEvent();
}
addUsers(filtered_users, admin)
addUsers(users, admin)
.then((data) =>
data.status < 300
? updateUsers(0, limit)

View File

@@ -70,12 +70,12 @@ test("Removes users when they fail Regex", async () => {
let textarea = screen.getByTestId("user-textarea");
let submit = screen.getByTestId("submit");
fireEvent.blur(textarea, { target: { value: "foo\nbar\n!!*&*" } });
fireEvent.blur(textarea, { target: { value: "foo \n bar\na@b.co\n \n\n" } });
await act(async () => {
fireEvent.click(submit);
});
expect(callbackSpy).toHaveBeenCalledWith(["foo", "bar"], false);
expect(callbackSpy).toHaveBeenCalledWith(["foo", "bar", "a@b.co"], false);
});
test("Correctly submits admin", async () => {

View File

@@ -59,7 +59,7 @@ const CreateGroup = (props) => {
value={groupName}
placeholder="group name..."
onChange={(e) => {
setGroupName(e.target.value);
setGroupName(e.target.value.trim());
}}
></input>
</div>

View File

@@ -30,7 +30,7 @@ const AccessServerButton = ({ url }) => (
);
const ServerDashboard = (props) => {
let base_url = window.base_url;
let base_url = window.base_url || "/";
// sort methods
var usernameDesc = (e) => e.sort((a, b) => (a.name > b.name ? 1 : -1)),
usernameAsc = (e) => e.sort((a, b) => (a.name < b.name ? 1 : -1)),
@@ -201,6 +201,25 @@ const ServerDashboard = (props) => {
};
const ServerRowTable = ({ data }) => {
const sortedData = Object.keys(data)
.sort()
.reduce(function (result, key) {
let value = data[key];
switch (key) {
case "last_activity":
case "created":
case "started":
// format timestamps
value = value ? timeSince(value) : value;
break;
}
if (Array.isArray(value)) {
// cast arrays (e.g. roles, groups) to string
value = value.sort().join(", ");
}
result[key] = value;
return result;
}, {});
return (
<ReactObjectTableViewer
className="table-striped table-bordered"
@@ -214,7 +233,7 @@ const ServerDashboard = (props) => {
valueStyle={{
padding: "4px",
}}
data={data}
data={sortedData}
/>
);
};
@@ -251,11 +270,7 @@ const ServerDashboard = (props) => {
<td data-testid="user-row-admin">{user.admin ? "admin" : ""}</td>
<td data-testid="user-row-server">
{server.name ? (
<p className="text-secondary">{server.name}</p>
) : (
<p style={{ color: "lightgrey" }}>[MAIN]</p>
)}
</td>
<td data-testid="user-row-last-activity">
{server.last_activity ? timeSince(server.last_activity) : "Never"}
@@ -277,7 +292,7 @@ const ServerDashboard = (props) => {
/>
<a
href={`${base_url}spawn/${user.name}${
server.name && "/" + server.name
server.name ? "/" + server.name : ""
}`}
>
<button

View File

@@ -98,6 +98,18 @@ test("Renders correctly the status of a single-user server", async () => {
expect(stop).toBeVisible();
});
test("Renders spawn page link", async () => {
let callbackSpy = mockAsync();
await act(async () => {
render(serverDashboardJsx(callbackSpy));
});
let link = screen.getByText("Spawn Page").closest("a");
let url = new URL(link.href);
expect(url.pathname).toEqual("/spawn/bar");
});
test("Invokes the startServer event on button click", async () => {
let callbackSpy = mockAsync();

View File

@@ -1,5 +1,5 @@
export const jhapiRequest = (endpoint, method, data) => {
let base_url = window.base_url,
let base_url = window.base_url || "/",
api_url = `${base_url}hub/api`;
return fetch(api_url + endpoint, {
method: method,

View File

@@ -1,6 +1,5 @@
const webpack = require("webpack");
const path = require("path");
const express = require("express");
module.exports = {
entry: path.resolve(__dirname, "src", "App.jsx"),
@@ -34,16 +33,19 @@ module.exports = {
},
plugins: [new webpack.HotModuleReplacementPlugin()],
devServer: {
contentBase: path.resolve(__dirname, "build"),
static: {
directory: path.resolve(__dirname, "build"),
},
port: 9000,
before: (app, server) => {
onBeforeSetupMiddleware: (devServer) => {
const app = devServer.app;
var user_data = JSON.parse(
'[{"kind":"user","name":"foo","admin":true,"groups":[],"server":"/user/foo/","pending":null,"created":"2020-12-07T18:46:27.112695Z","last_activity":"2020-12-07T21:00:33.336354Z","servers":{"":{"name":"","last_activity":"2020-12-07T20:58:02.437408Z","started":"2020-12-07T20:58:01.508266Z","pending":null,"ready":true,"state":{"pid":28085},"url":"/user/foo/","user_options":{},"progress_url":"/hub/api/users/foo/server/progress"}}},{"kind":"user","name":"bar","admin":false,"groups":[],"server":null,"pending":null,"created":"2020-12-07T18:46:27.115528Z","last_activity":"2020-12-07T20:43:51.013613Z","servers":{}}]'
);
var group_data = JSON.parse(
'[{"kind":"group","name":"testgroup","users":[]}, {"kind":"group","name":"testgroup2","users":["foo", "bar"]}]'
);
app.use(express.json());
// get user_data
app.get("/hub/api/users", (req, res) => {

File diff suppressed because it is too large Load Diff

View File

@@ -2,7 +2,7 @@
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
# version_info updated by running `tbump`
version_info = (2, 3, 0, "", "dev")
version_info = (2, 4, 0, "", "dev")
# pep 440 version: no dot before beta/rc, but before .dev
# 0.1.0rc1

View File

@@ -26,7 +26,7 @@ class ProxyAPIHandler(APIHandler):
else:
routes = {}
end = offset + limit
for i, key in sorted(all_routes.keys()):
for i, key in enumerate(sorted(all_routes.keys())):
if i < offset:
continue
elif i >= end:

View File

@@ -11,6 +11,7 @@ import re
import secrets
import signal
import socket
import ssl
import sys
import time
from concurrent.futures import ThreadPoolExecutor
@@ -23,15 +24,6 @@ from urllib.parse import unquote, urlparse, urlunparse
if sys.version_info[:2] < (3, 3):
raise ValueError("Python < 3.3 not supported: %s" % sys.version)
# For compatibility with python versions 3.6 or earlier.
# asyncio.Task.all_tasks() is fully moved to asyncio.all_tasks() starting with 3.9. Also applies to current_task.
try:
asyncio_all_tasks = asyncio.all_tasks
asyncio_current_task = asyncio.current_task
except AttributeError as e:
asyncio_all_tasks = asyncio.Task.all_tasks
asyncio_current_task = asyncio.Task.current_task
import tornado.httpserver
import tornado.options
from dateutil.parser import parse as parse_date
@@ -711,11 +703,14 @@ class JupyterHub(Application):
""",
).tag(config=True)
def _subdomain_host_changed(self, name, old, new):
@validate("subdomain_host")
def _validate_subdomain_host(self, proposal):
new = proposal.value
if new and '://' not in new:
# host should include '://'
# if not specified, assume https: You have to be really explicit about HTTP!
self.subdomain_host = 'https://' + new
new = 'https://' + new
return new
domain = Unicode(help="domain name, e.g. 'example.com' (excludes protocol, port)")
@@ -3069,7 +3064,7 @@ class JupyterHub(Application):
self.internal_ssl_key,
self.internal_ssl_cert,
cafile=self.internal_ssl_ca,
check_hostname=False,
purpose=ssl.Purpose.CLIENT_AUTH,
)
# start the webserver
@@ -3246,11 +3241,7 @@ class JupyterHub(Application):
self._atexit_ran = True
self._init_asyncio_patch()
# run the cleanup step (in a new loop, because the interrupted one is unclean)
asyncio.set_event_loop(asyncio.new_event_loop())
IOLoop.clear_current()
loop = IOLoop()
loop.make_current()
loop.run_sync(self.cleanup)
asyncio.run(self.cleanup())
async def shutdown_cancel_tasks(self, sig=None):
"""Cancel all other tasks of the event loop and initiate cleanup"""
@@ -3261,7 +3252,7 @@ class JupyterHub(Application):
await self.cleanup()
tasks = [t for t in asyncio_all_tasks() if t is not asyncio_current_task()]
tasks = [t for t in asyncio.all_tasks() if t is not asyncio.current_task()]
if tasks:
self.log.debug("Cancelling pending tasks")
@@ -3274,7 +3265,7 @@ class JupyterHub(Application):
except StopAsyncIteration as e:
self.log.error("Caught StopAsyncIteration Exception", exc_info=True)
tasks = [t for t in asyncio_all_tasks()]
tasks = [t for t in asyncio.all_tasks()]
for t in tasks:
self.log.debug("Task status: %s", t)
asyncio.get_event_loop().stop()
@@ -3310,16 +3301,19 @@ class JupyterHub(Application):
def launch_instance(cls, argv=None):
self = cls.instance()
self._init_asyncio_patch()
loop = IOLoop.current()
task = asyncio.ensure_future(self.launch_instance_async(argv))
loop = IOLoop(make_current=False)
try:
loop.run_sync(self.launch_instance_async, argv)
except Exception:
loop.close()
raise
try:
loop.start()
except KeyboardInterrupt:
print("\nInterrupted")
finally:
if task.done():
# re-raise exceptions in launch_instance_async
task.result()
loop.stop()
loop.close()

View File

@@ -832,7 +832,7 @@ class LocalAuthenticator(Authenticator):
raise ValueError("I don't know how to create users on OS X")
elif which('pw'):
# Probably BSD
return ['pw', 'useradd', '-m']
return ['pw', 'useradd', '-m', '-n']
else:
# This appears to be the Linux non-interactive adduser command:
return ['adduser', '-q', '--gecos', '""', '--disabled-password']

View File

@@ -23,12 +23,24 @@ import signal
import time
from functools import wraps
from subprocess import Popen
from urllib.parse import quote
from urllib.parse import quote, urlparse
from weakref import WeakKeyDictionary
from tornado.httpclient import AsyncHTTPClient, HTTPError, HTTPRequest
from tornado.ioloop import PeriodicCallback
from traitlets import Any, Bool, Dict, Instance, Integer, Unicode, default, observe
from traitlets import (
Any,
Bool,
CaselessStrEnum,
Dict,
Instance,
Integer,
TraitError,
Unicode,
default,
observe,
validate,
)
from traitlets.config import LoggingConfigurable
from jupyterhub.traitlets import Command
@@ -111,7 +123,8 @@ class Proxy(LoggingConfigurable):
)
extra_routes = Dict(
{},
key_trait=Unicode(),
value_trait=Unicode(),
config=True,
help="""
Additional routes to be maintained in the proxy.
@@ -130,6 +143,51 @@ class Proxy(LoggingConfigurable):
""",
)
@validate("extra_routes")
def _validate_extra_routes(self, proposal):
extra_routes = {}
# check routespecs for leading/trailing slashes
for routespec, target in proposal.value.items():
if not isinstance(routespec, str):
raise TraitError(
f"Proxy.extra_routes keys must be str, got {routespec!r}"
)
if not isinstance(target, str):
raise TraitError(
f"Proxy.extra_routes values must be str, got {target!r}"
)
if not routespec.endswith("/"):
# trailing / is unambiguous, so we can add it
self.log.warning(
f"Adding missing trailing '/' to c.Proxy.extra_routes {routespec} -> {routespec}/"
)
routespec += "/"
if self.app.subdomain_host:
# subdomain routing must _not_ start with /
if routespec.startswith("/"):
raise ValueError(
f"Proxy.extra_routes missing host component in {routespec} (must not have leading '/') when using `JupyterHub.subdomain_host = {self.app.subdomain_host!r}`"
)
else:
# no subdomains, must start with /
# this is ambiguous with host routing, so raise instead of warn
if not routespec.startswith("/"):
raise ValueError(
f"Proxy.extra_routes routespec {routespec} missing leading '/'."
)
# validate target URL?
target_url = urlparse(target.lower())
if target_url.scheme not in {"http", "https"} or not target_url.netloc:
raise ValueError(
f"Proxy.extra_routes target {routespec}={target!r} doesn't look like a URL (should have http[s]://...)"
)
extra_routes[routespec] = target
return extra_routes
def start(self):
"""Start the proxy.
@@ -466,7 +524,21 @@ class ConfigurableHTTPProxy(Proxy):
def _concurrency_changed(self, change):
self.semaphore = asyncio.BoundedSemaphore(change.new)
# https://github.com/jupyterhub/configurable-http-proxy/blob/4.5.1/bin/configurable-http-proxy#L92
log_level = CaselessStrEnum(
["debug", "info", "warn", "error"],
"info",
help="Proxy log level",
config=True,
)
debug = Bool(False, help="Add debug-level logging to the Proxy.", config=True)
@observe('debug')
def _debug_changed(self, change):
if change.new:
self.log_level = "debug"
auth_token = Unicode(
help="""The Proxy auth token
@@ -662,11 +734,11 @@ class ConfigurableHTTPProxy(Proxy):
str(api_server.port),
'--error-target',
url_path_join(self.hub.url, 'error'),
'--log-level',
self.log_level,
]
if self.app.subdomain_host:
cmd.append('--host-routing')
if self.debug:
cmd.extend(['--log-level', 'debug'])
if self.ssl_key:
cmd.extend(['--ssl-key', self.ssl_key])
if self.ssl_cert:

View File

@@ -551,7 +551,7 @@ def expand_scopes(scopes, owner=None, oauth_client=None):
else:
warnings.warn(
f"Not expanding !{filter} filter without target {filter} in {scope}",
stacklevel=2,
stacklevel=3,
)
if 'self' in expanded_scopes:
@@ -561,7 +561,7 @@ def expand_scopes(scopes, owner=None, oauth_client=None):
else:
warnings.warn(
f"Not expanding 'self' scope for owner {owner} which is not a User",
stacklevel=2,
stacklevel=3,
)
# reduce to discard overlapping scopes

View File

@@ -23,6 +23,7 @@ If you are using OAuth, you will also need to register an oauth callback handler
A tornado implementation is provided in :class:`HubOAuthCallbackHandler`.
"""
import asyncio
import base64
import hashlib
import json
@@ -34,14 +35,26 @@ import string
import time
import uuid
import warnings
from functools import partial
from http import HTTPStatus
from unittest import mock
from urllib.parse import urlencode
import requests
from tornado.httpclient import AsyncHTTPClient, HTTPRequest
from tornado.httputil import url_concat
from tornado.log import app_log
from tornado.web import HTTPError, RequestHandler
from traitlets import Dict, Instance, Integer, Set, Unicode, default, observe, validate
from traitlets import (
Any,
Dict,
Instance,
Integer,
Set,
Unicode,
default,
observe,
validate,
)
from traitlets.config import SingletonConfigurable
from ..scopes import _intersect_expanded_scopes
@@ -362,7 +375,47 @@ class HubAuth(SingletonConfigurable):
return {f'access:services!service={service_name}'}
return set()
def _check_hub_authorization(self, url, api_token, cache_key=None, use_cache=True):
_pool = Any(help="Thread pool for running async methods in the background")
@default("_pool")
def _new_pool(self):
# start a single ThreadPool in the background
from concurrent.futures import ThreadPoolExecutor
pool = ThreadPoolExecutor(1)
# create an event loop in the thread
pool.submit(self._setup_asyncio_thread).result()
return pool
def _setup_asyncio_thread(self):
"""Create asyncio loop
To be called from the background thread,
so that any thread-local state is setup correctly
"""
self._thread_loop = asyncio.new_event_loop()
def _synchronize(self, async_f, *args, **kwargs):
"""Call an async method in our background thread"""
future = self._pool.submit(
lambda: self._thread_loop.run_until_complete(async_f(*args, **kwargs))
)
return future.result()
def _call_coroutine(self, sync, async_f, *args, **kwargs):
"""Call an async coroutine function, either blocking or returning an awaitable
if not sync: calls function directly, returning awaitable
else: Block on a call in our background thread, return actual result
"""
if not sync:
return async_f(*args, **kwargs)
else:
return self._synchronize(async_f, *args, **kwargs)
async def _check_hub_authorization(
self, url, api_token, cache_key=None, use_cache=True
):
"""Identify a user with the Hub
Args:
@@ -385,7 +438,7 @@ class HubAuth(SingletonConfigurable):
except KeyError:
app_log.debug("HubAuth cache miss: %s", cache_key)
data = self._api_request(
data = await self._api_request(
'GET',
url,
headers={"Authorization": "token " + api_token},
@@ -400,18 +453,26 @@ class HubAuth(SingletonConfigurable):
self.cache[cache_key] = data
return data
def _api_request(self, method, url, **kwargs):
async def _api_request(self, method, url, **kwargs):
"""Make an API request"""
allow_403 = kwargs.pop('allow_403', False)
headers = kwargs.setdefault('headers', {})
headers.setdefault('Authorization', 'token %s' % self.api_token)
if "cert" not in kwargs and self.certfile and self.keyfile:
kwargs["cert"] = (self.certfile, self.keyfile)
headers.setdefault('Authorization', f'token {self.api_token}')
# translate requests args to tornado's
if self.certfile:
kwargs["client_cert"] = self.certfile
if self.keyfile:
kwargs["client_key"] = self.keyfile
if self.client_ca:
kwargs["verify"] = self.client_ca
kwargs["ca_certs"] = self.client_ca
req = HTTPRequest(
url,
method=method,
**kwargs,
)
try:
r = requests.request(method, url, **kwargs)
except requests.ConnectionError as e:
r = await AsyncHTTPClient().fetch(req, raise_error=False)
except Exception as e:
app_log.error("Error connecting to %s: %s", self.api_url, e)
msg = "Failed to connect to Hub API at %r." % self.api_url
msg += (
@@ -426,35 +487,46 @@ class HubAuth(SingletonConfigurable):
raise HTTPError(500, msg)
data = None
if r.status_code == 403 and allow_403:
try:
status = HTTPStatus(r.code)
except ValueError:
app_log.error(
f"Unknown error checking authorization with JupyterHub: {r.code}"
)
app_log.error(r.body.decode("utf8", "replace"))
response_text = r.body.decode("utf8", "replace")
if status.value == 403 and allow_403:
pass
elif r.status_code == 403:
elif status.value == 403:
app_log.error(
"I don't have permission to check authorization with JupyterHub, my auth token may have expired: [%i] %s",
r.status_code,
r.reason,
status.value,
status.description,
)
app_log.error(r.text)
app_log.error(response_text)
raise HTTPError(
500, "Permission failure checking authorization, I may need a new token"
)
elif r.status_code >= 500:
elif status.value >= 500:
app_log.error(
"Upstream failure verifying auth token: [%i] %s",
r.status_code,
r.reason,
status.value,
status.description,
)
app_log.error(r.text)
app_log.error(response_text)
raise HTTPError(502, "Failed to check authorization (upstream problem)")
elif r.status_code >= 400:
elif status.value >= 400:
app_log.warning(
"Failed to check authorization: [%i] %s", r.status_code, r.reason
"Failed to check authorization: [%i] %s",
status.value,
status.description,
)
app_log.warning(r.text)
app_log.warning(response_text)
msg = "Failed to check authorization"
# pass on error from oauth failure
try:
response = r.json()
response = json.loads(response_text)
# prefer more specific 'error_description', fallback to 'error'
description = response.get(
"error_description", response.get("error", "Unknown error")
@@ -465,7 +537,7 @@ class HubAuth(SingletonConfigurable):
msg += ": " + description
raise HTTPError(500, msg)
else:
data = r.json()
data = json.loads(response_text)
return data
@@ -475,19 +547,25 @@ class HubAuth(SingletonConfigurable):
"Identifying users by shared cookie is removed in JupyterHub 2.0. Use OAuth tokens."
)
def user_for_token(self, token, use_cache=True, session_id=''):
def user_for_token(self, token, use_cache=True, session_id='', *, sync=True):
"""Ask the Hub to identify the user for a given token.
.. versionadded:: 2.4
async support via `sync` argument.
Args:
token (str): the token
use_cache (bool): Specify use_cache=False to skip cached cookie values (default: True)
sync (bool): whether to block for the result or return an awaitable
Returns:
user_model (dict): The user model, if a user is identified, None if authentication fails.
The 'name' field contains the user's name.
"""
return self._check_hub_authorization(
return self._call_coroutine(
sync,
self._check_hub_authorization,
url=url_path_join(
self.api_url,
"user",
@@ -532,7 +610,7 @@ class HubAuth(SingletonConfigurable):
"""Base class doesn't store tokens in cookies"""
return None
def _get_user_cookie(self, handler):
async def _get_user_cookie(self, handler):
"""Get the user model from a cookie"""
# overridden in HubOAuth to store the access token after oauth
return None
@@ -544,20 +622,26 @@ class HubAuth(SingletonConfigurable):
"""
return handler.get_cookie('jupyterhub-session-id', '')
def get_user(self, handler):
def get_user(self, handler, *, sync=True):
"""Get the Hub user for a given tornado handler.
Checks cookie with the Hub to identify the current user.
.. versionadded:: 2.4
async support via `sync` argument.
Args:
handler (tornado.web.RequestHandler): the current request handler
sync (bool): whether to block for the result or return an awaitable
Returns:
user_model (dict): The user model, if a user is identified, None if authentication fails.
The 'name' field contains the user's name.
"""
return self._call_coroutine(sync, self._get_user, handler)
async def _get_user(self, handler):
# only allow this to be called once per handler
# avoids issues if an error is raised,
# since this may be called again when trying to render the error page
@@ -572,13 +656,15 @@ class HubAuth(SingletonConfigurable):
# is token-authenticated (CORS-related)
token = self.get_token(handler, in_cookie=False)
if token:
user_model = self.user_for_token(token, session_id=session_id)
user_model = await self.user_for_token(
token, session_id=session_id, sync=False
)
if user_model:
handler._token_authenticated = True
# no token, check cookie
if user_model is None:
user_model = self._get_user_cookie(handler)
user_model = await self._get_user_cookie(handler)
# cache result
handler._cached_hub_user = user_model
@@ -638,11 +724,13 @@ class HubOAuth(HubAuth):
token = token.decode('ascii', 'replace')
return token
def _get_user_cookie(self, handler):
async def _get_user_cookie(self, handler):
token = self._get_token_cookie(handler)
session_id = self.get_session_id(handler)
if token:
user_model = self.user_for_token(token, session_id=session_id)
user_model = await self.user_for_token(
token, session_id=session_id, sync=False
)
if user_model is None:
app_log.warning("Token stored in cookie may have expired")
handler.clear_cookie(self.cookie_name)
@@ -697,7 +785,7 @@ class HubOAuth(HubAuth):
def _token_url(self):
return url_path_join(self.api_url, 'oauth2/token')
def token_for_code(self, code):
def token_for_code(self, code, *, sync=True):
"""Get token for OAuth temporary code
This is the last step of OAuth login.
@@ -708,6 +796,9 @@ class HubOAuth(HubAuth):
Returns:
token (str): JupyterHub API Token
"""
return self._call_coroutine(sync, self._token_for_code, code)
async def _token_for_code(self, code):
# GitHub specifies a POST request yet requires URL parameters
params = dict(
client_id=self.oauth_client_id,
@@ -717,10 +808,10 @@ class HubOAuth(HubAuth):
redirect_uri=self.oauth_redirect_uri,
)
token_reply = self._api_request(
token_reply = await self._api_request(
'POST',
self.oauth_token_url,
data=urlencode(params).encode('utf8'),
body=urlencode(params).encode('utf8'),
headers={'Content-Type': 'application/x-www-form-urlencoded'},
)
@@ -1125,10 +1216,12 @@ class HubOAuthCallbackHandler(HubOAuthenticated, RequestHandler):
app_log.warning("oauth state %r != %r", arg_state, cookie_state)
raise HTTPError(403, "oauth state does not match. Try logging in again.")
next_url = self.hub_auth.get_next_url(cookie_state)
# TODO: make async (in a Thread?)
token = self.hub_auth.token_for_code(code)
token = await self.hub_auth.token_for_code(code, sync=False)
session_id = self.hub_auth.get_session_id(self)
user_model = self.hub_auth.user_for_token(token, session_id=session_id)
user_model = await self.hub_auth.user_for_token(
token, session_id=session_id, sync=False
)
if user_model is None:
raise HTTPError(500, "oauth callback failed to identify a user")
app_log.info("Logged-in user %s", user_model)

View File

@@ -14,6 +14,7 @@ import logging
import os
import random
import secrets
import ssl
import sys
import warnings
from datetime import timezone
@@ -635,14 +636,42 @@ class SingleUserNotebookAppMixin(Configurable):
if default_url:
self.config[self.__class__.__name__].default_url = default_url
self._log_app_versions()
# call our init_ioloop very early
# jupyter-server calls it too late, notebook doesn't define it yet
# only called in jupyter-server >= 1.9
self.init_ioloop()
super().initialize(argv)
self.patch_templates()
def init_ioloop(self):
"""init_ioloop added in jupyter-server 1.9"""
# avoid deprecated access to current event loop
if getattr(self, "io_loop", None) is None:
try:
asyncio.get_running_loop()
except RuntimeError:
# not running, make our own loop
self.io_loop = ioloop.IOLoop(make_current=False)
else:
# running, use IOLoop.current
self.io_loop = ioloop.IOLoop.current()
# Make our event loop the 'current' event loop.
# FIXME: this shouldn't be necessary, but it is.
# notebookapp (<=6.4, at least), and
# jupyter-server (<=1.17.0, at least) still need the 'current' event loop to be defined
with warnings.catch_warnings():
warnings.simplefilter("ignore")
self.io_loop.make_current()
def init_httpserver(self):
self.io_loop.run_sync(super().init_httpserver)
def start(self):
self.log.info("Starting jupyterhub-singleuser server version %s", __version__)
# start by hitting Hub to check version
ioloop.IOLoop.current().run_sync(self.check_hub_version)
ioloop.IOLoop.current().add_callback(self.keep_activity_updated)
self.io_loop.run_sync(self.check_hub_version)
self.io_loop.add_callback(self.keep_activity_updated)
super().start()
def init_hub_auth(self):
@@ -749,10 +778,12 @@ class SingleUserNotebookAppMixin(Configurable):
if 'jinja2_env' in settings:
# default jinja env (should we do this on jupyter-server, or only notebook?)
jinja_envs.append(settings['jinja2_env'])
if 'notebook_jinja2_env' in settings:
# when running with jupyter-server, classic notebook (nbclassic server extension)
for ext_name in ("notebook", "nbclassic"):
env_name = f"{ext_name}_jinja2_env"
if env_name in settings:
# when running with jupyter-server, classic notebook (nbclassic server extension or notebook v7)
# gets its own jinja env, which needs the same patch
jinja_envs.append(settings['notebook_jinja2_env'])
jinja_envs.append(settings[env_name])
# patch jinja env loading to get modified template, only for base page.html
def get_page(name):

View File

@@ -54,28 +54,6 @@ from .utils import add_user
_db = None
def _pytest_collection_modifyitems(items):
"""This function is automatically run by pytest passing all collected test
functions.
We use it to add asyncio marker to all async tests and assert we don't use
test functions that are async generators which wouldn't make sense.
It is no longer required with pytest-asyncio >= 0.17
"""
for item in items:
if inspect.iscoroutinefunction(item.obj):
item.add_marker('asyncio')
assert not inspect.isasyncgenfunction(item.obj)
if sys.version_info < (3, 7):
# apply pytest-asyncio's 'auto' mode on Python 3.6.
# 'auto' mode is new in pytest-asyncio 0.17,
# which requires Python 3.7.
pytest_collection_modifyitems = _pytest_collection_modifyitems
@fixture(scope='module')
def ssl_tmpdir(tmpdir_factory):
return tmpdir_factory.mktemp('ssl')
@@ -154,16 +132,13 @@ def event_loop(request):
@fixture(scope='module')
def io_loop(event_loop, request):
async def io_loop(event_loop, request):
"""Same as pytest-tornado.io_loop, but re-scoped to module-level"""
ioloop.IOLoop.configure(AsyncIOMainLoop)
io_loop = AsyncIOMainLoop()
io_loop.make_current()
assert asyncio.get_event_loop() is event_loop
assert io_loop.asyncio_loop is event_loop
def _close():
io_loop.clear_current()
io_loop.close(all_fds=True)
request.addfinalizer(_close)

View File

@@ -411,14 +411,10 @@ class StubSingleUserSpawner(MockSpawner):
print(args, env)
def _run():
asyncio.set_event_loop(asyncio.new_event_loop())
io_loop = IOLoop()
io_loop.make_current()
io_loop.add_callback(lambda: evt.set())
with mock.patch.dict(os.environ, env):
app = self._app = MockSingleUserServer()
app.initialize(args)
app.io_loop.add_callback(lambda: evt.set())
assert app.hub_auth.oauth_client_id
assert app.hub_auth.api_token
assert app.hub_auth.oauth_scopes

View File

@@ -15,6 +15,7 @@ Handlers and their purpose include:
import json
import os
import pprint
import ssl
import sys
from urllib.parse import urlparse
@@ -111,7 +112,9 @@ def main():
ca = os.environ.get('JUPYTERHUB_SSL_CLIENT_CA') or ''
if key and cert and ca:
ssl_context = make_ssl_context(key, cert, cafile=ca, check_hostname=False)
ssl_context = make_ssl_context(
key, cert, cafile=ca, purpose=ssl.Purpose.CLIENT_AUTH
)
server = httpserver.HTTPServer(app, ssl_options=ssl_context)
server.listen(url.port, url.hostname)

View File

@@ -47,7 +47,11 @@ def main():
ca = os.environ.get('JUPYTERHUB_SSL_CLIENT_CA') or ''
if key and cert and ca:
ssl_context = make_ssl_context(key, cert, cafile=ca, check_hostname=False)
import ssl
ssl_context = make_ssl_context(
key, cert, cafile=ca, purpose=ssl.Purpose.CLIENT_AUTH
)
assert url.scheme == "https"
server = httpserver.HTTPServer(app, ssl_options=ssl_context)

View File

@@ -1416,6 +1416,17 @@ async def test_get_proxy(app):
assert list(reply.keys()) == [app.hub.routespec]
@mark.parametrize("offset", (0, 1))
async def test_get_proxy_pagination(app, offset):
r = await api_request(
app, f'proxy?offset={offset}', headers={"Accept": PAGINATION_MEDIA_TYPE}
)
r.raise_for_status()
reply = r.json()
assert set(reply) == {"items", "_pagination"}
assert list(reply["items"].keys()) == [app.hub.routespec][offset:]
async def test_cookie(app):
db = app.db
name = 'patience'

View File

@@ -6,6 +6,7 @@ from subprocess import Popen
from urllib.parse import quote, urlparse
import pytest
from traitlets import TraitError
from traitlets.config import Config
from ..utils import url_path_join as ujoin
@@ -193,23 +194,96 @@ async def test_check_routes(app, username, disable_check_routes):
assert before == after
async def test_extra_routes(app):
@pytest.mark.parametrize(
"routespec",
[
'/has%20space/foo/',
'/missing-trailing/slash',
'/has/@/',
'/has/' + quote('üñîçø∂é'),
'host.name/path/',
'other.host/path/no/slash',
],
)
async def test_extra_routes(app, routespec):
proxy = app.proxy
# When using host_routing, it's up to the admin to
# provide routespecs that have a domain in them.
# We don't explicitly validate that here.
if app.subdomain_host:
route_spec = 'example.com/test-extra-routes/'
else:
route_spec = '/test-extra-routes/'
if app.subdomain_host and routespec.startswith("/"):
routespec = 'example.com/' + routespec
elif not app.subdomain_host and not routespec.startswith("/"):
pytest.skip("requires subdomains")
validated_routespec = routespec
if not routespec.endswith("/"):
validated_routespec = routespec + "/"
target = 'http://localhost:9999/test'
proxy.extra_routes = {route_spec: target}
proxy.extra_routes = {routespec: target}
await proxy.check_routes(app.users, app._service_map)
routes = await app.proxy.get_all_routes()
assert route_spec in routes
assert routes[route_spec]['target'] == target
print(routes)
assert validated_routespec in routes
assert routes[validated_routespec]['target'] == target
assert routes[validated_routespec]['data']['extra']
@pytest.mark.parametrize(
"needs_subdomain, routespec, expected",
[
(False, "/prefix/", "/prefix/"),
(False, "/prefix", "/prefix/"),
(False, "prefix/", ValueError),
(True, "/prefix/", ValueError),
(True, "example.com/prefix/", "example.com/prefix/"),
(True, "example.com/prefix", "example.com/prefix/"),
(False, 100, TraitError),
],
)
def test_extra_routes_validate_routespec(
request, app, needs_subdomain, routespec, expected
):
save_host = app.subdomain_host
request.addfinalizer(lambda: setattr(app, "subdomain_host", save_host))
if needs_subdomain:
app.subdomain_host = "localhost.jovyan.org"
else:
app.subdomain_host = ""
proxy = app.proxy
extra_routes = {routespec: "https://127.0.0.1"}
if isinstance(expected, type) and issubclass(expected, BaseException):
with pytest.raises(expected):
proxy.extra_routes = extra_routes
return
proxy.extra_routes = extra_routes
assert list(proxy.extra_routes) == [expected]
@pytest.mark.parametrize(
"target, expected",
[
("http://host", "http://host"),
("https://host", "https://host"),
("/missing-host", ValueError),
("://missing-scheme", ValueError),
(100, TraitError),
],
)
def test_extra_routes_validate_target(app, target, expected):
proxy = app.proxy
routespec = "/prefix/"
if app.subdomain_host:
routespec = f"host.tld{routespec}"
extra_routes = {routespec: target}
if isinstance(expected, type) and issubclass(expected, BaseException):
with pytest.raises(expected):
proxy.extra_routes = extra_routes
return
proxy.extra_routes = extra_routes
assert list(proxy.extra_routes.values()) == [expected]
@pytest.mark.parametrize(

View File

@@ -1,7 +1,7 @@
"""Tests for jupyterhub.singleuser"""
import os
import sys
from contextlib import contextmanager
from contextlib import contextmanager, nullcontext
from subprocess import CalledProcessError, check_output
from unittest import mock
from urllib.parse import urlencode, urlparse
@@ -17,12 +17,6 @@ from .mocking import StubSingleUserSpawner, public_url
from .utils import AsyncSession, async_requests, get_page
@contextmanager
def nullcontext():
"""Python 3.7+ contextlib.nullcontext, backport for 3.6"""
yield
@pytest.mark.parametrize(
"access_scopes, server_name, expect_success",
[
@@ -195,10 +189,22 @@ def test_singleuser_app_class(JUPYTERHUB_SINGLEUSER_APP):
import jupyter_server # noqa
except ImportError:
have_server = False
expect_error = "jupyter_server" in JUPYTERHUB_SINGLEUSER_APP
else:
have_server = True
expect_error = False
try:
import notebook.notebookapp # noqa
except ImportError:
have_notebook = False
else:
have_notebook = True
if JUPYTERHUB_SINGLEUSER_APP.startswith("notebook."):
expect_error = not have_notebook
elif JUPYTERHUB_SINGLEUSER_APP.startswith("jupyter_server."):
expect_error = not have_server
else:
# not specified, will try both
expect_error = not (have_server or have_notebook)
if expect_error:
ctx = pytest.raises(CalledProcessError)

View File

@@ -3,7 +3,14 @@ Traitlets that are used in JupyterHub
"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import entrypoints
import sys
# See compatibility note on `group` keyword in https://docs.python.org/3/library/importlib.metadata.html#entry-points
if sys.version_info < (3, 10):
from importlib_metadata import entry_points
else:
from importlib.metadata import entry_points
from traitlets import Integer, List, TraitError, TraitType, Type, Undefined, Unicode
@@ -125,11 +132,7 @@ class EntryPointType(Type):
chunks = [self._original_help]
chunks.append("Currently installed: ")
for key, entry_point in self.load_entry_points().items():
chunks.append(
" - {}: {}.{}".format(
key, entry_point.module_name, entry_point.object_name
)
)
chunks.append(f" - {key}: {entry_point.module}.{entry_point.attr}")
return '\n'.join(chunks)
@help.setter
@@ -137,11 +140,14 @@ class EntryPointType(Type):
self._original_help = value
def load_entry_points(self):
"""Load my entry point group"""
# load the group
group = entrypoints.get_group_named(self.entry_point_group)
# make it case-insensitive
return {key.lower(): value for key, value in group.items()}
"""Load my entry point group
Returns a dict whose keys are lowercase entrypoint names
"""
return {
entry_point.name.lower(): entry_point
for entry_point in entry_points(group=self.entry_point_group)
}
def validate(self, obj, value):
if isinstance(value, str):

View File

@@ -1,9 +1,11 @@
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import json
import string
import warnings
from collections import defaultdict
from datetime import datetime, timedelta
from functools import lru_cache
from urllib.parse import quote, urlparse
from sqlalchemy import inspect
@@ -53,6 +55,42 @@ Common causes of this timeout, and debugging tips:
to a number of seconds that is enough for servers to become responsive.
"""
# set of chars that are safe in dns labels
# (allow '.' because we don't mind multiple levels of subdomains)
_dns_safe = set(string.ascii_letters + string.digits + '-.')
# don't escape % because it's the escape char and we handle it separately
_dns_needs_replace = _dns_safe | {"%"}
@lru_cache()
def _dns_quote(name):
"""Escape a name for use in a dns label
this is _NOT_ fully domain-safe, but works often enough for realistic usernames.
Fully safe would be full IDNA encoding,
PLUS escaping non-IDNA-legal ascii,
PLUS some encoding of boundary conditions
"""
# escape name for subdomain label
label = quote(name, safe="").lower()
# some characters are not handled by quote,
# because they are legal in URLs but not domains,
# specifically _ and ~ (starting in 3.7).
# Escape these in the same way (%{hex_codepoint}).
unique_chars = set(label)
for c in unique_chars:
if c not in _dns_needs_replace:
label = label.replace(c, f"%{ord(c):x}")
# underscore is our escape char -
# it's not officially legal in hostnames,
# but is valid in _domain_ names (?),
# and always works in practice.
# FIXME: We should consider switching to proper IDNA encoding
# for 3.0.
label = label.replace("%", "_")
return label
class UserDict(dict):
"""Like defaultdict, but for users
@@ -520,10 +558,8 @@ class User:
@property
def domain(self):
"""Get the domain for my server."""
# use underscore as escape char for domains
return (
quote(self.name).replace('%', '_').lower() + '.' + self.settings['domain']
)
return _dns_quote(self.name) + '.' + self.settings['domain']
@property
def host(self):

View File

@@ -27,14 +27,26 @@ from tornado import gen, ioloop, web
from tornado.httpclient import AsyncHTTPClient, HTTPError
from tornado.log import app_log
# For compatibility with python versions 3.6 or earlier.
# asyncio.Task.all_tasks() is fully moved to asyncio.all_tasks() starting with 3.9. Also applies to current_task.
try:
asyncio_all_tasks = asyncio.all_tasks
asyncio_current_task = asyncio.current_task
except AttributeError as e:
asyncio_all_tasks = asyncio.Task.all_tasks
asyncio_current_task = asyncio.Task.current_task
# Deprecated aliases: no longer needed now that we require 3.7
def asyncio_all_tasks(loop=None):
warnings.warn(
"jupyterhub.utils.asyncio_all_tasks is deprecated in JupyterHub 2.4."
" Use asyncio.all_tasks().",
DeprecationWarning,
stacklevel=2,
)
return asyncio.all_tasks(loop=loop)
def asyncio_current_task(loop=None):
warnings.warn(
"jupyterhub.utils.asyncio_current_task is deprecated in JupyterHub 2.4."
" Use asyncio.current_task().",
DeprecationWarning,
stacklevel=2,
)
return asyncio.current_task(loop=loop)
def random_port():
@@ -82,13 +94,51 @@ def can_connect(ip, port):
return True
def make_ssl_context(keyfile, certfile, cafile=None, verify=True, check_hostname=True):
"""Setup context for starting an https server or making requests over ssl."""
def make_ssl_context(
keyfile,
certfile,
cafile=None,
verify=None,
check_hostname=None,
purpose=ssl.Purpose.SERVER_AUTH,
):
"""Setup context for starting an https server or making requests over ssl.
Used for verifying internal ssl connections.
Certificates are always verified in both directions.
Hostnames are checked for client sockets.
Client sockets are created with `purpose=ssl.Purpose.SERVER_AUTH` (default),
Server sockets are created with `purpose=ssl.Purpose.CLIENT_AUTH`.
"""
if not keyfile or not certfile:
return None
if verify is not None:
purpose = ssl.Purpose.SERVER_AUTH if verify else ssl.Purpose.CLIENT_AUTH
warnings.warn(
f"make_ssl_context(verify={verify}) is deprecated in jupyterhub 2.4."
f" Use make_ssl_context(purpose={purpose!s}).",
DeprecationWarning,
stacklevel=2,
)
if check_hostname is not None:
purpose = ssl.Purpose.SERVER_AUTH if check_hostname else ssl.Purpose.CLIENT_AUTH
warnings.warn(
f"make_ssl_context(check_hostname={check_hostname}) is deprecated in jupyterhub 2.4."
f" Use make_ssl_context(purpose={purpose!s}).",
DeprecationWarning,
stacklevel=2,
)
ssl_context = ssl.create_default_context(purpose, cafile=cafile)
# always verify
ssl_context.verify_mode = ssl.CERT_REQUIRED
if purpose == ssl.Purpose.SERVER_AUTH:
# SERVER_AUTH is authenticating servers (i.e. for a client)
ssl_context.check_hostname = True
ssl_context.load_default_certs()
ssl_context.load_cert_chain(certfile, keyfile)
ssl_context.check_hostname = check_hostname
return ssl_context

View File

@@ -22,7 +22,7 @@
"bootstrap": "^3.4.1",
"font-awesome": "^4.7.0",
"jquery": "^3.5.1",
"moment": "^2.29.2",
"moment": "^2.29.4",
"requirejs": "^2.3.6"
}
}

View File

@@ -6,7 +6,6 @@ skip-string-normalization = true
# target-version should be all supported versions, see
# https://github.com/psf/black/issues/751#issuecomment-473066811
target_version = [
"py36",
"py37",
"py38",
"py39",
@@ -18,7 +17,7 @@ target_version = [
github_url = "https://github.com/jupyterhub/jupyterhub"
[tool.tbump.version]
current = "2.3.0.dev"
current = "2.4.0.dev"
# Example of a semver regexp.
# Make sure this matches current_version before

View File

@@ -1,7 +1,7 @@
alembic>=1.4
async_generator>=1.9
certipy>=0.1.2
entrypoints
importlib_metadata>=3.6; python_version < '3.10'
jinja2>=2.11.0
jupyter_telemetry>=0.1.0
oauthlib>=3.0

View File

@@ -14,12 +14,6 @@ from setuptools.command.bdist_egg import bdist_egg
from setuptools.command.build_py import build_py
from setuptools.command.sdist import sdist
v = sys.version_info
if v[:2] < (3, 6):
error = "ERROR: JupyterHub requires Python version 3.6 or above."
print(error, file=sys.stderr)
sys.exit(1)
shell = False
if os.name in ('nt', 'dos'):
shell = True
@@ -91,7 +85,7 @@ setup_args = dict(
license="BSD",
platforms="Linux, Mac OS X",
keywords=['Interactive', 'Interpreter', 'Shell', 'Web'],
python_requires=">=3.6",
python_requires=">=3.7",
entry_points={
'jupyterhub.authenticators': [
'default = jupyterhub.auth:PAMAuthenticator',

View File

@@ -6,7 +6,7 @@
window.api_page_limit = parseInt("{{ api_page_limit|safe }}")
window.base_url = "{{ base_url|safe }}"
</script>
<script src="static/js/admin-react.js"></script>
<script src={{ static_url("js/admin-react.js") }}></script>
</div>
{% endblock %}

View File

@@ -1,5 +1,5 @@
{% extends "page.html" %}
{% if announcement_home %}
{% if announcement_home is string %}
{% set announcement = announcement_home %}
{% endif %}

View File

@@ -1,5 +1,5 @@
{% extends "page.html" %}
{% if announcement_login %}
{% if announcement_login is string %}
{% set announcement = announcement_login %}
{% endif %}
@@ -47,6 +47,7 @@
type="text"
autocapitalize="off"
autocorrect="off"
autocomplete="username"
class="form-control"
name="username"
val="{{username}}"
@@ -57,6 +58,7 @@
<input
type="password"
class="form-control"
autocomplete="current-password"
name="password"
id="password_input"
tabindex="2"

View File

@@ -1,5 +1,5 @@
{% extends "page.html" %}
{% if announcement_logout %}
{% if announcement_logout is string %}
{% set announcement = announcement_logout %}
{% endif %}

View File

@@ -1,5 +1,5 @@
{% extends "page.html" %}
{% if announcement_spawn %}
{% if announcement_spawn is string %}
{% set announcement = announcement_spawn %}
{% endif %}