mirror of
https://github.com/jupyter/docker-stacks.git
synced 2025-10-07 10:04:03 +00:00
Merge branch 'main' into asalikhov/ubuntu_jammy
This commit is contained in:
2
.github/actions/create-dev-env/action.yml
vendored
2
.github/actions/create-dev-env/action.yml
vendored
@@ -10,7 +10,7 @@ inputs:
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
# actions/setup-python doesn't suport Linux aarch64 runners
|
||||
# actions/setup-python doesn't support Linux aarch64 runners
|
||||
# See: https://github.com/actions/setup-python/issues/108
|
||||
# python3 is manually preinstalled in the aarch64 VM self-hosted runner
|
||||
- name: Set Up Python 🐍
|
||||
|
4
.github/workflows/sphinx.yml
vendored
4
.github/workflows/sphinx.yml
vendored
@@ -11,7 +11,6 @@ on:
|
||||
- "docs/**"
|
||||
- "README.md"
|
||||
- ".readthedocs.yaml"
|
||||
- "requirements-docs.txt"
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
@@ -21,7 +20,6 @@ on:
|
||||
- "docs/**"
|
||||
- "README.md"
|
||||
- ".readthedocs.yaml"
|
||||
- "requirements-docs.txt"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
@@ -44,7 +42,7 @@ jobs:
|
||||
- name: Install Doc Dependencies 📦
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade -r requirements-docs.txt
|
||||
pip install --upgrade -r docs/requirements.txt
|
||||
|
||||
- name: Build Documentation 📖
|
||||
run: make docs
|
||||
|
@@ -14,7 +14,7 @@
|
||||
repos:
|
||||
# Autoupdate: Python code
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v2.34.0
|
||||
rev: v2.37.2
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py39-plus]
|
||||
@@ -35,7 +35,7 @@ repos:
|
||||
|
||||
# Check python code static typing
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v0.961
|
||||
rev: v0.971
|
||||
hooks:
|
||||
- id: mypy
|
||||
args: [--config, ./mypy.ini]
|
||||
@@ -74,7 +74,7 @@ repos:
|
||||
|
||||
# Lint: YAML
|
||||
- repo: https://github.com/adrienverge/yamllint.git
|
||||
rev: v1.26.3
|
||||
rev: v1.27.1
|
||||
hooks:
|
||||
- id: yamllint
|
||||
args: ["-d {extends: relaxed, rules: {line-length: disable}}", "-s"]
|
||||
@@ -102,21 +102,21 @@ repos:
|
||||
|
||||
# Lint: Markdown
|
||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||
rev: v0.31.1
|
||||
rev: v0.32.1
|
||||
hooks:
|
||||
- id: markdownlint
|
||||
args: ["--fix"]
|
||||
|
||||
# Strip output from Jupyter notebooks
|
||||
- repo: https://github.com/kynan/nbstripout
|
||||
rev: 0.5.0
|
||||
rev: 0.6.0
|
||||
hooks:
|
||||
- id: nbstripout
|
||||
|
||||
# nbQA provides tools from the Python ecosystem like
|
||||
# pyupgrade, isort, black, and flake8, adjusted for notebooks.
|
||||
- repo: https://github.com/nbQA-dev/nbQA
|
||||
rev: 1.3.1
|
||||
rev: 1.4.0
|
||||
hooks:
|
||||
- id: nbqa-pyupgrade
|
||||
args: [--py39-plus]
|
||||
|
@@ -27,4 +27,4 @@ sphinx:
|
||||
# Optionally declare the Python requirements required to build your docs
|
||||
python:
|
||||
install:
|
||||
- requirements: requirements-docs.txt
|
||||
- requirements: docs/requirements.txt
|
||||
|
3
Makefile
3
Makefile
@@ -67,10 +67,9 @@ linkcheck-docs: ## check broken links
|
||||
|
||||
|
||||
|
||||
hook/%: WIKI_PATH?=wiki
|
||||
hook/%: ## run post-build hooks for an image
|
||||
python3 -m tagging.tag_image --short-image-name "$(notdir $@)" --owner "$(OWNER)" && \
|
||||
python3 -m tagging.write_manifest --short-image-name "$(notdir $@)" --owner "$(OWNER)" --wiki-path "$(WIKI_PATH)"
|
||||
python3 -m tagging.write_manifest --short-image-name "$(notdir $@)" --hist-line-dir /tmp/hist_lines/ --manifest-dir /tmp/manifests/ --owner "$(OWNER)"
|
||||
hook-all: $(foreach I, $(ALL_IMAGES), hook/$(I)) ## run post-build hooks for all images
|
||||
|
||||
|
||||
|
@@ -52,7 +52,7 @@ It then starts an _ephemeral_ container running a Jupyter Server and exposes the
|
||||
docker run -it --rm -p 10000:8888 -v "${PWD}":/home/jovyan/work jupyter/datascience-notebook:807999a41207
|
||||
```
|
||||
|
||||
The use of the `-v` flag in the command mounts the current working directory on the host (`{PWD}` in the example command) as `/home/jovyan/work` in the container.
|
||||
The use of the `-v` flag in the command mounts the current working directory on the host (`${PWD}` in the example command) as `/home/jovyan/work` in the container.
|
||||
The server logs appear in the terminal.
|
||||
|
||||
Visiting `http://<hostname>:10000/?token=<token>` in a browser loads JupyterLab.
|
||||
|
@@ -63,7 +63,7 @@ html_theme = "alabaster"
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ["_static"]
|
||||
|
||||
# File above was generated using sphinx 4.3.2 with this command:
|
||||
# File above was generated using sphinx 4.5.0 with this command:
|
||||
# sphinx-quickstart --project "docker-stacks" --author "Project Jupyter" -v "latest" -r "latest" -l en --no-sep --no-makefile --no-batchfile
|
||||
# These are custom options for this project
|
||||
|
||||
@@ -73,6 +73,7 @@ html_logo = "_static/jupyter-logo.svg"
|
||||
html_theme_options = {
|
||||
"path_to_docs": "docs",
|
||||
"repository_url": "https://github.com/jupyter/docker-stacks",
|
||||
"repository_branch": "main",
|
||||
"use_edit_page_button": True,
|
||||
"use_issues_button": True,
|
||||
"use_repository_button": True,
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# ReadTheDocs environment contains old package versions preinstalled
|
||||
# So, to ensure we have modern packages, we pin minimum versions of the packages we need
|
||||
docutils>=0.17
|
||||
myst-parser>=0.16.1
|
||||
sphinx>=4.4.0
|
||||
sphinx-book-theme>=0.3.2
|
||||
docutils>=0.17.1
|
||||
myst-parser>=0.18.0
|
||||
sphinx>=4.5.0
|
||||
sphinx-book-theme>=0.3.3
|
||||
sphinx-copybutton>=0.5.0
|
@@ -46,14 +46,13 @@ You can build a `pyspark-notebook` image (and also the downstream `all-spark-not
|
||||
|
||||
- `spark_version`: The Spark version to install (`3.3.0`).
|
||||
- `hadoop_version`: The Hadoop version (`3.2`).
|
||||
- `scala_version`: The Scala version (`2.13`).
|
||||
- `scala_version`: The Scala version (`2.13`, optional).
|
||||
- `spark_checksum`: The package checksum (`BFE4540...`).
|
||||
- `openjdk_version`: The version of the OpenJDK (JRE headless) distribution (`17`).
|
||||
- This version needs to match the version supported by the Spark distribution used above.
|
||||
- See [Spark Overview](https://spark.apache.org/docs/latest/#downloading) and [Ubuntu packages](https://packages.ubuntu.com/search?keywords=openjdk).
|
||||
|
||||
- Starting with _Spark >= 3.2_ the distribution file contains Scala version, hence building older Spark will not work.
|
||||
- Building older version requires modification to the Dockerfile or using it's older version of the Dockerfile.
|
||||
- Starting with _Spark >= 3.2_ the distribution file might contain Scala version.
|
||||
|
||||
For example here is how to build a `pyspark-notebook` image with Spark `3.2.0`, Hadoop `3.2` and OpenJDK `11`.
|
||||
|
||||
|
@@ -17,8 +17,8 @@ USER root
|
||||
# (ARGS are in lower case to distinguish them from ENV)
|
||||
ARG spark_version="3.3.0"
|
||||
ARG hadoop_version="3"
|
||||
ARG scala_version="2.13"
|
||||
ARG spark_checksum="4c09dac70e22bf1d5b7b2cabc1dd92aba13237f52a5b682c67982266fc7a0f5e0f964edff9bc76adbd8cb444eb1a00fdc59516147f99e4e2ce068420ff4881f0"
|
||||
ARG scala_version
|
||||
ARG spark_checksum="1e8234d0c1d2ab4462d6b0dfe5b54f2851dcd883378e0ed756140e10adfb5be4123961b521140f580e364c239872ea5a9f813a20b73c69cb6d4e95da2575c29c"
|
||||
ARG openjdk_version="17"
|
||||
|
||||
ENV APACHE_SPARK_VERSION="${spark_version}" \
|
||||
@@ -32,22 +32,29 @@ RUN apt-get update --yes && \
|
||||
|
||||
# Spark installation
|
||||
WORKDIR /tmp
|
||||
RUN wget -q "https://archive.apache.org/dist/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}-scala${scala_version}.tgz" && \
|
||||
echo "${spark_checksum} *spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}-scala${scala_version}.tgz" | sha512sum -c - && \
|
||||
tar xzf "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}-scala${scala_version}.tgz" -C /usr/local --owner root --group root --no-same-owner && \
|
||||
rm "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}-scala${scala_version}.tgz"
|
||||
|
||||
WORKDIR /usr/local
|
||||
RUN if [ -z "${scala_version}" ]; then \
|
||||
wget -qO "spark.tgz" "https://archive.apache.org/dist/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz"; \
|
||||
else \
|
||||
wget -qO "spark.tgz" "https://archive.apache.org/dist/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}-scala${scala_version}.tgz"; \
|
||||
fi && \
|
||||
echo "${spark_checksum} *spark.tgz" | sha512sum -c - && \
|
||||
tar xzf "spark.tgz" -C /usr/local --owner root --group root --no-same-owner && \
|
||||
rm "spark.tgz"
|
||||
|
||||
# Configure Spark
|
||||
ENV SPARK_HOME=/usr/local/spark
|
||||
ENV SPARK_OPTS="--driver-java-options=-Xms1024M --driver-java-options=-Xmx4096M --driver-java-options=-Dlog4j.logLevel=info" \
|
||||
PATH="${PATH}:${SPARK_HOME}/bin"
|
||||
|
||||
RUN ln -s "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}-scala${scala_version}" spark && \
|
||||
# Add a link in the before_notebook hook in order to source automatically PYTHONPATH
|
||||
mkdir -p /usr/local/bin/before-notebook.d && \
|
||||
ln -s "${SPARK_HOME}/sbin/spark-config.sh" /usr/local/bin/before-notebook.d/spark-config.sh
|
||||
RUN if [ -z "${scala_version}" ]; then \
|
||||
ln -s "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}" "${SPARK_HOME}"; \
|
||||
else \
|
||||
ln -s "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}-scala${scala_version}" "${SPARK_HOME}"; \
|
||||
fi && \
|
||||
# Add a link in the before_notebook hook in order to source automatically PYTHONPATH && \
|
||||
mkdir -p /usr/local/bin/before-notebook.d && \
|
||||
ln -s "${SPARK_HOME}/sbin/spark-config.sh" /usr/local/bin/before-notebook.d/spark-config.sh
|
||||
|
||||
# Configure IPython system-wide
|
||||
COPY ipython_kernel_config.py "/etc/ipython/"
|
||||
|
@@ -46,7 +46,7 @@ def test_unsigned_ssl(
|
||||
# properly while the server is booting up. An SSL handshake error seems to
|
||||
# abort the retry logic. Forcing a long sleep for the moment until I have
|
||||
# time to dig more.
|
||||
time.sleep(5)
|
||||
time.sleep(1)
|
||||
resp = http_client.get(f"https://localhost:{host_port}", verify=False)
|
||||
resp.raise_for_status()
|
||||
assert "login_submit" in resp.text
|
||||
@@ -91,9 +91,9 @@ def test_nb_user_change(container: TrackedContainer) -> None:
|
||||
command=["start.sh", "bash", "-c", "sleep infinity"],
|
||||
)
|
||||
|
||||
# Give the chown time to complete. Use sleep, not wait, because the
|
||||
# container sleeps forever.
|
||||
time.sleep(10)
|
||||
# Give the chown time to complete.
|
||||
# Use sleep, not wait, because the container sleeps forever.
|
||||
time.sleep(1)
|
||||
LOGGER.info(f"Checking if the user is changed to {nb_user} by the start script ...")
|
||||
output = running_container.logs().decode("utf-8")
|
||||
assert "ERROR" not in output
|
||||
|
@@ -38,7 +38,8 @@ Example:
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Callable, Iterable
|
||||
from collections.abc import Iterable
|
||||
from typing import Callable
|
||||
|
||||
import pytest # type: ignore
|
||||
|
||||
|
@@ -50,7 +50,7 @@ def test_start_notebook(
|
||||
ports={"8888/tcp": host_port},
|
||||
)
|
||||
# sleeping some time to let the server start
|
||||
time.sleep(3)
|
||||
time.sleep(1)
|
||||
logs = running_container.logs().decode("utf-8")
|
||||
LOGGER.debug(logs)
|
||||
# checking that the expected command is launched
|
||||
|
Reference in New Issue
Block a user