Merge branch 'main' into mathbunnyru-patch-1

This commit is contained in:
Ayaz Salikhov
2024-11-07 15:15:31 +00:00
committed by GitHub
15 changed files with 50 additions and 25 deletions

4
.gitignore vendored
View File

@@ -106,8 +106,10 @@ ipython_config.py
#pdm.lock #pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control. # in version control.
# https://pdm.fming.dev/#use-with-ide # https://pdm.fming.dev/latest/usage/project/#working-with-version-control
.pdm.toml .pdm.toml
.pdm-python
.pdm-build/
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/ __pypackages__/

View File

@@ -14,7 +14,7 @@
repos: repos:
# Autoupdate: Python code # Autoupdate: Python code
- repo: https://github.com/asottile/pyupgrade - repo: https://github.com/asottile/pyupgrade
rev: v3.17.0 rev: v3.19.0
hooks: hooks:
- id: pyupgrade - id: pyupgrade
args: [--py39-plus] args: [--py39-plus]
@@ -28,14 +28,14 @@ repos:
# Autoformat: Python code # Autoformat: Python code
- repo: https://github.com/psf/black - repo: https://github.com/psf/black
rev: 24.8.0 rev: 24.10.0
hooks: hooks:
- id: black - id: black
args: [--target-version=py39] args: [--target-version=py39]
# Check python code static typing # Check python code static typing
- repo: https://github.com/pre-commit/mirrors-mypy - repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.11.2 rev: v1.13.0
hooks: hooks:
- id: mypy - id: mypy
args: [--config, ./mypy.ini] args: [--config, ./mypy.ini]
@@ -69,7 +69,7 @@ repos:
# `pre-commit sample-config` default hooks # `pre-commit sample-config` default hooks
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0 rev: v5.0.0
hooks: hooks:
- id: check-added-large-files - id: check-added-large-files
- id: end-of-file-fixer - id: end-of-file-fixer
@@ -78,7 +78,7 @@ repos:
# Lint: Dockerfile # Lint: Dockerfile
- repo: https://github.com/hadolint/hadolint - repo: https://github.com/hadolint/hadolint
rev: v2.13.0-beta rev: v2.13.1-beta
hooks: hooks:
- id: hadolint-docker - id: hadolint-docker
entry: hadolint/hadolint:v2.12.1-beta hadolint entry: hadolint/hadolint:v2.12.1-beta hadolint
@@ -86,7 +86,7 @@ repos:
# Lint: Dockerfile # Lint: Dockerfile
# We're linting .dockerfile files as well # We're linting .dockerfile files as well
- repo: https://github.com/hadolint/hadolint - repo: https://github.com/hadolint/hadolint
rev: v2.13.0-beta rev: v2.13.1-beta
hooks: hooks:
- id: hadolint-docker - id: hadolint-docker
name: Lint *.dockerfile Dockerfiles name: Lint *.dockerfile Dockerfiles
@@ -124,14 +124,14 @@ repos:
# Lint: Markdown # Lint: Markdown
- repo: https://github.com/igorshubovych/markdownlint-cli - repo: https://github.com/igorshubovych/markdownlint-cli
rev: v0.41.0 rev: v0.42.0
hooks: hooks:
- id: markdownlint - id: markdownlint
args: ["--fix"] args: ["--fix"]
# Strip output from Jupyter notebooks # Strip output from Jupyter notebooks
- repo: https://github.com/kynan/nbstripout - repo: https://github.com/kynan/nbstripout
rev: 0.7.1 rev: 0.8.0
hooks: hooks:
- id: nbstripout - id: nbstripout
@@ -149,7 +149,7 @@ repos:
# Run black on python code blocks in documentation files. # Run black on python code blocks in documentation files.
- repo: https://github.com/adamchainz/blacken-docs - repo: https://github.com/adamchainz/blacken-docs
rev: 1.18.0 rev: 1.19.1
hooks: hooks:
- id: blacken-docs - id: blacken-docs
# --skip-errors is added to allow us to have python syntax highlighting even if # --skip-errors is added to allow us to have python syntax highlighting even if

View File

@@ -8,7 +8,14 @@ All image manifests can be found in [the wiki](https://github.com/jupyter/docker
Affected: all images. Affected: all images.
- **Breaking:** Switch to `mamba` v2 ([#2147](https://github.com/jupyter/docker-stacks/pull/2147)). - **Breaking:** Switch to `mamba` v2 ([#2147](https://github.com/jupyter/docker-stacks/pull/2147)).
More information about changes made: <https://mamba.readthedocs.io/en/latest/developer_zone/changes-2.0.html> More information about changes made: <https://mamba.readthedocs.io/en/latest/developer_zone/changes-2.0.html>.
## 2024-11-06
Affected: all images except `docker-stacks-foundation`.
- **Non-breaking** Install `jupyterhub-base` and `nodejs` packages instead of `jupyterhub` package ([#2171](https://github.com/jupyter/docker-stacks/pull/2171)).
## 2024-10-23 ## 2024-10-23

View File

@@ -1,7 +1,6 @@
# Jupyter Docker Stacks # Jupyter Docker Stacks
[![GitHub actions badge](https://github.com/jupyter/docker-stacks/actions/workflows/docker.yml/badge.svg) [![GitHub actions badge](https://github.com/jupyter/docker-stacks/actions/workflows/docker.yml/badge.svg)](https://github.com/jupyter/docker-stacks/actions/workflows/docker.yml?query=branch%3Amain "Docker images build status")
](https://github.com/jupyter/docker-stacks/actions/workflows/docker.yml?query=branch%3Amain "Docker images build status")
[![Read the Docs badge](https://img.shields.io/readthedocs/jupyter-docker-stacks.svg)](https://jupyter-docker-stacks.readthedocs.io/en/latest/ "Documentation build status") [![Read the Docs badge](https://img.shields.io/readthedocs/jupyter-docker-stacks.svg)](https://jupyter-docker-stacks.readthedocs.io/en/latest/ "Documentation build status")
[![pre-commit.ci status](https://results.pre-commit.ci/badge/github/jupyter/docker-stacks/main.svg)](https://results.pre-commit.ci/latest/github/jupyter/docker-stacks/main "pre-commit.ci build status") [![pre-commit.ci status](https://results.pre-commit.ci/badge/github/jupyter/docker-stacks/main.svg)](https://results.pre-commit.ci/latest/github/jupyter/docker-stacks/main "pre-commit.ci build status")
[![Discourse badge](https://img.shields.io/discourse/users.svg?color=%23f37626&server=https%3A%2F%2Fdiscourse.jupyter.org)](https://discourse.jupyter.org/ "Jupyter Discourse Forum") [![Discourse badge](https://img.shields.io/discourse/users.svg?color=%23f37626&server=https%3A%2F%2Fdiscourse.jupyter.org)](https://discourse.jupyter.org/ "Jupyter Discourse Forum")

View File

@@ -67,6 +67,7 @@ linkcheck_ignore = [
r"http://127\.0\.0\.1:.*", # various examples r"http://127\.0\.0\.1:.*", # various examples
r"https://mybinder\.org/v2/gh/.*", # lots of 500 errors r"https://mybinder\.org/v2/gh/.*", # lots of 500 errors
r"https://packages\.ubuntu\.com/search\?keywords=openjdk", # frequent read timeouts r"https://packages\.ubuntu\.com/search\?keywords=openjdk", # frequent read timeouts
r"https://anaconda\.org\/conda-forge", # frequent read timeouts
] ]
linkcheck_allowed_redirects = { linkcheck_allowed_redirects = {

View File

@@ -7,7 +7,7 @@ Each runner is recommended to have at least _2 cores_ and _30 GB_ of disk space.
Add a new runner: Add a new runner:
- To use [Oracle OCI](https://www.oracle.com/cloud-0/), create a compute instance `VM.Standard.A1.Flex`. - To use [Oracle OCI](https://www.oracle.com/cloud-0/), create a compute instance `VM.Standard.A1.Flex`.
- To use [Google Cloud](https://cloud.google.com), use [this instruction](https://cloud.google.com/compute/docs/instances/create-arm-vm-instance#armpublicimage). - To use [Google Cloud](https://cloud.google.com), use [this instruction](https://cloud.google.com/compute/docs/instances/create-start-instance).
Configure your runner: Configure your runner:

View File

@@ -1,6 +1,6 @@
FROM quay.io/jupyter/base-notebook FROM quay.io/jupyter/base-notebook
RUN mamba install --yes 'jupyterhub==4.0.1' && \ RUN mamba install --yes 'jupyterhub-base==4.0.1' && \
mamba clean --all -f -y && \ mamba clean --all -f -y && \
fix-permissions "${CONDA_DIR}" && \ fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}" fix-permissions "/home/${NB_USER}"

View File

@@ -54,7 +54,9 @@ It contains:
- Everything in `jupyter/docker-stacks-foundation` - Everything in `jupyter/docker-stacks-foundation`
- Minimally functional Server (e.g., no LaTeX support for saving notebooks as PDFs) - Minimally functional Server (e.g., no LaTeX support for saving notebooks as PDFs)
- `notebook`, `jupyterhub`, and `jupyterlab` packages - `notebook`, `jupyterhub-base`, and `jupyterlab` packages
Note: we're also installing `nodejs` as it has historically been installed indirectly as a dependency of `jupyterhub` package, which was used before.
See more at: <https://github.com/jupyter/docker-stacks/pull/2171>
- A `start-notebook.py` script as the default command - A `start-notebook.py` script as the default command
- A `start-singleuser.py` script useful for launching containers in JupyterHub - A `start-singleuser.py` script useful for launching containers in JupyterHub
- Options for a self-signed HTTPS certificate - Options for a self-signed HTTPS certificate

View File

@@ -49,7 +49,8 @@ You can build a `pyspark-notebook` image with a different `Spark` version by ove
- This version needs to match the version supported by the Spark distribution used above. - This version needs to match the version supported by the Spark distribution used above.
- See [Spark Overview](https://spark.apache.org/docs/latest/#downloading) and [Ubuntu packages](https://packages.ubuntu.com/search?keywords=openjdk). - See [Spark Overview](https://spark.apache.org/docs/latest/#downloading) and [Ubuntu packages](https://packages.ubuntu.com/search?keywords=openjdk).
- `spark_version` (optional): The Spark version to install, for example `3.5.0`. - `spark_version` (optional): The Spark version to install, for example `3.5.0`.
If not specified (this is the default), latest stable Spark will be installed. If not specified (this is the default), latest Spark will be installed.
Note: to support Python 3.12, we currently install Spark v4 preview versions: <https://github.com/jupyter/docker-stacks/pull/2072#issuecomment-2414123851>.
- `hadoop_version`: The Hadoop version (`3` by default). - `hadoop_version`: The Hadoop version (`3` by default).
Note, that _Spark < 3.3_ require to specify `major.minor` Hadoop version (i.e. `3.2`). Note, that _Spark < 3.3_ require to specify `major.minor` Hadoop version (i.e. `3.2`).
- `scala_version` (optional): The Scala version, for example `2.13` (not specified by default). - `scala_version` (optional): The Scala version, for example `2.13` (not specified by default).

View File

@@ -39,10 +39,20 @@ USER ${NB_UID}
# files across image layers when the permissions change # files across image layers when the permissions change
WORKDIR /tmp WORKDIR /tmp
RUN mamba install --yes \ RUN mamba install --yes \
'jupyterhub' \ 'jupyterhub-base' \
'jupyterlab' \ 'jupyterlab' \
'nbclassic' \ 'nbclassic' \
'notebook' && \ # nodejs has historically been installed indirectly as a dependency.
# When it was no longer getting installed indirectly,
# we started installing it explicitly to avoid introducing a breaking change
# for users building on top of these images.
# See: https://github.com/jupyter/docker-stacks/pull/2171
'nodejs' \
# Sometimes, when the new version of `jupyterlab` is released, latest `notebook` might not support it for some time
# Old versions of `notebook` (<v7) didn't have a restriction on the `jupyterlab` version, and old `notebook` is getting installed
# That's why we have to pin the minimum notebook version
# More info: https://github.com/jupyter/docker-stacks/pull/2167
'notebook>=7.2.2' && \
jupyter server --generate-config && \ jupyter server --generate-config && \
mamba clean --all -f -y && \ mamba clean --all -f -y && \
npm cache clean --force && \ npm cache clean --force && \

View File

@@ -24,7 +24,7 @@ RUN apt-get update --yes && \
ca-certificates-java && \ ca-certificates-java && \
apt-get clean && rm -rf /var/lib/apt/lists/* apt-get clean && rm -rf /var/lib/apt/lists/*
# If spark_version is not set, latest stable Spark will be installed # If spark_version is not set, latest Spark will be installed
ARG spark_version ARG spark_version
ARG hadoop_version="3" ARG hadoop_version="3"
# If scala_version is not set, Spark without Scala will be installed # If scala_version is not set, Spark without Scala will be installed

View File

@@ -29,11 +29,11 @@ def get_all_refs(url: str) -> list[str]:
def get_latest_spark_version() -> str: def get_latest_spark_version() -> str:
""" """
Returns the last stable version of Spark using spark archive Returns the last version of Spark using spark archive
""" """
LOGGER.info("Downloading Spark versions information") LOGGER.info("Downloading Spark versions information")
all_refs = get_all_refs("https://archive.apache.org/dist/spark/") all_refs = get_all_refs("https://archive.apache.org/dist/spark/")
stable_versions = [ versions = [
ref.removeprefix("spark-").removesuffix("/") ref.removeprefix("spark-").removesuffix("/")
for ref in all_refs for ref in all_refs
if ref.startswith("spark-") and "incubating" not in ref if ref.startswith("spark-") and "incubating" not in ref
@@ -49,7 +49,7 @@ def get_latest_spark_version() -> str:
patch, _, preview = arr[2].partition("-") patch, _, preview = arr[2].partition("-")
return (major, minor, int(patch), preview) return (major, minor, int(patch), preview)
latest_version = max(stable_versions, key=lambda ver: version_array(ver)) latest_version = max(versions, key=lambda ver: version_array(ver))
LOGGER.info(f"Latest version: {latest_version}") LOGGER.info(f"Latest version: {latest_version}")
return latest_version return latest_version

View File

@@ -13,7 +13,7 @@ SHELL ["/bin/bash", "-o", "pipefail", "-c"]
# Install PyTorch with pip (https://pytorch.org/get-started/locally/) # Install PyTorch with pip (https://pytorch.org/get-started/locally/)
# hadolint ignore=DL3013 # hadolint ignore=DL3013
RUN pip install --no-cache-dir --extra-index-url=https://pypi.nvidia.com --index-url 'https://download.pytorch.org/whl/cu121' \ RUN pip install --no-cache-dir --extra-index-url=https://pypi.nvidia.com --index-url 'https://download.pytorch.org/whl/cu124' \
'torch' \ 'torch' \
'torchaudio' \ 'torchaudio' \
'torchvision' && \ 'torchvision' && \

View File

@@ -14,7 +14,7 @@ SHELL ["/bin/bash", "-o", "pipefail", "-c"]
# Install TensorFlow, CUDA and cuDNN with pip # Install TensorFlow, CUDA and cuDNN with pip
RUN pip install --no-cache-dir \ RUN pip install --no-cache-dir \
"jupyter-server-proxy" \ "jupyter-server-proxy" \
"tensorflow[and-cuda]" && \ "tensorflow[and-cuda]<=2.17.1" && \
fix-permissions "${CONDA_DIR}" && \ fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}" fix-permissions "/home/${NB_USER}"

View File

@@ -74,8 +74,11 @@ EXCLUDED_PACKAGES = [
"grpcio-status", "grpcio-status",
"grpcio", "grpcio",
"hdf5", "hdf5",
"jupyterhub-base",
"jupyterlab-git", "jupyterlab-git",
"mamba", "mamba",
"nodejs",
"notebook[version='>",
"openssl", "openssl",
"pandas[version='>", "pandas[version='>",
"protobuf", "protobuf",