Merge branch 'master' into asalikhov/rename_cloud_to_hub

This commit is contained in:
Ayaz Salikhov
2021-05-07 01:44:12 +03:00
61 changed files with 1857 additions and 1462 deletions

View File

@@ -1,3 +1,4 @@
<!-- markdownlint-disable MD041 -->
Hi! Thanks for using the Jupyter Docker Stacks.
Please review the following guidance about how to ask questions, contribute changes, or report bugs in the Docker images maintained here.
@@ -20,7 +21,7 @@ Example: `docker run -it --rm -p 8889:8888 jupyter/all-spark-notebook:latest`
Example:
1. Visit http://localhost:8888
1. Visit <http://localhost:8888>
2. Start an R notebook
3. ...

View File

@@ -20,6 +20,8 @@ jobs:
build:
name: Build Docker Images
runs-on: ubuntu-latest
permissions:
contents: write
if: >
!contains(github.event.head_commit.message, 'ci skip') &&
!contains(github.event.pull_request.title, 'ci skip')
@@ -40,9 +42,7 @@ jobs:
- name: Install Dev Dependencies
run: |
python -m pip install --upgrade pip
make -C main dev-env hadolint-install
- name: Lint Dockerfiles
run: make -C main hadolint-all
make -C main dev-env
- name: Run pre-commit hooks
run: make -C main pre-commit-all
- name: Build Docker Images
@@ -52,9 +52,6 @@ jobs:
BUILDKIT_PROGRESS: plain
- name: Run Post-Build Hooks
run: make -C main hook-all
env:
COMMIT_MSG: "${{github.event.head_commit.message}}"
WIKI_PATH: ../wiki
- name: Login to Docker Hub
if: github.ref == 'refs/heads/master'
run: >
@@ -66,7 +63,7 @@ jobs:
- name: Push Wiki to GitHub
if: github.ref == 'refs/heads/master'
# Pass GITHUB_REPOSITORY directly to avoid conflict with GitHub Actions built-in env var
run: make -C main git-commit GITHUB_REPOSITORY='${{ github.repository }}.wiki'
run: make -C main git-commit GITHUB_REPOSITORY='${{github.repository}}.wiki'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
LOCAL_PATH: ../wiki

View File

@@ -16,6 +16,8 @@ jobs:
build:
name: Build Sphinx Documentation
runs-on: ubuntu-latest
permissions:
contents: write
if: >
!contains(github.event.head_commit.message , 'ci skip') &&
!contains(github.event.pull_request.title, 'ci skip')
@@ -42,6 +44,6 @@ jobs:
if: github.ref == 'refs/heads/master'
run: make git-commit
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_REPOSITORY: ${{ github.repository }}
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
GITHUB_REPOSITORY: ${{github.repository}}
LOCAL_PATH: ./docs/locale/en

7
.markdownlint.yaml Normal file
View File

@@ -0,0 +1,7 @@
# Default state for all rules
default: true
# MD013/line-length - Line length
MD013:
# Number of characters
line_length: 1000

View File

@@ -5,6 +5,13 @@ repos:
hooks:
- id: check-yaml
files: .*\.(yaml|yml)$
- repo: https://github.com/hadolint/hadolint.git
rev: v2.3.0
hooks:
- id: hadolint-docker
# FIXME: remove after https://github.com/hadolint/hadolint/issues/628 is resolved
entry: hadolint/hadolint:v2.3.0 hadolint
exclude: Dockerfile.ppc64le|Dockerfile.ppc64le.patch
- repo: https://github.com/adrienverge/yamllint.git
rev: v1.26.1
hooks:
@@ -24,3 +31,8 @@ repos:
rev: v1.5.6
hooks:
- id: autopep8
- repo: https://github.com/igorshubovych/markdownlint-cli
rev: v0.27.1
hooks:
- id: markdownlint
args: ['--fix']

View File

@@ -1 +1,3 @@
# Project `jupyter/docker-stacks` Code of Conduct
Please see the [Project Jupyter Code of Conduct](https://github.com/jupyter/governance/blob/master/conduct/code_of_conduct.md).

View File

@@ -1,3 +1,5 @@
<!-- markdownlint-disable MD041 -->
Thanks for contributing! Please see the
__Contributor Guide__ section in [the documentation](https://jupyter-docker-stacks.readthedocs.io) for
information about how to contribute

View File

@@ -39,7 +39,7 @@ The Jupyter Development Team is the set of all contributors to the Jupyter proje
This includes all of the Jupyter subprojects.
The core team that coordinates development on GitHub can be found here:
https://github.com/jupyter/.
<https://github.com/jupyter/>.
## Our Copyright Policy

View File

@@ -23,10 +23,6 @@ endif
ALL_IMAGES:=$(ALL_STACKS)
# Dockerfile Linter
HADOLINT="${HOME}/hadolint"
HADOLINT_VERSION="v2.1.0"
# Enable BuildKit for Docker build
export DOCKER_BUILDKIT:=1
@@ -98,15 +94,10 @@ git-commit: ## commit outstading git changes and push to remote
git commit -m "[ci skip] Automated publish for $(GITHUB_SHA)" || exit 0
@cd $(LOCAL_PATH) && git push -u publisher master
hook/%: export COMMIT_MSG?=$(shell git log -1 --pretty=%B)
hook/%: export GITHUB_SHA?=$(shell git rev-parse HEAD)
hook/%: export WIKI_PATH?=../wiki
hook/%: WIKI_PATH?=../wiki
hook/%: ## run post-build hooks for an image
BUILD_TIMESTAMP="$$(date -u +%FT%TZ)" \
DOCKER_REPO="$(OWNER)/$(notdir $@)" \
IMAGE_NAME="$(OWNER)/$(notdir $@):latest" \
IMAGE_SHORT_NAME="$(notdir $@)" \
$(SHELL) $(notdir $@)/hooks/run_hook
python3 -m tagging.tag_image --short-image-name "$(notdir $@)" --owner "$(OWNER)" && \
python3 -m tagging.create_manifests --short-image-name "$(notdir $@)" --owner "$(OWNER)" --wiki-path "$(WIKI_PATH)"
hook-all: $(foreach I,$(ALL_IMAGES),hook/$(I) ) ## run post-build hooks for all images
@@ -124,23 +115,6 @@ img-rm-dang: ## remove dangling images (tagged None)
@echo "Removing dangling images ..."
-docker rmi --force $(shell docker images -f "dangling=true" -q) 2> /dev/null
hadolint/%: ARGS?=
hadolint/%: ## lint the dockerfile(s) for a stack
@echo "Linting Dockerfiles in $(notdir $@)..."
@git ls-files --exclude='Dockerfile*' --ignored $(notdir $@) | grep -v ppc64 | xargs -L 1 $(HADOLINT) $(ARGS)
@echo "Linting done!"
hadolint-all: $(foreach I,$(ALL_IMAGES),hadolint/$(I) ) ## lint all stacks
hadolint-build-test-all: $(foreach I,$(ALL_IMAGES),hadolint/$(I) arch_patch/$(I) build/$(I) test/$(I) ) ## lint, build and test all stacks
hadolint-install: ## install hadolint
@echo "Installing hadolint at $(HADOLINT) ..."
@curl -sL -o $(HADOLINT) "https://github.com/hadolint/hadolint/releases/download/$(HADOLINT_VERSION)/hadolint-$(shell uname -s)-$(shell uname -m)"
@chmod 700 $(HADOLINT)
@echo "Installation done!"
@$(HADOLINT) --version
pre-commit-all: ## run pre-commit hook on all files
@pre-commit run --all-files

View File

@@ -1,10 +1,10 @@
# Jupyter Docker Stacks
[![Discourse badge](https://img.shields.io/discourse/https/discourse.jupyter.org/users.svg?color=%23f37626)](https://discourse.jupyter.org/c/questions "Jupyter Discourse Q&A")
[![Read the Docs badge](https://img.shields.io/readthedocs/jupyter-docker-stacks.svg)](https://jupyter-docker-stacks.readthedocs.io/en/latest/ "Documentation build status")
[![DockerHub badge](https://images.microbadger.com/badges/version/jupyter/base-notebook.svg)](https://microbadger.com/images/jupyter/base-notebook "Recent tag/version of jupyter/base-notebook")
[![Binder badget](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/jupyter/docker-stacks/master?filepath=README.ipynb "Launch a jupyter/base-notebook container on mybinder.org")
# Jupyter Docker Stacks
Jupyter Docker Stacks are a set of ready-to-run [Docker images](https://hub.docker.com/u/jupyter)
containing Jupyter applications and interactive computing tools.

View File

@@ -1,10 +1,10 @@
# Jupyter Notebook Python, Scala, R, Spark Stack
[![docker pulls](https://img.shields.io/docker/pulls/jupyter/all-spark-notebook.svg)](https://hub.docker.com/r/jupyter/all-spark-notebook/)
[![docker stars](https://img.shields.io/docker/stars/jupyter/all-spark-notebook.svg)](https://hub.docker.com/r/jupyter/all-spark-notebook/)
[![image metadata](https://images.microbadger.com/badges/image/jupyter/all-spark-notebook.svg)](https://microbadger.com/images/jupyter/all-spark-notebook "jupyter/all-spark-notebook image metadata")
# Jupyter Notebook Python, Scala, R, Spark Stack
GitHub Actions in the https://github.com/jupyter/docker-stacks project builds and pushes this image
GitHub Actions in the <https://github.com/jupyter/docker-stacks> project builds and pushes this image
to Docker Hub.
Please visit the project documentation site for help using and contributing to this image and

View File

@@ -1,61 +0,0 @@
#!/bin/bash
set -e
# Apply tags
GIT_SHA_TAG=${GITHUB_SHA:0:12}
docker tag $IMAGE_NAME "$DOCKER_REPO:$GIT_SHA_TAG"
# Update index
INDEX_ROW="|\`${BUILD_TIMESTAMP}\`|\`jupyter/${IMAGE_SHORT_NAME}:${GIT_SHA_TAG}\`|[Git diff](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})<br />[Dockerfile](https://github.com/jupyter/docker-stacks/blob/${GITHUB_SHA}/${IMAGE_SHORT_NAME}/Dockerfile)<br />[Build manifest](./${IMAGE_SHORT_NAME}-${GIT_SHA_TAG})|"
sed "/|-|/a ${INDEX_ROW}" -i "${WIKI_PATH}/Home.md"
# Build manifest
MANIFEST_FILE="${WIKI_PATH}/manifests/${IMAGE_SHORT_NAME}-${GIT_SHA_TAG}.md"
mkdir -p $(dirname "$MANIFEST_FILE")
cat << EOF > "$MANIFEST_FILE"
* Build datetime: ${BUILD_TIMESTAMP}
* Docker image: ${DOCKER_REPO}:${GIT_SHA_TAG}
* Docker image size: $(docker images ${IMAGE_NAME} --format "{{.Size}}")
* Git commit SHA: [${GITHUB_SHA}](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})
* Git commit message:
\`\`\`
${COMMIT_MSG}
\`\`\`
## Apache Spark
\`\`\`
$(docker run --rm ${IMAGE_NAME} bash -c '$SPARK_HOME/bin/spark-submit --version' 2>&1)
\`\`\`
## Python Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} python --version)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda info)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda list)
\`\`\`
## R Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} R --version)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} R --silent -e 'installed.packages(.Library)[, c(1,3)]')
\`\`\`
## Apt Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} apt list --installed)
\`\`\`
EOF

View File

@@ -4,7 +4,7 @@
# Ubuntu 20.04 (focal)
# https://hub.docker.com/_/ubuntu/?tab=tags&name=focal
# OS/ARCH: linux/amd64
ARG ROOT_CONTAINER=ubuntu:focal-20210401@sha256:5403064f94b617f7975a19ba4d1a1299fd584397f6ee4393d0e16744ed11aab1
ARG ROOT_CONTAINER=ubuntu:focal-20210416@sha256:86ac87f73641c920fb42cc9612d4fb57b5626b56ea2a19b894d0673fd5b4f2e9
ARG BASE_CONTAINER=$ROOT_CONTAINER
FROM $BASE_CONTAINER
@@ -24,7 +24,7 @@ USER root
# (ARGS are in lower case to distinguish them from ENV)
# Check https://github.com/conda-forge/miniforge/releases
# Conda version
ARG conda_version="4.10.0"
ARG conda_version="4.10.1"
# Miniforge installer patch version
ARG miniforge_patch_number="0"
# Miniforge installer architecture
@@ -39,22 +39,21 @@ ARG miniforge_version="${conda_version}-${miniforge_patch_number}"
# Miniforge installer
ARG miniforge_installer="${miniforge_python}-${miniforge_version}-Linux-${miniforge_arch}.sh"
# Miniforge checksum
ARG miniforge_checksum="c56cc2da96043688c6bdb521d825de27754de0a342d5228ba3155cd94532ff75"
ARG miniforge_checksum="d4065b376f81b83cfef0c7316f97bb83337e4ae27eb988828363a578226e3a62"
# Install all OS dependencies for notebook server that starts but lacks all
# features (e.g., download as all possible file formats)
ENV DEBIAN_FRONTEND noninteractive
RUN apt-get -q update \
&& apt-get install -yq --no-install-recommends \
RUN apt-get -q update && \
apt-get install -yq --no-install-recommends \
wget \
ca-certificates \
sudo \
locales \
fonts-liberation \
run-one \
&& apt-get clean && rm -rf /var/lib/apt/lists/*
RUN echo "en_US.UTF-8 UTF-8" > /etc/locale.gen && \
run-one && \
apt-get clean && rm -rf /var/lib/apt/lists/* && \
echo "en_US.UTF-8 UTF-8" > /etc/locale.gen && \
locale-gen
# Configure environment
@@ -133,7 +132,7 @@ RUN wget --quiet "https://github.com/conda-forge/miniforge/releases/download/${m
# files across image layers when the permissions change
RUN conda install --quiet --yes \
'notebook=6.3.0' \
'jupyterhub=1.3.0' \
'jupyterhub=1.4.0' \
'jupyterlab=3.0.14' && \
conda clean --all -f -y && \
npm cache clean --force && \
@@ -159,9 +158,8 @@ USER root
# Prepare upgrade to JupyterLab V3.0 #1205
RUN sed -re "s/c.NotebookApp/c.ServerApp/g" \
/etc/jupyter/jupyter_notebook_config.py > /etc/jupyter/jupyter_server_config.py
RUN fix-permissions /etc/jupyter/
/etc/jupyter/jupyter_notebook_config.py > /etc/jupyter/jupyter_server_config.py && \
fix-permissions /etc/jupyter/
# Switch back to jovyan to avoid accidental container runs as root
USER $NB_UID

View File

@@ -1,10 +1,10 @@
# Base Jupyter Notebook Stack
[![docker pulls](https://img.shields.io/docker/pulls/jupyter/base-notebook.svg)](https://hub.docker.com/r/jupyter/base-notebook/)
[![docker stars](https://img.shields.io/docker/stars/jupyter/base-notebook.svg)](https://hub.docker.com/r/jupyter/base-notebook/)
[![image metadata](https://images.microbadger.com/badges/image/jupyter/base-notebook.svg)](https://microbadger.com/images/jupyter/base-notebook "jupyter/base-notebook image metadata")
# Base Jupyter Notebook Stack
GitHub Actions in the https://github.com/jupyter/docker-stacks project builds and pushes this image
GitHub Actions in the <https://github.com/jupyter/docker-stacks> project builds and pushes this image
to Docker Hub.
Please visit the project documentation site for help using and contributing to this image and

View File

@@ -1,53 +0,0 @@
#!/bin/bash
set -e
# Apply tags
GIT_SHA_TAG=${GITHUB_SHA:0:12}
docker tag $IMAGE_NAME "$DOCKER_REPO:$GIT_SHA_TAG"
PY_VERSION_TAG="python-$(docker run --rm ${IMAGE_NAME} python --version 2>&1 | awk '{print $2}')"
docker tag $IMAGE_NAME "$DOCKER_REPO:$PY_VERSION_TAG"
NB_VERSION_TAG="notebook-$(docker run --rm -a STDOUT ${IMAGE_NAME} jupyter-notebook --version | tr -d '\r')"
docker tag $IMAGE_NAME "$DOCKER_REPO:${NB_VERSION_TAG%% }"
LAB_VERSION_TAG="lab-$(docker run --rm -a STDOUT ${IMAGE_NAME} jupyter-lab --version | tr -d '\r')"
docker tag $IMAGE_NAME "$DOCKER_REPO:${LAB_VERSION_TAG%%\r}"
HUB_VERSION_TAG="hub-$(docker run --rm -a STDOUT ${IMAGE_NAME} jupyterhub --version | tr -d '\r')"
docker tag $IMAGE_NAME "$DOCKER_REPO:${HUB_VERSION_TAG%%\r}"
# Update index
INDEX_FILE="${WIKI_PATH}/Home.md"
INDEX_ROW="|\`${BUILD_TIMESTAMP}\`|\`jupyter/${IMAGE_SHORT_NAME}:${GIT_SHA_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${PY_VERSION_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${NB_VERSION_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${LAB_VERSION_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${HUB_VERSION_TAG}\`|[Git diff](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})<br />[Dockerfile](https://github.com/jupyter/docker-stacks/blob/${GITHUB_SHA}/${IMAGE_SHORT_NAME}/Dockerfile)<br />[Build manifest](./${IMAGE_SHORT_NAME}-${GIT_SHA_TAG})|"
sed "/|-|/a ${INDEX_ROW}" -i "$INDEX_FILE"
# Build manifest
MANIFEST_FILE="${WIKI_PATH}/manifests/${IMAGE_SHORT_NAME}-${GIT_SHA_TAG}.md"
mkdir -p $(dirname "$MANIFEST_FILE")
cat << EOF > "$MANIFEST_FILE"
* Build datetime: ${BUILD_TIMESTAMP}
* Docker image: ${DOCKER_REPO}:${GIT_SHA_TAG}
* Docker image size: $(docker images ${IMAGE_NAME} --format "{{.Size}}")
* Git commit SHA: [${GITHUB_SHA}](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})
* Git commit message:
\`\`\`
${COMMIT_MSG}
\`\`\`
## Python Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} python --version)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda info)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda list)
\`\`\`
## Apt Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} apt list --installed)
\`\`\`
EOF

View File

@@ -0,0 +1,19 @@
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import logging
from packaging import version
LOGGER = logging.getLogger(__name__)
def test_python_version(container, python_next_version="3.10"):
"""Check that python version is lower than the next version"""
LOGGER.info(f"Checking that python version is lower than {python_next_version}")
c = container.run(tty=True, command=["start.sh"])
cmd = c.exec_run("python --version")
output = cmd.output.decode("utf-8")
actual_python_version = version.parse(output.split()[1])
assert actual_python_version < version.parse(
python_next_version
), f"Python version shall be lower than {python_next_version}"

View File

@@ -8,6 +8,7 @@ FROM $BASE_CONTAINER
LABEL maintainer="Jupyter Project <jupyter@googlegroups.com>"
ENV TAG="aec555e49be6"
WORKDIR $HOME
COPY binder/README.ipynb .
# Fix permissions on README.ipynb as root

View File

@@ -5,10 +5,6 @@ FROM $BASE_CONTAINER
LABEL maintainer="Jupyter Project <jupyter@googlegroups.com>"
# Set when building on Travis so that certain long-running build steps can
# be skipped to shorten build time.
ARG TEST_ONLY_BUILD
# Fix DL4006
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
@@ -43,8 +39,8 @@ RUN mkdir "/opt/julia-${JULIA_VERSION}" && \
wget -q https://julialang-s3.julialang.org/bin/linux/x64/$(echo "${JULIA_VERSION}" | cut -d. -f 1,2)"/julia-${JULIA_VERSION}-linux-x86_64.tar.gz" && \
echo "${julia_checksum} *julia-${JULIA_VERSION}-linux-x86_64.tar.gz" | sha256sum -c - && \
tar xzf "julia-${JULIA_VERSION}-linux-x86_64.tar.gz" -C "/opt/julia-${JULIA_VERSION}" --strip-components=1 && \
rm "/tmp/julia-${JULIA_VERSION}-linux-x86_64.tar.gz"
RUN ln -fs /opt/julia-*/bin/julia /usr/local/bin/julia
rm "/tmp/julia-${JULIA_VERSION}-linux-x86_64.tar.gz" && \
ln -fs /opt/julia-*/bin/julia /usr/local/bin/julia
# Show Julia where conda libraries are \
RUN mkdir /etc/julia && \
@@ -79,16 +75,13 @@ RUN conda install --quiet --yes \
fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}"
# Add Julia packages. Only add HDF5 if this is not a test-only build since
# it takes roughly half the entire build time of all of the images on Travis
# to add this one package and often causes Travis to timeout.
#
# Add Julia packages.
# Install IJulia as jovyan and then move the kernelspec out
# to the system share location. Avoids problems with runtime UID change not
# taking effect properly on the .local folder in the jovyan home dir.
RUN julia -e 'import Pkg; Pkg.update()' && \
(test $TEST_ONLY_BUILD || julia -e 'import Pkg; Pkg.add("HDF5")') && \
julia -e "using Pkg; pkg\"add IJulia\"; pkg\"precompile\"" && \
julia -e 'import Pkg; Pkg.add("HDF5")' && \
julia -e 'using Pkg; pkg"add IJulia"; pkg"precompile"' && \
# move kernelspec out of home \
mv "${HOME}/.local/share/jupyter/kernels/julia"* "${CONDA_DIR}/share/jupyter/kernels/" && \
chmod -R go+rx "${CONDA_DIR}/share/jupyter" && \

View File

@@ -1,10 +1,10 @@
# Jupyter Notebook Data Science Stack
[![docker pulls](https://img.shields.io/docker/pulls/jupyter/datascience-notebook.svg)](https://hub.docker.com/r/jupyter/datascience-notebook/)
[![docker stars](https://img.shields.io/docker/stars/jupyter/datascience-notebook.svg)](https://hub.docker.com/r/jupyter/datascience-notebook/)
[![image metadata](https://images.microbadger.com/badges/image/jupyter/datascience-notebook.svg)](https://microbadger.com/images/jupyter/datascience-notebook "jupyter/datascience-notebook image metadata")
# Jupyter Notebook Data Science Stack
GitHub Actions in the https://github.com/jupyter/docker-stacks project builds and pushes this image
GitHub Actions in the <https://github.com/jupyter/docker-stacks> project builds and pushes this image
to Docker Hub.
Please visit the project documentation site for help using and contributing to this image and

View File

@@ -1,78 +0,0 @@
#!/bin/bash
set -e
# Apply tags
GIT_SHA_TAG=${GITHUB_SHA:0:12}
docker tag $IMAGE_NAME "$DOCKER_REPO:$GIT_SHA_TAG"
PY_VERSION_TAG="python-$(docker run --rm ${IMAGE_NAME} python --version 2>&1 | awk '{print $2}')"
docker tag $IMAGE_NAME "$DOCKER_REPO:$PY_VERSION_TAG"
R_VERSION_TAG="r-$(docker run --rm -a STDOUT ${IMAGE_NAME} R --version | sed -n 1p | awk '{print $3}')"
docker tag $IMAGE_NAME "$DOCKER_REPO:${R_VERSION_TAG%%\r}"
JULIA_VERSION_TAG="julia-$(docker run --rm -a STDOUT ${IMAGE_NAME} julia --version | awk '{print $3}')"
docker tag $IMAGE_NAME "$DOCKER_REPO:${JULIA_VERSION_TAG%%\r}"
NB_VERSION_TAG="notebook-$(docker run --rm -a STDOUT ${IMAGE_NAME} jupyter-notebook --version | tr -d '\r')"
docker tag $IMAGE_NAME "$DOCKER_REPO:${NB_VERSION_TAG%% }"
LAB_VERSION_TAG="lab-$(docker run --rm -a STDOUT ${IMAGE_NAME} jupyter-lab --version | tr -d '\r')"
docker tag $IMAGE_NAME "$DOCKER_REPO:${LAB_VERSION_TAG%%\r}"
HUB_VERSION_TAG="hub-$(docker run --rm -a STDOUT ${IMAGE_NAME} jupyterhub --version | tr -d '\r')"
docker tag $IMAGE_NAME "$DOCKER_REPO:${HUB_VERSION_TAG%%\r}"
# Update index
INDEX_ROW="|\`${BUILD_TIMESTAMP}\`|\`jupyter/${IMAGE_SHORT_NAME}:${GIT_SHA_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${PY_VERSION_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${JULIA_VERSION_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${R_VERSION_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${NB_VERSION_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${LAB_VERSION_TAG}\`<br />\`jupyter/${IMAGE_SHORT_NAME}:${HUB_VERSION_TAG}\`|[Git diff](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})<br />[Dockerfile](https://github.com/jupyter/docker-stacks/blob/${GITHUB_SHA}/${IMAGE_SHORT_NAME}/Dockerfile)<br />[Build manifest](./${IMAGE_SHORT_NAME}-${GIT_SHA_TAG})|"
sed "/|-|/a ${INDEX_ROW}" -i "${WIKI_PATH}/Home.md"
# Build manifest
MANIFEST_FILE="${WIKI_PATH}/manifests/${IMAGE_SHORT_NAME}-${GIT_SHA_TAG}.md"
mkdir -p $(dirname "$MANIFEST_FILE")
cat << EOF > "$MANIFEST_FILE"
* Build datetime: ${BUILD_TIMESTAMP}
* Docker image: ${DOCKER_REPO}:${GIT_SHA_TAG}
* Docker image size: $(docker images ${IMAGE_NAME} --format "{{.Size}}")
* Git commit SHA: [${GITHUB_SHA}](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})
* Git commit message:
\`\`\`
${COMMIT_MSG}
\`\`\`
## Julia Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} julia -E 'using InteractiveUtils; versioninfo()')
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} julia -E 'import Pkg; Pkg.status()')
\`\`\`
## Python Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} python --version)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda info)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda list)
\`\`\`
## R Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} R --version)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} R --silent -e 'installed.packages(.Library)[, c(1,3)]')
\`\`\`
## Apt Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} apt list --installed)
\`\`\`
EOF

View File

@@ -40,9 +40,11 @@ If there's agreement that the feature belongs in one or more of the core stacks:
2. Please build the image locally before submitting a pull request. Building the image locally
shortens the debugging cycle by taking some load off GitHub Actions, which graciously provide
free build services for open source projects like this one. If you use `make`, call:
```bash
make build/somestack-notebook
```
3. [Submit a pull request](https://github.com/PointCloudLibrary/pcl/wiki/A-step-by-step-guide-on-preparing-and-submitting-a-pull-request)
(PR) with your changes.
4. Watch for GitHub to report a build success or failure for your PR on GitHub.

View File

@@ -1,12 +1,12 @@
# Lint
In order to enforce some rules **linters** are used in this project.
Linters can be run either during the **development phase** (by the developer) and during **integration phase** (by Travis).
Linters can be run either during the **development phase** (by the developer) and during **integration phase** (by GitHub Actions).
To integrate and enforce this process in the project lifecycle we are using **git hooks** through [pre-commit][pre-commit].
## Pre-commit hook
### Installation
### Pre-commit hook installation
pre-commit is a Python package that needs to be installed.
This can be achieved by using the generic task used to install all Python development dependencies.
@@ -21,7 +21,7 @@ $ pip install pre-commit
Then the git hooks scripts configured for the project in `.pre-commit-config.yaml` need to be installed in the local git repository.
```sh
$ make pre-commit-install
make pre-commit-install
```
### Run
@@ -29,65 +29,20 @@ $ make pre-commit-install
Now pre-commit (and so configured hooks) will run automatically on `git commit` on each changed file.
However it is also possible to trigger it against all files.
- Note: Hadolint pre-commit uses docker to run, so docker should be running while running this command.
```sh
$ make pre-commit-all
make pre-commit-all
```
## Image Lint
To comply with [Docker best practices][dbp], we are using the [Hadolint][hadolint] tool to analyse each `Dockerfile` .
### Installation
There is a specific `make` target to install the linter.
By default `hadolint` will be installed in `${HOME}/hadolint`.
```bash
$ make hadolint-install
# Installing hadolint at /Users/romain/hadolint ...
# Installation done!
# Haskell Dockerfile Linter v1.17.6-0-gc918759
```
### Linting
#### Per Stack
The linter can be run per stack.
```bash
$ make hadolint/scipy-notebook
# Linting Dockerfiles in scipy-notebook...
# scipy-notebook/Dockerfile:4 DL3006 Always tag the version of an image explicitly
# scipy-notebook/Dockerfile:11 DL3008 Pin versions in apt get install. Instead of `apt-get install <package>` use `apt-get install <package>=<version>`
# scipy-notebook/Dockerfile:18 SC2086 Double quote to prevent globbing and word splitting.
# scipy-notebook/Dockerfile:68 SC2086 Double quote to prevent globbing and word splitting.
# scipy-notebook/Dockerfile:68 DL3003 Use WORKDIR to switch to a directory
# scipy-notebook/Dockerfile:79 SC2086 Double quote to prevent globbing and word splitting.
# make: *** [lint/scipy-notebook] Error 1
```
Optionally you can pass arguments to the hadolint.
```bash
# Use a different export format
$ make hadolint/scipy-notebook ARGS="--format codeclimate"
```
#### All the Stacks
The linter can be run against all the stacks.
```bash
$ make hadolint-all
```
### Ignoring Rules
Sometimes it is necessary to ignore [some rules][rules].
The following rules are ignored by default and sor for all images in the `.hadolint.yaml` file.
The following rules are ignored by default for all images in the `.hadolint.yaml` file.
- [`DL3006`][DL3006]: We use a specific policy to manage image tags.
- `base-notebook` `FROM` clause is fixed but based on an argument (`ARG`).
@@ -99,7 +54,6 @@ For other rules, the preferred way to do it is to flag ignored rules in the `Doc
> It is also possible to ignore rules by using a special comment directly above the Dockerfile instruction you want to make an exception for. Ignore rule comments look like `# hadolint ignore=DL3001,SC1081`. For example:
```dockerfile
FROM ubuntu
# hadolint ignore=DL3003,SC1035

View File

@@ -16,9 +16,11 @@ Please follow the process below to update a package version:
3. Please build the image locally before submitting a pull request. Building the image locally
shortens the debugging cycle by taking some load off GitHub Actions, which graciously provide
free build services for open source projects like this one. If you use `make`, call:
```bash
make build/somestack-notebook
```
4. [Submit a pull request](https://github.com/PointCloudLibrary/pcl/wiki/A-step-by-step-guide-on-preparing-and-submitting-a-pull-request)
(PR) with your changes.
5. Watch for GitHub to report a build success or failure for your PR on GitHub.

View File

@@ -20,7 +20,7 @@ your own path using alternative services and build tools.
First, install [cookiecutter](https://github.com/audreyr/cookiecutter) using pip or conda:
```bash
pip install cookiecutter # or conda install cookiecutter
pip install cookiecutter # or conda install cookiecutter
```
Run the cookiecutter command pointing to the
@@ -34,7 +34,7 @@ cookiecutter https://github.com/jupyter/cookiecutter-docker-stacks.git
Enter a name for your new stack image. This will serve as both the git repository name and the part
of the Docker image name after the slash.
```
```lang-none
stack_name [my-jupyter-stack]:
```
@@ -42,26 +42,26 @@ Enter the user or organization name under which this stack will reside on Docker
must have access to manage this Docker Hub organization to push images here and set up automated
builds.
```
```lang-none
stack_org [my-project]:
```
Select an image from the jupyter/docker-stacks project that will serve as the base for your new
image.
```
```lang-none
stack_base_image [jupyter/base-notebook]:
```
Enter a longer description of the stack for your README.
```
```lang-none
stack_description [my-jupyter-stack is a community maintained Jupyter Docker Stack image]:
```
Initialize your project as a Git repository and push it to GitHub.
```
```bash
cd <stack_name you chose>
git init

View File

@@ -23,10 +23,12 @@ Please follow the process below to add new tests:
2. If your test should run against a single image, add your test code to one of the modules in
`some-notebook/test/` or create a new module.
3. Build one or more images you intend to test and run the tests locally. If you use `make`, call:
```bash
make build/somestack-notebook
make test/somestack-notebook
```
4. [Submit a pull request](https://github.com/PointCloudLibrary/pcl/wiki/A-step-by-step-guide-on-preparing-and-submitting-a-pull-request)
(PR) with your changes.
5. Watch for GitHub to report a build success or failure for your PR on GitHub.

View File

@@ -3,5 +3,5 @@
We are delighted when members of the Jupyter community want to help translate these documentation pages to other languages. If you're interested, please visit links below below to join our team on [Transifex](https://transifex.com) and to start creating, reviewing, and updating translations of the Jupyter Docker Stacks documentation.
1. Follow the steps documented on the [Getting Started as a Translator](https://docs.transifex.com/getting-started-1/translators) page.
2. Look for *jupyter-docker-stacks* when prompted to choose a translation team. Alternatively, visit https://www.transifex.com/project-jupyter/jupyter-docker-stacks-1 after creating your account and request to join the project.
2. Look for *jupyter-docker-stacks* when prompted to choose a translation team. Alternatively, visit <https://www.transifex.com/project-jupyter/jupyter-docker-stacks-1> after creating your account and request to join the project.
3. See [Translating with the Web Editor](https://docs.transifex.com/translation/translating-with-the-web-editor) in the Transifex documentation.

File diff suppressed because it is too large Load Diff

View File

@@ -9,40 +9,40 @@ msgid ""
msgstr ""
"Project-Id-Version: docker-stacks latest\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2020-08-08 19:06+0000\n"
"POT-Creation-Date: 2021-05-06 17:59+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel 2.8.0\n"
"Generated-By: Babel 2.9.1\n"
#: ../../maintaining/tasks.md:1 4b12bcf1c691475db62e872bbdca68fa
#: ../../maintaining/tasks.md:1 ad19168330d6498dbc60354eb93625b1
msgid "# Maintainer Playbook"
msgstr ""
#: ../../maintaining/tasks.md:3 f688b20624524b93a14add6065bee956
#: ../../maintaining/tasks.md:3 dbb6bb0d38fa401eac3fc22f525ae7c5
msgid "## Merging Pull Requests"
msgstr ""
# 0a04eb6c0525428984f07f3c249f5d73
#: ../../maintaining/tasks.md:5 5b916e113fc746d6bf5e1097c7dc928b
#: ../../maintaining/tasks.md:5 2d3ae30d2ce84cc49438c46d407d39b5
msgid ""
"To build new images on Docker Cloud and publish them to the Docker Hub "
"registry, do the following:"
msgstr ""
#: ../../maintaining/tasks.md:7 fd849160eef9483f8205d9c6967c475e
#: ../../maintaining/tasks.md:7 3e4b7078b531456184bff92ea9c53c6a
msgid "Make sure GitHub Actions status checks pas for the PR."
msgstr ""
# db74ca47dfde4e558a20aff52176347a
#: ../../maintaining/tasks.md:8 80c6da1230554f92be8abdef70aa01ff
#: ../../maintaining/tasks.md:8 2bd9edcd7de744fdb899eee3b64b8305
msgid "Merge the PR."
msgstr ""
#: ../../maintaining/tasks.md:9 2e1ced11629f4ec2a259101ac85735c7
#: ../../maintaining/tasks.md:9 6dabd1e3aa614425b46cd6247f1b5471
msgid ""
"Monitor the Docker Cloud build status for each of the stacks, starting "
"with [jupyter/base-"
@@ -55,14 +55,14 @@ msgid ""
msgstr ""
# 9149cb3c2bdc48ffa9109b3b6ddcf446
#: ../../maintaining/tasks.md:15 7ddbf4630fa3436e88d0ec901d7268cc
#: ../../maintaining/tasks.md:15 b4f9af7e681b4de78de3c8abd0a1b32a
msgid ""
"Manually click the retry button next to any build that fails to resume "
"that build and any dependent builds."
msgstr ""
# d204ada7a14b46338be9b7afb0bc95b0
#: ../../maintaining/tasks.md:17 daa76f01193e48b79b1de4e0d6ec8f57
#: ../../maintaining/tasks.md:17 ac63e778f8b8451ea7b80c57393624cf
msgid ""
"Try to avoid merging another PR to master until all outstanding builds "
"complete. There's no way at present to propagate the git SHA to build "
@@ -70,11 +70,11 @@ msgid ""
" of master HEAD."
msgstr ""
#: ../../maintaining/tasks.md:21 37458f5852104d58b3f9a886f2c3cb83
#: ../../maintaining/tasks.md:21 4c41bc804cdd4b9d883bcad323ceb012
msgid "## Updating the Ubuntu Base Image"
msgstr ""
#: ../../maintaining/tasks.md:23 db19a5192fab4cccbe86f6cc73ab778f
#: ../../maintaining/tasks.md:23 e5cfb2e5299640f68f989bc6e61f2412
msgid ""
"When there's a security fix in the Ubuntu base image or after some time "
"passes, it's a good idea to update the pinned SHA in the [jupyter/base-"
@@ -84,18 +84,18 @@ msgid ""
"layer will rebuild."
msgstr ""
#: ../../maintaining/tasks.md:29 4ced832a1c9844c4a23c7486670a40fc
#: ../../maintaining/tasks.md:29 047c97bc89a54e3eac5bc569a6fa4ac3
msgid "## Adding a New Core Image to Docker Cloud"
msgstr ""
# 201f0506bbb24b47b79a5db01db86557
#: ../../maintaining/tasks.md:31 c9553182c042418ab3aba261d6c19d3f
#: ../../maintaining/tasks.md:31 cb951324ed474427adc4b962981a6f31
msgid ""
"When there's a new stack definition, do the following before merging the "
"PR with the new stack:"
msgstr ""
#: ../../maintaining/tasks.md:33 fabe64e7a1af46a7910b3cffeef5411c
#: ../../maintaining/tasks.md:33 78271967cc34407186d2cfed5e318f53
msgid ""
"Ensure the PR includes an update to the stack overview diagram [in the "
"documentation](https://github.com/jupyter/docker-"
@@ -104,49 +104,49 @@ msgid ""
"used to create it."
msgstr ""
#: ../../maintaining/tasks.md:36 cb36e81de2574d7eb3a3447dd9c40e68
#: ../../maintaining/tasks.md:36 da5b292a5bf6464aaae531bcbe8a1cd6
msgid ""
"Ensure the PR updates the Makefile which is used to build the stacks in "
"order on GitHub Actions."
msgstr ""
#: ../../maintaining/tasks.md:37 a1db6dcafa40462c834b4eb78fb9f8f7
#: ../../maintaining/tasks.md:37 4ce152df9c3c4b339708db81c1258530
msgid ""
"Create a new repository in the `jupyter` org on Docker Cloud named after "
"the stack folder in the git repo."
msgstr ""
#: ../../maintaining/tasks.md:39 1c547a028fbb4bd4b84066c28f5598d3
#: ../../maintaining/tasks.md:39 8614fd2d2e104045a6a5c2fe5d045c7e
msgid "Grant the `stacks` team permission to write to the repo."
msgstr ""
#: ../../maintaining/tasks.md:40 70b1bbb3c6d34473b9c613903831c122
#: ../../maintaining/tasks.md:40 2f11729e6c634856abb4cf95a4fa33ac
msgid "Click _Builds_ and then _Configure Automated Builds_ for the repository."
msgstr ""
#: ../../maintaining/tasks.md:41 bfad3b5b351541c88f89dfdf0a4ce909
#: ../../maintaining/tasks.md:41 b7b0c174d4fb463ebfe73b73671eefaa
msgid "Select `jupyter/docker-stacks` as the source repository."
msgstr ""
#: ../../maintaining/tasks.md:42 b15d3cffa47d4b58ab2a15bdff0c4c76
#: ../../maintaining/tasks.md:42 e9cf26204cc14df690df13a77593c3ce
msgid ""
"Choose _Build on Docker Cloud's infrastructure using a Small node_ unless"
" you have reason to believe a bigger host is required."
msgstr ""
#: ../../maintaining/tasks.md:44 0a64807f1c46424f8bffdaae134d9af9
#: ../../maintaining/tasks.md:44 9c61a743f600465b8336a98603491e72
msgid ""
"Update the _Build Context_ in the default build rule to be `/<name-of-"
"the-stack>`."
msgstr ""
#: ../../maintaining/tasks.md:45 e372a3e606e74279a767f664773b4685
#: ../../maintaining/tasks.md:45 ff192d1c1c694c8e97f3e0c6925a6783
msgid ""
"Toggle _Autobuild_ to disabled unless the stack is a new root stack "
"(e.g., like `jupyter/base-notebook`)."
msgstr ""
#: ../../maintaining/tasks.md:47 5dab6662eb524abb9878e0fe7c7b5e03
#: ../../maintaining/tasks.md:47 e75f59814b104faeaafb251cc7146f12
msgid ""
"If the new stack depends on the build of another stack in the hierarchy: "
"1. Hit _Save_ and then click _Configure Automated Builds_. 2. At the very"
@@ -156,16 +156,16 @@ msgid ""
"_NEXT_BUILD_TRIGGERS_ environment variable comma separated list"
msgstr ""
#: ../../maintaining/tasks.md:53 4886079321d54cf5959fc3e409140166
#: ../../maintaining/tasks.md:53 3a80f2569b32489587ccdf7610767b26
msgid "of URLs, creating that environment variable if it does not already exist."
msgstr ""
#: ../../maintaining/tasks.md:54 ../../maintaining/tasks.md:59
#: 7e6dec502b5841db915faf2aa3a8eb41 b2ee46f5748e4ac9a2348e81ab605a7b
#: 2de5c406ef4349c18c973df7b19f78f1 5919e952a1154f00954b3678d377b297
msgid "Hit _Save_."
msgstr ""
#: ../../maintaining/tasks.md:55 8f2588957e2944389fcdba97ff7a5929
#: ../../maintaining/tasks.md:55 8c117daa53944f25bf6d883da4e67a93
msgid ""
"If the new stack should trigger other dependent builds: 1. Add an "
"environment variable named _NEXT_BUILD_TRIGGERS_. 2. Copy the build "
@@ -173,152 +173,146 @@ msgid ""
"comma"
msgstr ""
#: ../../maintaining/tasks.md:58 6f987b32b0eb4bfb9632499dd9547cf8
#: ../../maintaining/tasks.md:58 3ec98660bc7540db801f2b8266c96c38
msgid "separated list of URLs."
msgstr ""
#: ../../maintaining/tasks.md:60 c9f9d54989724b62ba46c690970ba576
#: ../../maintaining/tasks.md:60 0fcc2931913f4083b3cd496078c7bc17
msgid ""
"Adjust other _NEXT_BUILD_TRIGGERS_ values as needed so that the build "
"order matches that in the stack hierarchy diagram."
msgstr ""
#: ../../maintaining/tasks.md:63 8ff283d700cd4a9dad9c10a6789cc5f9
#: ../../maintaining/tasks.md:63 fb8dcdb139ff40eba071710a6f9f5131
msgid "## Adding a New Maintainer Account"
msgstr ""
# e3bd3ced73994d9fad596784e1469cfc
#: ../../maintaining/tasks.md:65 c0c7ecda9e6344ab9821beb463b849dc
msgid "Visit https://cloud.docker.com/app/jupyter/team/stacks/users"
#: ../../maintaining/tasks.md:65 2cf5874541b940c880a8ef7f1dfbba5e
msgid "Visit <https://cloud.docker.com/app/jupyter/team/stacks/users>"
msgstr ""
# 51b166c70ba743e0b4d335b3471da69a
#: ../../maintaining/tasks.md:66 85e68de60e4a4426b250f5dfb96f30c9
#: ../../maintaining/tasks.md:66 9c0071a784d249fd84292b516fee3e38
msgid "Add the maintainer's Docker Cloud username."
msgstr ""
# 300f5dbd933f4ee6b5a550efd35f1c52
#: ../../maintaining/tasks.md:67 eff9933085bc46ac81926d7e85e2b3f8
#: ../../maintaining/tasks.md:67 1ce0530370004b87b2ec4167e17bb448
msgid ""
"Visit https://github.com/orgs/jupyter/teams/docker-image-"
"maintainers/members"
"Visit <https://github.com/orgs/jupyter/teams/docker-image-"
"maintainers/members>"
msgstr ""
# e26ad8ffb6de489988e076e64b6a1415
#: ../../maintaining/tasks.md:68 46d01c10e2c44664aee95e5360edd579
#: ../../maintaining/tasks.md:68 7eab41699dad475bbe14a110cd749cbc
msgid "Add the maintainer's GitHub username."
msgstr ""
#: ../../maintaining/tasks.md:70 0278ae3e3f354df2accf4716aa4a619c
#: ../../maintaining/tasks.md:70 af3096f3fb9c4ea2a15c47cbc801a238
msgid "## Pushing a Build Manually"
msgstr ""
# 050b5c7a3d9d46bcbe26d54e8585ddd8
#: ../../maintaining/tasks.md:72 b0260a73403c425da436c59fe8e78e09
#: ../../maintaining/tasks.md:72 4fec9558dd3b495097690e5b7a86ad5b
msgid ""
"If automated builds on Docker Cloud have got you down, do the following "
"to push a build manually:"
msgstr ""
# ead6a3a714ae4a8a9df5585c18260c16
#: ../../maintaining/tasks.md:74 b1d36953ee174c3dbc04a153ace6d862
#: ../../maintaining/tasks.md:74 ec3f7086ed6d4f658f0a338fc86626c0
msgid "Clone this repository."
msgstr ""
# 48e1d6954f414fe080d7b4afd9e0c391
#: ../../maintaining/tasks.md:75 bd98e36df92b4a83b9fb5298fbabf0bf
#: ../../maintaining/tasks.md:75 f45c2a3ed2ff49d6a10561cbe7499031
msgid "Check out the git SHA you want to build and publish."
msgstr ""
#: ../../maintaining/tasks.md:76 4b4af44a60f54f5db093942092362794
#: ../../maintaining/tasks.md:76 e026e03fc4ea4849a12e9b492a8f90e9
msgid "`docker login` with your Docker Hub/Cloud credentials."
msgstr ""
#: ../../maintaining/tasks.md:77 a60704a1dee24afeac8298732c88133c
#: ../../maintaining/tasks.md:77 e3dbd3cb6e1d4f07986e77f0f64e9046
msgid "Run `make retry/release-all`."
msgstr ""
#: ../../maintaining/tasks.md:79 1d7f037ade384524be9cf5a2da2e1732
#: ../../maintaining/tasks.md:79 b08d6c1c16a04f33b19ec7d7a14a365b
msgid "## Enabling a New Doc Language Translation"
msgstr ""
# 5aafef10dc75417785a79aba203175e5
#: ../../maintaining/tasks.md:81 9b565e5ad1ae4df9a168536dac18f7e2
#: ../../maintaining/tasks.md:81 a9b89fccdc9b4571ae46f2beff8d4110
msgid "First enable translation on Transifex:"
msgstr ""
# c1a249c0d0cd4e9192ed7814dfde6e34
#: ../../maintaining/tasks.md:83 c39df2ae52ef49f8b4ff9d184abe8331
#: ../../maintaining/tasks.md:83 f040c322e0a34f5fb472a01004a0a0da
msgid ""
"Visit https://www.transifex.com/project-jupyter/jupyter-docker-"
"stacks-1/languages/"
"Visit <https://www.transifex.com/project-jupyter/jupyter-docker-"
"stacks-1/languages/>."
msgstr ""
#: ../../maintaining/tasks.md:84 ec2ee93b70ce405ab43f4da197674467
#: ../../maintaining/tasks.md:84 e1acfc287d4245928ee6f2035caea008
msgid "Click _Edit Languages_ in the top right."
msgstr ""
# 7efe7d98a98b47bd82d697673d277cbd
#: ../../maintaining/tasks.md:85 8c9578ec646e410bb0d4891bb4f5f71e
#: ../../maintaining/tasks.md:85 e2a2f55e12274a0481c89eef1eca7c78
msgid "Select the language from the dropdown."
msgstr ""
#: ../../maintaining/tasks.md:86 5699c36519a34f6b9f2a0c76cb2844a5
#: ../../maintaining/tasks.md:86 1569eca4aab14c73a4bae9e81804e357
msgid "Click _Apply_."
msgstr ""
# 1e3868ee7dae469f9921516dd7973766
#: ../../maintaining/tasks.md:88 d7108eada14f448497e580d6401b7593
#: ../../maintaining/tasks.md:88 4ee65f6fad4d49af9ca48ab39f5b8ed0
msgid "Then setup a subproject on ReadTheDocs for the language:"
msgstr ""
# fffa155a75674f0dbe746a15eb3be492
#: ../../maintaining/tasks.md:90 12b36373698643b695784e7b0a68d585
msgid "Visit https://readthedocs.org/dashboard/import/manual/"
#: ../../maintaining/tasks.md:90 05d54b068d7a43498cef8f19e91fb5bd
msgid "Visit <https://readthedocs.org/dashboard/import/manual/>."
msgstr ""
#: ../../maintaining/tasks.md:91 b488eae9bb924b3588d9d14e758ee7b6
#: ../../maintaining/tasks.md:91 952a60e68900475d951c3d0f9819453d
msgid "Enter _jupyter-docker-stacks-language_abbreviation_ for the project name."
msgstr ""
# 2869b2f7a89c428f903e3695dd511e9a
#: ../../maintaining/tasks.md:92 9ebca91beff846648bdc758fc90afec1
msgid "Enter https://github.com/jupyter/docker-stacks for the URL."
#: ../../maintaining/tasks.md:92 1b8fc983f4624c25aa3e939382e0f0ff
msgid "Enter <https://github.com/jupyter/docker-stacks> for the URL."
msgstr ""
#: ../../maintaining/tasks.md:93 2d3b518fd69640169e17d44018a7a8e4
#: ../../maintaining/tasks.md:93 4dbf7d9037454fef92448e133a5b8835
msgid "Check _Edit advanced project options_."
msgstr ""
#: ../../maintaining/tasks.md:94 75e92ae1b3b0418f8256a68e974ae9e9
#: ../../maintaining/tasks.md:94 994b0124ca044fe8920a0a60ea84e8ce
msgid "Click _Next_."
msgstr ""
#: ../../maintaining/tasks.md:95 f577fc280c154e37824461b6d8f39528
#: ../../maintaining/tasks.md:95 b8531de9f2e840fc854a6ee47851bb2a
msgid "Select the _Language_ from the dropdown on the next screen."
msgstr ""
#: ../../maintaining/tasks.md:96 810b06550ec44599a81792006973d3af
#: ../../maintaining/tasks.md:96 00409e848b7d4fc9b6361d0f41108b7e
msgid "Click _Finish_."
msgstr ""
# 529f3729d2474287adec0ff895100248
#: ../../maintaining/tasks.md:98 08613ea94d7c4b7c99cc0c3e775670bc
#: ../../maintaining/tasks.md:98 00b02bf0db74413486914fb890bc0765
msgid "Finally link the new language subproject to the top level doc project:"
msgstr ""
# 024aaf54695141839eaa5537b4087a81
#: ../../maintaining/tasks.md:100 2a798b799c2649769bdbc0f202c49e50
#: ../../maintaining/tasks.md:100 d87a1ad28ffa484aa2313904a7be7141
msgid ""
"Visit https://readthedocs.org/dashboard/jupyter-docker-"
"stacks/translations/"
"Visit <https://readthedocs.org/dashboard/jupyter-docker-"
"stacks/translations/>."
msgstr ""
#: ../../maintaining/tasks.md:101 3c1717d4974c4e4a98dbbbcf947e93ec
#: ../../maintaining/tasks.md:101 f3bf47695d7f4248bbbd58fb270e164a
msgid "Select the subproject you created from the _Project_ dropdown."
msgstr ""
#: ../../maintaining/tasks.md:102 91e9666e3b1348e7830a077ca601add8
#: ../../maintaining/tasks.md:102 abe3142a0d9945c2adaa250c60acb805
msgid "Click _Add_."
msgstr ""
@@ -634,3 +628,33 @@ msgstr ""
#~ msgid "Click *Add*."
#~ msgstr ""
# e3bd3ced73994d9fad596784e1469cfc
#~ msgid "Visit https://cloud.docker.com/app/jupyter/team/stacks/users"
#~ msgstr ""
# 300f5dbd933f4ee6b5a550efd35f1c52
#~ msgid ""
#~ "Visit https://github.com/orgs/jupyter/teams/docker-"
#~ "image-maintainers/members"
#~ msgstr ""
# c1a249c0d0cd4e9192ed7814dfde6e34
#~ msgid ""
#~ "Visit https://www.transifex.com/project-jupyter"
#~ "/jupyter-docker-stacks-1/languages/"
#~ msgstr ""
# fffa155a75674f0dbe746a15eb3be492
#~ msgid "Visit https://readthedocs.org/dashboard/import/manual/"
#~ msgstr ""
# 2869b2f7a89c428f903e3695dd511e9a
#~ msgid "Enter https://github.com/jupyter/docker-stacks for the URL."
#~ msgstr ""
# 024aaf54695141839eaa5537b4087a81
#~ msgid ""
#~ "Visit https://readthedocs.org/dashboard/jupyter-docker-"
#~ "stacks/translations/"
#~ msgstr ""

File diff suppressed because it is too large Load Diff

View File

@@ -62,9 +62,9 @@ When there's a new stack definition, do the following before merging the PR with
## Adding a New Maintainer Account
1. Visit https://hub.docker.com/app/jupyter/team/stacks/users
1. Visit <https://hub.docker.com/app/jupyter/team/stacks/users>
2. Add the maintainer's Docker Hub username.
3. Visit https://github.com/orgs/jupyter/teams/docker-image-maintainers/members
3. Visit <https://github.com/orgs/jupyter/teams/docker-image-maintainers/members>
4. Add the maintainer's GitHub username.
## Pushing a Build Manually
@@ -80,16 +80,16 @@ If automated builds on Docker Hub have got you down, do the following to push a
First enable translation on Transifex:
1. Visit https://www.transifex.com/project-jupyter/jupyter-docker-stacks-1/languages/
1. Visit <https://www.transifex.com/project-jupyter/jupyter-docker-stacks-1/languages/>.
2. Click _Edit Languages_ in the top right.
3. Select the language from the dropdown.
4. Click _Apply_.
Then setup a subproject on ReadTheDocs for the language:
1. Visit https://readthedocs.org/dashboard/import/manual/
1. Visit <https://readthedocs.org/dashboard/import/manual/>.
2. Enter _jupyter-docker-stacks-language_abbreviation_ for the project name.
3. Enter https://github.com/jupyter/docker-stacks for the URL.
3. Enter <https://github.com/jupyter/docker-stacks> for the URL.
4. Check _Edit advanced project options_.
5. Click _Next_.
6. Select the _Language_ from the dropdown on the next screen.
@@ -97,6 +97,6 @@ Then setup a subproject on ReadTheDocs for the language:
Finally link the new language subproject to the top level doc project:
1. Visit https://readthedocs.org/dashboard/jupyter-docker-stacks/translations/
1. Visit <https://readthedocs.org/dashboard/jupyter-docker-stacks/translations/>.
2. Select the subproject you created from the _Project_ dropdown.
3. Click _Add_.

View File

@@ -166,11 +166,13 @@ ENTRYPOINT ["jupyter", "lab", "--ip=0.0.0.0", "--allow-root"]
```
And build the image as:
```bash
docker build -t jupyter/scipy-dasklabextension:latest .
```
Once built, run using the command:
```bash
docker run -it --rm -p 8888:8888 -p 8787:8787 jupyter/scipy-dasklabextension:latest
```
@@ -273,6 +275,7 @@ ARG BASE_CONTAINER=ubuntu:focal-20200423@sha256:238e696992ba9913d24cfc3727034985
```
For Ubuntu 18.04 (bionic) and earlier, you may also require to workaround for a mandb bug, which was fixed in mandb >= 2.8.6.1:
```dockerfile
# https://git.savannah.gnu.org/cgit/man-db.git/commit/?id=8197d7824f814c5d4b992b4c8730b5b0f7ec589a
# http://launchpadlibrarian.net/435841763/man-db_2.8.5-2_2.8.6-1.diff.gz

View File

@@ -13,8 +13,8 @@ You can launch a local Docker container from the Jupyter Docker Stacks using the
**Example 1** This command pulls the `jupyter/scipy-notebook` image tagged `2c80cf3537ca` from Docker Hub if it is not already present on the local host. It then starts a container running a Jupyter Notebook server and exposes the server on host port 8888. The server logs appear in the terminal and include a URL to the notebook server.
```
docker run -p 8888:8888 jupyter/scipy-notebook:2c80cf3537ca
```bash
$ docker run -p 8888:8888 jupyter/scipy-notebook:2c80cf3537ca
Executing the command: jupyter notebook
[I 15:33:00.567 NotebookApp] Writing notebook server cookie secret to /home/jovyan/.local/share/jupyter/runtime/notebook_cookie_secret
@@ -35,27 +35,27 @@ Executing the command: jupyter notebook
Pressing `Ctrl-C` shuts down the notebook server but leaves the container intact on disk for later restart or permanent deletion using commands like the following:
```
```bash
# list containers
docker ps -a
$ docker ps -a
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
d67fe77f1a84 jupyter/base-notebook "tini -- start-noteb…" 44 seconds ago Exited (0) 39 seconds ago cocky_mirzakhani
# start the stopped container
docker start -a d67fe77f1a84
$ docker start -a d67fe77f1a84
Executing the command: jupyter notebook
[W 16:45:02.020 NotebookApp] WARNING: The notebook server is listening on all IP addresses and not using encryption. This is not recommended.
...
# remove the stopped container
docker rm d67fe77f1a84
$ docker rm d67fe77f1a84
d67fe77f1a84
```
**Example 2** This command pulls the `jupyter/r-notebook` image tagged `e5c5a7d3e52d` from Docker Hub if it is not already present on the local host. It then starts a container running a Jupyter Notebook server and exposes the server on host port 10000. The server logs appear in the terminal and include a URL to the notebook server, but with the internal container port (8888) instead of the the correct host port (10000).
```
docker run --rm -p 10000:8888 -v "$PWD":/home/jovyan/work jupyter/r-notebook:e5c5a7d3e52d
```bash
$ docker run --rm -p 10000:8888 -v "$PWD":/home/jovyan/work jupyter/r-notebook:e5c5a7d3e52d
Executing the command: jupyter notebook
[I 19:31:09.573 NotebookApp] Writing notebook server cookie secret to /home/jovyan/.local/share/jupyter/runtime/notebook_cookie_secret
@@ -78,29 +78,29 @@ Pressing `Ctrl-C` shuts down the notebook server and immediately destroys the Do
**Example 3** This command pulls the `jupyter/all-spark-notebook` image currently tagged `latest` from Docker Hub if an image tagged `latest` is not already present on the local host. It then starts a container named `notebook` running a JupyterLab server and exposes the server on a randomly selected port.
```
```bash
docker run -d -P --name notebook jupyter/all-spark-notebook
```
The assigned port and notebook server token are visible using other Docker commands.
```
```bash
# get the random host port assigned to the container port 8888
docker port notebook 8888
$ docker port notebook 8888
0.0.0.0:32769
# get the notebook token from the logs
docker logs --tail 3 notebook
$ docker logs --tail 3 notebook
Copy/paste this URL into your browser when you connect for the first time,
to login with a token:
http://localhost:8888/?token=15914ca95f495075c0aa7d0e060f1a78b6d94f70ea373b00
```
Together, the URL to visit on the host machine to access the server in this case is http://localhost:32769?token=15914ca95f495075c0aa7d0e060f1a78b6d94f70ea373b00.
Together, the URL to visit on the host machine to access the server in this case is <http://localhost:32769?token=15914ca95f495075c0aa7d0e060f1a78b6d94f70ea373b00>.
The container runs in the background until stopped and/or removed by additional Docker commands.
```
```bash
# stop the container
docker stop notebook
notebook

View File

@@ -12,7 +12,7 @@ This page provides details about features specific to one or more images.
You can build a `pyspark-notebook` image (and also the downstream `all-spark-notebook` image) with a different version of Spark by overriding the default value of the following arguments at build time.
* Spark distribution is defined by the combination of the Spark and the Hadoop version and verified by the package checksum, see [Download Apache Spark](https://spark.apache.org/downloads.html) for more information. At this time the build will only work with the set of versions available on the Apache Spark download page, so it will not work with the archived versions.
* Spark distribution is defined by the combination of the Spark and the Hadoop version and verified by the package checksum, see [Download Apache Spark](https://spark.apache.org/downloads.html) and the [archive repo](https://archive.apache.org/dist/spark/) for more information.
* `spark_version`: The Spark version to install (`3.0.0`).
* `hadoop_version`: The Hadoop version (`3.2`).
* `spark_checksum`: The package checksum (`BFE4540...`).
@@ -52,7 +52,7 @@ The `jupyter/pyspark-notebook` and `jupyter/all-spark-notebook` images support t
Spark **local mode** is useful for experimentation on small data when you do not have a Spark cluster available.
##### In Python
##### Local Mode in Python
In a Python notebook.
@@ -69,7 +69,7 @@ rdd.sum()
# 5050
```
##### In R
##### Local Mode in R
In a R notebook with [SparkR][sparkr].
@@ -107,7 +107,7 @@ sdf_len(sc, 100, repartition = 1) %>%
# 5050
```
##### In Scala
##### Local Mode in Scala
Spylon kernel instantiates a `SparkContext` for you in variable `sc` after you configure Spark
options in a `%%init_spark` magic cell.
@@ -136,11 +136,11 @@ Connection to Spark Cluster on **[Standalone Mode](https://spark.apache.org/docs
your Spark workers. (This is a [Spark networking
requirement](http://spark.apache.org/docs/latest/cluster-overview.html#components).)
* NOTE: When using `--net=host`, you must also use the flags `--pid=host -e
TINI_SUBREAPER=true`. See https://github.com/jupyter/docker-stacks/issues/64 for details.
TINI_SUBREAPER=true`. See <https://github.com/jupyter/docker-stacks/issues/64> for details.
**Note**: In the following examples we are using the Spark master URL `spark://master:7077` that shall be replaced by the URL of the Spark master.
##### In Python
##### Standalone Mode in Python
The **same Python version** need to be used on the notebook (where the driver is located) and on the Spark workers.
The python version used at driver and worker side can be adjusted by setting the environment variables `PYSPARK_PYTHON` and / or `PYSPARK_DRIVER_PYTHON`, see [Spark Configuration][spark-conf] for more information.
@@ -158,7 +158,7 @@ rdd.sum()
# 5050
```
##### In R
##### Standalone Mode in R
In a R notebook with [SparkR][sparkr].
@@ -195,7 +195,7 @@ sdf_len(sc, 100, repartition = 1) %>%
# 5050
```
##### In Scala
##### Standalone Mode in Scala
Spylon kernel instantiates a `SparkContext` for you in variable `sc` after you configure Spark
options in a `%%init_spark` magic cell.

View File

@@ -1,3 +1,5 @@
# Docker Compose example
This example demonstrate how to deploy docker-stack notebook containers to any Docker Machine-controlled host using Docker Compose.
## Prerequisites
@@ -32,7 +34,6 @@ To stop and remove the container:
notebook/down.sh
```
## FAQ
### How do I specify which docker-stack notebook image to deploy?
@@ -73,7 +74,6 @@ NAME=your-notebook notebook/down.sh
The `up.sh` creates a Docker volume named after the notebook container with a `-work` suffix, e.g., `my-notebook-work`.
### Can multiple notebook containers share the same notebook volume?
Yes. Set the `WORK_VOLUME` environment variable to the same value for each notebook.
@@ -98,7 +98,6 @@ notebook/up.sh --secure --password a_secret
Sure. If you want to secure access to publicly addressable notebook containers, you can generate a free certificate using the [Let's Encrypt](https://letsencrypt.org) service.
This example includes the `bin/letsencrypt.sh` script, which runs the `letsencrypt` client to create a full-chain certificate and private key, and stores them in a Docker volume. _Note:_ The script hard codes several `letsencrypt` options, one of which automatically agrees to the Let's Encrypt Terms of Service.
The following command will create a certificate chain and store it in a Docker volume named `mydomain-secrets`.
@@ -152,10 +151,9 @@ bin/softlayer.sh myhost
bin/sl-dns.sh myhost
```
## Troubleshooting
### Unable to connect to VirtualBox VM on Mac OS X when using Cisco VPN client.
### Unable to connect to VirtualBox VM on Mac OS X when using Cisco VPN client
The Cisco VPN client blocks access to IP addresses that it does not know about, and may block access to a new VM if it is created while the Cisco VPN client is running.

View File

@@ -1,9 +1,11 @@
# Make deploy example
This folder contains a Makefile and a set of supporting files demonstrating how to run a docker-stack notebook container on a docker-machine controlled host.
## Prerequisites
* make 3.81+
* Ubuntu users: Be aware of [make 3.81 defect 483086](https://bugs.launchpad.net/ubuntu/+source/make-dfsg/+bug/483086) which exists in 14.04 LTS but is fixed in 15.04+
* Ubuntu users: Be aware of [make 3.81 defect 483086](https://bugs.launchpad.net/ubuntu/+source/make-dfsg/+bug/483086) which exists in 14.04 LTS but is fixed in 15.04+
* docker-machine 0.5.0+
* docker 1.9.0+

View File

@@ -1,3 +1,6 @@
OpenShift example
=================
This example provides templates for deploying the Jupyter Project docker-stacks images to OpenShift.
Prerequsites
@@ -7,7 +10,7 @@ Any OpenShift 3 environment. The templates were tested with OpenShift 3.7. It is
Do be aware that the Jupyter Project docker-stacks images are very large. The OpenShift environment you are using must provide sufficient quota on the per user space for images and the file system for running containers. If the quota is too small, the pulling of the images to a node in the OpenShift cluster when deploying them, will fail due to lack of space. Even if the image is able to be run, if the quota is only just larger than the space required for the image, you will not be able to install many packages into the container before running out of space.
OpenShift Online, the public hosted version of OpenShift from Red Hat has a quota of only 3GB for the image and container file system. As a result, only the ``minimal-notebook`` can be started and there is little space remaining to install additional packages. Although OpenShift Online is suitable for demonstrating these templates work, what you can do in that environment will be limited due to the size of the images.
OpenShift Online, the public hosted version of OpenShift from Red Hat has a quota of only 3GB for the image and container file system. As a result, only the `minimal-notebook` can be started and there is little space remaining to install additional packages. Although OpenShift Online is suitable for demonstrating these templates work, what you can do in that environment will be limited due to the size of the images.
If you want to experiment with using Jupyter Notebooks in an OpenShift environment, you should instead use [Minishift](https://www.openshift.org/minishift/). Minishift provides you the ability to run OpenShift in a virtual machine on your own local computer.
@@ -20,13 +23,9 @@ To load the templates, login to OpenShift from the command line and run:
oc create -f https://raw.githubusercontent.com/jupyter-on-openshift/docker-stacks/master/examples/openshift/templates.json
```
This should create the following templates:
This should create the `jupyter-notebook` template
```
jupyter-notebook
```
The template can be used from the command line using the ``oc new-app`` command, or from the OpenShift web console by selecting _Add to Project_. This ``README`` is only going to explain deploying from the command line.
The template can be used from the command line using the `oc new-app` command, or from the OpenShift web console by selecting _Add to Project_. This `README` is only going to explain deploying from the command line.
Deploying a Notebook
--------------------
@@ -39,7 +38,7 @@ oc new-app --template jupyter-notebook
The output will be similar to:
```
```lang-none
--> Deploying template "jupyter/jupyter-notebook" to project jupyter
Jupyter Notebook
@@ -61,13 +60,13 @@ The output will be similar to:
Run 'oc status' to view your app.
```
When no template parameters are provided, the name of the deployed notebook will be ``notebook``. The image used will be:
When no template parameters are provided, the name of the deployed notebook will be `notebook`. The image used will be:
```
```lang-none
jupyter/minimal-notebook:latest
```
A password you can use when accessing the notebook will be auto generated and is displayed in the output from running ``oc new-app``.
A password you can use when accessing the notebook will be auto generated and is displayed in the output from running `oc new-app`.
To see the hostname for accessing the notebook run:
@@ -77,14 +76,14 @@ oc get routes
The output will be similar to:
```
```lang-none
NAME HOST/PORT PATH SERVICES PORT TERMINATION WILDCARD
notebook notebook-jupyter.abcd.pro-us-east-1.openshiftapps.com notebook 8888-tcp edge/Redirect None
```
A secure route will be used to expose the notebook outside of the OpenShift cluster, so in this case the URL would be:
```
```lang-none
https://notebook-jupyter.abcd.pro-us-east-1.openshiftapps.com/
```
@@ -93,7 +92,7 @@ When prompted, enter the password for the notebook.
Passing Template Parameters
---------------------------
To override the name for the notebook, the image used, and the password, you can pass template parameters using the ``--param`` option.
To override the name for the notebook, the image used, and the password, you can pass template parameters using the `--param` option.
```bash
oc new-app --template jupyter-notebook \
@@ -113,12 +112,12 @@ You can deploy any of the Jupyter Project docker-stacks images.
* jupyter/pyspark-notebook
* jupyter/all-spark-notebook
If you don't care what version of the image is used, add the ``:latest`` tag at the end of the image name, otherwise use the hash corresponding to the image version you want to use.
If you don't care what version of the image is used, add the `:latest` tag at the end of the image name, otherwise use the hash corresponding to the image version you want to use.
Deleting the Notebook Instance
------------------------------
To delete the notebook instance, run ``oc delete`` using a label selector for the application name.
To delete the notebook instance, run `oc delete` using a label selector for the application name.
```bash
oc delete all,configmap --selector app=mynotebook
@@ -127,7 +126,7 @@ oc delete all,configmap --selector app=mynotebook
Enabling Jupyter Lab Interface
------------------------------
To enable the Jupyter Lab interface for a deployed notebook set the ``JUPYTER_ENABLE_LAB`` environment variable.
To enable the Jupyter Lab interface for a deployed notebook set the `JUPYTER_ENABLE_LAB` environment variable.
```bash
oc set env dc/mynotebook JUPYTER_ENABLE_LAB=true
@@ -162,11 +161,11 @@ If you want to set any custom configuration for the notebook, you can edit the c
oc edit configmap/mynotebook-cfg
```
The ``data`` field of the config map contains Python code used as the ``jupyter_notebook_config.py`` file.
The `data` field of the config map contains Python code used as the `jupyter_notebook_config.py` file.
If you are using a persistent volume, you can also create a configuration file at:
```
```lang-none
/home/jovyan/.jupyter/jupyter_notebook_config.py
```
@@ -174,7 +173,7 @@ This will be merged at the end of the configuration from the config map.
Because the configuration is Python code, ensure any indenting is correct. Any errors in the configuration file will cause the notebook to fail when starting.
If the error is in the config map, edit it again to fix it and trigged a new deployment if necessary by running:
If the error is in the config map, edit it again to fix it and trigger a new deployment if necessary by running:
```bash
oc rollout latest dc/mynotebook
@@ -213,15 +212,7 @@ oc set env dc/mynotebook JUPYTER_NOTEBOOK_PASSWORD=mypassword
This will trigger a new deployment so ensure you have downloaded any work if not using a persistent volume.
If using a persistent volume, you could instead setup a password in the file:
```
/home/jovyan/.jupyter/jupyter_notebook_config.py
```
as per guidelines in:
* https://jupyter-notebook.readthedocs.io/en/stable/public_server.html
If using a persistent volume, you could instead setup a password in the file `/home/jovyan/.jupyter/jupyter_notebook_config.py` as per guidelines in <https://jupyter-notebook.readthedocs.io/en/stable/public_server.html>.
Deploying from a Custom Image
-----------------------------

View File

@@ -1,10 +1,13 @@
Custom Jupyter Notebook images
==============================
This example provides scripts for building custom Jupyter Notebook images containing notebooks, data files, and with Python packages required by the notebooks already installed. The scripts provided work with the Source-to-Image tool and you can create the images from the command line on your own computer. Templates are also provided to enable running builds in OpenShift, as well as deploying the resulting image to OpenShift to make it available.
The build scripts, when used with the Source-to-Image tool, provide similar capabilities to ``repo2docker``. When builds are run under OpenShift with the supplied templates, it provides similar capabilities to ``mybinder.org``, but where notebook instances are deployed in your existing OpenShift project and JupyterHub is not required.
The build scripts, when used with the Source-to-Image tool, provide similar capabilities to `repo2docker`. When builds are run under OpenShift with the supplied templates, it provides similar capabilities to `mybinder.org`, but where notebook instances are deployed in your existing OpenShift project and JupyterHub is not required.
For separate examples of using JupyterHub with OpenShift, see the project:
* https://github.com/jupyter-on-openshift/jupyterhub-quickstart
* <https://github.com/jupyter-on-openshift/jupyterhub-quickstart>
Source-to-Image Project
-----------------------
@@ -13,7 +16,7 @@ Source-to-Image (S2I) is an open source project which provides a tool for creati
Details on the S2I tool, and executable binaries for Linux, macOS and Windows, can be found on GitHub at:
* https://github.com/openshift/source-to-image
* <https://github.com/openshift/source-to-image>
The tool is standalone, and can be used on any system which provides a docker daemon for running containers. To provide an end-to-end capability to build and deploy applications in containers, support for S2I is also integrated into container platforms such as OpenShift.
@@ -31,18 +34,19 @@ s2i build \
notebook-examples
```
This example command will pull down the Git repository ``https://github.com/jupyter/notebook`` and build the image ``notebook-examples`` using the files contained in the ``docs/source/examples/Notebook`` directory of that Git repository. The base image which the files will be combined with is ``jupyter/minimal-notebook:latest``, but you can specify any of the Jupyter Project ``docker-stacks`` images as the base image.
This example command will pull down the Git repository <https://github.com/jupyter/notebook> and build the image `notebook-examples` using the files contained in the `docs/source/examples/Notebook` directory of that Git repository. The base image which the files will be combined with is `jupyter/minimal-notebook:latest`, but you can specify any of the Jupyter Project `docker-stacks` images as the base image.
The resulting image from running the command can be seen by running ``docker images``.
The resulting image from running the command can be seen by running `docker images` command:
```
```bash
$ docker images
REPOSITORY TAG IMAGE ID CREATED SIZE
notebook-examples latest f5899ed1241d 2 minutes ago 2.59GB
```
You can now run the image.
```
```bash
$ docker run --rm -p 8888:8888 notebook-examples
Executing the command: jupyter notebook
[I 01:14:50.532 NotebookApp] Writing notebook server cookie secret to /home/jovyan/.local/share/jupyter/runtime/notebook_cookie_secret
@@ -66,15 +70,15 @@ Open your browser on the URL displayed, and you will find the notebooks from the
The S2I Builder Scripts
-----------------------
Normally when using S2I, the base image would be S2I enabled and contain the builder scripts needed to prepare the image and define how the application in the image should be run. As the Jupyter Project ``docker-stacks`` images are not S2I enabled (although they could be), in the above example the ``--scripts-url`` option has been used to specify that the example builder scripts contained in this directory of this Git repository should be used.
Normally when using S2I, the base image would be S2I enabled and contain the builder scripts needed to prepare the image and define how the application in the image should be run. As the Jupyter Project `docker-stacks` images are not S2I enabled (although they could be), in the above example the `--scripts-url` option has been used to specify that the example builder scripts contained in this directory of this Git repository should be used.
Using the ``--scripts-url`` option, the builder scripts can be hosted on any HTTP server, or you could also use builder scripts local to your computer file using an appropriate ``file://`` format URI argument to ``--scripts-url``.
Using the `--scripts-url` option, the builder scripts can be hosted on any HTTP server, or you could also use builder scripts local to your computer file using an appropriate `file://` format URI argument to `--scripts-url`.
The builder scripts in this directory of this repository are ``assemble`` and ``run`` and are provided as examples of what can be done. You can use the scripts as is, or create your own.
The builder scripts in this directory of this repository are `assemble` and `run` and are provided as examples of what can be done. You can use the scripts as is, or create your own.
The supplied ``assemble`` script performs a few key steps.
The supplied `assemble` script performs a few key steps.
The first steps copy files into the location they need to be when the image is run, from the directory where they are initially placed by the ``s2i`` command.
The first steps copy files into the location they need to be when the image is run, from the directory where they are initially placed by the `s2i` command.
```bash
cp -Rf /tmp/src/. /home/$NB_USER
@@ -95,7 +99,7 @@ else
fi
```
This determines whether a ``environment.yml`` or ``requirements.txt`` file exists with the files and if so, runs the appropriate package management tool to install any Python packages listed in those files.
This determines whether a `environment.yml` or `requirements.txt` file exists with the files and if so, runs the appropriate package management tool to install any Python packages listed in those files.
This means that so long as a set of notebook files provides one of these files listing what Python packages they need, those packages will be automatically installed into the image so they are available when the image is run.
@@ -106,11 +110,11 @@ fix-permissions $CONDA_DIR
fix-permissions /home/$NB_USER
```
This fixes up permissions on any new files created by the build. This is necessary to ensure that when the image is run, you can still install additional files. This is important for when an image is run in ``sudo`` mode, or it is hosted in a more secure container platform such as Kubernetes/OpenShift where it will be run as a set user ID that isn't known in advance.
This fixes up permissions on any new files created by the build. This is necessary to ensure that when the image is run, you can still install additional files. This is important for when an image is run in `sudo` mode, or it is hosted in a more secure container platform such as Kubernetes/OpenShift where it will be run as a set user ID that isn't known in advance.
As long as you preserve the first and last set of steps, you can do whatever you want in the ``assemble`` script to install packages, create files etc. Do be aware though that S2I builds do not run as ``root`` and so you cannot install additional system packages. If you need to install additional system packages, use a ``Dockerfile`` and normal ``docker build`` to first create a new custom base image from the Jupyter Project ``docker-stacks`` images, with the extra system packages, and then use that image with the S2I build to combine your notebooks and have Python packages installed.
As long as you preserve the first and last set of steps, you can do whatever you want in the `assemble` script to install packages, create files etc. Do be aware though that S2I builds do not run as `root` and so you cannot install additional system packages. If you need to install additional system packages, use a `Dockerfile` and normal `docker build` to first create a new custom base image from the Jupyter Project `docker-stacks` images, with the extra system packages, and then use that image with the S2I build to combine your notebooks and have Python packages installed.
The ``run`` script in this directory is very simple and just runs the notebook application.
The `run` script in this directory is very simple and just runs the notebook application.
```bash
exec start-notebook.sh "$@"
@@ -132,7 +136,7 @@ jupyter-notebook-builder
jupyter-notebook-quickstart
```
The templates can be used from the OpenShift web console or command line. This ``README`` is only going to explain deploying from the command line.
The templates can be used from the OpenShift web console or command line. This `README` is only going to explain deploying from the command line.
To use the OpenShift command line to build into an image, and deploy, the set of notebooks used above, run:
@@ -145,21 +149,17 @@ oc new-app --template jupyter-notebook-quickstart \
--param NOTEBOOK_PASSWORD=mypassword
```
You can provide a password using the ``NOTEBOOK_PASSWORD`` parameter. If you don't set that parameter, a password will be generated, with it being displayed by the ``oc new-app`` command.
You can provide a password using the `NOTEBOOK_PASSWORD` parameter. If you don't set that parameter, a password will be generated, with it being displayed by the `oc new-app` command.
Once the image has been built, it will be deployed. To see the hostname for accessing the notebook, run ``oc get routes``.
Once the image has been built, it will be deployed. To see the hostname for accessing the notebook, run `oc get routes`.
```
```lang-none
NAME HOST/PORT PATH SERVICES PORT TERMINATION WILDCARD
notebook-examples notebook-examples-jupyter.abcd.pro-us-east-1.openshiftapps.com notebook-examples 8888-tcp edge/Redirect None
```
As the deployment will use a secure connection, the URL for accessing the notebook in this case would be:
As the deployment will use a secure connection, the URL for accessing the notebook in this case would be <https://notebook-examples-jupyter.abcd.pro-us-east-1.openshiftapps.com>.
```
https://notebook-examples-jupyter.abcd.pro-us-east-1.openshiftapps.com
```
If you only want to build an image but not deploy it, you can use the `jupyter-notebook-builder` template. You can then deploy it using the `jupyter-notebook` template provided with the [openshift](../openshift) examples directory.
If you only want to build an image but not deploy it, you can use the ``jupyter-notebook-builder`` template. You can then deploy it using the ``jupyter-notebook`` template provided with the [openshift](../openshift) examples directory.
See the ``openshift`` examples directory for further information on customizing configuration for a Jupyter Notebook deployment and deleting a deployment.
See the `openshift` examples directory for further information on customizing configuration for a Jupyter Notebook deployment and deleting a deployment.

View File

@@ -1,10 +1,10 @@
# Minimal Jupyter Notebook Stack
[![docker pulls](https://img.shields.io/docker/pulls/jupyter/minimal-notebook.svg)](https://hub.docker.com/r/jupyter/minimal-notebook/)
[![docker stars](https://img.shields.io/docker/stars/jupyter/minimal-notebook.svg)](https://hub.docker.com/r/jupyter/minimal-notebook/)
[![image metadata](https://images.microbadger.com/badges/image/jupyter/minimal-notebook.svg)](https://microbadger.com/images/jupyter/minimal-notebook "jupyter/minimal-notebook image metadata")
# Minimal Jupyter Notebook Stack
GitHub Actions in the https://github.com/jupyter/docker-stacks project builds and pushes this image
GitHub Actions in the <https://github.com/jupyter/docker-stacks> project builds and pushes this image
to Docker Hub.
Please visit the project documentation site for help using and contributing to this image and

View File

@@ -1,45 +0,0 @@
#!/bin/bash
set -e
# Apply tags
GIT_SHA_TAG=${GITHUB_SHA:0:12}
docker tag $IMAGE_NAME "$DOCKER_REPO:$GIT_SHA_TAG"
# Update index
INDEX_ROW="|\`${BUILD_TIMESTAMP}\`|\`jupyter/${IMAGE_SHORT_NAME}:${GIT_SHA_TAG}\`|[Git diff](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})<br />[Dockerfile](https://github.com/jupyter/docker-stacks/blob/${GITHUB_SHA}/${IMAGE_SHORT_NAME}/Dockerfile)<br />[Build manifest](./${IMAGE_SHORT_NAME}-${GIT_SHA_TAG})|"
sed "/|-|/a ${INDEX_ROW}" -i "${WIKI_PATH}/Home.md"
# Build manifest
MANIFEST_FILE="${WIKI_PATH}/manifests/${IMAGE_SHORT_NAME}-${GIT_SHA_TAG}.md"
mkdir -p $(dirname "$MANIFEST_FILE")
cat << EOF > "$MANIFEST_FILE"
* Build datetime: ${BUILD_TIMESTAMP}
* Docker image: ${DOCKER_REPO}:${GIT_SHA_TAG}
* Docker image size: $(docker images ${IMAGE_NAME} --format "{{.Size}}")
* Git commit SHA: [${GITHUB_SHA}](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})
* Git commit message:
\`\`\`
${COMMIT_MSG}
\`\`\`
## Python Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} python --version)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda info)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda list)
\`\`\`
## Apt Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} apt list --installed)
\`\`\`
EOF

View File

@@ -29,10 +29,7 @@ RUN apt-get -y update && \
# Spark installation
WORKDIR /tmp
# Using the preferred mirror to download Spark
# hadolint ignore=SC2046
RUN wget -q $(wget -qO- https://www.apache.org/dyn/closer.lua/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz\?as_json | \
python -c "import sys, json; content=json.load(sys.stdin); print(content['preferred']+content['path_info'])") && \
RUN wget -q "https://archive.apache.org/dist/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" && \
echo "${spark_checksum} *spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" | sha512sum -c - && \
tar xzf "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" -C /usr/local --owner root --group root --no-same-owner && \
rm "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz"
@@ -59,7 +56,7 @@ USER $NB_UID
# Install pyarrow
RUN conda install --quiet --yes --satisfied-skip-solve \
'pyarrow=3.0.*' && \
'pyarrow=4.0.*' && \
conda clean --all -f -y && \
fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}"

View File

@@ -1,10 +1,10 @@
# Jupyter Notebook Python, Spark Stack
[![docker pulls](https://img.shields.io/docker/pulls/jupyter/pyspark-notebook.svg)](https://hub.docker.com/r/jupyter/pyspark-notebook/)
[![docker stars](https://img.shields.io/docker/stars/jupyter/pyspark-notebook.svg)](https://hub.docker.com/r/jupyter/pyspark-notebook/)
[![image metadata](https://images.microbadger.com/badges/image/jupyter/pyspark-notebook.svg)](https://microbadger.com/images/jupyter/pyspark-notebook "jupyter/pyspark-notebook image metadata")
# Jupyter Notebook Python, Spark Stack
GitHub Actions in the https://github.com/jupyter/docker-stacks project builds and pushes this image
GitHub Actions in the <https://github.com/jupyter/docker-stacks> project builds and pushes this image
to Docker Hub.
Please visit the project documentation site for help using and contributing to this image and

View File

@@ -1,51 +0,0 @@
#!/bin/bash
set -e
# Apply tags
GIT_SHA_TAG=${GITHUB_SHA:0:12}
docker tag $IMAGE_NAME "$DOCKER_REPO:$GIT_SHA_TAG"
# Update index
INDEX_ROW="|\`${BUILD_TIMESTAMP}\`|\`jupyter/${IMAGE_SHORT_NAME}:${GIT_SHA_TAG}\`|[Git diff](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})<br />[Dockerfile](https://github.com/jupyter/docker-stacks/blob/${GITHUB_SHA}/${IMAGE_SHORT_NAME}/Dockerfile)<br />[Build manifest](./${IMAGE_SHORT_NAME}-${GIT_SHA_TAG})|"
sed "/|-|/a ${INDEX_ROW}" -i "${WIKI_PATH}/Home.md"
# Build manifest
MANIFEST_FILE="${WIKI_PATH}/manifests/${IMAGE_SHORT_NAME}-${GIT_SHA_TAG}.md"
mkdir -p $(dirname "$MANIFEST_FILE")
cat << EOF > "$MANIFEST_FILE"
* Build datetime: ${BUILD_TIMESTAMP}
* Docker image: ${DOCKER_REPO}:${GIT_SHA_TAG}
* Docker image size: $(docker images ${IMAGE_NAME} --format "{{.Size}}")
* Git commit SHA: [${GITHUB_SHA}](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})
* Git commit message:
\`\`\`
${COMMIT_MSG}
\`\`\`
## Apache Spark
\`\`\`
$(docker run --rm ${IMAGE_NAME} bash -c '$SPARK_HOME/bin/spark-submit --version' 2>&1)
\`\`\`
## Python Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} python --version)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda info)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda list)
\`\`\`
## Apt Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} apt list --installed)
\`\`\`
EOF

View File

@@ -1,10 +1,10 @@
# Jupyter Notebook R Stack
[![docker pulls](https://img.shields.io/docker/pulls/jupyter/r-notebook.svg)](https://hub.docker.com/r/jupyter/r-notebook/)
[![docker stars](https://img.shields.io/docker/stars/jupyter/r-notebook.svg)](https://hub.docker.com/r/jupyter/r-notebook/)
[![image metadata](https://images.microbadger.com/badges/image/jupyter/r-notebook.svg)](https://microbadger.com/images/jupyter/r-notebook "jupyter/r-notebook image metadata")
# Jupyter Notebook R Stack
GitHub Actions in the https://github.com/jupyter/docker-stacks project builds and pushes this image
GitHub Actions in the <https://github.com/jupyter/docker-stacks> project builds and pushes this image
to Docker Hub.
Please visit the project documentation site for help using and contributing to this image and

View File

@@ -1,55 +0,0 @@
#!/bin/bash
set -e
# Apply tags
GIT_SHA_TAG=${GITHUB_SHA:0:12}
docker tag $IMAGE_NAME "$DOCKER_REPO:$GIT_SHA_TAG"
# Update index
INDEX_ROW="|\`${BUILD_TIMESTAMP}\`|\`jupyter/${IMAGE_SHORT_NAME}:${GIT_SHA_TAG}\`|[Git diff](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})<br />[Dockerfile](https://github.com/jupyter/docker-stacks/blob/${GITHUB_SHA}/${IMAGE_SHORT_NAME}/Dockerfile)<br />[Build manifest](./${IMAGE_SHORT_NAME}-${GIT_SHA_TAG})|"
sed "/|-|/a ${INDEX_ROW}" -i "${WIKI_PATH}/Home.md"
# Build manifest
MANIFEST_FILE="${WIKI_PATH}/manifests/${IMAGE_SHORT_NAME}-${GIT_SHA_TAG}.md"
mkdir -p $(dirname "$MANIFEST_FILE")
cat << EOF > "$MANIFEST_FILE"
* Build datetime: ${BUILD_TIMESTAMP}
* Docker image: ${DOCKER_REPO}:${GIT_SHA_TAG}
* Docker image size: $(docker images ${IMAGE_NAME} --format "{{.Size}}")
* Git commit SHA: [${GITHUB_SHA}](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})
* Git commit message:
\`\`\`
${COMMIT_MSG}
\`\`\`
## R Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} R --version)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} R --silent -e 'installed.packages(.Library)[, c(1,3)]')
\`\`\`
## Python Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} python --version)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda info)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda list)
\`\`\`
## Apt Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} apt list --installed)
\`\`\`
EOF

View File

@@ -1,4 +1,5 @@
docker
plumbum
pre-commit
pytest
recommonmark

View File

@@ -24,7 +24,7 @@ RUN conda install --quiet --yes \
'cython=0.29.*' \
'dask=2021.4.*' \
'dill=0.3.*' \
'h5py=3.1.*' \
'h5py=3.2.*' \
'ipywidgets=7.6.*' \
'ipympl=0.7.*'\
'matplotlib-base=3.4.*' \
@@ -40,7 +40,7 @@ RUN conda install --quiet --yes \
'seaborn=0.11.*' \
'sqlalchemy=1.4.*' \
'statsmodels=0.12.*' \
'sympy=1.7.*' \
'sympy=1.8.*' \
'vincent=0.4.*' \
'widgetsnbextension=3.5.*'\
'xlrd=2.0.*' && \

View File

@@ -1,10 +1,10 @@
# Jupyter Notebook Scientific Python Stack
[![docker pulls](https://img.shields.io/docker/pulls/jupyter/scipy-notebook.svg)](https://hub.docker.com/r/jupyter/scipy-notebook/)
[![docker stars](https://img.shields.io/docker/stars/jupyter/scipy-notebook.svg)](https://hub.docker.com/r/jupyter/scipy-notebook/)
[![image metadata](https://images.microbadger.com/badges/image/jupyter/scipy-notebook.svg)](https://microbadger.com/images/jupyter/scipy-notebook "jupyter/scipy-notebook image metadata")
# Jupyter Notebook Scientific Python Stack
GitHub Actions in the https://github.com/jupyter/docker-stacks project builds and pushes this image
GitHub Actions in the <https://github.com/jupyter/docker-stacks> project builds and pushes this image
to Docker Hub.
Please visit the project documentation site for help using and contributing to this image and

View File

@@ -1,45 +0,0 @@
#!/bin/bash
set -e
# Apply tags
GIT_SHA_TAG=${GITHUB_SHA:0:12}
docker tag $IMAGE_NAME "$DOCKER_REPO:$GIT_SHA_TAG"
# Update index
INDEX_ROW="|\`${BUILD_TIMESTAMP}\`|\`jupyter/${IMAGE_SHORT_NAME}:${GIT_SHA_TAG}\`|[Git diff](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})<br />[Dockerfile](https://github.com/jupyter/docker-stacks/blob/${GITHUB_SHA}/${IMAGE_SHORT_NAME}/Dockerfile)<br />[Build manifest](./${IMAGE_SHORT_NAME}-${GIT_SHA_TAG})|"
sed "/|-|/a ${INDEX_ROW}" -i "${WIKI_PATH}/Home.md"
# Build manifest
MANIFEST_FILE="${WIKI_PATH}/manifests/${IMAGE_SHORT_NAME}-${GIT_SHA_TAG}.md"
mkdir -p $(dirname "$MANIFEST_FILE")
cat << EOF > "$MANIFEST_FILE"
* Build datetime: ${BUILD_TIMESTAMP}
* Docker image: ${DOCKER_REPO}:${GIT_SHA_TAG}
* Docker image size: $(docker images ${IMAGE_NAME} --format "{{.Size}}")
* Git commit SHA: [${GITHUB_SHA}](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})
* Git commit message:
\`\`\`
${COMMIT_MSG}
\`\`\`
## Python Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} python --version)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda info)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda list)
\`\`\`
## Apt Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} apt list --installed)
\`\`\`
EOF

115
tagging/README.md Normal file
View File

@@ -0,0 +1,115 @@
# Docker stacks tagging and manifest creation
The main purpose of the source code in this folder is to properly tag all the images and to update [build manifests](https://github.com/jupyter/docker-stacks/wiki).
These two processes are closely related, so the source code is widely reused.
Basic example of a tag is a `python` version tag.
For example, an image `jupyter/base-notebook` with `python 3.8.8` will have a tag `jupyter/base-notebook:python-3.8.8`.
This tag (and all the other tags) are pushed to Docker Hub.
Manifest is a description of some important part of the image in a `markdown`.
For example, we dump all the `conda` packages including their versions.
## Main principles
- All the images are located in a hierarchical tree. More info on [image relationships](../docs/using/selecting.md#image-relationships).
- We have `tagger` and `manifest` classes, which can be run inside docker containers to obtain tags and build manifest pieces.
- These classes are inherited from the parent image to all the children images.
- Because manifests and tags might change from parent to children, `taggers` and `manifests` are reevaluated on each image. So, the values are not inherited.
- To tag an image and create a manifest, run `make hook/base-notebook` (or another image of your choice).
## Source code description
In this section we will briefly describe source code in this folder and give examples on how to use it.
### DockerRunner
`DockerRunner` is a helper class to easily run a docker container and execute commands inside this container:
```python
from .docker_runner import DockerRunner
with DockerRunner("ubuntu:bionic") as container:
DockerRunner.run_simple_command(container, cmd="env", print_result=True)
```
### GitHelper
`GitHelper` methods are run in the current `git` repo and give the information about last commit hash and commit message:
```python
from .git_helper import GitHelper
print("Git hash:", GitHelper.commit_hash())
print("Git message:", GitHelper.commit_message())
```
Prefix of commit hash (namely, 12 letters) is used as an image tag to make it easy to inherit from a fixed version of a docker image.
### Tagger
`Tagger` is a class, which can be run inside docker container to calculate some tag for an image.
All the taggers are inherited from `TaggerInterface`:
```python
class TaggerInterface:
"""Common interface for all taggers"""
@staticmethod
def tag_value(container) -> str:
raise NotImplementedError
```
So, `tag_value(container)` method gets a docker container as an input and returns some tag.
`SHATagger` example:
```python
class SHATagger(TaggerInterface):
@staticmethod
def tag_value(container):
return GitHelper.commit_hash_tag()
```
- `taggers.py` contains all the taggers.
- `tag_image.py` is a python executable which is used to tag the image.
### Manifest
`ManifestHeader` is a build manifest header.
It contains information about `Build datetime`, `Docker image size` and `Git commit` info.
All the other manifest classes are inherited from `ManifestInterface`:
```python
class ManifestInterface:
"""Common interface for all manifests"""
@staticmethod
def markdown_piece(container) -> str:
raise NotImplementedError
```
- `markdown_piece(container)` method returns piece of markdown file to be used as a part of build manifest.
`AptPackagesManifest` example:
```python
class AptPackagesManifest(ManifestInterface):
@staticmethod
def markdown_piece(container) -> str:
return "\n".join([
"## Apt Packages",
"",
quoted_output(container, "apt list --installed")
])
```
- `quoted_output` simply runs the command inside container using `DockerRunner.run_simple_command` and wraps it to triple quotes to create a valid markdown piece of file.
- `manifests.py` contains all the manifests.
- `create_manifests.py` is a python executable which is used to create the build manifest for an image.
### Images Hierarchy
All images dependencies on each other and what taggers and manifest they make use of is defined in `images_hierarchy.py`.
`get_taggers_and_manifests.py` defines a helper function to get the taggers and manifests for a specific image.

0
tagging/__init__.py Normal file
View File

91
tagging/create_manifests.py Executable file
View File

@@ -0,0 +1,91 @@
#!/usr/bin/env python3
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import argparse
import datetime
import logging
import os
from typing import List
from .docker_runner import DockerRunner
from .get_taggers_and_manifests import get_taggers_and_manifests
from .git_helper import GitHelper
from .manifests import ManifestHeader, ManifestInterface
logger = logging.getLogger(__name__)
BUILD_TIMESTAMP = datetime.datetime.utcnow().isoformat()[:-7] + "Z"
MARKDOWN_LINE_BREAK = "<br />"
def append_build_history_line(short_image_name: str, owner: str, wiki_path: str, all_tags: List[str]) -> None:
logger.info("Appending build history line")
date_column = f"`{BUILD_TIMESTAMP}`"
image_column = MARKDOWN_LINE_BREAK.join(
f"`{owner}/{short_image_name}:{tag_value}`" for tag_value in all_tags
)
commit_hash = GitHelper.commit_hash()
commit_hash_tag = GitHelper.commit_hash_tag()
links_column = MARKDOWN_LINE_BREAK.join([
f"[Git diff](https://github.com/jupyter/docker-stacks/commit/{commit_hash})",
f"[Dockerfile](https://github.com/jupyter/docker-stacks/blob/{commit_hash}/{short_image_name}/Dockerfile)",
f"[Build manifest](./{short_image_name}-{commit_hash_tag})"
])
build_history_line = "|".join([date_column, image_column, links_column]) + "|"
home_wiki_file = os.path.join(wiki_path, "Home.md")
with open(home_wiki_file, "r") as f:
file = f.read()
TABLE_BEGINNING = "|-|-|-|\n"
file = file.replace(TABLE_BEGINNING, TABLE_BEGINNING + build_history_line + "\n")
with open(home_wiki_file, "w") as f:
f.write(file)
def create_manifest_file(
short_image_name: str,
owner: str,
wiki_path: str,
manifests: List[ManifestInterface],
container
) -> None:
manifest_names = [manifest.__name__ for manifest in manifests]
logger.info(f"Using manifests: {manifest_names}")
commit_hash_tag = GitHelper.commit_hash_tag()
manifest_file = os.path.join(wiki_path, f"manifests/{short_image_name}-{commit_hash_tag}.md")
markdown_pieces = [ManifestHeader.create_header(short_image_name, owner, BUILD_TIMESTAMP)] + \
[manifest.markdown_piece(container) for manifest in manifests]
markdown_content = "\n\n".join(markdown_pieces) + "\n"
with open(manifest_file, "w") as f:
f.write(markdown_content)
def create_manifests(short_image_name: str, owner: str, wiki_path: str) -> None:
logger.info(f"Creating manifests for image: {short_image_name}")
taggers, manifests = get_taggers_and_manifests(short_image_name)
image = f"{owner}/{short_image_name}:latest"
with DockerRunner(image) as container:
all_tags = [tagger.tag_value(container) for tagger in taggers]
append_build_history_line(short_image_name, owner, wiki_path, all_tags)
create_manifest_file(short_image_name, owner, wiki_path, manifests, container)
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument("--short-image-name", required=True, help="Short image name to apply tags for")
arg_parser.add_argument("--owner", required=True, help="Owner of the image")
arg_parser.add_argument("--wiki-path", required=True, help="Path to the wiki pages")
args = arg_parser.parse_args()
logger.info(f"Current build timestamp: {BUILD_TIMESTAMP}")
create_manifests(args.short_image_name, args.owner, args.wiki_path)

39
tagging/docker_runner.py Normal file
View File

@@ -0,0 +1,39 @@
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import docker
import logging
logger = logging.getLogger(__name__)
class DockerRunner:
def __init__(self, image_name: str, docker_client=docker.from_env(), command: str = "sleep infinity"):
self.container = None
self.image_name = image_name
self.command = command
self.docker_client = docker_client
def __enter__(self):
logger.info(f"Creating container for image {self.image_name} ...")
self.container = self.docker_client.containers.run(
image=self.image_name, command=self.command, detach=True,
)
logger.info(f"Container {self.container.name} created")
return self.container
def __exit__(self, exc_type, exc_value, traceback):
logger.info(f"Removing container {self.container.name} ...")
if self.container:
self.container.remove(force=True)
logger.info(f"Container {self.container.name} removed")
@staticmethod
def run_simple_command(container, cmd: str, print_result: bool = True):
logger.info(f"Running cmd: '{cmd}' on container: {container}")
out = container.exec_run(cmd)
assert out.exit_code == 0, f"Command: {cmd} failed"
result = out.output.decode("utf-8").rstrip()
if print_result:
logger.info(f"Command result: {result}")
return result

View File

@@ -0,0 +1,19 @@
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from typing import List, Tuple
from .images_hierarchy import ALL_IMAGES
from .manifests import ManifestInterface
from .taggers import TaggerInterface
def get_taggers_and_manifests(short_image_name: str) -> Tuple[List[TaggerInterface], List[ManifestInterface]]:
taggers: List[TaggerInterface] = []
manifests: List[ManifestInterface] = []
while short_image_name is not None:
image_description = ALL_IMAGES[short_image_name]
taggers = image_description.taggers + taggers
manifests = image_description.manifests + manifests
short_image_name = image_description.parent_image
return taggers, manifests

23
tagging/git_helper.py Executable file
View File

@@ -0,0 +1,23 @@
#!/usr/bin/env python3
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from plumbum.cmd import git
class GitHelper:
@staticmethod
def commit_hash() -> str:
return git["rev-parse", "HEAD"]().strip()
@staticmethod
def commit_hash_tag() -> str:
return GitHelper.commit_hash()[:12]
@staticmethod
def commit_message() -> str:
return git["log", -1, "--pretty=%B"]().strip()
if __name__ == "__main__":
print("Git hash:", GitHelper.commit_hash())
print("Git message:", GitHelper.commit_message())

View File

@@ -0,0 +1,65 @@
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from dataclasses import dataclass, field
from typing import Optional, List
from .taggers import TaggerInterface, \
SHATagger, \
UbuntuVersionTagger, PythonVersionTagger, \
JupyterNotebookVersionTagger, JupyterLabVersionTagger, JupyterHubVersionTagger, \
RVersionTagger, TensorflowVersionTagger, JuliaVersionTagger, \
SparkVersionTagger, HadoopVersionTagger, JavaVersionTagger
from .manifests import ManifestInterface, \
CondaEnvironmentManifest, AptPackagesManifest, \
RPackagesManifest, JuliaPackagesManifest, SparkInfoManifest
@dataclass
class ImageDescription:
parent_image: Optional[str]
taggers: List[TaggerInterface] = field(default_factory=list)
manifests: List[ManifestInterface] = field(default_factory=list)
ALL_IMAGES = {
"base-notebook": ImageDescription(
parent_image=None,
taggers=[
SHATagger,
UbuntuVersionTagger, PythonVersionTagger,
JupyterNotebookVersionTagger, JupyterLabVersionTagger, JupyterHubVersionTagger
],
manifests=[
CondaEnvironmentManifest, AptPackagesManifest
]
),
"minimal-notebook": ImageDescription(
parent_image="base-notebook"
),
"scipy-notebook": ImageDescription(
parent_image="minimal-notebook"
),
"r-notebook": ImageDescription(
parent_image="minimal-notebook",
taggers=[RVersionTagger],
manifests=[RPackagesManifest]
),
"tensorflow-notebook": ImageDescription(
parent_image="scipy-notebook",
taggers=[TensorflowVersionTagger]
),
"datascience-notebook": ImageDescription(
parent_image="scipy-notebook",
taggers=[RVersionTagger, JuliaVersionTagger],
manifests=[RPackagesManifest, JuliaPackagesManifest]
),
"pyspark-notebook": ImageDescription(
parent_image="scipy-notebook",
taggers=[SparkVersionTagger, HadoopVersionTagger, JavaVersionTagger],
manifests=[SparkInfoManifest]
),
"all-spark-notebook": ImageDescription(
parent_image="pyspark-notebook",
taggers=[RVersionTagger],
manifests=[RPackagesManifest]
)
}

108
tagging/manifests.py Normal file
View File

@@ -0,0 +1,108 @@
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import logging
from plumbum.cmd import docker
from .docker_runner import DockerRunner
from .git_helper import GitHelper
logger = logging.getLogger(__name__)
def quoted_output(container, cmd: str) -> str:
return "\n".join([
"```",
DockerRunner.run_simple_command(container, cmd, print_result=False),
"```"
])
class ManifestHeader:
"""ManifestHeader doesn't fall under common interface and we run it separately"""
@staticmethod
def create_header(short_image_name: str, owner: str, build_timestamp: str) -> str:
commit_hash = GitHelper.commit_hash()
commit_hash_tag = GitHelper.commit_hash_tag()
commit_message = GitHelper.commit_message()
image_size = docker["images", f"{owner}/{short_image_name}:latest", "--format", "{{.Size}}"]().rstrip()
return "\n".join([
f"# Build manifest for image: {short_image_name}:{commit_hash_tag}",
"",
"## Build Info",
"",
f"* Build datetime: {build_timestamp}",
f"* Docker image: {owner}/{short_image_name}:{commit_hash_tag}",
f"* Docker image size: {image_size}",
f"* Git commit SHA: [{commit_hash}](https://github.com/jupyter/docker-stacks/commit/{commit_hash})",
"* Git commit message:",
"```",
f"{commit_message}",
"```"
])
class ManifestInterface:
"""Common interface for all manifests"""
@staticmethod
def markdown_piece(container) -> str:
raise NotImplementedError
class CondaEnvironmentManifest(ManifestInterface):
@staticmethod
def markdown_piece(container) -> str:
return "\n".join([
"## Python Packages",
"",
quoted_output(container, "python --version"),
"",
quoted_output(container, "conda info"),
"",
quoted_output(container, "conda list")
])
class AptPackagesManifest(ManifestInterface):
@staticmethod
def markdown_piece(container) -> str:
return "\n".join([
"## Apt Packages",
"",
quoted_output(container, "apt list --installed")
])
class RPackagesManifest(ManifestInterface):
@staticmethod
def markdown_piece(container) -> str:
return "\n".join([
"## R Packages",
"",
quoted_output(container, "R --version"),
"",
quoted_output(container, "R --silent -e 'installed.packages(.Library)[, c(1,3)]'")
])
class JuliaPackagesManifest(ManifestInterface):
@staticmethod
def markdown_piece(container) -> str:
return "\n".join([
"## Julia Packages",
"",
quoted_output(container, "julia -E 'using InteractiveUtils; versioninfo()'"),
"",
quoted_output(container, "julia -E 'import Pkg; Pkg.status()'")
])
class SparkInfoManifest(ManifestInterface):
@staticmethod
def markdown_piece(container) -> str:
return "\n".join([
"## Apache Spark",
"",
quoted_output(container, "/usr/local/spark/bin/spark-submit --version"),
])

36
tagging/tag_image.py Executable file
View File

@@ -0,0 +1,36 @@
#!/usr/bin/env python3
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import argparse
import logging
from plumbum.cmd import docker
from .docker_runner import DockerRunner
from .get_taggers_and_manifests import get_taggers_and_manifests
logger = logging.getLogger(__name__)
def tag_image(short_image_name: str, owner: str) -> None:
logger.info(f"Tagging image: {short_image_name}")
taggers, _ = get_taggers_and_manifests(short_image_name)
image = f"{owner}/{short_image_name}:latest"
with DockerRunner(image) as container:
for tagger in taggers:
tagger_name = tagger.__name__
tag_value = tagger.tag_value(container)
logger.info(f"Applying tag tagger_name: {tagger_name} tag_value: {tag_value}")
docker["tag", image, f"{owner}/{short_image_name}:{tag_value}"]()
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument("--short-image-name", required=True, help="Short image name to apply tags for")
arg_parser.add_argument("--owner", required=True, help="Owner of the image")
args = arg_parser.parse_args()
tag_image(args.short_image_name, args.owner)

118
tagging/taggers.py Normal file
View File

@@ -0,0 +1,118 @@
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import logging
from .git_helper import GitHelper
from .docker_runner import DockerRunner
logger = logging.getLogger(__name__)
def _get_program_version(container, program: str) -> str:
return DockerRunner.run_simple_command(container, cmd=f"{program} --version")
def _get_env_variable(container, variable: str) -> str:
env = DockerRunner.run_simple_command(
container,
cmd="env",
print_result=False
).split()
for env_entry in env:
if env_entry.startswith(variable):
return env_entry[len(variable) + 1:]
raise KeyError(variable)
def _get_pip_package_version(container, package: str) -> str:
VERSION_PREFIX = "Version: "
package_info = DockerRunner.run_simple_command(
container,
cmd=f"pip show {package}",
print_result=False
)
version_line = package_info.split("\n")[1]
assert version_line.startswith(VERSION_PREFIX)
return version_line[len(VERSION_PREFIX):]
class TaggerInterface:
"""Common interface for all taggers"""
@staticmethod
def tag_value(container) -> str:
raise NotImplementedError
class SHATagger(TaggerInterface):
@staticmethod
def tag_value(container) -> str:
return GitHelper.commit_hash_tag()
class UbuntuVersionTagger(TaggerInterface):
@staticmethod
def tag_value(container) -> str:
os_release = DockerRunner.run_simple_command(container, "cat /etc/os-release").split("\n")
for line in os_release:
if line.startswith("VERSION_ID"):
return "ubuntu-" + line.split("=")[1].strip('"')
class PythonVersionTagger(TaggerInterface):
@staticmethod
def tag_value(container) -> str:
return "python-" + _get_program_version(container, "python").split()[1]
class JupyterNotebookVersionTagger(TaggerInterface):
@staticmethod
def tag_value(container) -> str:
return "notebook-" + _get_program_version(container, "jupyter-notebook")
class JupyterLabVersionTagger(TaggerInterface):
@staticmethod
def tag_value(container) -> str:
return "lab-" + _get_program_version(container, "jupyter-lab")
class JupyterHubVersionTagger(TaggerInterface):
@staticmethod
def tag_value(container) -> str:
return "hub-" + _get_program_version(container, "jupyterhub")
class RVersionTagger(TaggerInterface):
@staticmethod
def tag_value(container) -> str:
return "r-" + _get_program_version(container, "R").split()[2]
class TensorflowVersionTagger(TaggerInterface):
@staticmethod
def tag_value(container) -> str:
return "tensorflow-" + _get_pip_package_version(container, "tensorflow")
class JuliaVersionTagger(TaggerInterface):
@staticmethod
def tag_value(container) -> str:
return "julia-" + _get_program_version(container, "julia").split()[2]
class SparkVersionTagger(TaggerInterface):
@staticmethod
def tag_value(container) -> str:
return "spark-" + _get_env_variable(container, "APACHE_SPARK_VERSION")
class HadoopVersionTagger(TaggerInterface):
@staticmethod
def tag_value(container) -> str:
return "hadoop-" + _get_env_variable(container, "HADOOP_VERSION")
class JavaVersionTagger(TaggerInterface):
@staticmethod
def tag_value(container) -> str:
return "java-" + _get_program_version(container, "java").split()[1]

View File

@@ -6,7 +6,8 @@ FROM $BASE_CONTAINER
LABEL maintainer="Jupyter Project <jupyter@googlegroups.com>"
# Install Tensorflow
RUN pip install --quiet --no-cache-dir \
'tensorflow==2.4.1' && \
RUN mamba install --quiet --yes \
'tensorflow=2.4.1' && \
conda clean --all -f -y && \
fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}"

View File

@@ -1,10 +1,10 @@
# Jupyter Notebook Deep Learning Stack
[![docker pulls](https://img.shields.io/docker/pulls/jupyter/tensorflow-notebook.svg)](https://hub.docker.com/r/jupyter/tensorflow-notebook/)
[![docker stars](https://img.shields.io/docker/stars/jupyter/tensorflow-notebook.svg)](https://hub.docker.com/r/jupyter/tensorflow-notebook/)
[![image metadata](https://images.microbadger.com/badges/image/jupyter/tensorflow-notebook.svg)](https://microbadger.com/images/jupyter/tensorflow-notebook "jupyter/tensorflow-notebook image metadata")
# Jupyter Notebook Deep Learning Stack
GitHub Actions in the https://github.com/jupyter/docker-stacks project builds and pushes this image
GitHub Actions in the <https://github.com/jupyter/docker-stacks> project builds and pushes this image
to Docker Hub.
Please visit the project documentation site for help using and contributing to this image and

View File

@@ -1,45 +0,0 @@
#!/bin/bash
set -e
# Apply tags
GIT_SHA_TAG=${GITHUB_SHA:0:12}
docker tag $IMAGE_NAME "$DOCKER_REPO:$GIT_SHA_TAG"
# Update index
INDEX_ROW="|\`${BUILD_TIMESTAMP}\`|\`jupyter/${IMAGE_SHORT_NAME}:${GIT_SHA_TAG}\`|[Git diff](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})<br />[Dockerfile](https://github.com/jupyter/docker-stacks/blob/${GITHUB_SHA}/${IMAGE_SHORT_NAME}/Dockerfile)<br />[Build manifest](./${IMAGE_SHORT_NAME}-${GIT_SHA_TAG})|"
sed "/|-|/a ${INDEX_ROW}" -i "${WIKI_PATH}/Home.md"
# Build manifest
MANIFEST_FILE="${WIKI_PATH}/manifests/${IMAGE_SHORT_NAME}-${GIT_SHA_TAG}.md"
mkdir -p $(dirname "$MANIFEST_FILE")
cat << EOF > "$MANIFEST_FILE"
* Build datetime: ${BUILD_TIMESTAMP}
* Docker image: ${DOCKER_REPO}:${GIT_SHA_TAG}
* Docker image size: $(docker images ${IMAGE_NAME} --format "{{.Size}}")
* Git commit SHA: [${GITHUB_SHA}](https://github.com/jupyter/docker-stacks/commit/${GITHUB_SHA})
* Git commit message:
\`\`\`
${COMMIT_MSG}
\`\`\`
## Python Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} python --version)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda info)
\`\`\`
\`\`\`
$(docker run --rm ${IMAGE_NAME} conda list)
\`\`\`
## Apt Packages
\`\`\`
$(docker run --rm ${IMAGE_NAME} apt list --installed)
\`\`\`
EOF