Merge branch 'master' into asalikhov/fix_markdown

This commit is contained in:
Ayaz Salikhov
2021-05-06 19:59:49 +03:00
15 changed files with 728 additions and 745 deletions

View File

@@ -42,9 +42,7 @@ jobs:
- name: Install Dev Dependencies - name: Install Dev Dependencies
run: | run: |
python -m pip install --upgrade pip python -m pip install --upgrade pip
make -C main dev-env hadolint-install make -C main dev-env
- name: Lint Dockerfiles
run: make -C main hadolint-all
- name: Run pre-commit hooks - name: Run pre-commit hooks
run: make -C main pre-commit-all run: make -C main pre-commit-all
- name: Build Docker Images - name: Build Docker Images

View File

@@ -5,6 +5,13 @@ repos:
hooks: hooks:
- id: check-yaml - id: check-yaml
files: .*\.(yaml|yml)$ files: .*\.(yaml|yml)$
- repo: https://github.com/hadolint/hadolint.git
rev: v2.3.0
hooks:
- id: hadolint-docker
# FIXME: remove after https://github.com/hadolint/hadolint/issues/628 is resolved
entry: hadolint/hadolint:v2.3.0 hadolint
exclude: Dockerfile.ppc64le|Dockerfile.ppc64le.patch
- repo: https://github.com/adrienverge/yamllint.git - repo: https://github.com/adrienverge/yamllint.git
rev: v1.26.1 rev: v1.26.1
hooks: hooks:

View File

@@ -23,10 +23,6 @@ endif
ALL_IMAGES:=$(ALL_STACKS) ALL_IMAGES:=$(ALL_STACKS)
# Dockerfile Linter
HADOLINT="${HOME}/hadolint"
HADOLINT_VERSION="v2.1.0"
# Enable BuildKit for Docker build # Enable BuildKit for Docker build
export DOCKER_BUILDKIT:=1 export DOCKER_BUILDKIT:=1
@@ -119,23 +115,6 @@ img-rm-dang: ## remove dangling images (tagged None)
@echo "Removing dangling images ..." @echo "Removing dangling images ..."
-docker rmi --force $(shell docker images -f "dangling=true" -q) 2> /dev/null -docker rmi --force $(shell docker images -f "dangling=true" -q) 2> /dev/null
hadolint/%: ARGS?=
hadolint/%: ## lint the dockerfile(s) for a stack
@echo "Linting Dockerfiles in $(notdir $@)..."
@git ls-files --exclude='Dockerfile*' --ignored $(notdir $@) | grep -v ppc64 | xargs -L 1 $(HADOLINT) $(ARGS)
@echo "Linting done!"
hadolint-all: $(foreach I,$(ALL_IMAGES),hadolint/$(I) ) ## lint all stacks
hadolint-build-test-all: $(foreach I,$(ALL_IMAGES),hadolint/$(I) arch_patch/$(I) build/$(I) test/$(I) ) ## lint, build and test all stacks
hadolint-install: ## install hadolint
@echo "Installing hadolint at $(HADOLINT) ..."
@curl -sL -o $(HADOLINT) "https://github.com/hadolint/hadolint/releases/download/$(HADOLINT_VERSION)/hadolint-$(shell uname -s)-$(shell uname -m)"
@chmod 700 $(HADOLINT)
@echo "Installation done!"
@$(HADOLINT) --version
pre-commit-all: ## run pre-commit hook on all files pre-commit-all: ## run pre-commit hook on all files
@pre-commit run --all-files @pre-commit run --all-files

View File

@@ -4,7 +4,7 @@
# Ubuntu 20.04 (focal) # Ubuntu 20.04 (focal)
# https://hub.docker.com/_/ubuntu/?tab=tags&name=focal # https://hub.docker.com/_/ubuntu/?tab=tags&name=focal
# OS/ARCH: linux/amd64 # OS/ARCH: linux/amd64
ARG ROOT_CONTAINER=ubuntu:focal-20210401@sha256:5403064f94b617f7975a19ba4d1a1299fd584397f6ee4393d0e16744ed11aab1 ARG ROOT_CONTAINER=ubuntu:focal-20210416@sha256:86ac87f73641c920fb42cc9612d4fb57b5626b56ea2a19b894d0673fd5b4f2e9
ARG BASE_CONTAINER=$ROOT_CONTAINER ARG BASE_CONTAINER=$ROOT_CONTAINER
FROM $BASE_CONTAINER FROM $BASE_CONTAINER
@@ -44,17 +44,16 @@ ARG miniforge_checksum="c56cc2da96043688c6bdb521d825de27754de0a342d5228ba3155cd9
# Install all OS dependencies for notebook server that starts but lacks all # Install all OS dependencies for notebook server that starts but lacks all
# features (e.g., download as all possible file formats) # features (e.g., download as all possible file formats)
ENV DEBIAN_FRONTEND noninteractive ENV DEBIAN_FRONTEND noninteractive
RUN apt-get -q update \ RUN apt-get -q update && \
&& apt-get install -yq --no-install-recommends \ apt-get install -yq --no-install-recommends \
wget \ wget \
ca-certificates \ ca-certificates \
sudo \ sudo \
locales \ locales \
fonts-liberation \ fonts-liberation \
run-one \ run-one && \
&& apt-get clean && rm -rf /var/lib/apt/lists/* apt-get clean && rm -rf /var/lib/apt/lists/* && \
echo "en_US.UTF-8 UTF-8" > /etc/locale.gen && \
RUN echo "en_US.UTF-8 UTF-8" > /etc/locale.gen && \
locale-gen locale-gen
# Configure environment # Configure environment
@@ -133,7 +132,7 @@ RUN wget --quiet "https://github.com/conda-forge/miniforge/releases/download/${m
# files across image layers when the permissions change # files across image layers when the permissions change
RUN conda install --quiet --yes \ RUN conda install --quiet --yes \
'notebook=6.3.0' \ 'notebook=6.3.0' \
'jupyterhub=1.3.0' \ 'jupyterhub=1.4.0' \
'jupyterlab=3.0.14' && \ 'jupyterlab=3.0.14' && \
conda clean --all -f -y && \ conda clean --all -f -y && \
npm cache clean --force && \ npm cache clean --force && \
@@ -159,9 +158,8 @@ USER root
# Prepare upgrade to JupyterLab V3.0 #1205 # Prepare upgrade to JupyterLab V3.0 #1205
RUN sed -re "s/c.NotebookApp/c.ServerApp/g" \ RUN sed -re "s/c.NotebookApp/c.ServerApp/g" \
/etc/jupyter/jupyter_notebook_config.py > /etc/jupyter/jupyter_server_config.py /etc/jupyter/jupyter_notebook_config.py > /etc/jupyter/jupyter_server_config.py && \
fix-permissions /etc/jupyter/
RUN fix-permissions /etc/jupyter/
# Switch back to jovyan to avoid accidental container runs as root # Switch back to jovyan to avoid accidental container runs as root
USER $NB_UID USER $NB_UID

View File

@@ -0,0 +1,19 @@
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import logging
from packaging import version
LOGGER = logging.getLogger(__name__)
def test_python_version(container, python_next_version="3.9"):
"""Check that python version is lower than the next version"""
LOGGER.info(f"Checking that python version is lower than {python_next_version}")
c = container.run(tty=True, command=["start.sh"])
cmd = c.exec_run("python --version")
output = cmd.output.decode("utf-8")
actual_python_version = version.parse(output.split()[1])
assert actual_python_version < version.parse(
python_next_version
), f"Python version shall be lower than {python_next_version}"

View File

@@ -8,6 +8,7 @@ FROM $BASE_CONTAINER
LABEL maintainer="Jupyter Project <jupyter@googlegroups.com>" LABEL maintainer="Jupyter Project <jupyter@googlegroups.com>"
ENV TAG="aec555e49be6" ENV TAG="aec555e49be6"
WORKDIR $HOME
COPY binder/README.ipynb . COPY binder/README.ipynb .
# Fix permissions on README.ipynb as root # Fix permissions on README.ipynb as root

View File

@@ -39,8 +39,8 @@ RUN mkdir "/opt/julia-${JULIA_VERSION}" && \
wget -q https://julialang-s3.julialang.org/bin/linux/x64/$(echo "${JULIA_VERSION}" | cut -d. -f 1,2)"/julia-${JULIA_VERSION}-linux-x86_64.tar.gz" && \ wget -q https://julialang-s3.julialang.org/bin/linux/x64/$(echo "${JULIA_VERSION}" | cut -d. -f 1,2)"/julia-${JULIA_VERSION}-linux-x86_64.tar.gz" && \
echo "${julia_checksum} *julia-${JULIA_VERSION}-linux-x86_64.tar.gz" | sha256sum -c - && \ echo "${julia_checksum} *julia-${JULIA_VERSION}-linux-x86_64.tar.gz" | sha256sum -c - && \
tar xzf "julia-${JULIA_VERSION}-linux-x86_64.tar.gz" -C "/opt/julia-${JULIA_VERSION}" --strip-components=1 && \ tar xzf "julia-${JULIA_VERSION}-linux-x86_64.tar.gz" -C "/opt/julia-${JULIA_VERSION}" --strip-components=1 && \
rm "/tmp/julia-${JULIA_VERSION}-linux-x86_64.tar.gz" rm "/tmp/julia-${JULIA_VERSION}-linux-x86_64.tar.gz" && \
RUN ln -fs /opt/julia-*/bin/julia /usr/local/bin/julia ln -fs /opt/julia-*/bin/julia /usr/local/bin/julia
# Show Julia where conda libraries are \ # Show Julia where conda libraries are \
RUN mkdir /etc/julia && \ RUN mkdir /etc/julia && \

View File

@@ -29,6 +29,8 @@ make pre-commit-install
Now pre-commit (and so configured hooks) will run automatically on `git commit` on each changed file. Now pre-commit (and so configured hooks) will run automatically on `git commit` on each changed file.
However it is also possible to trigger it against all files. However it is also possible to trigger it against all files.
- Note: Hadolint pre-commit uses docker to run, so docker should be running while running this command.
```sh ```sh
make pre-commit-all make pre-commit-all
``` ```
@@ -37,57 +39,10 @@ make pre-commit-all
To comply with [Docker best practices][dbp], we are using the [Hadolint][hadolint] tool to analyse each `Dockerfile` . To comply with [Docker best practices][dbp], we are using the [Hadolint][hadolint] tool to analyse each `Dockerfile` .
### Hadolint installation
There is a specific `make` target to install the linter.
By default `hadolint` will be installed in `${HOME}/hadolint`.
```bash
$ make hadolint-install
# Installing hadolint at /Users/romain/hadolint ...
# Installation done!
# Haskell Dockerfile Linter v1.17.6-0-gc918759
```
### Linting
#### Per Stack
The linter can be run per stack.
```bash
$ make hadolint/scipy-notebook
# Linting Dockerfiles in scipy-notebook...
# scipy-notebook/Dockerfile:4 DL3006 Always tag the version of an image explicitly
# scipy-notebook/Dockerfile:11 DL3008 Pin versions in apt get install. Instead of `apt-get install <package>` use `apt-get install <package>=<version>`
# scipy-notebook/Dockerfile:18 SC2086 Double quote to prevent globbing and word splitting.
# scipy-notebook/Dockerfile:68 SC2086 Double quote to prevent globbing and word splitting.
# scipy-notebook/Dockerfile:68 DL3003 Use WORKDIR to switch to a directory
# scipy-notebook/Dockerfile:79 SC2086 Double quote to prevent globbing and word splitting.
# make: *** [lint/scipy-notebook] Error 1
```
Optionally you can pass arguments to the hadolint.
```bash
# Use a different export format
$ make hadolint/scipy-notebook ARGS="--format codeclimate"
```
#### All the Stacks
The linter can be run against all the stacks.
```bash
make hadolint-all
```
### Ignoring Rules ### Ignoring Rules
Sometimes it is necessary to ignore [some rules][rules]. Sometimes it is necessary to ignore [some rules][rules].
The following rules are ignored by default and sor for all images in the `.hadolint.yaml` file. The following rules are ignored by default for all images in the `.hadolint.yaml` file.
- [`DL3006`][DL3006]: We use a specific policy to manage image tags. - [`DL3006`][DL3006]: We use a specific policy to manage image tags.
- `base-notebook` `FROM` clause is fixed but based on an argument (`ARG`). - `base-notebook` `FROM` clause is fixed but based on an argument (`ARG`).
@@ -99,7 +54,6 @@ For other rules, the preferred way to do it is to flag ignored rules in the `Doc
> It is also possible to ignore rules by using a special comment directly above the Dockerfile instruction you want to make an exception for. Ignore rule comments look like `# hadolint ignore=DL3001,SC1081`. For example: > It is also possible to ignore rules by using a special comment directly above the Dockerfile instruction you want to make an exception for. Ignore rule comments look like `# hadolint ignore=DL3001,SC1081`. For example:
```dockerfile ```dockerfile
FROM ubuntu FROM ubuntu
# hadolint ignore=DL3003,SC1035 # hadolint ignore=DL3003,SC1035

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -12,7 +12,7 @@ This page provides details about features specific to one or more images.
You can build a `pyspark-notebook` image (and also the downstream `all-spark-notebook` image) with a different version of Spark by overriding the default value of the following arguments at build time. You can build a `pyspark-notebook` image (and also the downstream `all-spark-notebook` image) with a different version of Spark by overriding the default value of the following arguments at build time.
* Spark distribution is defined by the combination of the Spark and the Hadoop version and verified by the package checksum, see [Download Apache Spark](https://spark.apache.org/downloads.html) for more information. At this time the build will only work with the set of versions available on the Apache Spark download page, so it will not work with the archived versions. * Spark distribution is defined by the combination of the Spark and the Hadoop version and verified by the package checksum, see [Download Apache Spark](https://spark.apache.org/downloads.html) and the [archive repo](https://archive.apache.org/dist/spark/) for more information.
* `spark_version`: The Spark version to install (`3.0.0`). * `spark_version`: The Spark version to install (`3.0.0`).
* `hadoop_version`: The Hadoop version (`3.2`). * `hadoop_version`: The Hadoop version (`3.2`).
* `spark_checksum`: The package checksum (`BFE4540...`). * `spark_checksum`: The package checksum (`BFE4540...`).

View File

@@ -29,10 +29,7 @@ RUN apt-get -y update && \
# Spark installation # Spark installation
WORKDIR /tmp WORKDIR /tmp
# Using the preferred mirror to download Spark RUN wget -q "https://archive.apache.org/dist/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" && \
# hadolint ignore=SC2046
RUN wget -q $(wget -qO- https://www.apache.org/dyn/closer.lua/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz\?as_json | \
python -c "import sys, json; content=json.load(sys.stdin); print(content['preferred']+content['path_info'])") && \
echo "${spark_checksum} *spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" | sha512sum -c - && \ echo "${spark_checksum} *spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" | sha512sum -c - && \
tar xzf "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" -C /usr/local --owner root --group root --no-same-owner && \ tar xzf "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" -C /usr/local --owner root --group root --no-same-owner && \
rm "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" rm "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz"
@@ -59,7 +56,7 @@ USER $NB_UID
# Install pyarrow # Install pyarrow
RUN conda install --quiet --yes --satisfied-skip-solve \ RUN conda install --quiet --yes --satisfied-skip-solve \
'pyarrow=3.0.*' && \ 'pyarrow=4.0.*' && \
conda clean --all -f -y && \ conda clean --all -f -y && \
fix-permissions "${CONDA_DIR}" && \ fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}" fix-permissions "/home/${NB_USER}"

View File

@@ -24,7 +24,7 @@ RUN conda install --quiet --yes \
'cython=0.29.*' \ 'cython=0.29.*' \
'dask=2021.4.*' \ 'dask=2021.4.*' \
'dill=0.3.*' \ 'dill=0.3.*' \
'h5py=3.1.*' \ 'h5py=3.2.*' \
'ipywidgets=7.6.*' \ 'ipywidgets=7.6.*' \
'ipympl=0.7.*'\ 'ipympl=0.7.*'\
'matplotlib-base=3.4.*' \ 'matplotlib-base=3.4.*' \
@@ -40,7 +40,7 @@ RUN conda install --quiet --yes \
'seaborn=0.11.*' \ 'seaborn=0.11.*' \
'sqlalchemy=1.4.*' \ 'sqlalchemy=1.4.*' \
'statsmodels=0.12.*' \ 'statsmodels=0.12.*' \
'sympy=1.7.*' \ 'sympy=1.8.*' \
'vincent=0.4.*' \ 'vincent=0.4.*' \
'widgetsnbextension=3.5.*'\ 'widgetsnbextension=3.5.*'\
'xlrd=2.0.*' && \ 'xlrd=2.0.*' && \

View File

@@ -30,7 +30,7 @@ def append_build_history_line(short_image_name: str, owner: str, wiki_path: str,
commit_hash_tag = GitHelper.commit_hash_tag() commit_hash_tag = GitHelper.commit_hash_tag()
links_column = MARKDOWN_LINE_BREAK.join([ links_column = MARKDOWN_LINE_BREAK.join([
f"[Git diff](https://github.com/jupyter/docker-stacks/commit/{commit_hash})", f"[Git diff](https://github.com/jupyter/docker-stacks/commit/{commit_hash})",
f"[Dockerfile](https://github.com/jupyter/docker-stacks/blob/{commit_hash}/{short_image_name}/Dockerfile)" f"[Dockerfile](https://github.com/jupyter/docker-stacks/blob/{commit_hash}/{short_image_name}/Dockerfile)",
f"[Build manifest](./{short_image_name}-{commit_hash_tag})" f"[Build manifest](./{short_image_name}-{commit_hash_tag})"
]) ])
build_history_line = "|".join([date_column, image_column, links_column]) + "|" build_history_line = "|".join([date_column, image_column, links_column]) + "|"

View File

@@ -7,8 +7,7 @@ LABEL maintainer="Jupyter Project <jupyter@googlegroups.com>"
# Install Tensorflow # Install Tensorflow
RUN mamba install --quiet --yes \ RUN mamba install --quiet --yes \
'tensorflow=2.4.1' \ 'tensorflow=2.4.1' && \
&& \
conda clean --all -f -y && \ conda clean --all -f -y && \
fix-permissions "${CONDA_DIR}" && \ fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}" fix-permissions "/home/${NB_USER}"