mirror of
https://github.com/jupyter/docker-stacks.git
synced 2025-10-18 15:32:56 +00:00
Merge branch 'jupyter:master' into master
This commit is contained in:
26
.github/actions/create-dev-env/action.yml
vendored
Normal file
26
.github/actions/create-dev-env/action.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
name: Build environment
|
||||
description: Create build environment
|
||||
|
||||
inputs:
|
||||
platform:
|
||||
description: Platform to be run on
|
||||
required: true
|
||||
type: string
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
# actions/setup-python doesn't suport Linux aarch64 runners
|
||||
# See: https://github.com/actions/setup-python/issues/108
|
||||
# python3 is manually preinstalled in the aarch64 VM self-hosted runner
|
||||
- name: Set Up Python 🐍
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.x
|
||||
if: ${{ inputs.platform == 'amd64' }}
|
||||
|
||||
- name: Install Dev Dependencies 📦
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade -r requirements-dev.txt
|
||||
shell: bash
|
161
.github/actions/download-manifests/action.yml
vendored
Normal file
161
.github/actions/download-manifests/action.yml
vendored
Normal file
@@ -0,0 +1,161 @@
|
||||
name: Download manifests
|
||||
description: Download all manifests and history lines
|
||||
|
||||
# Unfortunately, `actions/download-artifact` doesn't support wildcard download
|
||||
# To make this workflow fast, we manually list all manifests and history lines downloads
|
||||
# https://github.com/actions/download-artifact/issues/6
|
||||
|
||||
inputs:
|
||||
histLineDir:
|
||||
description: Directory to store history lines
|
||||
required: true
|
||||
type: string
|
||||
manifestDir:
|
||||
description: Directory to store manifest files
|
||||
required: true
|
||||
type: string
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: base-notebook-aarch64-history_line
|
||||
path: ${{ inputs.histLineDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: base-notebook-amd64-history_line
|
||||
path: ${{ inputs.histLineDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: minimal-notebook-aarch64-history_line
|
||||
path: ${{ inputs.histLineDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: minimal-notebook-amd64-history_line
|
||||
path: ${{ inputs.histLineDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: scipy-notebook-aarch64-history_line
|
||||
path: ${{ inputs.histLineDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: scipy-notebook-amd64-history_line
|
||||
path: ${{ inputs.histLineDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: r-notebook-aarch64-history_line
|
||||
path: ${{ inputs.histLineDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: r-notebook-amd64-history_line
|
||||
path: ${{ inputs.histLineDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: tensorflow-notebook-amd64-history_line
|
||||
path: ${{ inputs.histLineDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: datascience-notebook-amd64-history_line
|
||||
path: ${{ inputs.histLineDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: pyspark-notebook-aarch64-history_line
|
||||
path: ${{ inputs.histLineDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: pyspark-notebook-amd64-history_line
|
||||
path: ${{ inputs.histLineDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: all-spark-notebook-aarch64-history_line
|
||||
path: ${{ inputs.histLineDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: all-spark-notebook-amd64-history_line
|
||||
path: ${{ inputs.histLineDir }}
|
||||
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: base-notebook-aarch64-manifest
|
||||
path: ${{ inputs.manifestDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: base-notebook-amd64-manifest
|
||||
path: ${{ inputs.manifestDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: minimal-notebook-aarch64-manifest
|
||||
path: ${{ inputs.manifestDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: minimal-notebook-amd64-manifest
|
||||
path: ${{ inputs.manifestDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: scipy-notebook-aarch64-manifest
|
||||
path: ${{ inputs.manifestDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: scipy-notebook-amd64-manifest
|
||||
path: ${{ inputs.manifestDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: r-notebook-aarch64-manifest
|
||||
path: ${{ inputs.manifestDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: r-notebook-amd64-manifest
|
||||
path: ${{ inputs.manifestDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: tensorflow-notebook-amd64-manifest
|
||||
path: ${{ inputs.manifestDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: datascience-notebook-amd64-manifest
|
||||
path: ${{ inputs.manifestDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: pyspark-notebook-aarch64-manifest
|
||||
path: ${{ inputs.manifestDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: pyspark-notebook-amd64-manifest
|
||||
path: ${{ inputs.manifestDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: all-spark-notebook-aarch64-manifest
|
||||
path: ${{ inputs.manifestDir }}
|
||||
- name: Download artifact 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: all-spark-notebook-amd64-manifest
|
||||
path: ${{ inputs.manifestDir }}
|
30
.github/actions/load-image/action.yml
vendored
Normal file
30
.github/actions/load-image/action.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
name: Load Docker image
|
||||
description: Download image tar and load it to docker
|
||||
|
||||
inputs:
|
||||
image:
|
||||
description: Image name
|
||||
required: true
|
||||
type: string
|
||||
platform:
|
||||
description: Image platform
|
||||
required: true
|
||||
type: string
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Download built image 📥
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ inputs.image }}-${{ inputs.platform }}
|
||||
path: /tmp/
|
||||
- name: Load downloaded image to docker 📥
|
||||
run: |
|
||||
docker load --input /tmp/${{ inputs.image }}-${{ inputs.platform }}.tar
|
||||
docker image ls -a
|
||||
shell: bash
|
||||
- name: Delete the file 🗑️
|
||||
run: rm -f /tmp/${{ inputs.image }}-${{ inputs.platform }}.tar
|
||||
shell: bash
|
||||
if: always()
|
89
.github/workflows/docker-amd64.yml
vendored
89
.github/workflows/docker-amd64.yml
vendored
@@ -1,89 +0,0 @@
|
||||
name: Build and test amd64 Docker Images
|
||||
# This workflow runs a lot quicker, than building multi-images in docker.yml
|
||||
# This will allow us to conclude if things work or not far quicker,
|
||||
# when we don't expect there to be any platform specific breaking change.
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- ".github/workflows/docker-amd64.yml"
|
||||
|
||||
- "all-spark-notebook/**"
|
||||
- "base-notebook/**"
|
||||
- "datascience-notebook/**"
|
||||
- "minimal-notebook/**"
|
||||
- "pyspark-notebook/**"
|
||||
- "r-notebook/**"
|
||||
- "scipy-notebook/**"
|
||||
- "tensorflow-notebook/**"
|
||||
|
||||
- "tagging/**"
|
||||
- "tests/**"
|
||||
- "Makefile"
|
||||
- "requirements-dev.txt"
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
paths:
|
||||
- ".github/workflows/docker-amd64.yml"
|
||||
|
||||
- "all-spark-notebook/**"
|
||||
- "base-notebook/**"
|
||||
- "datascience-notebook/**"
|
||||
- "minimal-notebook/**"
|
||||
- "pyspark-notebook/**"
|
||||
- "r-notebook/**"
|
||||
- "scipy-notebook/**"
|
||||
- "tensorflow-notebook/**"
|
||||
|
||||
- "tagging/**"
|
||||
- "tests/**"
|
||||
- "Makefile"
|
||||
- "requirements-dev.txt"
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
# only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build-test-amd64-images:
|
||||
name: Build and test amd64 Docker Images
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout Repo ⚡️
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set Up Python 🐍
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.x
|
||||
|
||||
- name: Install Dev Dependencies 📦
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install -r requirements-dev.txt
|
||||
|
||||
- name: Build Docker Images 🛠
|
||||
run: make build-all
|
||||
env:
|
||||
# Full logs for CI build
|
||||
BUILDKIT_PROGRESS: plain
|
||||
|
||||
- name: Run tests ✅
|
||||
run: make test-all
|
||||
|
||||
- name: Checkout Wiki Repo 📃
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: ${{github.repository}}.wiki
|
||||
path: wiki/
|
||||
|
||||
- name: Create tags and manifest 🏷
|
||||
run: make hook-all
|
74
.github/workflows/docker-build-test-upload.yml
vendored
Normal file
74
.github/workflows/docker-build-test-upload.yml
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
name: Download parent image, build new one, test it and upload to GitHub artifacts
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
parentImage:
|
||||
description: Parent image name
|
||||
required: true
|
||||
type: string
|
||||
image:
|
||||
description: Image name
|
||||
required: true
|
||||
type: string
|
||||
platform:
|
||||
description: Image platform
|
||||
required: true
|
||||
type: string
|
||||
runsOn:
|
||||
description: GitHub Actions Runner image
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
build-test-upload:
|
||||
runs-on: ${{ inputs.runsOn }}
|
||||
steps:
|
||||
- name: Checkout Repo ⚡️
|
||||
uses: actions/checkout@v3
|
||||
- name: Create dev environment 📦
|
||||
uses: ./.github/actions/create-dev-env
|
||||
with:
|
||||
platform: ${{ inputs.platform }}
|
||||
|
||||
# Self-hosted runners share a state (whole VM) between runs
|
||||
- name: Reset docker state 🗑️
|
||||
if: ${{ inputs.platform != 'amd64' }}
|
||||
run: docker system prune --all --force
|
||||
shell: bash
|
||||
|
||||
- name: Load parent built image to Docker 📥
|
||||
if: ${{ inputs.parentImage != '' }}
|
||||
uses: ./.github/actions/load-image
|
||||
with:
|
||||
image: ${{ inputs.parentImage }}
|
||||
platform: ${{ inputs.platform }}
|
||||
|
||||
- name: Build image 🛠
|
||||
run: docker build --rm --force-rm --tag jupyter/${{ inputs.image }} ${{ inputs.image }}/
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
# Full logs for CI build
|
||||
BUILDKIT_PROGRESS: plain
|
||||
shell: bash
|
||||
|
||||
- name: Run tests ✅
|
||||
run: python3 -m tests.run_tests --short-image-name ${{ inputs.image }}
|
||||
shell: bash
|
||||
|
||||
- name: Save image as a tar for later use 💾
|
||||
run: docker save jupyter/${{ inputs.image }} -o /tmp/${{ inputs.image }}-${{ inputs.platform }}.tar
|
||||
shell: bash
|
||||
- name: Upload image as artifact 💾
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ inputs.image }}-${{ inputs.platform }}
|
||||
path: /tmp/${{ inputs.image }}-${{ inputs.platform }}.tar
|
||||
retention-days: 3
|
||||
|
||||
# Self-hosted runners share a state (whole VM) between runs
|
||||
- name: Cleanup artifacts 🗑️
|
||||
run: |
|
||||
rm -f /tmp/${{ inputs.image }}-${{ inputs.platform }}.tar
|
||||
shell: bash
|
||||
if: always()
|
92
.github/workflows/docker-tag-manifest-push.yml
vendored
Normal file
92
.github/workflows/docker-tag-manifest-push.yml
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
name: Download Docker image from GitHub artifacts, tag and push it to DockerHub
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
images:
|
||||
description: Stringified JSON object listing image names
|
||||
required: true
|
||||
type: string
|
||||
platform:
|
||||
description: Image platform
|
||||
required: true
|
||||
type: string
|
||||
runsOn:
|
||||
description: GitHub Actions Runner image
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME:
|
||||
required: true
|
||||
DOCKERHUB_TOKEN:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
tag-push:
|
||||
runs-on: ${{ inputs.runsOn }}
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
image: ${{ fromJson(inputs.images) }}
|
||||
|
||||
steps:
|
||||
- name: Checkout Repo ⚡️
|
||||
uses: actions/checkout@v3
|
||||
- name: Create dev environment 📦
|
||||
uses: ./.github/actions/create-dev-env
|
||||
with:
|
||||
platform: ${{ inputs.platform }}
|
||||
|
||||
# Self-hosted runners share a state (whole VM) between runs
|
||||
- name: Reset docker state and cleanup artifacts 🗑️
|
||||
if: ${{ inputs.platform != 'amd64' }}
|
||||
run: |
|
||||
docker system prune --all --force
|
||||
rm -rf /tmp/hist_lines/
|
||||
rm -rf /tmp/manifests/
|
||||
shell: bash
|
||||
|
||||
- name: Load image to Docker 📥
|
||||
uses: ./.github/actions/load-image
|
||||
with:
|
||||
image: ${{ matrix.image }}
|
||||
platform: ${{ inputs.platform }}
|
||||
|
||||
- name: Create tags 🏷
|
||||
run: |
|
||||
python3 -m tagging.tag_image --short-image-name ${{ matrix.image }}
|
||||
docker image ls -a
|
||||
shell: bash
|
||||
|
||||
- name: Write manifest and build history file 🏷
|
||||
run: python3 -m tagging.write_manifest --short-image-name ${{ matrix.image }} --hist-line-dir /tmp/hist_lines/ --manifest-dir /tmp/manifests/
|
||||
shell: bash
|
||||
- name: Upload manifest file 💾
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.image }}-${{ inputs.platform }}-manifest
|
||||
path: /tmp/manifests/${{ inputs.platform }}-${{ matrix.image }}-*.md
|
||||
retention-days: 3
|
||||
- name: Upload build history line 💾
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.image }}-${{ inputs.platform }}-history_line
|
||||
path: /tmp/hist_lines/${{ inputs.platform }}-${{ matrix.image }}-*.txt
|
||||
retention-days: 3
|
||||
|
||||
- name: Remove aarch64 latest tag 🗑️
|
||||
if: ${{ inputs.platform != 'amd64' }}
|
||||
run: docker rmi jupyter/${{ matrix.image }}
|
||||
shell: bash
|
||||
|
||||
- name: Login to Docker Hub 🔐
|
||||
if: github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' || github.event_name == 'schedule'
|
||||
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # dependabot updates to latest release
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Push Images to Docker Hub 📤
|
||||
if: github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' || github.event_name == 'schedule'
|
||||
run: docker push --all-tags jupyter/${{ matrix.image }}
|
||||
shell: bash
|
46
.github/workflows/docker-wiki-update.yml
vendored
Normal file
46
.github/workflows/docker-wiki-update.yml
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
name: Download manifest artifacts from GitHub, tag and push to DockerHub
|
||||
# We're doing everything in one workflow on purpose
|
||||
# This way we make sure we don't access wiki pages from several jobs simultaneously
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
tag-push:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout Repo ⚡️
|
||||
uses: actions/checkout@v3
|
||||
- name: Create dev environment 📦
|
||||
uses: ./.github/actions/create-dev-env
|
||||
with:
|
||||
platform: amd64
|
||||
|
||||
- name: Download all manifests and history lines 📥
|
||||
uses: ./.github/actions/download-manifests
|
||||
with:
|
||||
histLineDir: /tmp/hist_lines/
|
||||
manifestDir: /tmp/manifests/
|
||||
- name: Display structure of downloaded files 🔍️
|
||||
run: |
|
||||
ls -R /tmp/hist_lines/
|
||||
ls -R /tmp/manifests/
|
||||
shell: bash
|
||||
|
||||
- name: Checkout Wiki Repo 📃
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: ${{ github.repository }}.wiki
|
||||
path: wiki/
|
||||
|
||||
- name: Update wiki page 🏷
|
||||
run: python3 -m tagging.update_wiki_page --wiki-dir wiki/ --hist-line-dir /tmp/hist_lines/ --manifest-dir /tmp/manifests/
|
||||
shell: bash
|
||||
|
||||
- name: Push Wiki to GitHub 📤
|
||||
if: github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' || github.event_name == 'schedule'
|
||||
uses: stefanzweifel/git-auto-commit-action@5804e42f86b1891093b151b6c4e78e759c746c4d # dependabot updates to latest release
|
||||
with:
|
||||
commit_message: "Automated wiki publish for ${{ github.sha }}"
|
||||
repository: wiki/
|
280
.github/workflows/docker.yml
vendored
280
.github/workflows/docker.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Build, test, and publish Docker Images
|
||||
name: Build, test and push Docker Images
|
||||
|
||||
on:
|
||||
schedule:
|
||||
@@ -7,6 +7,17 @@ on:
|
||||
pull_request:
|
||||
paths:
|
||||
- ".github/workflows/docker.yml"
|
||||
# We use local reusable workflows to make architecture clean an simple
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows
|
||||
- ".github/workflows/docker-build-test-upload.yml"
|
||||
- ".github/workflows/docker-tag-manifest-push.yml"
|
||||
- ".github/workflows/docker-wiki-update.yml"
|
||||
|
||||
# We use local composite actions to combine multiple workflow steps within one action
|
||||
# https://docs.github.com/en/actions/creating-actions/about-custom-actions#composite-actions
|
||||
- ".github/actions/create-dev-env/action.yml"
|
||||
- ".github/actions/download-manifests/action.yml"
|
||||
- ".github/actions/load-image/action.yml"
|
||||
|
||||
- "all-spark-notebook/**"
|
||||
- "base-notebook/**"
|
||||
@@ -19,7 +30,6 @@ on:
|
||||
|
||||
- "tagging/**"
|
||||
- "tests/**"
|
||||
- "Makefile"
|
||||
- "requirements-dev.txt"
|
||||
push:
|
||||
branches:
|
||||
@@ -27,6 +37,13 @@ on:
|
||||
- master
|
||||
paths:
|
||||
- ".github/workflows/docker.yml"
|
||||
- ".github/workflows/docker-build-test-upload.yml"
|
||||
- ".github/workflows/docker-tag-manifest-push.yml"
|
||||
- ".github/workflows/docker-wiki-update.yml"
|
||||
|
||||
- ".github/actions/create-dev-env/action.yml"
|
||||
- ".github/actions/download-manifests/action.yml"
|
||||
- ".github/actions/load-image/action.yml"
|
||||
|
||||
- "all-spark-notebook/**"
|
||||
- "base-notebook/**"
|
||||
@@ -39,84 +56,203 @@ on:
|
||||
|
||||
- "tagging/**"
|
||||
- "tests/**"
|
||||
- "Makefile"
|
||||
- "requirements-dev.txt"
|
||||
workflow_dispatch:
|
||||
|
||||
# https://docs.github.com/en/actions/using-jobs/using-concurrency
|
||||
concurrency:
|
||||
# only cancel in-progress jobs or runs for the current workflow - matches against branch & tags
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build-test-publish-images:
|
||||
name: Build, test, and publish Docker Images
|
||||
if: github.event_name != 'schedule' || (github.event_name == 'schedule' && github.repository == 'jupyter/docker-stacks')
|
||||
runs-on: ubuntu-latest
|
||||
aarch64-base:
|
||||
uses: ./.github/workflows/docker-build-test-upload.yml
|
||||
with:
|
||||
parentImage: ""
|
||||
image: base-notebook
|
||||
platform: aarch64
|
||||
runsOn: ARM64
|
||||
|
||||
amd64-base:
|
||||
uses: ./.github/workflows/docker-build-test-upload.yml
|
||||
with:
|
||||
parentImage: ""
|
||||
image: base-notebook
|
||||
platform: amd64
|
||||
runsOn: ubuntu-latest
|
||||
|
||||
aarch64-minimal:
|
||||
needs: [aarch64-base]
|
||||
uses: ./.github/workflows/docker-build-test-upload.yml
|
||||
with:
|
||||
parentImage: base-notebook
|
||||
image: minimal-notebook
|
||||
platform: aarch64
|
||||
runsOn: ARM64
|
||||
|
||||
amd64-minimal:
|
||||
needs: [amd64-base]
|
||||
uses: ./.github/workflows/docker-build-test-upload.yml
|
||||
with:
|
||||
parentImage: base-notebook
|
||||
image: minimal-notebook
|
||||
platform: amd64
|
||||
runsOn: ubuntu-latest
|
||||
|
||||
aarch64-scipy:
|
||||
needs: [aarch64-minimal]
|
||||
uses: ./.github/workflows/docker-build-test-upload.yml
|
||||
with:
|
||||
parentImage: minimal-notebook
|
||||
image: scipy-notebook
|
||||
platform: aarch64
|
||||
runsOn: ARM64
|
||||
|
||||
amd64-scipy:
|
||||
needs: [amd64-minimal]
|
||||
uses: ./.github/workflows/docker-build-test-upload.yml
|
||||
with:
|
||||
parentImage: minimal-notebook
|
||||
image: scipy-notebook
|
||||
platform: amd64
|
||||
runsOn: ubuntu-latest
|
||||
|
||||
aarch64-r:
|
||||
needs: [aarch64-minimal]
|
||||
uses: ./.github/workflows/docker-build-test-upload.yml
|
||||
with:
|
||||
parentImage: minimal-notebook
|
||||
image: r-notebook
|
||||
platform: aarch64
|
||||
runsOn: ARM64
|
||||
|
||||
amd64-r:
|
||||
needs: [amd64-minimal]
|
||||
uses: ./.github/workflows/docker-build-test-upload.yml
|
||||
with:
|
||||
parentImage: minimal-notebook
|
||||
image: r-notebook
|
||||
platform: amd64
|
||||
runsOn: ubuntu-latest
|
||||
|
||||
amd64-tensorflow:
|
||||
needs: [amd64-scipy]
|
||||
uses: ./.github/workflows/docker-build-test-upload.yml
|
||||
with:
|
||||
parentImage: scipy-notebook
|
||||
image: tensorflow-notebook
|
||||
platform: amd64
|
||||
runsOn: ubuntu-latest
|
||||
|
||||
amd64-datascience:
|
||||
needs: [amd64-scipy]
|
||||
uses: ./.github/workflows/docker-build-test-upload.yml
|
||||
with:
|
||||
parentImage: scipy-notebook
|
||||
image: datascience-notebook
|
||||
platform: amd64
|
||||
runsOn: ubuntu-latest
|
||||
|
||||
aarch64-pyspark:
|
||||
needs: [aarch64-scipy]
|
||||
uses: ./.github/workflows/docker-build-test-upload.yml
|
||||
with:
|
||||
parentImage: scipy-notebook
|
||||
image: pyspark-notebook
|
||||
platform: aarch64
|
||||
runsOn: ARM64
|
||||
|
||||
amd64-pyspark:
|
||||
needs: [amd64-scipy]
|
||||
uses: ./.github/workflows/docker-build-test-upload.yml
|
||||
with:
|
||||
parentImage: scipy-notebook
|
||||
image: pyspark-notebook
|
||||
platform: amd64
|
||||
runsOn: ubuntu-latest
|
||||
|
||||
aarch64-all-spark:
|
||||
needs: [aarch64-pyspark]
|
||||
uses: ./.github/workflows/docker-build-test-upload.yml
|
||||
with:
|
||||
parentImage: pyspark-notebook
|
||||
image: all-spark-notebook
|
||||
platform: aarch64
|
||||
runsOn: ARM64
|
||||
|
||||
amd64-all-spark:
|
||||
needs: [amd64-pyspark]
|
||||
uses: ./.github/workflows/docker-build-test-upload.yml
|
||||
with:
|
||||
parentImage: pyspark-notebook
|
||||
image: all-spark-notebook
|
||||
platform: amd64
|
||||
runsOn: ubuntu-latest
|
||||
|
||||
aarch64-images-tag-push:
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
needs:
|
||||
[
|
||||
aarch64-base,
|
||||
aarch64-minimal,
|
||||
aarch64-scipy,
|
||||
aarch64-r,
|
||||
aarch64-pyspark,
|
||||
aarch64-all-spark,
|
||||
]
|
||||
uses: ./.github/workflows/docker-tag-manifest-push.yml
|
||||
with:
|
||||
platform: aarch64
|
||||
runsOn: ARM64
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows#limitations
|
||||
# The strategy property is not supported in any job that calls a reusable workflow
|
||||
# Using the workaround: https://github.community/t/reusable-workflow-with-strategy-matrix/205676/2
|
||||
images: >-
|
||||
[
|
||||
"base-notebook",
|
||||
"minimal-notebook",
|
||||
"scipy-notebook",
|
||||
"r-notebook",
|
||||
"pyspark-notebook",
|
||||
"all-spark-notebook"
|
||||
]
|
||||
|
||||
amd64-images-tag-push:
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
needs:
|
||||
[
|
||||
amd64-base,
|
||||
amd64-minimal,
|
||||
amd64-scipy,
|
||||
amd64-r,
|
||||
amd64-tensorflow,
|
||||
amd64-datascience,
|
||||
amd64-pyspark,
|
||||
amd64-all-spark,
|
||||
]
|
||||
uses: ./.github/workflows/docker-tag-manifest-push.yml
|
||||
with:
|
||||
platform: amd64
|
||||
runsOn: ubuntu-latest
|
||||
images: >-
|
||||
[
|
||||
"base-notebook",
|
||||
"minimal-notebook",
|
||||
"scipy-notebook",
|
||||
"r-notebook",
|
||||
"tensorflow-notebook",
|
||||
"datascience-notebook",
|
||||
"pyspark-notebook",
|
||||
"all-spark-notebook"
|
||||
]
|
||||
|
||||
wiki-update:
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
steps:
|
||||
- name: Maximize build space 🛠
|
||||
run: |
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf /usr/local/lib/android
|
||||
sudo rm -rf /opt/ghc
|
||||
|
||||
# Setup docker to build for multiple platforms, see:
|
||||
# https://github.com/docker/build-push-action/tree/master#usage
|
||||
# https://github.com/docker/build-push-action/blob/master/docs/advanced/multi-platform.md
|
||||
- name: Set up QEMU (for docker buildx) 🐳
|
||||
uses: docker/setup-qemu-action@8b122486cedac8393e77aa9734c3528886e4a1a8 # dependabot updates to latest release
|
||||
|
||||
- name: Set up Docker Buildx (for multi-arch builds) 🐳
|
||||
uses: docker/setup-buildx-action@dc7b9719a96d48369863986a06765841d7ea23f6 # dependabot updates to latest release
|
||||
|
||||
- name: Checkout Repo ⚡️
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set Up Python 🐍
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.x
|
||||
|
||||
- name: Install Dev Dependencies 📦
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install -r requirements-dev.txt
|
||||
|
||||
- name: Build Docker Images 🛠
|
||||
run: make build-all-multi
|
||||
env:
|
||||
# Full logs for CI build
|
||||
BUILDKIT_PROGRESS: plain
|
||||
|
||||
- name: Run tests ✅
|
||||
run: make test-all
|
||||
|
||||
- name: Checkout Wiki Repo 📃
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: ${{ github.repository }}.wiki
|
||||
path: wiki/
|
||||
|
||||
- name: Create tags and manifest 🏷
|
||||
run: make hook-all
|
||||
|
||||
- name: Push Wiki to GitHub 📤
|
||||
if: github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' || github.event_name == 'schedule'
|
||||
uses: stefanzweifel/git-auto-commit-action@49620cd3ed21ee620a48530e81dba0d139c9cb80 # dependabot updates to latest release
|
||||
with:
|
||||
commit_message: "Automated wiki publish for ${{ github.sha }}"
|
||||
repository: wiki/
|
||||
|
||||
- name: Login to Docker Hub 🔐
|
||||
if: github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' || github.event_name == 'schedule'
|
||||
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # dependabot updates to latest release
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Push Images to Docker Hub 📤
|
||||
if: github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' || github.event_name == 'schedule'
|
||||
run: make push-all-multi
|
||||
needs: [aarch64-images-tag-push, amd64-images-tag-push]
|
||||
uses: ./.github/workflows/docker-wiki-update.yml
|
||||
|
2
.github/workflows/sphinx.yml
vendored
2
.github/workflows/sphinx.yml
vendored
@@ -45,7 +45,7 @@ jobs:
|
||||
- name: Install Doc Dependencies 📦
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install -r requirements-docs.txt
|
||||
pip install --upgrade -r requirements-docs.txt
|
||||
|
||||
- name: Build Documentation 📖
|
||||
run: make docs
|
||||
|
80
Makefile
80
Makefile
@@ -7,21 +7,6 @@ SHELL:=bash
|
||||
OWNER?=jupyter
|
||||
|
||||
# Need to list the images in build dependency order
|
||||
|
||||
# Images supporting the following architectures:
|
||||
# - linux/amd64
|
||||
# - linux/arm64
|
||||
MULTI_IMAGES:= \
|
||||
base-notebook \
|
||||
minimal-notebook \
|
||||
r-notebook \
|
||||
scipy-notebook \
|
||||
pyspark-notebook \
|
||||
all-spark-notebook
|
||||
# Images that can only be built on the amd64 architecture (aka. x86_64)
|
||||
AMD64_ONLY_IMAGES:= \
|
||||
datascience-notebook \
|
||||
tensorflow-notebook
|
||||
# All of the images
|
||||
ALL_IMAGES:= \
|
||||
base-notebook \
|
||||
@@ -42,7 +27,7 @@ export DOCKER_BUILDKIT:=1
|
||||
help:
|
||||
@echo "jupyter/docker-stacks"
|
||||
@echo "====================="
|
||||
@echo "Replace % with a stack directory name (e.g., make build-multi/minimal-notebook)"
|
||||
@echo "Replace % with a stack directory name (e.g., make build/minimal-notebook)"
|
||||
@echo
|
||||
@grep -E '^[a-zA-Z0-9_%/-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
|
||||
|
||||
@@ -57,60 +42,6 @@ build/%: ## build the latest image for a stack using the system's architecture
|
||||
@echo "::endgroup::"
|
||||
build-all: $(foreach I, $(ALL_IMAGES), build/$(I)) ## build all stacks
|
||||
|
||||
# Limitations on docker buildx build (using docker/buildx 0.5.1):
|
||||
#
|
||||
# 1. Can't --load and --push at the same time
|
||||
#
|
||||
# 2. Can't --load multiple platforms
|
||||
#
|
||||
# What does it mean to --load?
|
||||
#
|
||||
# - It means that the built image can be referenced by `docker` CLI, for example
|
||||
# when using the `docker tag` or `docker push` commands.
|
||||
#
|
||||
# Workarounds due to limitations:
|
||||
#
|
||||
# 1. We always build a dedicated image using the current system architecture
|
||||
# named as OWNER/<stack>-notebook so we always can reference that image no
|
||||
# matter what during tests etc.
|
||||
#
|
||||
# 2. We always also build a multi-platform image during build-multi that will be
|
||||
# inaccessible with `docker tag` and `docker push` etc, but this will help us
|
||||
# test the build on the different platform and provide cached layers for
|
||||
# later.
|
||||
#
|
||||
# 3. We let push-multi refer to rebuilding a multi image with `--push`.
|
||||
#
|
||||
# We can rely on the cached layer from build-multi now even though we never
|
||||
# tagged the multi image.
|
||||
#
|
||||
# Outcomes of the workaround:
|
||||
#
|
||||
# 1. We can keep using the previously defined Makefile commands that doesn't
|
||||
# include `-multi` suffix as before.
|
||||
#
|
||||
# 2. Assuming we have setup docker/dockerx properly to build in arm64
|
||||
# architectures as well, then we can build and publish such images via the
|
||||
# `-multi` suffix without needing a local registry.
|
||||
#
|
||||
# 3. If we get dedicated arm64 runners, we can test everything separately
|
||||
# without needing to update this Makefile, and if all tests succeeds we can
|
||||
# do a publish job that creates a multi-platform image for us.
|
||||
#
|
||||
build-multi/%: DOCKER_BUILD_ARGS?=
|
||||
build-multi/%: ## build the latest image for a stack on both amd64 and arm64
|
||||
@echo "::group::Build $(OWNER)/$(notdir $@) (system's architecture)"
|
||||
docker buildx build $(DOCKER_BUILD_ARGS) -t $(OWNER)/$(notdir $@):latest ./$(notdir $@) --build-arg OWNER=$(OWNER) --load
|
||||
@echo -n "Built image size: "
|
||||
@docker images $(OWNER)/$(notdir $@):latest --format "{{.Size}}"
|
||||
@echo "::endgroup::"
|
||||
|
||||
@echo "::group::Build $(OWNER)/$(notdir $@) (amd64,arm64)"
|
||||
docker buildx build $(DOCKER_BUILD_ARGS) -t build-multi-tmp-cache/$(notdir $@):latest ./$(notdir $@) --build-arg OWNER=$(OWNER) --platform "linux/amd64,linux/arm64"
|
||||
@echo "::endgroup::"
|
||||
build-all-multi: $(foreach I, $(MULTI_IMAGES), build-multi/$(I)) $(foreach I, $(AMD64_ONLY_IMAGES), build/$(I)) ## build all stacks
|
||||
|
||||
|
||||
|
||||
check-outdated/%: ## check the outdated mamba/conda packages in a stack and produce a report (experimental)
|
||||
@TEST_IMAGE="$(OWNER)/$(notdir $@)" pytest tests/base-notebook/test_outdated.py
|
||||
@@ -139,7 +70,7 @@ linkcheck-docs: ## check broken links
|
||||
hook/%: WIKI_PATH?=wiki
|
||||
hook/%: ## run post-build hooks for an image
|
||||
python3 -m tagging.tag_image --short-image-name "$(notdir $@)" --owner "$(OWNER)" && \
|
||||
python3 -m tagging.create_manifests --short-image-name "$(notdir $@)" --owner "$(OWNER)" --wiki-path "$(WIKI_PATH)"
|
||||
python3 -m tagging.write_manifest --short-image-name "$(notdir $@)" --owner "$(OWNER)" --wiki-path "$(WIKI_PATH)"
|
||||
hook-all: $(foreach I, $(ALL_IMAGES), hook/$(I)) ## run post-build hooks for all images
|
||||
|
||||
|
||||
@@ -176,13 +107,6 @@ push/%: ## push all tags for a jupyter image
|
||||
@echo "::endgroup::"
|
||||
push-all: $(foreach I, $(ALL_IMAGES), push/$(I)) ## push all tagged images
|
||||
|
||||
push-multi/%: DOCKER_BUILD_ARGS?=
|
||||
push-multi/%: ## push all tags for a jupyter image that support multiple architectures
|
||||
@echo "::group::Push $(OWNER)/$(notdir $@) (amd64,arm64)"
|
||||
docker buildx build $(DOCKER_BUILD_ARGS) $($(subst -,_,$(notdir $@))_EXTRA_TAG_ARGS) -t $(OWNER)/$(notdir $@):latest ./$(notdir $@) --build-arg OWNER=$(OWNER) --platform "linux/amd64,linux/arm64" --push
|
||||
@echo "::endgroup::"
|
||||
push-all-multi: $(foreach I, $(MULTI_IMAGES), push-multi/$(I)) $(foreach I, $(AMD64_ONLY_IMAGES), push/$(I)) ## push all tagged images
|
||||
|
||||
|
||||
|
||||
run-shell/%: ## run a bash in interactive mode in a stack
|
||||
|
14
README.md
14
README.md
@@ -115,14 +115,6 @@ This change is tracked in the issue [#1217](https://github.com/jupyter/docker-st
|
||||
|
||||
## CPU Architectures
|
||||
|
||||
All published containers support amd64 (x86_64) and aarch64, except for `datascience-notebook` and `tensorflow-notebook`, which only support amd64 for now.
|
||||
|
||||
### Caveats for arm64 images
|
||||
|
||||
- The manifests we publish in this project's wiki as well as the image tags for
|
||||
the multi-platform images that also support arm, are all based on the amd64
|
||||
version even though details about the installed packages versions could differ
|
||||
between architectures. For the status about this, see
|
||||
[#1401](https://github.com/jupyter/docker-stacks/issues/1401).
|
||||
- Only the amd64 images are actively tested currently. For the status about
|
||||
this, see [#1402](https://github.com/jupyter/docker-stacks/issues/1402).
|
||||
- We publish containers for both `amd64` (`x86_64`) and `aarch64` platforms, except for `datascience-notebook` and `tensorflow-notebook`, which only support `amd64` for now
|
||||
- We do not create multi-platform images
|
||||
- Instead, all `arm64` images have _aarch64-_ tag prefix, for example `jupyter/base-notebook:aarch64-python-3.10.5`
|
||||
|
27
aarch64-runner/setup.sh
Executable file
27
aarch64-runner/setup.sh
Executable file
@@ -0,0 +1,27 @@
|
||||
#!/bin/bash
|
||||
set -ex
|
||||
|
||||
GITHUB_RUNNER_USER="runner-user"
|
||||
|
||||
if [ "$EUID" -ne 0 ]; then
|
||||
echo "Please run as root"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
apt-get update --yes
|
||||
apt-get upgrade --yes
|
||||
|
||||
echo "Setting up runner-user, who will run GitHub Actions runner"
|
||||
adduser --disabled-password --gecos "" ${GITHUB_RUNNER_USER}
|
||||
mkdir /home/${GITHUB_RUNNER_USER}/.ssh/
|
||||
cp /home/ubuntu/.ssh/authorized_keys /home/${GITHUB_RUNNER_USER}/.ssh/authorized_keys
|
||||
chown ${GITHUB_RUNNER_USER}:${GITHUB_RUNNER_USER} /home/${GITHUB_RUNNER_USER}/.ssh/authorized_keys
|
||||
|
||||
echo "Setting up python3"
|
||||
apt-get install --yes --no-install-recommends python3
|
||||
curl -sS https://bootstrap.pypa.io/get-pip.py | python3
|
||||
|
||||
echo "Setting up docker"
|
||||
apt-get install --yes --no-install-recommends docker.io
|
||||
usermod -aG docker ${GITHUB_RUNNER_USER}
|
||||
chmod 666 /var/run/docker.sock
|
@@ -27,13 +27,7 @@ RUN apt-get update --yes && \
|
||||
USER ${NB_UID}
|
||||
|
||||
# R packages including IRKernel which gets installed globally.
|
||||
RUN arch=$(uname -m) && \
|
||||
if [ "${arch}" == "aarch64" ]; then \
|
||||
# Prevent libmamba from sporadically hanging on arm64 under QEMU
|
||||
# <https://github.com/mamba-org/mamba/issues/1611>
|
||||
export G_SLICE=always-malloc; \
|
||||
fi && \
|
||||
mamba install --quiet --yes \
|
||||
RUN mamba install --quiet --yes \
|
||||
'r-base' \
|
||||
'r-ggplot2' \
|
||||
'r-irkernel' \
|
||||
|
@@ -116,12 +116,6 @@ RUN set -x && \
|
||||
rm /tmp/micromamba.tar.bz2 && \
|
||||
PYTHON_SPECIFIER="python=${PYTHON_VERSION}" && \
|
||||
if [[ "${PYTHON_VERSION}" == "default" ]]; then PYTHON_SPECIFIER="python"; fi && \
|
||||
if [ "${arch}" == "aarch64" ]; then \
|
||||
# Prevent libmamba from sporadically hanging on arm64 under QEMU
|
||||
# <https://github.com/mamba-org/mamba/issues/1611>
|
||||
# We don't use `micromamba config set` since it instead modifies ~/.condarc.
|
||||
echo "extract_threads: 1" >> "${CONDA_DIR}/.condarc"; \
|
||||
fi && \
|
||||
# Install the packages
|
||||
./micromamba install \
|
||||
--root-prefix="${CONDA_DIR}" \
|
||||
|
@@ -61,13 +61,7 @@ USER ${NB_UID}
|
||||
|
||||
# R packages including IRKernel which gets installed globally.
|
||||
# r-e1071: dependency of the caret R package
|
||||
RUN arch=$(uname -m) && \
|
||||
if [ "${arch}" == "aarch64" ]; then \
|
||||
# Prevent libmamba from sporadically hanging on arm64 under QEMU
|
||||
# <https://github.com/mamba-org/mamba/issues/1611>
|
||||
export G_SLICE=always-malloc; \
|
||||
fi && \
|
||||
mamba install --quiet --yes \
|
||||
RUN mamba install --quiet --yes \
|
||||
'r-base' \
|
||||
'r-caret' \
|
||||
'r-crayon' \
|
||||
|
@@ -91,6 +91,7 @@ myst_heading_anchors = 3
|
||||
|
||||
linkcheck_ignore = [
|
||||
r".*github\.com.*#", # javascript based anchors
|
||||
r"https://github\.com/jupyter/docker-stacks/settings/actions/runners/new\?arch=arm64&os=linux", # only works for users with permissions to change runners
|
||||
r"https://docs.github\.com/.*", # 403 error
|
||||
r"http://127\.0\.0\.1:49153/.*", # example
|
||||
r"https://mybinder\.org/v2/gh/.*", # lots of 500 errors
|
||||
|
@@ -5,11 +5,7 @@ We greatly appreciate pull requests that extend the automated tests that vet the
|
||||
## How the Tests Work
|
||||
|
||||
A [GitHub Action workflow](https://github.com/jupyter/docker-stacks/blob/master/.github/workflows/docker.yml)
|
||||
runs the following commands against pull requests submitted to the `jupyter/docker-stacks` repository:
|
||||
|
||||
1. `make build-all-multi` - which builds all the Docker images
|
||||
2. `make test-all` - which tests the newly created Docker images
|
||||
This `make` command builds and then tests every docker image.
|
||||
runs tests against pull requests submitted to the `jupyter/docker-stacks` repository.
|
||||
|
||||
We use `pytest` module to run tests on the image.
|
||||
`conftest.py` and `pytest.ini` in the `tests` folder define the environment in which tests are run.
|
||||
@@ -40,8 +36,8 @@ Please follow the process below to add new tests:
|
||||
If you use `make`, call:
|
||||
|
||||
```bash
|
||||
make build/somestack-notebook
|
||||
make test/somestack-notebook
|
||||
make build/<somestack>-notebook
|
||||
make test/<somestack>-notebook
|
||||
```
|
||||
|
||||
3. [Submit a pull request](https://github.com/PointCloudLibrary/pcl/wiki/A-step-by-step-guide-on-preparing-and-submitting-a-pull-request)
|
||||
|
@@ -32,6 +32,7 @@ Table of Contents
|
||||
:caption: Maintainer Guide
|
||||
|
||||
maintaining/tasks
|
||||
maintaining/aarch64-runner
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
25
docs/maintaining/aarch64-runner.md
Normal file
25
docs/maintaining/aarch64-runner.md
Normal file
@@ -0,0 +1,25 @@
|
||||
# Self-hosted runners
|
||||
|
||||
For building `aarch64` images, we use VMs, provided by [Oracle OCI](https://www.oracle.com/cloud/).
|
||||
Currently, there are 2 self-hosted GitHub runners with _2 OCPU_ and _12 GB_ each.
|
||||
|
||||
To setup a new runner:
|
||||
|
||||
1. Create a compute instance `VM.Standard.A1.Flex` with _2 OCPU_ and _12 GB_ using `Ubuntu 22.04` image.
|
||||
2. Run under `root`:
|
||||
|
||||
```bash
|
||||
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/jupyter/docker-stacks/HEAD/aarch64-runner/setup.sh)"
|
||||
```
|
||||
|
||||
This will perform initial runner setup and create a user `runner-user` without `sudo` capabilities.
|
||||
|
||||
3. Setup new GitHub Runner under `runner-user` using [GitHub Instructions](https://github.com/jupyter/docker-stacks/settings/actions/runners/new?arch=arm64&os=linux).
|
||||
Do not `./run.sh` yet.
|
||||
4. Run under `root`:
|
||||
|
||||
```bash
|
||||
cd /home/runner-user/actions-runner/ && ./svc.sh install runner-user
|
||||
```
|
||||
|
||||
5. Reboot VM to apply all updates and run GitHub runner.
|
@@ -56,13 +56,7 @@ RUN fix-permissions "/etc/ipython/"
|
||||
USER ${NB_UID}
|
||||
|
||||
# Install pyarrow
|
||||
RUN arch=$(uname -m) && \
|
||||
if [ "${arch}" == "aarch64" ]; then \
|
||||
# Prevent libmamba from sporadically hanging on arm64 under QEMU
|
||||
# <https://github.com/mamba-org/mamba/issues/1611>
|
||||
export G_SLICE=always-malloc; \
|
||||
fi && \
|
||||
mamba install --quiet --yes \
|
||||
RUN mamba install --quiet --yes \
|
||||
'pyarrow' && \
|
||||
mamba clean --all -f -y && \
|
||||
fix-permissions "${CONDA_DIR}" && \
|
||||
|
@@ -27,13 +27,7 @@ USER ${NB_UID}
|
||||
|
||||
# R packages including IRKernel which gets installed globally.
|
||||
# r-e1071: dependency of the caret R package
|
||||
RUN arch=$(uname -m) && \
|
||||
if [ "${arch}" == "aarch64" ]; then \
|
||||
# Prevent libmamba from sporadically hanging on arm64 under QEMU
|
||||
# <https://github.com/mamba-org/mamba/issues/1611>
|
||||
export G_SLICE=always-malloc; \
|
||||
fi && \
|
||||
mamba install --quiet --yes \
|
||||
RUN mamba install --quiet --yes \
|
||||
'r-base' \
|
||||
'r-caret' \
|
||||
'r-crayon' \
|
||||
|
@@ -26,13 +26,7 @@ RUN apt-get update --yes && \
|
||||
USER ${NB_UID}
|
||||
|
||||
# Install Python 3 packages
|
||||
RUN arch=$(uname -m) && \
|
||||
if [ "${arch}" == "aarch64" ]; then \
|
||||
# Prevent libmamba from sporadically hanging on arm64 under QEMU
|
||||
# <https://github.com/mamba-org/mamba/issues/1611>
|
||||
export G_SLICE=always-malloc; \
|
||||
fi && \
|
||||
mamba install --quiet --yes \
|
||||
RUN mamba install --quiet --yes \
|
||||
'altair' \
|
||||
'beautifulsoup4' \
|
||||
'bokeh' \
|
||||
|
@@ -113,7 +113,7 @@ class AptPackagesManifest(ManifestInterface):
|
||||
|
||||
- `quoted_output` simply runs the command inside container using `DockerRunner.run_simple_command` and wraps it to triple quotes to create a valid markdown piece of file.
|
||||
- `manifests.py` contains all the manifests.
|
||||
- `create_manifests.py` is a python executable which is used to create the build manifest for an image.
|
||||
- `write_manifest.py` is a python executable which is used to create the build manifest and history line for an image.
|
||||
|
||||
### Images Hierarchy
|
||||
|
||||
|
8
tagging/get_tags_prefix.py
Normal file
8
tagging/get_tags_prefix.py
Normal file
@@ -0,0 +1,8 @@
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
import platform
|
||||
|
||||
|
||||
def get_tags_prefix() -> str:
|
||||
machine = platform.machine()
|
||||
return "" if machine == "x86_64" else f"{machine}-"
|
@@ -1,11 +0,0 @@
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
import os
|
||||
|
||||
|
||||
def github_set_env(env_name: str, env_value: str) -> None:
|
||||
if not os.environ.get("GITHUB_ACTIONS") or not os.environ.get("GITHUB_ENV"):
|
||||
return
|
||||
|
||||
with open(os.environ["GITHUB_ENV"], "a") as f:
|
||||
f.write(f"{env_name}={env_value}\n")
|
@@ -8,7 +8,7 @@ import plumbum
|
||||
|
||||
from tagging.docker_runner import DockerRunner
|
||||
from tagging.get_taggers_and_manifests import get_taggers_and_manifests
|
||||
from tagging.github_set_env import github_set_env
|
||||
from tagging.get_tags_prefix import get_tags_prefix
|
||||
|
||||
docker = plumbum.local["docker"]
|
||||
|
||||
@@ -19,32 +19,26 @@ def tag_image(short_image_name: str, owner: str) -> None:
|
||||
"""
|
||||
Tags <owner>/<short_image_name>:latest with the tags reported by all taggers
|
||||
for the given image.
|
||||
|
||||
Tags are in a GitHub Actions environment also saved to environment variables
|
||||
in a format making it easy to append them.
|
||||
"""
|
||||
LOGGER.info(f"Tagging image: {short_image_name}")
|
||||
taggers, _ = get_taggers_and_manifests(short_image_name)
|
||||
|
||||
image = f"{owner}/{short_image_name}:latest"
|
||||
tags_prefix = get_tags_prefix()
|
||||
|
||||
with DockerRunner(image) as container:
|
||||
tags = []
|
||||
for tagger in taggers:
|
||||
tagger_name = tagger.__class__.__name__
|
||||
tag_value = tagger.tag_value(container)
|
||||
tags.append(tag_value)
|
||||
LOGGER.info(
|
||||
f"Applying tag, tagger_name: {tagger_name} tag_value: {tag_value}"
|
||||
)
|
||||
docker["tag", image, f"{owner}/{short_image_name}:{tag_value}"]()
|
||||
|
||||
if tags:
|
||||
env_name = f'{short_image_name.replace("-", "_")}_EXTRA_TAG_ARGS'
|
||||
docker_build_tag_args = " ".join(
|
||||
[f"-t {owner}/{short_image_name}:{tag}" for tag in tags]
|
||||
)
|
||||
github_set_env(env_name, docker_build_tag_args)
|
||||
docker[
|
||||
"tag", image, f"{owner}/{short_image_name}:{tags_prefix}{tag_value}"
|
||||
]()
|
||||
if tags_prefix != "":
|
||||
LOGGER.info(f"Adding {tags_prefix}latest tag")
|
||||
docker["tag", image, f"{owner}/{short_image_name}:{tags_prefix}latest"]()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
@@ -56,7 +50,7 @@ if __name__ == "__main__":
|
||||
required=True,
|
||||
help="Short image name to apply tags for",
|
||||
)
|
||||
arg_parser.add_argument("--owner", required=True, help="Owner of the image")
|
||||
arg_parser.add_argument("--owner", default="jupyter", help="Owner of the image")
|
||||
args = arg_parser.parse_args()
|
||||
|
||||
tag_image(args.short_image_name, args.owner)
|
||||
|
57
tagging/update_wiki_page.py
Executable file
57
tagging/update_wiki_page.py
Executable file
@@ -0,0 +1,57 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
import argparse
|
||||
import logging
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
TABLE_BEGINNING = "|-|-|-|\n"
|
||||
|
||||
|
||||
def update_wiki_page(wiki_dir: Path, hist_line_dir: Path, manifest_dir: Path) -> None:
|
||||
LOGGER.info("Updating wiki page")
|
||||
|
||||
wiki_home_file = wiki_dir / "Home.md"
|
||||
wiki_home_content = wiki_home_file.read_text()
|
||||
build_history_line_files = sorted(hist_line_dir.rglob("*.txt"))
|
||||
build_history_lines = "\n".join(
|
||||
hist_line_file.read_text() for hist_line_file in build_history_line_files
|
||||
)
|
||||
wiki_home_content = wiki_home_content.replace(
|
||||
TABLE_BEGINNING, TABLE_BEGINNING + build_history_lines + "\n"
|
||||
)
|
||||
wiki_home_file.write_text(wiki_home_content)
|
||||
LOGGER.info("Wiki home file updated")
|
||||
|
||||
for manifest_file in sorted(manifest_dir.rglob("*.md")):
|
||||
shutil.copy(manifest_file, wiki_dir / "manifests" / manifest_file.name)
|
||||
LOGGER.info(f"Manifest file added: {manifest_file.name}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
arg_parser = argparse.ArgumentParser()
|
||||
arg_parser.add_argument(
|
||||
"--wiki-dir",
|
||||
required=True,
|
||||
type=Path,
|
||||
help="Directory for wiki repo",
|
||||
)
|
||||
arg_parser.add_argument(
|
||||
"--hist-line-dir",
|
||||
required=True,
|
||||
type=Path,
|
||||
help="Directory to save history line",
|
||||
)
|
||||
arg_parser.add_argument(
|
||||
"--manifest-dir",
|
||||
required=True,
|
||||
type=Path,
|
||||
help="Directory to save manifest file",
|
||||
)
|
||||
args = arg_parser.parse_args()
|
||||
|
||||
update_wiki_page(args.wiki_dir, args.hist_line_dir, args.manifest_dir)
|
@@ -4,26 +4,29 @@
|
||||
import argparse
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
from pathlib import Path
|
||||
|
||||
from docker.models.containers import Container
|
||||
|
||||
from tagging.docker_runner import DockerRunner
|
||||
from tagging.get_taggers_and_manifests import get_taggers_and_manifests
|
||||
from tagging.get_tags_prefix import get_tags_prefix
|
||||
from tagging.git_helper import GitHelper
|
||||
from tagging.manifests import ManifestHeader, ManifestInterface
|
||||
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# This would actually be manifest creation timestamp
|
||||
BUILD_TIMESTAMP = datetime.datetime.utcnow().isoformat()[:-7] + "Z"
|
||||
MARKDOWN_LINE_BREAK = "<br />"
|
||||
|
||||
|
||||
def append_build_history_line(
|
||||
def write_build_history_line(
|
||||
short_image_name: str,
|
||||
owner: str,
|
||||
wiki_path: str,
|
||||
hist_line_dir: Path,
|
||||
filename: str,
|
||||
all_tags: list[str],
|
||||
) -> None:
|
||||
LOGGER.info("Appending build history line")
|
||||
@@ -33,60 +36,67 @@ def append_build_history_line(
|
||||
f"`{owner}/{short_image_name}:{tag_value}`" for tag_value in all_tags
|
||||
)
|
||||
commit_hash = GitHelper.commit_hash()
|
||||
commit_hash_tag = GitHelper.commit_hash_tag()
|
||||
links_column = MARKDOWN_LINE_BREAK.join(
|
||||
[
|
||||
f"[Git diff](https://github.com/jupyter/docker-stacks/commit/{commit_hash})",
|
||||
f"[Dockerfile](https://github.com/jupyter/docker-stacks/blob/{commit_hash}/{short_image_name}/Dockerfile)",
|
||||
f"[Build manifest](./{short_image_name}-{commit_hash_tag})",
|
||||
f"[Build manifest](./{filename})",
|
||||
]
|
||||
)
|
||||
build_history_line = "|".join([date_column, image_column, links_column]) + "|"
|
||||
|
||||
home_wiki_file = os.path.join(wiki_path, "Home.md")
|
||||
with open(home_wiki_file) as f:
|
||||
file = f.read()
|
||||
TABLE_BEGINNING = "|-|-|-|\n"
|
||||
file = file.replace(TABLE_BEGINNING, TABLE_BEGINNING + build_history_line + "\n")
|
||||
with open(home_wiki_file, "w") as f:
|
||||
f.write(file)
|
||||
hist_line_dir.mkdir(parents=True, exist_ok=True)
|
||||
(hist_line_dir / f"{filename}.txt").write_text(build_history_line)
|
||||
|
||||
|
||||
def create_manifest_file(
|
||||
def write_manifest_file(
|
||||
short_image_name: str,
|
||||
owner: str,
|
||||
wiki_path: str,
|
||||
manifest_dir: Path,
|
||||
filename: str,
|
||||
manifests: list[ManifestInterface],
|
||||
container: Container,
|
||||
) -> None:
|
||||
manifest_names = [manifest.__class__.__name__ for manifest in manifests]
|
||||
LOGGER.info(f"Using manifests: {manifest_names}")
|
||||
|
||||
commit_hash_tag = GitHelper.commit_hash_tag()
|
||||
manifest_file = os.path.join(
|
||||
wiki_path,
|
||||
f"manifests/{short_image_name}-{commit_hash_tag}.md",
|
||||
)
|
||||
|
||||
markdown_pieces = [
|
||||
ManifestHeader.create_header(short_image_name, owner, BUILD_TIMESTAMP)
|
||||
] + [manifest.markdown_piece(container) for manifest in manifests]
|
||||
markdown_content = "\n\n".join(markdown_pieces) + "\n"
|
||||
|
||||
with open(manifest_file, "w") as f:
|
||||
f.write(markdown_content)
|
||||
manifest_dir.mkdir(parents=True, exist_ok=True)
|
||||
(manifest_dir / f"{filename}.md").write_text(markdown_content)
|
||||
|
||||
|
||||
def create_manifests(short_image_name: str, owner: str, wiki_path: str) -> None:
|
||||
def get_file_prefix() -> str:
|
||||
machine = platform.machine()
|
||||
return "amd64" if machine == "x86_64" else "aarch64"
|
||||
|
||||
|
||||
def write_manifest(
|
||||
short_image_name: str,
|
||||
owner: str,
|
||||
hist_line_dir: Path,
|
||||
manifest_dir: Path,
|
||||
) -> None:
|
||||
LOGGER.info(f"Creating manifests for image: {short_image_name}")
|
||||
taggers, manifests = get_taggers_and_manifests(short_image_name)
|
||||
|
||||
image = f"{owner}/{short_image_name}:latest"
|
||||
|
||||
file_prefix = get_file_prefix()
|
||||
commit_hash_tag = GitHelper.commit_hash_tag()
|
||||
filename = f"{file_prefix}-{short_image_name}-{commit_hash_tag}"
|
||||
|
||||
with DockerRunner(image) as container:
|
||||
all_tags = [tagger.tag_value(container) for tagger in taggers]
|
||||
append_build_history_line(short_image_name, owner, wiki_path, all_tags)
|
||||
create_manifest_file(short_image_name, owner, wiki_path, manifests, container)
|
||||
tags_prefix = get_tags_prefix()
|
||||
all_tags = [tags_prefix + tagger.tag_value(container) for tagger in taggers]
|
||||
write_build_history_line(
|
||||
short_image_name, owner, hist_line_dir, filename, all_tags
|
||||
)
|
||||
write_manifest_file(
|
||||
short_image_name, owner, manifest_dir, filename, manifests, container
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
@@ -96,12 +106,25 @@ if __name__ == "__main__":
|
||||
arg_parser.add_argument(
|
||||
"--short-image-name",
|
||||
required=True,
|
||||
help="Short image name to apply tags for",
|
||||
help="Short image name to create manifests for",
|
||||
)
|
||||
arg_parser.add_argument("--owner", required=True, help="Owner of the image")
|
||||
arg_parser.add_argument("--wiki-path", required=True, help="Path to the wiki pages")
|
||||
arg_parser.add_argument(
|
||||
"--hist-line-dir",
|
||||
required=True,
|
||||
type=Path,
|
||||
help="Directory to save history line",
|
||||
)
|
||||
arg_parser.add_argument(
|
||||
"--manifest-dir",
|
||||
required=True,
|
||||
type=Path,
|
||||
help="Directory to save manifest file",
|
||||
)
|
||||
arg_parser.add_argument("--owner", default="jupyter", help="Owner of the image")
|
||||
args = arg_parser.parse_args()
|
||||
|
||||
LOGGER.info(f"Current build timestamp: {BUILD_TIMESTAMP}")
|
||||
|
||||
create_manifests(args.short_image_name, args.owner, args.wiki_path)
|
||||
write_manifest(
|
||||
args.short_image_name, args.owner, args.hist_line_dir, args.manifest_dir
|
||||
)
|
@@ -11,13 +11,7 @@ LABEL maintainer="Jupyter Project <jupyter@googlegroups.com>"
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
|
||||
# Install Tensorflow
|
||||
RUN arch=$(uname -m) && \
|
||||
if [ "${arch}" == "aarch64" ]; then \
|
||||
# Prevent libmamba from sporadically hanging on arm64 under QEMU
|
||||
# <https://github.com/mamba-org/mamba/issues/1611>
|
||||
export G_SLICE=always-malloc; \
|
||||
fi && \
|
||||
mamba install --quiet --yes \
|
||||
RUN mamba install --quiet --yes \
|
||||
'tensorflow' && \
|
||||
mamba clean --all -f -y && \
|
||||
fix-permissions "${CONDA_DIR}" && \
|
||||
|
@@ -72,6 +72,7 @@ EXCLUDED_PACKAGES = [
|
||||
"protobuf",
|
||||
"python",
|
||||
"r-irkernel",
|
||||
"r-sparklyr", # TODO(asalikhov): remove this line when updated to spark 3.3
|
||||
"unixodbc",
|
||||
]
|
||||
|
||||
|
@@ -8,7 +8,7 @@ import plumbum
|
||||
|
||||
from tests.images_hierarchy import get_test_dirs
|
||||
|
||||
pytest = plumbum.local["pytest"]
|
||||
python3 = plumbum.local["python3"]
|
||||
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -19,7 +19,9 @@ def test_image(short_image_name: str, owner: str) -> None:
|
||||
LOGGER.info(f"Test dirs to be run: {test_dirs}")
|
||||
with plumbum.local.env(TEST_IMAGE=f"{owner}/{short_image_name}"):
|
||||
(
|
||||
pytest[
|
||||
python3[
|
||||
"-m",
|
||||
"pytest",
|
||||
"--numprocesses",
|
||||
"auto",
|
||||
"-m",
|
||||
@@ -39,7 +41,7 @@ if __name__ == "__main__":
|
||||
required=True,
|
||||
help="Short image name to run test on",
|
||||
)
|
||||
arg_parser.add_argument("--owner", required=True, help="Owner of the image")
|
||||
arg_parser.add_argument("--owner", default="jupyter", help="Owner of the image")
|
||||
|
||||
args = arg_parser.parse_args()
|
||||
|
||||
|
Reference in New Issue
Block a user