Merge branch 'master' into asalikhov/automatic_conda_versioning

This commit is contained in:
Ayaz Salikhov
2021-06-29 02:28:53 +03:00
39 changed files with 504 additions and 391 deletions

View File

@@ -1,5 +1,4 @@
[flake8] [flake8]
ignore = W605,W503,W504,H306,H238,H301,H202 max-line-length = 88
max-line-length = 120 select = C,E,F,W,B,B950
per-file-ignores = extend-ignore = E203, E501
test/test_packages.py:E501

11
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,11 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"

View File

@@ -59,14 +59,13 @@ jobs:
run: make -C main hook-all run: make -C main hook-all
- name: Push Wiki to GitHub - name: Push Wiki to GitHub
if: github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' if: github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main'
# Pass GITHUB_REPOSITORY directly to avoid conflict with GitHub Actions built-in env var uses: stefanzweifel/git-auto-commit-action@5dd17c3b53a58c1cb5eaab903826abe94765ccd6 # dependabot updates to latest release
run: make -C main git-commit GITHUB_REPOSITORY='${{github.repository}}.wiki' with:
env: commit_message: "[ci skip] Automated publish for ${{github.sha}}"
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} repository: wiki/
LOCAL_PATH: ../wiki
- name: Login to Docker Hub - name: Login to Docker Hub
if: github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' if: github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main'
uses: docker/login-action@v1 uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 # dependabot updates to latest release
with: with:
username: ${{secrets.DOCKERHUB_USERNAME}} username: ${{secrets.DOCKERHUB_USERNAME}}
password: ${{secrets.DOCKERHUB_TOKEN}} password: ${{secrets.DOCKERHUB_TOKEN}}

View File

@@ -42,8 +42,6 @@ jobs:
sphinx-intl update -p ./_build/gettext -l en sphinx-intl update -p ./_build/gettext -l en
- name: Push Strings to Master - name: Push Strings to Master
if: github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' if: github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main'
run: make git-commit uses: stefanzweifel/git-auto-commit-action@5dd17c3b53a58c1cb5eaab903826abe94765ccd6 # dependabot updates to latest release
env: with:
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} commit_message: "[ci skip] Automated publish for ${{github.sha}}"
GITHUB_REPOSITORY: ${{github.repository}}
LOCAL_PATH: ./docs/locale/en

View File

@@ -3,47 +3,58 @@ ci:
skip: [hadolint-docker] skip: [hadolint-docker]
repos: repos:
- repo: https://github.com/pre-commit/pre-commit-hooks # Autoformat: Python code
rev: v4.0.1 - repo: https://github.com/ambv/black
rev: 21.6b0
hooks: hooks:
- id: check-yaml - id: black
files: .*\.(yaml|yml)$ args: [--target-version=py39]
# Autoformat: YAML, JSON, Markdown, etc.
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v2.3.2
hooks:
- id: prettier
# Lint: Dockerfile
- repo: https://github.com/hadolint/hadolint.git - repo: https://github.com/hadolint/hadolint.git
rev: v2.5.0 rev: v2.5.0
hooks: hooks:
- id: hadolint-docker - id: hadolint-docker
# FIXME: remove after https://github.com/hadolint/hadolint/issues/628 is resolved # FIXME: remove after https://github.com/hadolint/hadolint/issues/628 is resolved
entry: hadolint/hadolint:v2.5.0 hadolint entry: hadolint/hadolint:v2.5.0 hadolint
# Lint: YAML
- repo: https://github.com/adrienverge/yamllint.git - repo: https://github.com/adrienverge/yamllint.git
rev: v1.26.1 rev: v1.26.1
hooks: hooks:
- id: yamllint - id: yamllint
args: ["-d {extends: relaxed, rules: {line-length: disable}}", "-s"] args: ["-d {extends: relaxed, rules: {line-length: disable}}", "-s"]
files: \.(yaml|yml)$ files: \.(yaml|yml)$
# Lint: Bash scripts
- repo: https://github.com/openstack-dev/bashate.git - repo: https://github.com/openstack-dev/bashate.git
rev: 2.0.0 rev: 2.0.0
hooks: hooks:
- id: bashate - id: bashate
args: ["--ignore=E006"] args: ["--ignore=E006"]
# Lint: Shell scripts
- repo: https://github.com/shellcheck-py/shellcheck-py - repo: https://github.com/shellcheck-py/shellcheck-py
rev: v0.7.2.1 rev: v0.7.2.1
hooks: hooks:
- id: shellcheck - id: shellcheck
args: ["-x"] args: ["-x"]
# Lint: Python
- repo: https://github.com/PyCQA/flake8 - repo: https://github.com/PyCQA/flake8
rev: 3.9.2 rev: 3.9.2
hooks: hooks:
- id: flake8 - id: flake8
- repo: https://github.com/pre-commit/mirrors-autopep8
rev: v1.5.7 # Lint: Markdown
hooks:
- id: autopep8
- repo: https://github.com/igorshubovych/markdownlint-cli - repo: https://github.com/igorshubovych/markdownlint-cli
rev: v0.27.1 rev: v0.27.1
hooks: hooks:
- id: markdownlint - id: markdownlint
args: ["--fix"] args: ["--fix"]
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v2.3.1
hooks:
- id: prettier

View File

@@ -62,22 +62,6 @@ dev-env: ## install libraries required to build docs and run tests
docs: ## build HTML documentation docs: ## build HTML documentation
sphinx-build docs/ docs/_build/ sphinx-build docs/ docs/_build/
git-commit: LOCAL_PATH?=.
git-commit: GITHUB_SHA?=$(shell git rev-parse HEAD)
git-commit: GITHUB_REPOSITORY?=jupyter/docker-stacks
git-commit: GITHUB_TOKEN?=
git-commit: ## commit outstading git changes and push to remote
@git config --global user.name "GitHub Actions"
@git config --global user.email "actions@users.noreply.github.com"
@echo "Publishing outstanding changes in $(LOCAL_PATH) to $(GITHUB_REPOSITORY)"
@cd $(LOCAL_PATH) && \
git remote add publisher https://$(GITHUB_TOKEN)@github.com/$(GITHUB_REPOSITORY).git && \
git checkout master && \
git add -A -- . && \
git commit -m "[ci skip] Automated publish for $(GITHUB_SHA)" || exit 0
@cd $(LOCAL_PATH) && git push -u publisher master
hook/%: WIKI_PATH?=../wiki hook/%: WIKI_PATH?=../wiki
hook/%: ## run post-build hooks for an image hook/%: ## run post-build hooks for an image
python3 -m tagging.tag_image --short-image-name "$(notdir $@)" --owner "$(OWNER)" && \ python3 -m tagging.tag_image --short-image-name "$(notdir $@)" --owner "$(OWNER)" && \

View File

@@ -22,10 +22,12 @@ def test_nbconvert(container, test_file):
output_dir = "/tmp" output_dir = "/tmp"
timeout_ms = 600 timeout_ms = 600
LOGGER.info(f"Test that {test_file} notebook can be executed ...") LOGGER.info(f"Test that {test_file} notebook can be executed ...")
command = "jupyter nbconvert --to markdown " + \ command = (
f"--ExecutePreprocessor.timeout={timeout_ms} " + \ "jupyter nbconvert --to markdown "
f"--output-dir {output_dir} " + \ + f"--ExecutePreprocessor.timeout={timeout_ms} "
f"--execute {cont_data_dir}/{test_file}.ipynb" + f"--output-dir {output_dir} "
+ f"--execute {cont_data_dir}/{test_file}.ipynb"
)
c = container.run( c = container.run(
volumes={host_data_dir: {"bind": cont_data_dir, "mode": "ro"}}, volumes={host_data_dir: {"bind": cont_data_dir, "mode": "ro"}},
tty=True, tty=True,

View File

@@ -8,7 +8,7 @@ import errno
import stat import stat
c = get_config() # noqa: F821 c = get_config() # noqa: F821
c.NotebookApp.ip = '0.0.0.0' c.NotebookApp.ip = "0.0.0.0"
c.NotebookApp.port = 8888 c.NotebookApp.port = 8888
c.NotebookApp.open_browser = False c.NotebookApp.open_browser = False
@@ -16,9 +16,9 @@ c.NotebookApp.open_browser = False
c.FileContentsManager.delete_to_trash = False c.FileContentsManager.delete_to_trash = False
# Generate a self-signed certificate # Generate a self-signed certificate
if 'GEN_CERT' in os.environ: if "GEN_CERT" in os.environ:
dir_name = jupyter_data_dir() dir_name = jupyter_data_dir()
pem_file = os.path.join(dir_name, 'notebook.pem') pem_file = os.path.join(dir_name, "notebook.pem")
try: try:
os.makedirs(dir_name) os.makedirs(dir_name)
except OSError as exc: # Python >2.5 except OSError as exc: # Python >2.5
@@ -28,28 +28,37 @@ if 'GEN_CERT' in os.environ:
raise raise
# Generate an openssl.cnf file to set the distinguished name # Generate an openssl.cnf file to set the distinguished name
cnf_file = os.path.join(os.getenv('CONDA_DIR', '/usr/lib'), 'ssl', 'openssl.cnf') cnf_file = os.path.join(os.getenv("CONDA_DIR", "/usr/lib"), "ssl", "openssl.cnf")
if not os.path.isfile(cnf_file): if not os.path.isfile(cnf_file):
with open(cnf_file, 'w') as fh: with open(cnf_file, "w") as fh:
fh.write('''\ fh.write(
"""\
[req] [req]
distinguished_name = req_distinguished_name distinguished_name = req_distinguished_name
[req_distinguished_name] [req_distinguished_name]
''') """
)
# Generate a certificate if one doesn't exist on disk # Generate a certificate if one doesn't exist on disk
subprocess.check_call(['openssl', 'req', '-new', subprocess.check_call(
'-newkey', 'rsa:2048', [
'-days', '365', "openssl",
'-nodes', '-x509', "req",
'-subj', '/C=XX/ST=XX/L=XX/O=generated/CN=generated', "-new",
'-keyout', pem_file, "-newkey=rsa:2048",
'-out', pem_file]) "-days=365",
"-nodes",
"-x509",
"-subj=/C=XX/ST=XX/L=XX/O=generated/CN=generated",
f"-keyout={pem_file}",
f"-out={pem_file}",
]
)
# Restrict access to the file # Restrict access to the file
os.chmod(pem_file, stat.S_IRUSR | stat.S_IWUSR) os.chmod(pem_file, stat.S_IRUSR | stat.S_IWUSR)
c.NotebookApp.certfile = pem_file c.NotebookApp.certfile = pem_file
# Change default umask for all subprocesses of the notebook server if set in # Change default umask for all subprocesses of the notebook server if set in
# the environment # the environment
if 'NB_UMASK' in os.environ: if "NB_UMASK" in os.environ:
os.umask(int(os.environ['NB_UMASK'], 8)) os.umask(int(os.environ["NB_UMASK"], 8))

View File

@@ -11,59 +11,55 @@ LOGGER = logging.getLogger(__name__)
def test_cli_args(container, http_client): def test_cli_args(container, http_client):
"""Container should respect notebook server command line args """Container should respect notebook server command line args
(e.g., disabling token security)""" (e.g., disabling token security)"""
c = container.run( c = container.run(command=["start-notebook.sh", "--NotebookApp.token=''"])
command=["start-notebook.sh", "--NotebookApp.token=''"] resp = http_client.get("http://localhost:8888")
)
resp = http_client.get('http://localhost:8888')
resp.raise_for_status() resp.raise_for_status()
logs = c.logs(stdout=True).decode('utf-8') logs = c.logs(stdout=True).decode("utf-8")
LOGGER.debug(logs) LOGGER.debug(logs)
assert 'login_submit' not in resp.text assert "login_submit" not in resp.text
@pytest.mark.filterwarnings('ignore:Unverified HTTPS request') @pytest.mark.filterwarnings("ignore:Unverified HTTPS request")
def test_unsigned_ssl(container, http_client): def test_unsigned_ssl(container, http_client):
"""Container should generate a self-signed SSL certificate """Container should generate a self-signed SSL certificate
and notebook server should use it to enable HTTPS. and notebook server should use it to enable HTTPS.
""" """
container.run( container.run(environment=["GEN_CERT=yes"])
environment=['GEN_CERT=yes']
)
# NOTE: The requests.Session backing the http_client fixture does not retry # NOTE: The requests.Session backing the http_client fixture does not retry
# properly while the server is booting up. An SSL handshake error seems to # properly while the server is booting up. An SSL handshake error seems to
# abort the retry logic. Forcing a long sleep for the moment until I have # abort the retry logic. Forcing a long sleep for the moment until I have
# time to dig more. # time to dig more.
time.sleep(5) time.sleep(5)
resp = http_client.get('https://localhost:8888', verify=False) resp = http_client.get("https://localhost:8888", verify=False)
resp.raise_for_status() resp.raise_for_status()
assert 'login_submit' in resp.text assert "login_submit" in resp.text
def test_uid_change(container): def test_uid_change(container):
"""Container should change the UID of the default user.""" """Container should change the UID of the default user."""
c = container.run( c = container.run(
tty=True, tty=True,
user='root', user="root",
environment=['NB_UID=1010'], environment=["NB_UID=1010"],
command=['start.sh', 'bash', '-c', 'id && touch /opt/conda/test-file'] command=["start.sh", "bash", "-c", "id && touch /opt/conda/test-file"],
) )
# usermod is slow so give it some time # usermod is slow so give it some time
c.wait(timeout=120) c.wait(timeout=120)
assert 'uid=1010(jovyan)' in c.logs(stdout=True).decode('utf-8') assert "uid=1010(jovyan)" in c.logs(stdout=True).decode("utf-8")
def test_gid_change(container): def test_gid_change(container):
"""Container should change the GID of the default user.""" """Container should change the GID of the default user."""
c = container.run( c = container.run(
tty=True, tty=True,
user='root', user="root",
environment=['NB_GID=110'], environment=["NB_GID=110"],
command=['start.sh', 'id'] command=["start.sh", "id"],
) )
c.wait(timeout=10) c.wait(timeout=10)
logs = c.logs(stdout=True).decode('utf-8') logs = c.logs(stdout=True).decode("utf-8")
assert 'gid=110(jovyan)' in logs assert "gid=110(jovyan)" in logs
assert 'groups=110(jovyan),100(users)' in logs assert "groups=110(jovyan),100(users)" in logs
def test_nb_user_change(container): def test_nb_user_change(container):
@@ -72,11 +68,8 @@ def test_nb_user_change(container):
running_container = container.run( running_container = container.run(
tty=True, tty=True,
user="root", user="root",
environment=[ environment=[f"NB_USER={nb_user}", "CHOWN_HOME=yes"],
f"NB_USER={nb_user}", command=["start.sh", "bash", "-c", "sleep infinity"],
"CHOWN_HOME=yes"
],
command=['start.sh', 'bash', '-c', 'sleep infinity']
) )
# Give the chown time to complete. Use sleep, not wait, because the # Give the chown time to complete. Use sleep, not wait, because the
@@ -98,25 +91,27 @@ def test_nb_user_change(container):
expected_output = f"{nb_user} users" expected_output = f"{nb_user} users"
cmd = running_container.exec_run(command, workdir=f"/home/{nb_user}") cmd = running_container.exec_run(command, workdir=f"/home/{nb_user}")
output = cmd.output.decode("utf-8").strip("\n") output = cmd.output.decode("utf-8").strip("\n")
assert output == expected_output, f"Bad owner for the {nb_user} home folder {output}, expected {expected_output}" assert (
output == expected_output
), f"Bad owner for the {nb_user} home folder {output}, expected {expected_output}"
def test_chown_extra(container): def test_chown_extra(container):
"""Container should change the UID/GID of CHOWN_EXTRA.""" """Container should change the UID/GID of CHOWN_EXTRA."""
c = container.run( c = container.run(
tty=True, tty=True,
user='root', user="root",
environment=[ environment=[
'NB_UID=1010', "NB_UID=1010",
'NB_GID=101', "NB_GID=101",
'CHOWN_EXTRA=/opt/conda', "CHOWN_EXTRA=/opt/conda",
'CHOWN_EXTRA_OPTS=-R' "CHOWN_EXTRA_OPTS=-R",
], ],
command=['start.sh', 'bash', '-c', 'stat -c \'%n:%u:%g\' /opt/conda/LICENSE.txt'] command=["start.sh", "bash", "-c", "stat -c '%n:%u:%g' /opt/conda/LICENSE.txt"],
) )
# chown is slow so give it some time # chown is slow so give it some time
c.wait(timeout=120) c.wait(timeout=120)
assert '/opt/conda/LICENSE.txt:1010:101' in c.logs(stdout=True).decode('utf-8') assert "/opt/conda/LICENSE.txt:1010:101" in c.logs(stdout=True).decode("utf-8")
def test_chown_home(container): def test_chown_home(container):
@@ -124,53 +119,59 @@ def test_chown_home(container):
group to the current value of NB_UID and NB_GID.""" group to the current value of NB_UID and NB_GID."""
c = container.run( c = container.run(
tty=True, tty=True,
user='root', user="root",
environment=[ environment=["CHOWN_HOME=yes", "CHOWN_HOME_OPTS=-R"],
'CHOWN_HOME=yes', command=[
'CHOWN_HOME_OPTS=-R' "start.sh",
"bash",
"-c",
"chown root:root /home/jovyan && ls -alsh /home",
], ],
command=['start.sh', 'bash', '-c', 'chown root:root /home/jovyan && ls -alsh /home']
) )
c.wait(timeout=120) c.wait(timeout=120)
assert "Changing ownership of /home/jovyan to 1000:100 with options '-R'" in c.logs(stdout=True).decode('utf-8') assert "Changing ownership of /home/jovyan to 1000:100 with options '-R'" in c.logs(
stdout=True
).decode("utf-8")
def test_sudo(container): def test_sudo(container):
"""Container should grant passwordless sudo to the default user.""" """Container should grant passwordless sudo to the default user."""
c = container.run( c = container.run(
tty=True, tty=True,
user='root', user="root",
environment=['GRANT_SUDO=yes'], environment=["GRANT_SUDO=yes"],
command=['start.sh', 'sudo', 'id'] command=["start.sh", "sudo", "id"],
) )
rv = c.wait(timeout=10) rv = c.wait(timeout=10)
assert rv == 0 or rv["StatusCode"] == 0 assert rv == 0 or rv["StatusCode"] == 0
assert 'uid=0(root)' in c.logs(stdout=True).decode('utf-8') assert "uid=0(root)" in c.logs(stdout=True).decode("utf-8")
def test_sudo_path(container): def test_sudo_path(container):
"""Container should include /opt/conda/bin in the sudo secure_path.""" """Container should include /opt/conda/bin in the sudo secure_path."""
c = container.run( c = container.run(
tty=True, tty=True,
user='root', user="root",
environment=['GRANT_SUDO=yes'], environment=["GRANT_SUDO=yes"],
command=['start.sh', 'sudo', 'which', 'jupyter'] command=["start.sh", "sudo", "which", "jupyter"],
) )
rv = c.wait(timeout=10) rv = c.wait(timeout=10)
assert rv == 0 or rv["StatusCode"] == 0 assert rv == 0 or rv["StatusCode"] == 0
assert c.logs(stdout=True).decode('utf-8').rstrip().endswith('/opt/conda/bin/jupyter') logs = c.logs(stdout=True).decode("utf-8")
assert logs.rstrip().endswith("/opt/conda/bin/jupyter")
def test_sudo_path_without_grant(container): def test_sudo_path_without_grant(container):
"""Container should include /opt/conda/bin in the sudo secure_path.""" """Container should include /opt/conda/bin in the sudo secure_path."""
c = container.run( c = container.run(
tty=True, tty=True,
user='root', user="root",
command=['start.sh', 'which', 'jupyter'] command=["start.sh", "which", "jupyter"],
) )
rv = c.wait(timeout=10) rv = c.wait(timeout=10)
assert rv == 0 or rv["StatusCode"] == 0 assert rv == 0 or rv["StatusCode"] == 0
assert c.logs(stdout=True).decode('utf-8').rstrip().endswith('/opt/conda/bin/jupyter') logs = c.logs(stdout=True).decode("utf-8")
assert logs.rstrip().endswith("/opt/conda/bin/jupyter")
def test_group_add(container, tmpdir): def test_group_add(container, tmpdir):
@@ -178,10 +179,11 @@ def test_group_add(container, tmpdir):
group. group.
""" """
c = container.run( c = container.run(
user='1010:1010', user="1010:1010",
group_add=['users'], group_add=["users"],
command=['start.sh', 'id'] command=["start.sh", "id"],
) )
rv = c.wait(timeout=5) rv = c.wait(timeout=5)
assert rv == 0 or rv["StatusCode"] == 0 assert rv == 0 or rv["StatusCode"] == 0
assert 'uid=1010 gid=1010 groups=1010,100(users)' in c.logs(stdout=True).decode('utf-8') logs = c.logs(stdout=True).decode("utf-8")
assert "uid=1010 gid=1010 groups=1010,100(users)" in logs

View File

@@ -24,7 +24,7 @@ def test_package_manager(container, package_manager, version_arg):
) )
c = container.run( c = container.run(
tty=True, tty=True,
command=["start.sh", "bash", "-c", f"{package_manager} {version_arg}"] command=["start.sh", "bash", "-c", f"{package_manager} {version_arg}"],
) )
rv = c.wait(timeout=5) rv = c.wait(timeout=5)
logs = c.logs(stdout=True).decode("utf-8") logs = c.logs(stdout=True).decode("utf-8")

View File

@@ -10,7 +10,7 @@ def test_pandoc(container):
"""Pandoc shall be able to convert MD to HTML.""" """Pandoc shall be able to convert MD to HTML."""
c = container.run( c = container.run(
tty=True, tty=True,
command=["start.sh", "bash", "-c", 'echo "**BOLD**" | pandoc'] command=["start.sh", "bash", "-c", 'echo "**BOLD**" | pandoc'],
) )
c.wait(timeout=10) c.wait(timeout=10)
logs = c.logs(stdout=True).decode("utf-8") logs = c.logs(stdout=True).decode("utf-8")

View File

@@ -10,7 +10,10 @@ LOGGER = logging.getLogger(__name__)
def test_python_version(container, python_next_version="3.10"): def test_python_version(container, python_next_version="3.10"):
"""Check that python version is lower than the next version""" """Check that python version is lower than the next version"""
LOGGER.info(f"Checking that python version is lower than {python_next_version}") LOGGER.info(f"Checking that python version is lower than {python_next_version}")
c = container.run(tty=True, command=["start.sh"]) c = container.run(
tty=True,
command=["start.sh"],
)
cmd = c.exec_run("python --version") cmd = c.exec_run("python --version")
output = cmd.output.decode("utf-8") output = cmd.output.decode("utf-8")
actual_python_version = version.parse(output.split()[1]) actual_python_version = version.parse(output.split()[1])

View File

@@ -19,7 +19,11 @@ def test_start_notebook(container, http_client, env, expected_server):
LOGGER.info( LOGGER.info(
f"Test that the start-notebook launches the {expected_server} server from the env {env} ..." f"Test that the start-notebook launches the {expected_server} server from the env {env} ..."
) )
c = container.run(tty=True, environment=env, command=["start-notebook.sh"]) c = container.run(
tty=True,
environment=env,
command=["start-notebook.sh"],
)
resp = http_client.get("http://localhost:8888") resp = http_client.get("http://localhost:8888")
logs = c.logs(stdout=True).decode("utf-8") logs = c.logs(stdout=True).decode("utf-8")
LOGGER.debug(logs) LOGGER.debug(logs)
@@ -40,7 +44,10 @@ def test_tini_entrypoint(container, pid=1, command="tini"):
https://superuser.com/questions/632979/if-i-know-the-pid-number-of-a-process-how-can-i-get-its-name https://superuser.com/questions/632979/if-i-know-the-pid-number-of-a-process-how-can-i-get-its-name
""" """
LOGGER.info(f"Test that {command} is launched as PID {pid} ...") LOGGER.info(f"Test that {command} is launched as PID {pid} ...")
c = container.run(tty=True, command=["start.sh"]) c = container.run(
tty=True,
command=["start.sh"],
)
# Select the PID 1 and get the corresponding command # Select the PID 1 and get the corresponding command
cmd = c.exec_run(f"ps -p {pid} -o comm=") cmd = c.exec_run(f"ps -p {pid} -o comm=")
output = cmd.output.decode("utf-8").strip("\n") output = cmd.output.decode("utf-8").strip("\n")

View File

@@ -14,26 +14,26 @@ from requests.adapters import HTTPAdapter
LOGGER = logging.getLogger(__name__) LOGGER = logging.getLogger(__name__)
@pytest.fixture(scope='session') @pytest.fixture(scope="session")
def http_client(): def http_client():
"""Requests session with retries and backoff.""" """Requests session with retries and backoff."""
s = requests.Session() s = requests.Session()
retries = Retry(total=5, backoff_factor=1) retries = Retry(total=5, backoff_factor=1)
s.mount('http://', HTTPAdapter(max_retries=retries)) s.mount("http://", HTTPAdapter(max_retries=retries))
s.mount('https://', HTTPAdapter(max_retries=retries)) s.mount("https://", HTTPAdapter(max_retries=retries))
return s return s
@pytest.fixture(scope='session') @pytest.fixture(scope="session")
def docker_client(): def docker_client():
"""Docker client configured based on the host environment""" """Docker client configured based on the host environment"""
return docker.from_env() return docker.from_env()
@pytest.fixture(scope='session') @pytest.fixture(scope="session")
def image_name(): def image_name():
"""Image name to test""" """Image name to test"""
return os.getenv('TEST_IMAGE') return os.getenv("TEST_IMAGE")
class TrackedContainer: class TrackedContainer:
@@ -78,7 +78,10 @@ class TrackedContainer:
all_kwargs.update(self.kwargs) all_kwargs.update(self.kwargs)
all_kwargs.update(kwargs) all_kwargs.update(kwargs)
LOGGER.info(f"Running {self.image_name} with args {all_kwargs} ...") LOGGER.info(f"Running {self.image_name} with args {all_kwargs} ...")
self.container = self.docker_client.containers.run(self.image_name, **all_kwargs) self.container = self.docker_client.containers.run(
self.image_name,
**all_kwargs,
)
return self.container return self.container
def remove(self): def remove(self):
@@ -87,7 +90,7 @@ class TrackedContainer:
self.container.remove(force=True) self.container.remove(force=True)
@pytest.fixture(scope='function') @pytest.fixture(scope="function")
def container(docker_client, image_name): def container(docker_client, image_name):
"""Notebook container with initial configuration appropriate for testing """Notebook container with initial configuration appropriate for testing
(e.g., HTTP port exposed to the host for HTTP calls). (e.g., HTTP port exposed to the host for HTTP calls).
@@ -98,9 +101,7 @@ def container(docker_client, image_name):
docker_client, docker_client,
image_name, image_name,
detach=True, detach=True,
ports={ ports={"8888/tcp": 8888},
'8888/tcp': 8888
}
) )
yield container yield container
container.remove() container.remove()

View File

@@ -10,7 +10,7 @@ def test_julia(container):
LOGGER.info("Test that julia is correctly installed ...") LOGGER.info("Test that julia is correctly installed ...")
running_container = container.run( running_container = container.run(
tty=True, tty=True,
command=["start.sh", "bash", "-c", "sleep infinity"] command=["start.sh", "bash", "-c", "sleep infinity"],
) )
command = "julia --version" command = "julia --version"
cmd = running_container.exec_run(command) cmd = running_container.exec_run(command)

View File

@@ -25,40 +25,37 @@
# If your documentation needs a minimal Sphinx version, state it here. # If your documentation needs a minimal Sphinx version, state it here.
# #
needs_sphinx = '2.1' needs_sphinx = "2.1"
# Add any Sphinx extension module names here, as strings. They can be # Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones. # ones.
extensions = [ extensions = ["myst_parser", "sphinx_copybutton"]
'myst_parser',
'sphinx_copybutton'
]
# Add any paths that contain templates here, relative to this directory. # Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates'] templates_path = ["_templates"]
source_suffix = { source_suffix = {
'.rst': 'restructuredtext', ".rst": "restructuredtext",
'.md': 'markdown', ".md": "markdown",
} }
# The master toctree document. # The master toctree document.
master_doc = 'index' master_doc = "index"
# General information about the project. # General information about the project.
project = 'docker-stacks' project = "docker-stacks"
copyright = '2018- Project Jupyter' copyright = "2018- Project Jupyter"
author = 'Project Jupyter' author = "Project Jupyter"
# The version info for the project you're documenting, acts as replacement for # The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the # |version| and |release|, also used in various other places throughout the
# built documents. # built documents.
# #
# The short X.Y version. # The short X.Y version.
version = 'latest' version = "latest"
# The full version, including alpha/beta/rc tags. # The full version, including alpha/beta/rc tags.
release = 'latest' release = "latest"
# The language for content autogenerated by Sphinx. Refer to documentation # The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages. # for a list of supported languages.
@@ -70,10 +67,10 @@ language = None
# List of patterns, relative to source directory, that match files and # List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files. # directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path # This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The name of the Pygments (syntax highlighting) style to use. # The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx' pygments_style = "sphinx"
# If true, `todo` and `todoList` produce output, else they produce nothing. # If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False todo_include_todos = False
@@ -84,27 +81,27 @@ todo_include_todos = False
# The theme to use for HTML and HTML Help pages. See the documentation for # The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes. # a list of builtin themes.
# #
html_theme = 'alabaster' html_theme = "alabaster"
# Theme options are theme-specific and customize the look and feel of a theme # Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the # further. For a list of options available for each theme, see the
# documentation. # documentation.
# #
html_theme_options = { html_theme_options = {
'description': "Jupyter Docker Stacks", "description": "Jupyter Docker Stacks",
'fixed_sidebar': False, "fixed_sidebar": False,
'show_relbars': True, "show_relbars": True,
'github_user': 'jupyter', "github_user": "jupyter",
'github_repo': 'docker-stacks', "github_repo": "docker-stacks",
'github_type': 'star', "github_type": "star",
'logo': 'jupyter-logo.svg', "logo": "jupyter-logo.svg",
'logo_text_align': 'left' "logo_text_align": "left",
} }
# Add any paths that contain custom static files (such as style sheets) here, # Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files, # relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css". # so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static'] html_static_path = ["_static"]
# Custom sidebar templates, must be a dictionary that maps document names # Custom sidebar templates, must be a dictionary that maps document names
# to template names. # to template names.
@@ -115,18 +112,18 @@ html_static_path = ['_static']
# 'searchbox.html']``. # 'searchbox.html']``.
# #
html_sidebars = { html_sidebars = {
'**': [ "**": [
'about.html', "about.html",
'navigation.html', "navigation.html",
'relations.html', "relations.html",
'searchbox.html', "searchbox.html",
] ]
} }
# -- Options for HTMLHelp output ------------------------------------------ # -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder. # Output file base name for HTML help builder.
htmlhelp_basename = 'docker-stacksdoc' htmlhelp_basename = "docker-stacksdoc"
# -- Options for LaTeX output --------------------------------------------- # -- Options for LaTeX output ---------------------------------------------
@@ -135,15 +132,12 @@ latex_elements = {
# The paper size ('letterpaper' or 'a4paper'). # The paper size ('letterpaper' or 'a4paper').
# #
# 'papersize': 'letterpaper', # 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt'). # The font size ('10pt', '11pt' or '12pt').
# #
# 'pointsize': '10pt', # 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble. # Additional stuff for the LaTeX preamble.
# #
# 'preamble': '', # 'preamble': '',
# Latex figure (float) alignment # Latex figure (float) alignment
# #
# 'figure_align': 'htbp', # 'figure_align': 'htbp',
@@ -153,8 +147,13 @@ latex_elements = {
# (source start file, target name, title, # (source start file, target name, title,
# author, documentclass [howto, manual, or own class]). # author, documentclass [howto, manual, or own class]).
latex_documents = [ latex_documents = [
(master_doc, 'docker-stacks.tex', 'docker-stacks Documentation', (
'Project Jupyter', 'manual'), master_doc,
"docker-stacks.tex",
"docker-stacks Documentation",
"Project Jupyter",
"manual",
),
] ]
@@ -162,10 +161,7 @@ latex_documents = [
# One entry per manual page. List of tuples # One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section). # (source start file, name, description, authors, manual section).
man_pages = [ man_pages = [(master_doc, "docker-stacks", "docker-stacks Documentation", [author], 1)]
(master_doc, 'docker-stacks', 'docker-stacks Documentation',
[author], 1)
]
# -- Options for Texinfo output ------------------------------------------- # -- Options for Texinfo output -------------------------------------------
@@ -174,9 +170,15 @@ man_pages = [
# (source start file, target name, title, author, # (source start file, target name, title, author,
# dir menu entry, description, category) # dir menu entry, description, category)
texinfo_documents = [ texinfo_documents = [
(master_doc, 'docker-stacks', 'docker-stacks Documentation', (
author, 'docker-stacks', 'One line description of project.', master_doc,
'Miscellaneous'), "docker-stacks",
"docker-stacks Documentation",
author,
"docker-stacks",
"One line description of project.",
"Miscellaneous",
),
] ]
# -- Extension configuration ---------------------------------------------- # -- Extension configuration ----------------------------------------------
@@ -187,4 +189,4 @@ linkcheck_anchors = False
# -- Translation ---------------------------------------------------------- # -- Translation ----------------------------------------------------------
gettext_uuid = True gettext_uuid = True
locale_dirs = ['locale/'] locale_dirs = ["locale/"]

View File

@@ -11,7 +11,7 @@ def test_inkscape(container):
LOGGER.info("Test that inkscape is working by printing its version ...") LOGGER.info("Test that inkscape is working by printing its version ...")
c = container.run( c = container.run(
tty=True, tty=True,
command=["start.sh", "bash", "-c", "inkscape --version"] command=["start.sh", "bash", "-c", "inkscape --version"],
) )
c.wait(timeout=10) c.wait(timeout=10)
logs = c.logs(stdout=True).decode("utf-8") logs = c.logs(stdout=True).decode("utf-8")

View File

@@ -24,7 +24,9 @@ def test_nbconvert(container, test_file, output_format):
host_data_dir = os.path.join(THIS_DIR, "data") host_data_dir = os.path.join(THIS_DIR, "data")
cont_data_dir = "/home/jovyan/data" cont_data_dir = "/home/jovyan/data"
output_dir = "/tmp" output_dir = "/tmp"
LOGGER.info(f"Test that the example notebook {test_file} can be converted to {output_format} ...") LOGGER.info(
f"Test that the example notebook {test_file} can be converted to {output_format} ..."
)
command = f"jupyter nbconvert {cont_data_dir}/{test_file}.ipynb --output-dir {output_dir} --to {output_format}" command = f"jupyter nbconvert {cont_data_dir}/{test_file}.ipynb --output-dir {output_dir} --to {output_format}"
c = container.run( c = container.run(
volumes={host_data_dir: {"bind": cont_data_dir, "mode": "ro"}}, volumes={host_data_dir: {"bind": cont_data_dir, "mode": "ro"}},

View File

@@ -10,21 +10,9 @@ def test_spark_shell(container):
"""Checking if Spark (spark-shell) is running properly""" """Checking if Spark (spark-shell) is running properly"""
c = container.run( c = container.run(
tty=True, tty=True,
command=['start.sh', 'bash', '-c', 'spark-shell <<< "1+1"'] command=["start.sh", "bash", "-c", 'spark-shell <<< "1+1"'],
) )
c.wait(timeout=60) c.wait(timeout=60)
logs = c.logs(stdout=True).decode('utf-8') logs = c.logs(stdout=True).decode("utf-8")
LOGGER.debug(logs) LOGGER.debug(logs)
assert 'res0: Int = 2' in logs, "spark-shell does not work" assert "res0: Int = 2" in logs, "spark-shell does not work"
def test_pyspark(container):
"""PySpark should be in the Python path"""
c = container.run(
tty=True,
command=['start.sh', 'python', '-c', 'import pyspark']
)
rv = c.wait(timeout=30)
logs = c.logs(stdout=True).decode('utf-8')
LOGGER.debug(logs)
assert rv == 0 or rv["StatusCode"] == 0, "pyspark not in PYTHONPATH"

View File

@@ -0,0 +1,4 @@
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import pyspark # noqa: F401

View File

@@ -14,8 +14,11 @@ s = 1 + np.sin(2 * np.pi * t)
fig, ax = plt.subplots() fig, ax = plt.subplots()
ax.plot(t, s) ax.plot(t, s)
ax.set(xlabel='time (s)', ylabel='voltage (mV)', ax.set(
title='About as simple as it gets, folks') xlabel="time (s)",
ylabel="voltage (mV)",
title="About as simple as it gets, folks",
)
ax.grid() ax.grid()
# Note that the test can be run headless by checking if an image is produced # Note that the test can be run headless by checking if an image is produced
file_path = os.path.join("/tmp", "test.png") file_path = os.path.join("/tmp", "test.png")

View File

@@ -3,16 +3,22 @@ import matplotlib
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import os import os
matplotlib.rcParams['pgf.texsystem'] = 'pdflatex' matplotlib.rcParams["pgf.texsystem"] = "pdflatex"
matplotlib.rcParams.update({'font.family': 'serif', 'font.size': 18, matplotlib.rcParams.update(
'axes.labelsize': 20, 'axes.titlesize': 24, {
'figure.titlesize': 28}) "font.family": "serif",
matplotlib.rcParams['text.usetex'] = True "font.size": 18,
"axes.labelsize": 20,
"axes.titlesize": 24,
"figure.titlesize": 28,
}
)
matplotlib.rcParams["text.usetex"] = True
fig, ax = plt.subplots(1, 1) fig, ax = plt.subplots(1, 1)
x = [1, 2] x = [1, 2]
y = [1, 2] y = [1, 2]
ax.plot(x, y, label='a label') ax.plot(x, y, label="a label")
ax.legend(fontsize=15) ax.legend(fontsize=15)
file_path = os.path.join("/tmp", "test_fonts.png") file_path = os.path.join("/tmp", "test_fonts.png")

View File

@@ -27,7 +27,7 @@ def test_check_extension(container, extension):
LOGGER.info(f"Checking the extension: {extension} ...") LOGGER.info(f"Checking the extension: {extension} ...")
c = container.run( c = container.run(
tty=True, tty=True,
command=["start.sh", "jupyter", "labextension", "check", extension] command=["start.sh", "jupyter", "labextension", "check", extension],
) )
rv = c.wait(timeout=10) rv = c.wait(timeout=10)
logs = c.logs(stdout=True).decode("utf-8") logs = c.logs(stdout=True).decode("utf-8")

View File

@@ -13,9 +13,17 @@ THIS_DIR = os.path.dirname(os.path.realpath(__file__))
@pytest.mark.parametrize( @pytest.mark.parametrize(
"test_file,expected_file,description", "test_file,expected_file,description",
[ [
("matplotlib_1.py", "test.png", "Test that matplotlib is able to plot a graph and write it as an image ..."), (
("matplotlib_fonts_1.py", "test_fonts.png", "Test cm-super latex labels in matplotlib ...") "matplotlib_1.py",
] "test.png",
"Test that matplotlib is able to plot a graph and write it as an image ...",
),
(
"matplotlib_fonts_1.py",
"test_fonts.png",
"Test cm-super latex labels in matplotlib ...",
),
],
) )
def test_matplotlib(container, test_file, expected_file, description): def test_matplotlib(container, test_file, expected_file, description):
"""Various tests performed on matplotlib """Various tests performed on matplotlib

View File

@@ -1,32 +0,0 @@
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import logging
import pytest
LOGGER = logging.getLogger(__name__)
@pytest.mark.parametrize(
"name,command_list",
[
(
"Sum series",
[
"import pandas as pd",
"import numpy as np",
"np.random.seed(0)",
"print(pd.Series(np.random.randint(0, 7, size=10)).sum())"
]
),
],
)
def test_pandas(container, name, command_list):
"""Basic pandas tests"""
LOGGER.info(f"Testing pandas: {name} ...")
command = ';'.join(command_list)
c = container.run(tty=True, command=["start.sh", "python", "-c", command])
rv = c.wait(timeout=30)
logs = c.logs(stdout=True).decode("utf-8")
LOGGER.debug(logs)
assert rv == 0 or rv["StatusCode"] == 0, f"Command {command} failed"

View File

@@ -0,0 +1,9 @@
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import numpy as np
import pandas as pd
np.random.seed(0)
print(pd.Series(np.random.randint(0, 7, size=10)).sum())

View File

@@ -19,7 +19,12 @@ BUILD_TIMESTAMP = datetime.datetime.utcnow().isoformat()[:-7] + "Z"
MARKDOWN_LINE_BREAK = "<br />" MARKDOWN_LINE_BREAK = "<br />"
def append_build_history_line(short_image_name: str, owner: str, wiki_path: str, all_tags: List[str]) -> None: def append_build_history_line(
short_image_name: str,
owner: str,
wiki_path: str,
all_tags: List[str],
) -> None:
logger.info("Appending build history line") logger.info("Appending build history line")
date_column = f"`{BUILD_TIMESTAMP}`" date_column = f"`{BUILD_TIMESTAMP}`"
@@ -28,11 +33,13 @@ def append_build_history_line(short_image_name: str, owner: str, wiki_path: str,
) )
commit_hash = GitHelper.commit_hash() commit_hash = GitHelper.commit_hash()
commit_hash_tag = GitHelper.commit_hash_tag() commit_hash_tag = GitHelper.commit_hash_tag()
links_column = MARKDOWN_LINE_BREAK.join([ links_column = MARKDOWN_LINE_BREAK.join(
[
f"[Git diff](https://github.com/jupyter/docker-stacks/commit/{commit_hash})", f"[Git diff](https://github.com/jupyter/docker-stacks/commit/{commit_hash})",
f"[Dockerfile](https://github.com/jupyter/docker-stacks/blob/{commit_hash}/{short_image_name}/Dockerfile)", f"[Dockerfile](https://github.com/jupyter/docker-stacks/blob/{commit_hash}/{short_image_name}/Dockerfile)",
f"[Build manifest](./{short_image_name}-{commit_hash_tag})" f"[Build manifest](./{short_image_name}-{commit_hash_tag})",
]) ]
)
build_history_line = "|".join([date_column, image_column, links_column]) + "|" build_history_line = "|".join([date_column, image_column, links_column]) + "|"
home_wiki_file = os.path.join(wiki_path, "Home.md") home_wiki_file = os.path.join(wiki_path, "Home.md")
@@ -49,16 +56,20 @@ def create_manifest_file(
owner: str, owner: str,
wiki_path: str, wiki_path: str,
manifests: List[ManifestInterface], manifests: List[ManifestInterface],
container container,
) -> None: ) -> None:
manifest_names = [manifest.__name__ for manifest in manifests] manifest_names = [manifest.__name__ for manifest in manifests]
logger.info(f"Using manifests: {manifest_names}") logger.info(f"Using manifests: {manifest_names}")
commit_hash_tag = GitHelper.commit_hash_tag() commit_hash_tag = GitHelper.commit_hash_tag()
manifest_file = os.path.join(wiki_path, f"manifests/{short_image_name}-{commit_hash_tag}.md") manifest_file = os.path.join(
wiki_path,
f"manifests/{short_image_name}-{commit_hash_tag}.md",
)
markdown_pieces = [ManifestHeader.create_header(short_image_name, owner, BUILD_TIMESTAMP)] + \ markdown_pieces = [
[manifest.markdown_piece(container) for manifest in manifests] ManifestHeader.create_header(short_image_name, owner, BUILD_TIMESTAMP)
] + [manifest.markdown_piece(container) for manifest in manifests]
markdown_content = "\n\n".join(markdown_pieces) + "\n" markdown_content = "\n\n".join(markdown_pieces) + "\n"
with open(manifest_file, "w") as f: with open(manifest_file, "w") as f:
@@ -81,7 +92,11 @@ if __name__ == "__main__":
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)
arg_parser = argparse.ArgumentParser() arg_parser = argparse.ArgumentParser()
arg_parser.add_argument("--short-image-name", required=True, help="Short image name to apply tags for") arg_parser.add_argument(
"--short-image-name",
required=True,
help="Short image name to apply tags for",
)
arg_parser.add_argument("--owner", required=True, help="Owner of the image") arg_parser.add_argument("--owner", required=True, help="Owner of the image")
arg_parser.add_argument("--wiki-path", required=True, help="Path to the wiki pages") arg_parser.add_argument("--wiki-path", required=True, help="Path to the wiki pages")
args = arg_parser.parse_args() args = arg_parser.parse_args()

View File

@@ -8,7 +8,12 @@ logger = logging.getLogger(__name__)
class DockerRunner: class DockerRunner:
def __init__(self, image_name: str, docker_client=docker.from_env(), command: str = "sleep infinity"): def __init__(
self,
image_name: str,
docker_client=docker.from_env(),
command: str = "sleep infinity",
):
self.container = None self.container = None
self.image_name = image_name self.image_name = image_name
self.command = command self.command = command
@@ -17,7 +22,9 @@ class DockerRunner:
def __enter__(self): def __enter__(self):
logger.info(f"Creating container for image {self.image_name} ...") logger.info(f"Creating container for image {self.image_name} ...")
self.container = self.docker_client.containers.run( self.container = self.docker_client.containers.run(
image=self.image_name, command=self.command, detach=True, image=self.image_name,
command=self.command,
detach=True,
) )
logger.info(f"Container {self.container.name} created") logger.info(f"Container {self.container.name} created")
return self.container return self.container

View File

@@ -6,7 +6,9 @@ from .manifests import ManifestInterface
from .taggers import TaggerInterface from .taggers import TaggerInterface
def get_taggers_and_manifests(short_image_name: str) -> Tuple[List[TaggerInterface], List[ManifestInterface]]: def get_taggers_and_manifests(
short_image_name: str,
) -> Tuple[List[TaggerInterface], List[ManifestInterface]]:
taggers: List[TaggerInterface] = [] taggers: List[TaggerInterface] = []
manifests: List[ManifestInterface] = [] manifests: List[ManifestInterface] = []
while short_image_name is not None: while short_image_name is not None:

View File

@@ -2,15 +2,29 @@
# Distributed under the terms of the Modified BSD License. # Distributed under the terms of the Modified BSD License.
from dataclasses import dataclass, field from dataclasses import dataclass, field
from typing import Optional, List from typing import Optional, List
from .taggers import TaggerInterface, \ from .taggers import (
SHATagger, \ TaggerInterface,
UbuntuVersionTagger, PythonVersionTagger, \ SHATagger,
JupyterNotebookVersionTagger, JupyterLabVersionTagger, JupyterHubVersionTagger, \ UbuntuVersionTagger,
RVersionTagger, TensorflowVersionTagger, JuliaVersionTagger, \ PythonVersionTagger,
SparkVersionTagger, HadoopVersionTagger, JavaVersionTagger JupyterNotebookVersionTagger,
from .manifests import ManifestInterface, \ JupyterLabVersionTagger,
CondaEnvironmentManifest, AptPackagesManifest, \ JupyterHubVersionTagger,
RPackagesManifest, JuliaPackagesManifest, SparkInfoManifest RVersionTagger,
TensorflowVersionTagger,
JuliaVersionTagger,
SparkVersionTagger,
HadoopVersionTagger,
JavaVersionTagger,
)
from .manifests import (
ManifestInterface,
CondaEnvironmentManifest,
AptPackagesManifest,
RPackagesManifest,
JuliaPackagesManifest,
SparkInfoManifest,
)
@dataclass @dataclass
@@ -25,41 +39,37 @@ ALL_IMAGES = {
parent_image=None, parent_image=None,
taggers=[ taggers=[
SHATagger, SHATagger,
UbuntuVersionTagger, PythonVersionTagger, UbuntuVersionTagger,
JupyterNotebookVersionTagger, JupyterLabVersionTagger, JupyterHubVersionTagger PythonVersionTagger,
JupyterNotebookVersionTagger,
JupyterLabVersionTagger,
JupyterHubVersionTagger,
], ],
manifests=[ manifests=[CondaEnvironmentManifest, AptPackagesManifest],
CondaEnvironmentManifest, AptPackagesManifest
]
),
"minimal-notebook": ImageDescription(
parent_image="base-notebook"
),
"scipy-notebook": ImageDescription(
parent_image="minimal-notebook"
), ),
"minimal-notebook": ImageDescription(parent_image="base-notebook"),
"scipy-notebook": ImageDescription(parent_image="minimal-notebook"),
"r-notebook": ImageDescription( "r-notebook": ImageDescription(
parent_image="minimal-notebook", parent_image="minimal-notebook",
taggers=[RVersionTagger], taggers=[RVersionTagger],
manifests=[RPackagesManifest] manifests=[RPackagesManifest],
), ),
"tensorflow-notebook": ImageDescription( "tensorflow-notebook": ImageDescription(
parent_image="scipy-notebook", parent_image="scipy-notebook", taggers=[TensorflowVersionTagger]
taggers=[TensorflowVersionTagger]
), ),
"datascience-notebook": ImageDescription( "datascience-notebook": ImageDescription(
parent_image="scipy-notebook", parent_image="scipy-notebook",
taggers=[RVersionTagger, JuliaVersionTagger], taggers=[RVersionTagger, JuliaVersionTagger],
manifests=[RPackagesManifest, JuliaPackagesManifest] manifests=[RPackagesManifest, JuliaPackagesManifest],
), ),
"pyspark-notebook": ImageDescription( "pyspark-notebook": ImageDescription(
parent_image="scipy-notebook", parent_image="scipy-notebook",
taggers=[SparkVersionTagger, HadoopVersionTagger, JavaVersionTagger], taggers=[SparkVersionTagger, HadoopVersionTagger, JavaVersionTagger],
manifests=[SparkInfoManifest] manifests=[SparkInfoManifest],
), ),
"all-spark-notebook": ImageDescription( "all-spark-notebook": ImageDescription(
parent_image="pyspark-notebook", parent_image="pyspark-notebook",
taggers=[RVersionTagger], taggers=[RVersionTagger],
manifests=[RPackagesManifest] manifests=[RPackagesManifest],
) ),
} }

View File

@@ -10,24 +10,30 @@ logger = logging.getLogger(__name__)
def quoted_output(container, cmd: str) -> str: def quoted_output(container, cmd: str) -> str:
return "\n".join([ return "\n".join(
[
"```", "```",
DockerRunner.run_simple_command(container, cmd, print_result=False), DockerRunner.run_simple_command(container, cmd, print_result=False),
"```" "```",
]) ]
)
class ManifestHeader: class ManifestHeader:
"""ManifestHeader doesn't fall under common interface and we run it separately""" """ManifestHeader doesn't fall under common interface and we run it separately"""
@staticmethod @staticmethod
def create_header(short_image_name: str, owner: str, build_timestamp: str) -> str: def create_header(short_image_name: str, owner: str, build_timestamp: str) -> str:
commit_hash = GitHelper.commit_hash() commit_hash = GitHelper.commit_hash()
commit_hash_tag = GitHelper.commit_hash_tag() commit_hash_tag = GitHelper.commit_hash_tag()
commit_message = GitHelper.commit_message() commit_message = GitHelper.commit_message()
image_size = docker["images", f"{owner}/{short_image_name}:latest", "--format", "{{.Size}}"]().rstrip() image_size = docker[
"images", f"{owner}/{short_image_name}:latest", "--format", "{{.Size}}"
]().rstrip()
return "\n".join([ return "\n".join(
[
f"# Build manifest for image: {short_image_name}:{commit_hash_tag}", f"# Build manifest for image: {short_image_name}:{commit_hash_tag}",
"", "",
"## Build Info", "## Build Info",
@@ -39,12 +45,14 @@ class ManifestHeader:
"* Git commit message:", "* Git commit message:",
"```", "```",
f"{commit_message}", f"{commit_message}",
"```" "```",
]) ]
)
class ManifestInterface: class ManifestInterface:
"""Common interface for all manifests""" """Common interface for all manifests"""
@staticmethod @staticmethod
def markdown_piece(container) -> str: def markdown_piece(container) -> str:
raise NotImplementedError raise NotImplementedError
@@ -53,56 +61,72 @@ class ManifestInterface:
class CondaEnvironmentManifest(ManifestInterface): class CondaEnvironmentManifest(ManifestInterface):
@staticmethod @staticmethod
def markdown_piece(container) -> str: def markdown_piece(container) -> str:
return "\n".join([ return "\n".join(
[
"## Python Packages", "## Python Packages",
"", "",
quoted_output(container, "python --version"), quoted_output(container, "python --version"),
"", "",
quoted_output(container, "conda info"), quoted_output(container, "conda info"),
"", "",
quoted_output(container, "conda list") quoted_output(container, "conda list"),
]) ]
)
class AptPackagesManifest(ManifestInterface): class AptPackagesManifest(ManifestInterface):
@staticmethod @staticmethod
def markdown_piece(container) -> str: def markdown_piece(container) -> str:
return "\n".join([ return "\n".join(
[
"## Apt Packages", "## Apt Packages",
"", "",
quoted_output(container, "apt list --installed") quoted_output(container, "apt list --installed"),
]) ]
)
class RPackagesManifest(ManifestInterface): class RPackagesManifest(ManifestInterface):
@staticmethod @staticmethod
def markdown_piece(container) -> str: def markdown_piece(container) -> str:
return "\n".join([ return "\n".join(
[
"## R Packages", "## R Packages",
"", "",
quoted_output(container, "R --version"), quoted_output(container, "R --version"),
"", "",
quoted_output(container, "R --silent -e 'installed.packages(.Library)[, c(1,3)]'") quoted_output(
]) container,
"R --silent -e 'installed.packages(.Library)[, c(1,3)]'",
),
]
)
class JuliaPackagesManifest(ManifestInterface): class JuliaPackagesManifest(ManifestInterface):
@staticmethod @staticmethod
def markdown_piece(container) -> str: def markdown_piece(container) -> str:
return "\n".join([ return "\n".join(
[
"## Julia Packages", "## Julia Packages",
"", "",
quoted_output(container, "julia -E 'using InteractiveUtils; versioninfo()'"), quoted_output(
container,
"julia -E 'using InteractiveUtils; versioninfo()'",
),
"", "",
quoted_output(container, "julia -E 'import Pkg; Pkg.status()'") quoted_output(container, "julia -E 'import Pkg; Pkg.status()'"),
]) ]
)
class SparkInfoManifest(ManifestInterface): class SparkInfoManifest(ManifestInterface):
@staticmethod @staticmethod
def markdown_piece(container) -> str: def markdown_piece(container) -> str:
return "\n".join([ return "\n".join(
[
"## Apache Spark", "## Apache Spark",
"", "",
quoted_output(container, "/usr/local/spark/bin/spark-submit --version"), quoted_output(container, "/usr/local/spark/bin/spark-submit --version"),
]) ]
)

View File

@@ -21,7 +21,9 @@ def tag_image(short_image_name: str, owner: str) -> None:
for tagger in taggers: for tagger in taggers:
tagger_name = tagger.__name__ tagger_name = tagger.__name__
tag_value = tagger.tag_value(container) tag_value = tagger.tag_value(container)
logger.info(f"Applying tag tagger_name: {tagger_name} tag_value: {tag_value}") logger.info(
f"Applying tag tagger_name: {tagger_name} tag_value: {tag_value}"
)
docker["tag", image, f"{owner}/{short_image_name}:{tag_value}"]() docker["tag", image, f"{owner}/{short_image_name}:{tag_value}"]()
@@ -29,7 +31,11 @@ if __name__ == "__main__":
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)
arg_parser = argparse.ArgumentParser() arg_parser = argparse.ArgumentParser()
arg_parser.add_argument("--short-image-name", required=True, help="Short image name to apply tags for") arg_parser.add_argument(
"--short-image-name",
required=True,
help="Short image name to apply tags for",
)
arg_parser.add_argument("--owner", required=True, help="Owner of the image") arg_parser.add_argument("--owner", required=True, help="Owner of the image")
args = arg_parser.parse_args() args = arg_parser.parse_args()

View File

@@ -16,11 +16,11 @@ def _get_env_variable(container, variable: str) -> str:
env = DockerRunner.run_simple_command( env = DockerRunner.run_simple_command(
container, container,
cmd="env", cmd="env",
print_result=False print_result=False,
).split() ).split()
for env_entry in env: for env_entry in env:
if env_entry.startswith(variable): if env_entry.startswith(variable):
return env_entry[len(variable) + 1:] return env_entry[len(variable) + 1 :]
raise KeyError(variable) raise KeyError(variable)
@@ -29,15 +29,16 @@ def _get_pip_package_version(container, package: str) -> str:
package_info = DockerRunner.run_simple_command( package_info = DockerRunner.run_simple_command(
container, container,
cmd=f"pip show {package}", cmd=f"pip show {package}",
print_result=False print_result=False,
) )
version_line = package_info.split("\n")[1] version_line = package_info.split("\n")[1]
assert version_line.startswith(VERSION_PREFIX) assert version_line.startswith(VERSION_PREFIX)
return version_line[len(VERSION_PREFIX):] return version_line[len(VERSION_PREFIX) :]
class TaggerInterface: class TaggerInterface:
"""Common interface for all taggers""" """Common interface for all taggers"""
@staticmethod @staticmethod
def tag_value(container) -> str: def tag_value(container) -> str:
raise NotImplementedError raise NotImplementedError
@@ -52,7 +53,10 @@ class SHATagger(TaggerInterface):
class UbuntuVersionTagger(TaggerInterface): class UbuntuVersionTagger(TaggerInterface):
@staticmethod @staticmethod
def tag_value(container) -> str: def tag_value(container) -> str:
os_release = DockerRunner.run_simple_command(container, "cat /etc/os-release").split("\n") os_release = DockerRunner.run_simple_command(
container,
"cat /etc/os-release",
).split("\n")
for line in os_release: for line in os_release:
if line.startswith("VERSION_ID"): if line.startswith("VERSION_ID"):
return "ubuntu-" + line.split("=")[1].strip('"') return "ubuntu-" + line.split("=")[1].strip('"')

View File

@@ -1,30 +0,0 @@
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import logging
import pytest
LOGGER = logging.getLogger(__name__)
@pytest.mark.parametrize(
"name,command",
[
(
"Hello world",
"import tensorflow as tf;print(tf.constant('Hello, TensorFlow'))",
),
(
"Sum",
"import tensorflow as tf;print(tf.reduce_sum(tf.random.normal([1000, 1000])))",
),
],
)
def test_tensorflow(container, name, command):
"""Basic tensorflow tests"""
LOGGER.info(f"Testing tensorflow: {name} ...")
c = container.run(tty=True, command=["start.sh", "python", "-c", command])
rv = c.wait(timeout=30)
logs = c.logs(stdout=True).decode("utf-8")
LOGGER.debug(logs)
assert rv == 0 or rv["StatusCode"] == 0, f"Command {command} failed"

View File

@@ -0,0 +1,7 @@
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import tensorflow as tf
print(tf.constant("Hello, TensorFlow"))
print(tf.reduce_sum(tf.random.normal([1000, 1000])))

15
test/README.md Normal file
View File

@@ -0,0 +1,15 @@
# Docker stacks testing
We test our images using `pytest` module.
`conftest.py` and `pytest.ini` in the root of our repository define the environment in which tests are run.
More info on pytest can be found [here](https://docs.pytest.org/en/latest/contents.html).
There are two kinds of tests we use:
- General tests - these are located in [this](https://github.com/jupyter/docker-stacks/blob/master/test) folder
- Image specific tests - for example, [base-notebook/test](https://github.com/jupyter/docker-stacks/blob/master/base-notebook/test) folder
We also have a way to easily run arbitrary python files in a container.
This is useful for running unit tests of packages we use, so we put these files in `{image}/test/units` folder.
An example of such a test is [unit_pandas.py](https://github.com/jupyter/docker-stacks/blob/master/scipy-notebook/test/units/unit_pandas.py).

View File

@@ -34,8 +34,7 @@ LOGGER = logging.getLogger(__name__)
class CondaPackageHelper: class CondaPackageHelper:
"""Conda package helper permitting to get information about packages """Conda package helper permitting to get information about packages"""
"""
def __init__(self, container): def __init__(self, container):
# if isinstance(container, TrackedContainer): # if isinstance(container, TrackedContainer):
@@ -51,7 +50,7 @@ class CondaPackageHelper:
LOGGER.info(f"Starting container {container.image_name} ...") LOGGER.info(f"Starting container {container.image_name} ...")
return container.run( return container.run(
tty=True, tty=True,
command=["start.sh", "bash", "-c", "sleep infinity"] command=["start.sh", "bash", "-c", "sleep infinity"],
) )
@staticmethod @staticmethod
@@ -76,7 +75,9 @@ class CondaPackageHelper:
if self.specs is None: if self.specs is None:
LOGGER.info("Grabing the list of specifications ...") LOGGER.info("Grabing the list of specifications ...")
self.specs = CondaPackageHelper._packages_from_json( self.specs = CondaPackageHelper._packages_from_json(
self._execute_command(CondaPackageHelper._conda_export_command(from_history=True)) self._execute_command(
CondaPackageHelper._conda_export_command(from_history=True)
)
) )
return self.specs return self.specs
@@ -112,9 +113,7 @@ class CondaPackageHelper:
def available_packages(self): def available_packages(self):
"""Return the available packages""" """Return the available packages"""
if self.available is None: if self.available is None:
LOGGER.info( LOGGER.info("Grabing the list of available packages (can take a while) ...")
"Grabing the list of available packages (can take a while) ..."
)
# Keeping command line output since `conda search --outdated --json` is way too long ... # Keeping command line output since `conda search --outdated --json` is way too long ...
self.available = CondaPackageHelper._extract_available( self.available = CondaPackageHelper._extract_available(
self._execute_command(["conda", "search", "--outdated"]) self._execute_command(["conda", "search", "--outdated"])
@@ -145,10 +144,11 @@ class CondaPackageHelper:
continue continue
current = min(inst_vs, key=CondaPackageHelper.semantic_cmp) current = min(inst_vs, key=CondaPackageHelper.semantic_cmp)
newest = avail_vs[-1] newest = avail_vs[-1]
if avail_vs and current != newest:
if ( if (
CondaPackageHelper.semantic_cmp(current) < avail_vs
CondaPackageHelper.semantic_cmp(newest) and current != newest
and CondaPackageHelper.semantic_cmp(current)
< CondaPackageHelper.semantic_cmp(newest)
): ):
self.comparison.append( self.comparison.append(
{"Package": pkg, "Current": current, "Newest": newest} {"Package": pkg, "Current": current, "Newest": newest}
@@ -162,6 +162,7 @@ class CondaPackageHelper:
def mysplit(string): def mysplit(string):
def version_substrs(x): def version_substrs(x):
return re.findall(r"([A-z]+|\d+)", x) return re.findall(r"([A-z]+|\d+)", x)
return list(chain(map(version_substrs, string.split(".")))) return list(chain(map(version_substrs, string.split("."))))
def str_ord(string): def str_ord(string):

View File

@@ -68,7 +68,7 @@ EXCLUDED_PACKAGES = [
"protobuf", "protobuf",
"r-irkernel", "r-irkernel",
"unixodbc", "unixodbc",
"bzip2" "bzip2",
] ]
@@ -133,8 +133,9 @@ def _import_packages(package_helper, filtered_packages, check_function, max_fail
for package in filtered_packages: for package in filtered_packages:
LOGGER.info(f"Trying to import {package}") LOGGER.info(f"Trying to import {package}")
try: try:
assert check_function(package_helper, package) == 0, \ assert (
f"Package [{package}] import failed" check_function(package_helper, package) == 0
), f"Package [{package}] import failed"
except AssertionError as err: except AssertionError as err:
failures[package] = err failures[package] = err
if len(failures) > max_failures: if len(failures) > max_failures:

35
test/test_units.py Normal file
View File

@@ -0,0 +1,35 @@
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import logging
import os
LOGGER = logging.getLogger(__name__)
THIS_DIR = os.path.dirname(os.path.realpath(__file__))
def test_units(container):
"""Various units tests
Add a py file in the {image}/test/units dir and it will be automatically tested
"""
short_image_name = container.image_name[container.image_name.rfind("/") + 1 :]
host_data_dir = os.path.join(THIS_DIR, f"../{short_image_name}/test/units")
LOGGER.info(f"Searching for units tests in {host_data_dir}")
cont_data_dir = "/home/jovyan/data"
if not os.path.exists(host_data_dir):
LOGGER.info(f"Not found unit tests for image: {container.image_name}")
return
for test_file in os.listdir(host_data_dir):
LOGGER.info(f"Running unit test: {test_file}")
c = container.run(
volumes={host_data_dir: {"bind": cont_data_dir, "mode": "ro"}},
tty=True,
command=["start.sh", "python", f"{cont_data_dir}/{test_file}"],
)
rv = c.wait(timeout=30)
logs = c.logs(stdout=True).decode("utf-8")
LOGGER.debug(logs)
assert rv == 0 or rv["StatusCode"] == 0