From a570f43cfbac4d67bd0192348570b4435b46c660 Mon Sep 17 00:00:00 2001 From: Guo Quan Date: Tue, 4 Feb 2020 14:34:23 -0500 Subject: [PATCH 01/10] Add `ROOT_CONTAINER` to base-notebook Add another layer of abstraction to the base container, namely the root container `ROOT_CONTAINER`, so that the user can have the flexibility to build the whole series of images with `make` based on a different base image. --- base-notebook/Dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/base-notebook/Dockerfile b/base-notebook/Dockerfile index 020cd41a..9e1edcfa 100644 --- a/base-notebook/Dockerfile +++ b/base-notebook/Dockerfile @@ -3,7 +3,8 @@ # Ubuntu 18.04 (bionic) # https://hub.docker.com/_/ubuntu/?tab=tags&name=bionic -ARG BASE_CONTAINER=ubuntu:bionic-20200112@sha256:bc025862c3e8ec4a8754ea4756e33da6c41cba38330d7e324abd25c8e0b93300 +ARG ROOT_CONTAINER=ubuntu:bionic-20200112@sha256:bc025862c3e8ec4a8754ea4756e33da6c41cba38330d7e324abd25c8e0b93300 +ARG BASE_CONTAINER=$ROOT_CONTAINER FROM $BASE_CONTAINER LABEL maintainer="Jupyter Project " From 8b3ce5cfa6ce70a86ebb09bc0e2104048f920569 Mon Sep 17 00:00:00 2001 From: romainx Date: Thu, 13 Feb 2020 11:21:54 +0100 Subject: [PATCH 02/10] Change spark mirror --- pyspark-notebook/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyspark-notebook/Dockerfile b/pyspark-notebook/Dockerfile index f2dea592..9b243c0d 100644 --- a/pyspark-notebook/Dockerfile +++ b/pyspark-notebook/Dockerfile @@ -16,7 +16,7 @@ RUN apt-get -y update && \ rm -rf /var/lib/apt/lists/* RUN cd /tmp && \ - wget -q http://mirrors.ukfast.co.uk/sites/ftp.apache.org/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz && \ + wget -q https://www-us.apache.org/dist/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz && \ echo "2426a20c548bdfc07df288cd1d18d1da6b3189d0b78dee76fa034c52a4e02895f0ad460720c526f163ba63a17efae4764c46a1cd8f9b04c60f9937a554db85d2 *spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" | sha512sum -c - && \ tar xzf spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz -C /usr/local --owner root --group root --no-same-owner && \ rm spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz From 7f7be5707c4058611fae8d1961cd98bf449ff29a Mon Sep 17 00:00:00 2001 From: romainx Date: Thu, 13 Feb 2020 12:00:41 +0100 Subject: [PATCH 03/10] spark mirror improvement --- pyspark-notebook/Dockerfile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pyspark-notebook/Dockerfile b/pyspark-notebook/Dockerfile index 9b243c0d..c3fc2d2e 100644 --- a/pyspark-notebook/Dockerfile +++ b/pyspark-notebook/Dockerfile @@ -15,8 +15,10 @@ RUN apt-get -y update && \ apt-get install --no-install-recommends -y openjdk-8-jre-headless ca-certificates-java && \ rm -rf /var/lib/apt/lists/* +# Using the preferred mirror to download the file RUN cd /tmp && \ - wget -q https://www-us.apache.org/dist/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz && \ + wget -q $(wget -qO- https://www.apache.org/dyn/closer.lua/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz\?as_json=1 | \ + python -c "import sys, json; content=json.load(sys.stdin); print(content['preferred']+content['path_info'])") && \ echo "2426a20c548bdfc07df288cd1d18d1da6b3189d0b78dee76fa034c52a4e02895f0ad460720c526f163ba63a17efae4764c46a1cd8f9b04c60f9937a554db85d2 *spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" | sha512sum -c - && \ tar xzf spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz -C /usr/local --owner root --group root --no-same-owner && \ rm spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz From 4333c7cc142361f8504be15a0b2ed894fa4c5448 Mon Sep 17 00:00:00 2001 From: romainx Date: Thu, 13 Feb 2020 13:02:37 +0100 Subject: [PATCH 04/10] fix as_json param --- pyspark-notebook/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyspark-notebook/Dockerfile b/pyspark-notebook/Dockerfile index c3fc2d2e..ac9a41c2 100644 --- a/pyspark-notebook/Dockerfile +++ b/pyspark-notebook/Dockerfile @@ -17,7 +17,7 @@ RUN apt-get -y update && \ # Using the preferred mirror to download the file RUN cd /tmp && \ - wget -q $(wget -qO- https://www.apache.org/dyn/closer.lua/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz\?as_json=1 | \ + wget -q $(wget -qO- https://www.apache.org/dyn/closer.lua/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz\?as_json | \ python -c "import sys, json; content=json.load(sys.stdin); print(content['preferred']+content['path_info'])") && \ echo "2426a20c548bdfc07df288cd1d18d1da6b3189d0b78dee76fa034c52a4e02895f0ad460720c526f163ba63a17efae4764c46a1cd8f9b04c60f9937a554db85d2 *spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" | sha512sum -c - && \ tar xzf spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz -C /usr/local --owner root --group root --no-same-owner && \ From dfd6d2ed03c0a1c2422b5eb57d660c0313183da1 Mon Sep 17 00:00:00 2001 From: romainx Date: Thu, 13 Feb 2020 16:57:00 +0100 Subject: [PATCH 05/10] Fix doc make image -> make build --- docs/contributing/features.md | 2 +- docs/contributing/packages.md | 2 +- docs/contributing/tests.md | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/contributing/features.md b/docs/contributing/features.md index 1dca0d2c..79511e5b 100644 --- a/docs/contributing/features.md +++ b/docs/contributing/features.md @@ -26,7 +26,7 @@ If there's agreement that the feature belongs in one or more of the core stacks: 1. Implement the feature in a local clone of the `jupyter/docker-stacks` project. 2. Please build the image locally before submitting a pull request. Building the image locally shortens the debugging cycle by taking some load off [Travis CI](http://travis-ci.org/), which graciously provides free build services for open source projects like this one. If you use `make`, call: ``` -make image/somestack-notebook +make build/somestack-notebook ``` 3. [Submit a pull request](https://github.com/PointCloudLibrary/pcl/wiki/A-step-by-step-guide-on-preparing-and-submitting-a-pull-request) (PR) with your changes. 4. Watch for Travis to report a build success or failure for your PR on GitHub. diff --git a/docs/contributing/packages.md b/docs/contributing/packages.md index 11828a8b..ef9e8b55 100644 --- a/docs/contributing/packages.md +++ b/docs/contributing/packages.md @@ -8,7 +8,7 @@ Please follow the process below to update a package version: 2. Adjust the version number for the package. We prefer to pin the major and minor version number of packages so as to minimize rebuild side-effects when users submit pull requests (PRs). For example, you'll find the Jupyter Notebook package, `notebook`, installed using conda with `notebook=5.4.*`. 3. Please build the image locally before submitting a pull request. Building the image locally shortens the debugging cycle by taking some load off [Travis CI](http://travis-ci.org/), which graciously provides free build services for open source projects like this one. If you use `make`, call: ``` -make image/somestack-notebook +make build/somestack-notebook ``` 4. [Submit a pull request](https://github.com/PointCloudLibrary/pcl/wiki/A-step-by-step-guide-on-preparing-and-submitting-a-pull-request) (PR) with your changes. 5. Watch for Travis to report a build success or failure for your PR on GitHub. diff --git a/docs/contributing/tests.md b/docs/contributing/tests.md index c2ff4f35..80d2786d 100644 --- a/docs/contributing/tests.md +++ b/docs/contributing/tests.md @@ -14,7 +14,7 @@ Please follow the process below to add new tests: 2. If your test should run against a single image, add your test code to one of the modules in `some-notebook/test/` or create a new module. 3. Build one or more images you intend to test and run the tests locally. If you use `make`, call: ``` -make image/somestack-notebook +make build/somestack-notebook make test/somestack-notebook ``` 4. [Submit a pull request](https://github.com/PointCloudLibrary/pcl/wiki/A-step-by-step-guide-on-preparing-and-submitting-a-pull-request) (PR) with your changes. From d1e10fc795a1fe5d1538e42541d868d53c1d7b03 Mon Sep 17 00:00:00 2001 From: romainx Date: Fri, 14 Feb 2020 20:07:18 +0100 Subject: [PATCH 06/10] Remove pandoc os package install --- base-notebook/test/test_pandoc.py | 20 ++++++++++++++++++++ minimal-notebook/Dockerfile | 1 - pytest.ini | 3 ++- 3 files changed, 22 insertions(+), 2 deletions(-) create mode 100644 base-notebook/test/test_pandoc.py diff --git a/base-notebook/test/test_pandoc.py b/base-notebook/test/test_pandoc.py new file mode 100644 index 00000000..103f418a --- /dev/null +++ b/base-notebook/test/test_pandoc.py @@ -0,0 +1,20 @@ +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. + +import logging + +import pytest + +LOGGER = logging.getLogger(__name__) + + +def test_pandoc(container): + """Pandoc shall be able to convert MD to HTML.""" + c = container.run( + tty=True, command=["start.sh", "bash", "-c", 'echo "**BOLD**" | pandoc'] + ) + c.wait(timeout=10) + logs = c.logs(stdout=True).decode("utf-8") + LOGGER.debug(logs) + assert "

BOLD

" in logs + diff --git a/minimal-notebook/Dockerfile b/minimal-notebook/Dockerfile index a706eefc..e578b503 100644 --- a/minimal-notebook/Dockerfile +++ b/minimal-notebook/Dockerfile @@ -19,7 +19,6 @@ RUN apt-get update && apt-get install -yq --no-install-recommends \ libxrender1 \ lmodern \ netcat \ - pandoc \ python-dev \ texlive-fonts-extra \ texlive-fonts-recommended \ diff --git a/pytest.ini b/pytest.ini index f861f05e..1c0585dc 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,5 +1,6 @@ [pytest] +addopts = -rA log_cli = 1 -log_cli_level = INFO +log_cli_level = DEBUG log_cli_format = %(asctime)s [%(levelname)8s] %(message)s (%(filename)s:%(lineno)s) log_cli_date_format=%Y-%m-%d %H:%M:%S \ No newline at end of file From dd922582d5158557413cd93aed8f03a610f1e83d Mon Sep 17 00:00:00 2001 From: romainx Date: Fri, 14 Feb 2020 20:08:29 +0100 Subject: [PATCH 07/10] Log level back to info --- pytest.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytest.ini b/pytest.ini index 1c0585dc..d6bb7778 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,6 +1,6 @@ [pytest] addopts = -rA log_cli = 1 -log_cli_level = DEBUG +log_cli_level = INFO log_cli_format = %(asctime)s [%(levelname)8s] %(message)s (%(filename)s:%(lineno)s) log_cli_date_format=%Y-%m-%d %H:%M:%S \ No newline at end of file From 3aa61f94c204c07ad00b8e352a86d4258d5d55a5 Mon Sep 17 00:00:00 2001 From: Peter Parente Date: Sat, 15 Feb 2020 18:53:43 -0500 Subject: [PATCH 08/10] Split SPARK_HOME definition from other env vars --- pyspark-notebook/Dockerfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pyspark-notebook/Dockerfile b/pyspark-notebook/Dockerfile index f2dea592..c8dd3e09 100644 --- a/pyspark-notebook/Dockerfile +++ b/pyspark-notebook/Dockerfile @@ -36,11 +36,11 @@ RUN apt-get -y update && \ rm -rf /var/lib/apt/lists/* # Spark and Mesos config -ENV SPARK_HOME=/usr/local/spark \ - PYTHONPATH=$SPARK_HOME/python:$SPARK_HOME/python/lib/py4j-0.10.7-src.zip \ +ENV SPARK_HOME=/usr/local/spark +ENV PYTHONPATH=$SPARK_HOME/python:$SPARK_HOME/python/lib/py4j-0.10.7-src.zip \ MESOS_NATIVE_LIBRARY=/usr/local/lib/libmesos.so \ SPARK_OPTS="--driver-java-options=-Xms1024M --driver-java-options=-Xmx4096M --driver-java-options=-Dlog4j.logLevel=info" \ - PATH=$PATH:/usr/local/spark/bin + PATH=$PATH:$SPARH_HOME/bin USER $NB_UID From 4a8b58a41b7b6f39fba9db855f337028c0f24aea Mon Sep 17 00:00:00 2001 From: Peter Parente Date: Sat, 15 Feb 2020 18:53:58 -0500 Subject: [PATCH 09/10] Test payspark import --- pyspark-notebook/Dockerfile | 2 +- pyspark-notebook/test/test_spark.py | 13 ++++++++++++- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/pyspark-notebook/Dockerfile b/pyspark-notebook/Dockerfile index c8dd3e09..19b11d14 100644 --- a/pyspark-notebook/Dockerfile +++ b/pyspark-notebook/Dockerfile @@ -40,7 +40,7 @@ ENV SPARK_HOME=/usr/local/spark ENV PYTHONPATH=$SPARK_HOME/python:$SPARK_HOME/python/lib/py4j-0.10.7-src.zip \ MESOS_NATIVE_LIBRARY=/usr/local/lib/libmesos.so \ SPARK_OPTS="--driver-java-options=-Xms1024M --driver-java-options=-Xmx4096M --driver-java-options=-Dlog4j.logLevel=info" \ - PATH=$PATH:$SPARH_HOME/bin + PATH=$PATH:$SPARK_HOME/bin USER $NB_UID diff --git a/pyspark-notebook/test/test_spark.py b/pyspark-notebook/test/test_spark.py index 2b5499ae..a09d0256 100644 --- a/pyspark-notebook/test/test_spark.py +++ b/pyspark-notebook/test/test_spark.py @@ -16,4 +16,15 @@ def test_spark_shell(container): c.wait(timeout=30) logs = c.logs(stdout=True).decode('utf-8') LOGGER.debug(logs) - assert 'res0: Int = 2' in logs \ No newline at end of file + assert 'res0: Int = 2' in logs + +def test_pyspark(container): + """PySpark should be in the Python path""" + c = container.run( + tty=True, + command=['start.sh', 'python', '-c', '"import pyspark"'] + ) + rv = c.wait(timeout=30) + assert rv == 0 or rv["StatusCode"] == 0 + logs = c.logs(stdout=True).decode('utf-8') + LOGGER.debug(logs) From 9b983ea89d557234fb9a5032b5ce998064af35ba Mon Sep 17 00:00:00 2001 From: romainx Date: Mon, 17 Feb 2020 21:40:36 +0100 Subject: [PATCH 10/10] clean tex dependency + test --- minimal-notebook/Dockerfile | 9 +- minimal-notebook/test/data/notebook1.ipynb | 149 +++++++++++++++++++++ minimal-notebook/test/test_nbconvert.py | 31 +++++ 3 files changed, 185 insertions(+), 4 deletions(-) create mode 100644 minimal-notebook/test/data/notebook1.ipynb create mode 100644 minimal-notebook/test/test_nbconvert.py diff --git a/minimal-notebook/Dockerfile b/minimal-notebook/Dockerfile index e578b503..5d80dfb9 100644 --- a/minimal-notebook/Dockerfile +++ b/minimal-notebook/Dockerfile @@ -20,12 +20,13 @@ RUN apt-get update && apt-get install -yq --no-install-recommends \ lmodern \ netcat \ python-dev \ - texlive-fonts-extra \ + # ---- nbconvert dependencies ---- + texlive-xetex \ texlive-fonts-recommended \ texlive-generic-recommended \ - texlive-latex-base \ - texlive-latex-extra \ - texlive-xetex \ + # Optional dependency + texlive-fonts-extra \ + # ---- tzdata \ unzip \ nano \ diff --git a/minimal-notebook/test/data/notebook1.ipynb b/minimal-notebook/test/data/notebook1.ipynb new file mode 100644 index 00000000..0ee2ca07 --- /dev/null +++ b/minimal-notebook/test/data/notebook1.ipynb @@ -0,0 +1,149 @@ +{ + "metadata": { + "name": "notebook1" + }, + "nbformat": 3, + "nbformat_minor": 0, + "worksheets": [ + { + "cells": [ + { + "cell_type": "heading", + "level": 1, + "metadata": {}, + "source": [ + "A simple SymPy example" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "First we import SymPy and initialize printing:" + ] + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "from sympy import init_printing\n", + "from sympy import *\n", + " init_printing()" + ], + "language": "python", + "metadata": {}, + "outputs": [], + "prompt_number": 2 + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Create a few symbols:" + ] + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "x,y,z = symbols('x y z')" + ], + "language": "python", + "metadata": {}, + "outputs": [], + "prompt_number": 4 + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here is a basic expression:" + ] + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "e = x**2 + 2.0*y + sin(z); e" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "latex": [ + "$$x^{2} + 2.0 y + \\sin{\\left (z \\right )}$$" + ], + "metadata": {}, + "output_type": "pyout", + "png": "iVBORw0KGgoAAAANSUhEUgAAAKMAAAAZBAMAAACvE4OgAAAAMFBMVEX///8AAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAv3aB7AAAAD3RSTlMAEHarIkSJZt3NVLsy\nme8Q6PJIAAACz0lEQVRIDa1UTWjUQBT+ZpvdzW7TGlrxItjYSg/C6vbiDwjmoCgUpHioPYhdqig9\nFJYiPYmW4klB14NgFGnw4EHpj7UgUtTFXhSEBgVBxIOFggWVrrUqiMY3mZkkLNIK7oN575vvvfky\n8yYJIGzgkSlRrULKrivVSkvq6LbxtcaSjV3aSo0lgWyl5pK69V+SRlEsPxNTGYhhDrV3M2Ue2etc\nEDmuMmM+IjolrCuHXNoLoQDNSAXdzbjsfFVKTY1vCgFXFIxenG4cFSSzRewAPnN0FugXjPDr45MQ\nJwoKtitgXL9zT+CsJeIHYG+Z4H1gwhRU4G/FcAQbbYU3KdDo+0sCK8lRU0guA72uKqMYk9RehHxP\niDIu0NS2v90KGShJYi7T7tgvkrQ2vIT2XtRISWNra6lzGc8/PW3ji4PL7Vmge095YIX0iB71NCaZ\n5N3XyM0VCuNIyFNIyY3AMG/KDUvjn90DGmwq9wpIl5AyU5WsTYy0aJf6JFGB5An3Der5jExKHjNR\n4JKPge/EXqDBoOXpkxkmkJHFfAFRVhDIveWA0S57N2Me6yw+DSX1n1uCq3sIfCF2IcjNkjeWyKli\nginHubboOB4vSNAjyaiXE26ygrkyTfod55Lj3CTE+n2P73ImJpnk6wJJKjYJSwt3OQbNJu4icM5s\nKGGbzMuD70N6JSbJD44x7pLDyJrbkfiLpOEhYVMJSVEj83x5YFLyNrAzJsmvJ+uhLrieXvcJDshy\nHtQuD54c2IWWEnSXfUTDZJJfAjcpOW5imp9aHvw4ZZ4NDV4FGjw0tzadKgbFwinJUd//AT0P1tdW\nBtuRU39oKdk9ONQ163fM+nvu/s4D/FX30otdQIZGlSnJKpq6KUxKVqV1WxGHFIhishjhEO1Gi3r4\nkZCMg+hH1henV8EjmFoly1PTMs/Uadaox+FceY2STpmvt9co/Pe0Jvt1GvgDK/Osw/4jQ4wAAAAA\nSUVORK5CYII=\n", + "prompt_number": 6, + "text": [ + " 2 \n", + "x + 2.0\u22c5y + sin(z)" + ] + } + ], + "prompt_number": 6 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "diff(e, x)" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "latex": [ + "$$2 x$$" + ], + "metadata": {}, + "output_type": "pyout", + "png": "iVBORw0KGgoAAAANSUhEUgAAABQAAAAOBAMAAADd6iHDAAAAMFBMVEX///8AAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAv3aB7AAAAD3RSTlMAIpm7MhCriUTv3c12\nVGZoascqAAAAgElEQVQIHWNgVDJ2YICAMAb2H1BmKgPDTChzFgNDvgOEvT8AzgQKrA9gPZPYUwNk\ncXxnCGd4dWA1kMllwFDKUB9wEchUZmAIYNgMZDDwJIDIPyDiEgOjAAPLFwZWBhYFBh6BqzwfGI4y\nSJUXZXH8Zf7A+IBh////v1hzjh5/xwAAW80hUDE8HYkAAAAASUVORK5CYII=\n", + "prompt_number": 7, + "text": [ + "2\u22c5x" + ] + } + ], + "prompt_number": 7 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "integrate(e, z)" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "latex": [ + "$$x^{2} z + 2.0 y z - \\cos{\\left (z \\right )}$$" + ], + "metadata": {}, + "output_type": "pyout", + "png": "iVBORw0KGgoAAAANSUhEUgAAALsAAAAZBAMAAACbakK8AAAAMFBMVEX///8AAAAAAAAAAAAAAAAA\nAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAv3aB7AAAAD3RSTlMAEHarIkSJZt3NVLsy\nme8Q6PJIAAADAklEQVRIDbVVS2gTURQ90/wmk0k6tCJCsR1SKShIsxE3CgNWBKUxq9qFmqFqShfF\nUKQrkaDiF0pcCKYgBBcuBLV+wIWKARe6kQ4UhNKKWdiF4KIptmA/xPvmzZuMxdYUzIPcd+655568\nvLlJAL6G32oOasQWNHz5Rvg6nrKh/mygfSzlX2ygPaBUGmov6//NXs1yq4sex2EPrsHemTd2snNg\ntkb+Cx1zBL6SqwxZLvQAKYHzKZaPY4fh4TeHd0S5Nox9OClItm/jiU9DrEwwVEawpiVis9VkimqX\nAOr4o2cCs/0BT2I5+FYJRhJbePQxgzcD7QLEqtV5gdnu2Icr3L45gcCyt74Z7neL4SLQ0nm4S+dM\nYCz1gSPHnhKZDWyHhcCCNKwjqaF/TkwGl0L6nClie/wc1D1xdoNsSLhT0IJkhi7Lzr22xb8keE/N\nPm0Sc9yEuhRUyuiG9HzvFNeImCyq39SriOhtQI7IV/TiTqE8glqwohjE0NJwiANxOZTdZoxtfzSa\nx2tI8DtHcKQoQFmV6f1XT2swibxFL+6k5EgenhBCqKLTPX3ULnaYdDlaTMcCSd8zuXTvBq2bJUJr\nlE4WgSV5ZRdBzLFgO6nzhJp1ltvrlB2HCoWxQuG+jTvt2GxBWUZaU2mMApZNuSHA3vJpCliRhqqs\nZtvbTrb9ZIk+i70Ut1OcnpgeKskTCFUwjaYy8Jhr3eiefq0HIfa7yC6HOwVyULRuNDn21JngbcL+\nE8A+MNnSxb+w59+Cj2tELJBbjEZr8SGwn0j2aLkTPdp08R2OcKV6fXB3ikPH3n8tM5WTfrETtZcw\ng3QWH0dH7nKNiMkszqo/EDafaHhJ5Bm6ee4UtdAabxnMcmUUl0SnYx+uVqs5XAGN9QGgdeCrASv0\n3TmCsJcOdhnozexD38goK9HXynEKr1OKDs9guhQD039kGySyIQpJAdbvJ9YTlPvyUl3/aLUf34G/\nuGxIyXpE37DoLbAHwJaU53t9MRCfrU8o/k4iRn36Lar8Wd5wAfgN4R6xelyy/ssAAAAASUVORK5C\nYII=\n", + "prompt_number": 8, + "text": [ + " 2 \n", + "x \u22c5z + 2.0\u22c5y\u22c5z - cos(z)" + ] + } + ], + "prompt_number": 8 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [], + "language": "python", + "metadata": {}, + "outputs": [] + } + ], + "metadata": {} + } + ] +} \ No newline at end of file diff --git a/minimal-notebook/test/test_nbconvert.py b/minimal-notebook/test/test_nbconvert.py new file mode 100644 index 00000000..653deae9 --- /dev/null +++ b/minimal-notebook/test/test_nbconvert.py @@ -0,0 +1,31 @@ +# Copyright (c) Jupyter Development Team. +# Distributed under the terms of the Modified BSD License. + +import logging + +import pytest +import os + +LOGGER = logging.getLogger(__name__) + + +@pytest.mark.parametrize("format", ["html", "pdf"]) +def test_nbconvert(container, format): + """Check if nbconvert is able to convert a notebook file""" + host_data_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data") + cont_data_dir = "/home/jovyan/data" + test_file = "notebook1" + output_dir = "/tmp" + LOGGER.info(f"Converting example notebook to {format.upper()} ...") + command = f"jupyter nbconvert {cont_data_dir}/{test_file}.ipynb --output-dir {output_dir} --to {format}" + c = container.run( + volumes={host_data_dir: {"bind": cont_data_dir, "mode": "ro"}}, + tty=True, + command=["start.sh", "bash", "-c", command], + ) + rv = c.wait(timeout=30) + assert rv == 0 or rv["StatusCode"] == 0 + logs = c.logs(stdout=True).decode("utf-8") + LOGGER.debug(logs) + assert f"{output_dir}/{test_file}.{format}" in logs +