mirror of
https://github.com/jupyter/docker-stacks.git
synced 2025-10-07 10:04:03 +00:00
Add check_nbconvert function and run tests for appropriate images (#2266)
This commit is contained in:
@@ -1,48 +0,0 @@
|
|||||||
# Copyright (c) Jupyter Development Team.
|
|
||||||
# Distributed under the terms of the Modified BSD License.
|
|
||||||
import logging
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import pytest # type: ignore
|
|
||||||
|
|
||||||
from tests.utils.tracked_container import TrackedContainer
|
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
|
||||||
THIS_DIR = Path(__file__).parent.resolve()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.flaky(retries=3, delay=1)
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"test_file",
|
|
||||||
["issue_1168", "local_pyspark", "local_sparkR"],
|
|
||||||
)
|
|
||||||
def test_nbconvert(container: TrackedContainer, test_file: str) -> None:
|
|
||||||
"""Check if Spark notebooks can be executed"""
|
|
||||||
host_data_dir = THIS_DIR / "data"
|
|
||||||
cont_data_dir = "/home/jovyan/data"
|
|
||||||
output_dir = "/tmp"
|
|
||||||
conversion_timeout_ms = 5000
|
|
||||||
LOGGER.info(f"Test that {test_file} notebook can be executed ...")
|
|
||||||
command = [
|
|
||||||
"jupyter",
|
|
||||||
"nbconvert",
|
|
||||||
"--to",
|
|
||||||
"markdown",
|
|
||||||
f"--ExecutePreprocessor.timeout={conversion_timeout_ms}",
|
|
||||||
"--output-dir",
|
|
||||||
output_dir,
|
|
||||||
"--execute",
|
|
||||||
f"{cont_data_dir}/{test_file}.ipynb",
|
|
||||||
]
|
|
||||||
logs = container.run_and_wait(
|
|
||||||
timeout=60,
|
|
||||||
no_warnings=False,
|
|
||||||
volumes={str(host_data_dir): {"bind": cont_data_dir, "mode": "ro"}},
|
|
||||||
command=command,
|
|
||||||
)
|
|
||||||
warnings = TrackedContainer.get_warnings(logs)
|
|
||||||
assert len(warnings) == 1
|
|
||||||
assert "Using incubator modules: jdk.incubator.vector" in warnings[0]
|
|
||||||
|
|
||||||
expected_file = f"{output_dir}/{test_file}.md"
|
|
||||||
assert expected_file in logs, f"Expected file {expected_file} not generated"
|
|
31
tests/by_image/all-spark-notebook/test_spark_r_nbconvert.py
Normal file
31
tests/by_image/all-spark-notebook/test_spark_r_nbconvert.py
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
# Copyright (c) Jupyter Development Team.
|
||||||
|
# Distributed under the terms of the Modified BSD License.
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest # type: ignore
|
||||||
|
|
||||||
|
from tests.shared_checks.nbconvert_check import check_nbconvert
|
||||||
|
from tests.utils.tracked_container import TrackedContainer
|
||||||
|
|
||||||
|
LOGGER = logging.getLogger(__name__)
|
||||||
|
THIS_DIR = Path(__file__).parent.resolve()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.flaky(retries=3, delay=1)
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"test_file",
|
||||||
|
["local_sparkR"],
|
||||||
|
)
|
||||||
|
@pytest.mark.parametrize("output_format", ["pdf", "html", "markdown"])
|
||||||
|
def test_spark_r_nbconvert(
|
||||||
|
container: TrackedContainer, test_file: str, output_format: str
|
||||||
|
) -> None:
|
||||||
|
host_data_file = THIS_DIR / "data" / f"{test_file}.ipynb"
|
||||||
|
logs = check_nbconvert(
|
||||||
|
container, host_data_file, "markdown", execute=True, no_warnings=False
|
||||||
|
)
|
||||||
|
|
||||||
|
warnings = TrackedContainer.get_warnings(logs)
|
||||||
|
assert len(warnings) == 1
|
||||||
|
assert "Using incubator modules: jdk.incubator.vector" in warnings[0]
|
@@ -5,6 +5,7 @@ from pathlib import Path
|
|||||||
|
|
||||||
import pytest # type: ignore
|
import pytest # type: ignore
|
||||||
|
|
||||||
|
from tests.shared_checks.nbconvert_check import check_nbconvert
|
||||||
from tests.utils.tracked_container import TrackedContainer
|
from tests.utils.tracked_container import TrackedContainer
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
LOGGER = logging.getLogger(__name__)
|
||||||
@@ -12,30 +13,9 @@ THIS_DIR = Path(__file__).parent.resolve()
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("test_file", ["notebook_math", "notebook_svg"])
|
@pytest.mark.parametrize("test_file", ["notebook_math", "notebook_svg"])
|
||||||
@pytest.mark.parametrize("output_format", ["pdf", "html"])
|
@pytest.mark.parametrize("output_format", ["pdf", "html", "markdown"])
|
||||||
def test_nbconvert(
|
def test_nbconvert(
|
||||||
container: TrackedContainer, test_file: str, output_format: str
|
container: TrackedContainer, test_file: str, output_format: str
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Check if nbconvert is able to convert a notebook file"""
|
host_data_file = THIS_DIR / "data" / f"{test_file}.ipynb"
|
||||||
host_data_dir = THIS_DIR / "data"
|
check_nbconvert(container, host_data_file, output_format, execute=False)
|
||||||
cont_data_dir = "/home/jovyan/data"
|
|
||||||
output_dir = "/tmp"
|
|
||||||
LOGGER.info(
|
|
||||||
f"Test that the example notebook {test_file} can be converted to {output_format} ..."
|
|
||||||
)
|
|
||||||
command = [
|
|
||||||
"jupyter",
|
|
||||||
"nbconvert",
|
|
||||||
f"{cont_data_dir}/{test_file}.ipynb",
|
|
||||||
"--output-dir",
|
|
||||||
output_dir,
|
|
||||||
"--to",
|
|
||||||
output_format,
|
|
||||||
]
|
|
||||||
logs = container.run_and_wait(
|
|
||||||
timeout=30,
|
|
||||||
volumes={str(host_data_dir): {"bind": cont_data_dir, "mode": "ro"}},
|
|
||||||
command=command,
|
|
||||||
)
|
|
||||||
expected_file = f"{output_dir}/{test_file}.{output_format}"
|
|
||||||
assert expected_file in logs, f"Expected file {expected_file} not generated"
|
|
||||||
|
31
tests/by_image/pyspark-notebook/test_spark_nbconvert.py
Normal file
31
tests/by_image/pyspark-notebook/test_spark_nbconvert.py
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
# Copyright (c) Jupyter Development Team.
|
||||||
|
# Distributed under the terms of the Modified BSD License.
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest # type: ignore
|
||||||
|
|
||||||
|
from tests.shared_checks.nbconvert_check import check_nbconvert
|
||||||
|
from tests.utils.tracked_container import TrackedContainer
|
||||||
|
|
||||||
|
LOGGER = logging.getLogger(__name__)
|
||||||
|
THIS_DIR = Path(__file__).parent.resolve()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.flaky(retries=3, delay=1)
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"test_file",
|
||||||
|
["issue_1168", "local_pyspark"],
|
||||||
|
)
|
||||||
|
@pytest.mark.parametrize("output_format", ["pdf", "html", "markdown"])
|
||||||
|
def test_spark_nbconvert(
|
||||||
|
container: TrackedContainer, test_file: str, output_format: str
|
||||||
|
) -> None:
|
||||||
|
host_data_file = THIS_DIR / "data" / f"{test_file}.ipynb"
|
||||||
|
logs = check_nbconvert(
|
||||||
|
container, host_data_file, "markdown", execute=True, no_warnings=False
|
||||||
|
)
|
||||||
|
|
||||||
|
warnings = TrackedContainer.get_warnings(logs)
|
||||||
|
assert len(warnings) == 1
|
||||||
|
assert "Using incubator modules: jdk.incubator.vector" in warnings[0]
|
51
tests/shared_checks/nbconvert_check.py
Normal file
51
tests/shared_checks/nbconvert_check.py
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
# Copyright (c) Jupyter Development Team.
|
||||||
|
# Distributed under the terms of the Modified BSD License.
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from tests.utils.tracked_container import TrackedContainer
|
||||||
|
|
||||||
|
LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def check_nbconvert(
|
||||||
|
container: TrackedContainer,
|
||||||
|
host_data_file: Path,
|
||||||
|
output_format: str,
|
||||||
|
*,
|
||||||
|
execute: bool,
|
||||||
|
no_warnings: bool = True,
|
||||||
|
) -> str:
|
||||||
|
"""Check if nbconvert is able to convert a notebook file"""
|
||||||
|
cont_data_file = "/home/jovyan/data/" + host_data_file.name
|
||||||
|
|
||||||
|
output_dir = "/tmp"
|
||||||
|
LOGGER.info(
|
||||||
|
f"Test that the example notebook {host_data_file.name} can be converted to {output_format} ..."
|
||||||
|
)
|
||||||
|
command = [
|
||||||
|
"jupyter",
|
||||||
|
"nbconvert",
|
||||||
|
cont_data_file,
|
||||||
|
"--output-dir",
|
||||||
|
output_dir,
|
||||||
|
"--to",
|
||||||
|
output_format,
|
||||||
|
]
|
||||||
|
if execute:
|
||||||
|
conversion_timeout_ms = 5000
|
||||||
|
command += [
|
||||||
|
"--execute",
|
||||||
|
f"--ExecutePreprocessor.timeout={conversion_timeout_ms}",
|
||||||
|
]
|
||||||
|
logs = container.run_and_wait(
|
||||||
|
timeout=60,
|
||||||
|
volumes={str(host_data_file): {"bind": cont_data_file, "mode": "ro"}},
|
||||||
|
command=command,
|
||||||
|
no_warnings=no_warnings,
|
||||||
|
)
|
||||||
|
output_ext = "md" if output_format == "markdown" else output_format
|
||||||
|
expected_file = f"{output_dir}/{host_data_file.stem}.{output_ext}"
|
||||||
|
assert expected_file in logs, f"Expected file {expected_file} not generated"
|
||||||
|
|
||||||
|
return logs
|
Reference in New Issue
Block a user