mirror of
https://github.com/jupyter/docker-stacks.git
synced 2025-10-08 02:24:04 +00:00
@@ -35,7 +35,5 @@ def test_spark_r_nbconvert(
|
||||
no_warnings=(not expected_warnings),
|
||||
)
|
||||
|
||||
if expected_warnings:
|
||||
warnings = TrackedContainer.get_warnings(logs)
|
||||
assert len(warnings) == len(expected_warnings)
|
||||
assert expected_warnings[0] == warnings[0]
|
||||
assert warnings == expected_warnings
|
||||
|
@@ -15,7 +15,5 @@ def test_spark_shell(container: TrackedContainer) -> None:
|
||||
command=["bash", "-c", 'spark-shell <<< "1+1"'],
|
||||
)
|
||||
warnings = TrackedContainer.get_warnings(logs)
|
||||
assert len(warnings) == 1
|
||||
assert "Using incubator modules: jdk.incubator.vector" in warnings[0]
|
||||
|
||||
assert warnings == ["WARNING: Using incubator modules: jdk.incubator.vector"]
|
||||
assert "res0: Int = 2" in logs, "spark-shell does not work"
|
||||
|
@@ -12,19 +12,15 @@ LOGGER = logging.getLogger(__name__)
|
||||
THIS_DIR = Path(__file__).parent.resolve()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"test_file",
|
||||
["issue_1168", "local_pyspark"],
|
||||
)
|
||||
@pytest.mark.parametrize("test_file", ["issue_1168", "local_pyspark"])
|
||||
@pytest.mark.parametrize("output_format", ["pdf", "html", "markdown"])
|
||||
def test_spark_nbconvert(
|
||||
container: TrackedContainer, test_file: str, output_format: str
|
||||
) -> None:
|
||||
host_data_file = THIS_DIR / "data" / f"{test_file}.ipynb"
|
||||
logs = check_nbconvert(
|
||||
container, host_data_file, "markdown", execute=True, no_warnings=False
|
||||
container, host_data_file, output_format, execute=True, no_warnings=False
|
||||
)
|
||||
|
||||
warnings = TrackedContainer.get_warnings(logs)
|
||||
assert len(warnings) == 1
|
||||
assert "Using incubator modules: jdk.incubator.vector" in warnings[0]
|
||||
assert warnings == ["WARNING: Using incubator modules: jdk.incubator.vector"]
|
||||
|
Reference in New Issue
Block a user