mirror of
https://github.com/jupyter/docker-stacks.git
synced 2025-10-13 13:02:56 +00:00
Merge branch 'master' into asalikhov/automatic_conda_versioning
This commit is contained in:
@@ -10,21 +10,9 @@ def test_spark_shell(container):
|
||||
"""Checking if Spark (spark-shell) is running properly"""
|
||||
c = container.run(
|
||||
tty=True,
|
||||
command=['start.sh', 'bash', '-c', 'spark-shell <<< "1+1"']
|
||||
command=["start.sh", "bash", "-c", 'spark-shell <<< "1+1"'],
|
||||
)
|
||||
c.wait(timeout=60)
|
||||
logs = c.logs(stdout=True).decode('utf-8')
|
||||
logs = c.logs(stdout=True).decode("utf-8")
|
||||
LOGGER.debug(logs)
|
||||
assert 'res0: Int = 2' in logs, "spark-shell does not work"
|
||||
|
||||
|
||||
def test_pyspark(container):
|
||||
"""PySpark should be in the Python path"""
|
||||
c = container.run(
|
||||
tty=True,
|
||||
command=['start.sh', 'python', '-c', 'import pyspark']
|
||||
)
|
||||
rv = c.wait(timeout=30)
|
||||
logs = c.logs(stdout=True).decode('utf-8')
|
||||
LOGGER.debug(logs)
|
||||
assert rv == 0 or rv["StatusCode"] == 0, "pyspark not in PYTHONPATH"
|
||||
assert "res0: Int = 2" in logs, "spark-shell does not work"
|
||||
|
4
pyspark-notebook/test/units/unit_spark.py
Normal file
4
pyspark-notebook/test/units/unit_spark.py
Normal file
@@ -0,0 +1,4 @@
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import pyspark # noqa: F401
|
Reference in New Issue
Block a user