diff --git a/pyspark-notebook/Dockerfile b/pyspark-notebook/Dockerfile index c8dd3e09..19b11d14 100644 --- a/pyspark-notebook/Dockerfile +++ b/pyspark-notebook/Dockerfile @@ -40,7 +40,7 @@ ENV SPARK_HOME=/usr/local/spark ENV PYTHONPATH=$SPARK_HOME/python:$SPARK_HOME/python/lib/py4j-0.10.7-src.zip \ MESOS_NATIVE_LIBRARY=/usr/local/lib/libmesos.so \ SPARK_OPTS="--driver-java-options=-Xms1024M --driver-java-options=-Xmx4096M --driver-java-options=-Dlog4j.logLevel=info" \ - PATH=$PATH:$SPARH_HOME/bin + PATH=$PATH:$SPARK_HOME/bin USER $NB_UID diff --git a/pyspark-notebook/test/test_spark.py b/pyspark-notebook/test/test_spark.py index 2b5499ae..a09d0256 100644 --- a/pyspark-notebook/test/test_spark.py +++ b/pyspark-notebook/test/test_spark.py @@ -16,4 +16,15 @@ def test_spark_shell(container): c.wait(timeout=30) logs = c.logs(stdout=True).decode('utf-8') LOGGER.debug(logs) - assert 'res0: Int = 2' in logs \ No newline at end of file + assert 'res0: Int = 2' in logs + +def test_pyspark(container): + """PySpark should be in the Python path""" + c = container.run( + tty=True, + command=['start.sh', 'python', '-c', '"import pyspark"'] + ) + rv = c.wait(timeout=30) + assert rv == 0 or rv["StatusCode"] == 0 + logs = c.logs(stdout=True).decode('utf-8') + LOGGER.debug(logs)