Spark->3.3,Hadoop->3,Scala->2.13,Java->17

This commit is contained in:
Darek
2022-06-16 14:53:37 -04:00
parent 666416b385
commit aeb940220f
3 changed files with 15 additions and 13 deletions

View File

@@ -15,10 +15,11 @@ USER root
# Spark dependencies # Spark dependencies
# Default values can be overridden at build time # Default values can be overridden at build time
# (ARGS are in lower case to distinguish them from ENV) # (ARGS are in lower case to distinguish them from ENV)
ARG spark_version="3.2.1" ARG spark_version="3.3.0"
ARG hadoop_version="3.2" ARG hadoop_version="3"
ARG spark_checksum="145ADACF189FECF05FBA3A69841D2804DD66546B11D14FC181AC49D89F3CB5E4FECD9B25F56F0AF767155419CD430838FB651992AEB37D3A6F91E7E009D1F9AE" ARG scala_version="2.13"
ARG openjdk_version="11" ARG spark_checksum="4c09dac70e22bf1d5b7b2cabc1dd92aba13237f52a5b682c67982266fc7a0f5e0f964edff9bc76adbd8cb444eb1a00fdc59516147f99e4e2ce068420ff4881f0"
ARG openjdk_version="17"
ENV APACHE_SPARK_VERSION="${spark_version}" \ ENV APACHE_SPARK_VERSION="${spark_version}" \
HADOOP_VERSION="${hadoop_version}" HADOOP_VERSION="${hadoop_version}"
@@ -31,10 +32,10 @@ RUN apt-get update --yes && \
# Spark installation # Spark installation
WORKDIR /tmp WORKDIR /tmp
RUN wget -q "https://archive.apache.org/dist/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" && \ RUN wget -q "https://archive.apache.org/dist/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}-scala${scala_version}.tgz" && \
echo "${spark_checksum} *spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" | sha512sum -c - && \ echo "${spark_checksum} *spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}-scala${scala_version}.tgz" | sha512sum -c - && \
tar xzf "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" -C /usr/local --owner root --group root --no-same-owner && \ tar xzf "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}-scala${scala_version}.tgz" -C /usr/local --owner root --group root --no-same-owner && \
rm "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" rm "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}-scala${scala_version}.tgz"
WORKDIR /usr/local WORKDIR /usr/local
@@ -43,7 +44,7 @@ ENV SPARK_HOME=/usr/local/spark
ENV SPARK_OPTS="--driver-java-options=-Xms1024M --driver-java-options=-Xmx4096M --driver-java-options=-Dlog4j.logLevel=info" \ ENV SPARK_OPTS="--driver-java-options=-Xms1024M --driver-java-options=-Xmx4096M --driver-java-options=-Dlog4j.logLevel=info" \
PATH="${PATH}:${SPARK_HOME}/bin" PATH="${PATH}:${SPARK_HOME}/bin"
RUN ln -s "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}" spark && \ RUN ln -s "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}-scala${scala_version}" spark && \
# Add a link in the before_notebook hook in order to source automatically PYTHONPATH # Add a link in the before_notebook hook in order to source automatically PYTHONPATH
mkdir -p /usr/local/bin/before-notebook.d && \ mkdir -p /usr/local/bin/before-notebook.d && \
ln -s "${SPARK_HOME}/sbin/spark-config.sh" /usr/local/bin/before-notebook.d/spark-config.sh ln -s "${SPARK_HOME}/sbin/spark-config.sh" /usr/local/bin/before-notebook.d/spark-config.sh

View File

@@ -38,8 +38,8 @@ def test_nbconvert(container: TrackedContainer, test_file: str) -> None:
command=["start.sh", "bash", "-c", command], command=["start.sh", "bash", "-c", command],
) )
warnings = TrackedContainer.get_warnings(logs) warnings = TrackedContainer.get_warnings(logs)
# Some Spark warnings # No Spark warnings
assert len(warnings) == 5 assert len(warnings) == 0
expected_file = f"{output_dir}/{test_file}.md" expected_file = f"{output_dir}/{test_file}.md"
assert expected_file in logs, f"Expected file {expected_file} not generated" assert expected_file in logs, f"Expected file {expected_file} not generated"

View File

@@ -16,7 +16,8 @@ def test_spark_shell(container: TrackedContainer) -> None:
command=["start.sh", "bash", "-c", 'spark-shell <<< "1+1"'], command=["start.sh", "bash", "-c", 'spark-shell <<< "1+1"'],
) )
warnings = TrackedContainer.get_warnings(logs) warnings = TrackedContainer.get_warnings(logs)
# Some Spark warnings
assert len(warnings) == 5 # Should not see any Spark warnings
assert len(warnings) == 0
assert "res0: Int = 2" in logs, "spark-shell does not work" assert "res0: Int = 2" in logs, "spark-shell does not work"