# Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. ARG BASE_CONTAINER=jupyter/scipy-notebook FROM $BASE_CONTAINER LABEL maintainer="Jupyter Project " # Fix DL4006 SHELL ["/bin/bash", "-o", "pipefail", "-c"] USER root # Spark dependencies # Default values can be overridden at build time # (ARGS are in lower case to distinguish them from ENV) ARG spark_version="3.0.1" ARG hadoop_version="3.2" ARG spark_checksum="E8B47C5B658E0FBC1E57EEA06262649D8418AE2B2765E44DA53AAF50094877D17297CC5F0B9B35DF2CEEF830F19AA31D7E56EAD950BBE7F8830D6874F88CFC3C" ARG openjdk_version="11" ENV APACHE_SPARK_VERSION="${spark_version}" \ HADOOP_VERSION="${hadoop_version}" RUN apt-get -y update && \ apt-get install --no-install-recommends -y \ "openjdk-${openjdk_version}-jre-headless" \ ca-certificates-java && \ apt-get clean && rm -rf /var/lib/apt/lists/* # Spark installation WORKDIR /tmp # Using the preferred mirror to download Spark # hadolint ignore=SC2046 RUN wget -q $(wget -qO- https://www.apache.org/dyn/closer.lua/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz\?as_json | \ python -c "import sys, json; content=json.load(sys.stdin); print(content['preferred']+content['path_info'])") && \ echo "${spark_checksum} *spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" | sha512sum -c - && \ tar xzf "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" -C /usr/local --owner root --group root --no-same-owner && \ rm "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" WORKDIR /usr/local # Configure Spark ENV SPARK_HOME=/usr/local/spark ENV SPARK_OPTS="--driver-java-options=-Xms1024M --driver-java-options=-Xmx4096M --driver-java-options=-Dlog4j.logLevel=info" \ PATH=$PATH:$SPARK_HOME/bin RUN ln -s "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}" spark && \ # Add a link in the before_notebook hook in order to source automatically PYTHONPATH mkdir -p /usr/local/bin/before-notebook.d && \ ln -s "${SPARK_HOME}/sbin/spark-config.sh" /usr/local/bin/before-notebook.d/spark-config.sh # Fix Spark installation for Java 11 and Apache Arrow library # see: https://github.com/apache/spark/pull/27356, https://spark.apache.org/docs/latest/#downloading RUN cp -p "$SPARK_HOME/conf/spark-defaults.conf.template" "$SPARK_HOME/conf/spark-defaults.conf" && \ echo 'spark.driver.extraJavaOptions="-Dio.netty.tryReflectionSetAccessible=true"' >> $SPARK_HOME/conf/spark-defaults.conf && \ echo 'spark.executor.extraJavaOptions="-Dio.netty.tryReflectionSetAccessible=true"' >> $SPARK_HOME/conf/spark-defaults.conf USER $NB_UID # Install pyarrow RUN conda install --quiet --yes --satisfied-skip-solve \ 'pyarrow=2.0.*' && \ conda clean --all -f -y && \ fix-permissions "${CONDA_DIR}" && \ fix-permissions "/home/${NB_USER}" WORKDIR $HOME