mirror of
https://github.com/jupyter/docker-stacks.git
synced 2025-10-17 23:12:56 +00:00
Merge branch 'master' into asalikhov/use_mamba
This commit is contained in:
@@ -13,9 +13,9 @@ USER root
|
||||
# Spark dependencies
|
||||
# Default values can be overridden at build time
|
||||
# (ARGS are in lower case to distinguish them from ENV)
|
||||
ARG spark_version="3.1.1"
|
||||
ARG spark_version="3.1.2"
|
||||
ARG hadoop_version="3.2"
|
||||
ARG spark_checksum="E90B31E58F6D95A42900BA4D288261D71F6C19FA39C1CB71862B792D1B5564941A320227F6AB0E09D946F16B8C1969ED2DEA2A369EC8F9D2D7099189234DE1BE"
|
||||
ARG spark_checksum="2385CB772F21B014CE2ABD6B8F5E815721580D6E8BC42A26D70BBCDDA8D303D886A6F12B36D40F6971B5547B70FAE62B5A96146F0421CB93D4E51491308EF5D5"
|
||||
ARG openjdk_version="11"
|
||||
|
||||
ENV APACHE_SPARK_VERSION="${spark_version}" \
|
||||
@@ -39,7 +39,7 @@ WORKDIR /usr/local
|
||||
# Configure Spark
|
||||
ENV SPARK_HOME=/usr/local/spark
|
||||
ENV SPARK_OPTS="--driver-java-options=-Xms1024M --driver-java-options=-Xmx4096M --driver-java-options=-Dlog4j.logLevel=info" \
|
||||
PATH=$PATH:$SPARK_HOME/bin
|
||||
PATH="${PATH}:${SPARK_HOME}/bin"
|
||||
|
||||
RUN ln -s "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}" spark && \
|
||||
# Add a link in the before_notebook hook in order to source automatically PYTHONPATH
|
||||
@@ -48,11 +48,11 @@ RUN ln -s "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}" spark && \
|
||||
|
||||
# Fix Spark installation for Java 11 and Apache Arrow library
|
||||
# see: https://github.com/apache/spark/pull/27356, https://spark.apache.org/docs/latest/#downloading
|
||||
RUN cp -p "$SPARK_HOME/conf/spark-defaults.conf.template" "$SPARK_HOME/conf/spark-defaults.conf" && \
|
||||
echo 'spark.driver.extraJavaOptions -Dio.netty.tryReflectionSetAccessible=true' >> $SPARK_HOME/conf/spark-defaults.conf && \
|
||||
echo 'spark.executor.extraJavaOptions -Dio.netty.tryReflectionSetAccessible=true' >> $SPARK_HOME/conf/spark-defaults.conf
|
||||
RUN cp -p "${SPARK_HOME}/conf/spark-defaults.conf.template" "${SPARK_HOME}/conf/spark-defaults.conf" && \
|
||||
echo 'spark.driver.extraJavaOptions -Dio.netty.tryReflectionSetAccessible=true' >> "${SPARK_HOME}/conf/spark-defaults.conf" && \
|
||||
echo 'spark.executor.extraJavaOptions -Dio.netty.tryReflectionSetAccessible=true' >> "${SPARK_HOME}/conf/spark-defaults.conf"
|
||||
|
||||
USER $NB_UID
|
||||
USER ${NB_UID}
|
||||
|
||||
# Install pyarrow
|
||||
RUN mamba install --quiet --yes \
|
||||
@@ -61,4 +61,4 @@ RUN mamba install --quiet --yes \
|
||||
fix-permissions "${CONDA_DIR}" && \
|
||||
fix-permissions "/home/${NB_USER}"
|
||||
|
||||
WORKDIR $HOME
|
||||
WORKDIR "${HOME}"
|
||||
|
Reference in New Issue
Block a user