mirror of
https://github.com/jupyter/docker-stacks.git
synced 2025-10-18 07:22:57 +00:00
Removing Spark config specific to Apache Arrow and Java 11
This commit is contained in:
@@ -49,12 +49,6 @@ RUN ln -s "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}-scala${scal
|
||||
mkdir -p /usr/local/bin/before-notebook.d && \
|
||||
ln -s "${SPARK_HOME}/sbin/spark-config.sh" /usr/local/bin/before-notebook.d/spark-config.sh
|
||||
|
||||
# Fix Spark installation for Java 11 and Apache Arrow library
|
||||
# see: https://github.com/apache/spark/pull/27356, https://spark.apache.org/docs/latest/#downloading
|
||||
RUN cp -p "${SPARK_HOME}/conf/spark-defaults.conf.template" "${SPARK_HOME}/conf/spark-defaults.conf" && \
|
||||
echo 'spark.driver.extraJavaOptions -Dio.netty.tryReflectionSetAccessible=true' >> "${SPARK_HOME}/conf/spark-defaults.conf" && \
|
||||
echo 'spark.executor.extraJavaOptions -Dio.netty.tryReflectionSetAccessible=true' >> "${SPARK_HOME}/conf/spark-defaults.conf"
|
||||
|
||||
# Configure IPython system-wide
|
||||
COPY ipython_kernel_config.py "/etc/ipython/"
|
||||
RUN fix-permissions "/etc/ipython/"
|
||||
|
Reference in New Issue
Block a user