mirror of
https://github.com/jupyter/docker-stacks.git
synced 2025-10-17 15:02:57 +00:00
Merge branch 'master' into asalikhov/py_codestyle
This commit is contained in:
@@ -11,20 +11,30 @@ SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
USER root
|
||||
|
||||
# Spark dependencies
|
||||
ENV APACHE_SPARK_VERSION=3.0.0 \
|
||||
HADOOP_VERSION=3.2
|
||||
# Default values can be overridden at build time
|
||||
# (ARGS are in lower case to distinguish them from ENV)
|
||||
ARG spark_version="3.0.1"
|
||||
ARG hadoop_version="3.2"
|
||||
ARG spark_checksum="E8B47C5B658E0FBC1E57EEA06262649D8418AE2B2765E44DA53AAF50094877D17297CC5F0B9B35DF2CEEF830F19AA31D7E56EAD950BBE7F8830D6874F88CFC3C"
|
||||
ARG py4j_version="0.10.9"
|
||||
ARG openjdk_version="11"
|
||||
|
||||
ENV APACHE_SPARK_VERSION="${spark_version}" \
|
||||
HADOOP_VERSION="${hadoop_version}"
|
||||
|
||||
RUN apt-get -y update && \
|
||||
apt-get install --no-install-recommends -y openjdk-11-jre-headless ca-certificates-java && \
|
||||
apt-get install --no-install-recommends -y \
|
||||
"openjdk-${openjdk_version}-jre-headless" \
|
||||
ca-certificates-java && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Using the preferred mirror to download Spark
|
||||
# Spark installation
|
||||
WORKDIR /tmp
|
||||
|
||||
# Using the preferred mirror to download Spark
|
||||
# hadolint ignore=SC2046
|
||||
RUN wget -q $(wget -qO- https://www.apache.org/dyn/closer.lua/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz\?as_json | \
|
||||
python -c "import sys, json; content=json.load(sys.stdin); print(content['preferred']+content['path_info'])") && \
|
||||
echo "BFE45406C67CC4AE00411AD18CC438F51E7D4B6F14EB61E7BF6B5450897C2E8D3AB020152657C0239F253735C263512FFABF538AC5B9FFFA38B8295736A9C387 *spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" | sha512sum -c - && \
|
||||
echo "${spark_checksum} *spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" | sha512sum -c - && \
|
||||
tar xzf "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" -C /usr/local --owner root --group root --no-same-owner && \
|
||||
rm "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz"
|
||||
|
||||
@@ -33,16 +43,17 @@ RUN ln -s "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}" spark
|
||||
|
||||
# Configure Spark
|
||||
ENV SPARK_HOME=/usr/local/spark
|
||||
ENV PYTHONPATH=$SPARK_HOME/python:$SPARK_HOME/python/lib/py4j-0.10.9-src.zip \
|
||||
ENV PYTHONPATH="${SPARK_HOME}/python:${SPARK_HOME}/python/lib/py4j-${py4j_version}-src.zip" \
|
||||
SPARK_OPTS="--driver-java-options=-Xms1024M --driver-java-options=-Xmx4096M --driver-java-options=-Dlog4j.logLevel=info" \
|
||||
PATH=$PATH:$SPARK_HOME/bin
|
||||
|
||||
USER $NB_UID
|
||||
|
||||
# Install pyarrow
|
||||
RUN conda install --quiet -y 'pyarrow' && \
|
||||
RUN conda install --quiet --yes --satisfied-skip-solve \
|
||||
'pyarrow=1.0.*' && \
|
||||
conda clean --all -f -y && \
|
||||
fix-permissions "${CONDA_DIR}" && \
|
||||
fix-permissions "/home/${NB_USER}"
|
||||
|
||||
WORKDIR $HOME
|
||||
WORKDIR $HOME
|
||||
|
Reference in New Issue
Block a user