mirror of
https://github.com/jupyter/docker-stacks.git
synced 2025-10-09 19:12:59 +00:00

The last commit was only for the base-notebook's Dockerfile. For this, all the files in the repo were grepped through and changed.
51 lines
2.2 KiB
Docker
51 lines
2.2 KiB
Docker
# Copyright (c) Jupyter Development Team.
|
|
# Distributed under the terms of the Modified BSD License.
|
|
ARG BASE_CONTAINER=jupyter/scipy-notebook
|
|
FROM $BASE_CONTAINER
|
|
|
|
LABEL maintainer="Jupyter Project <jupyter@googlegroups.com>"
|
|
|
|
USER root
|
|
|
|
# Spark dependencies
|
|
ENV APACHE_SPARK_VERSION 2.4.1
|
|
ENV HADOOP_VERSION 2.7
|
|
|
|
RUN apt-get -y update && \
|
|
apt-get install --no-install-recommends -y openjdk-8-jre-headless ca-certificates-java && \
|
|
rm -rf /var/lib/apt/lists/*
|
|
|
|
RUN cd /tmp && \
|
|
wget -q http://mirrors.ukfast.co.uk/sites/ftp.apache.org/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz && \
|
|
echo "360A7B57290537C5EB3570C70D0D0B9580C4F9DB8D0FA9746C3BBB6544BBB8F629901582968955ACEB5649CB9D66C2D524971E4E3EF34C35D96F02FF6DBA4D72 *spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" | sha512sum -c - && \
|
|
tar xzf spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz -C /usr/local --owner root --group root --no-same-owner && \
|
|
rm spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz
|
|
RUN cd /usr/local && ln -s spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION} spark
|
|
|
|
# Mesos dependencies
|
|
# Install from the Xenial Mesosphere repository since there does not (yet)
|
|
# exist a Bionic repository and the dependencies seem to be compatible for now.
|
|
COPY mesos.key /tmp/
|
|
RUN apt-get -y update && \
|
|
apt-get install --no-install-recommends -y gnupg && \
|
|
apt-key add /tmp/mesos.key && \
|
|
echo "deb http://repos.mesosphere.io/ubuntu xenial main" > /etc/apt/sources.list.d/mesosphere.list && \
|
|
apt-get -y update && \
|
|
apt-get --no-install-recommends -y install mesos=1.2\* && \
|
|
apt-get purge --auto-remove -y gnupg && \
|
|
rm -rf /var/lib/apt/lists/*
|
|
|
|
# Spark and Mesos config
|
|
ENV SPARK_HOME /usr/local/spark
|
|
ENV PYTHONPATH $SPARK_HOME/python:$SPARK_HOME/python/lib/py4j-0.10.7-src.zip
|
|
ENV MESOS_NATIVE_LIBRARY /usr/local/lib/libmesos.so
|
|
ENV SPARK_OPTS --driver-java-options=-Xms1024M --driver-java-options=-Xmx4096M --driver-java-options=-Dlog4j.logLevel=info
|
|
|
|
USER $NB_UID
|
|
|
|
# Install pyarrow
|
|
RUN conda install --quiet -y 'pyarrow' && \
|
|
conda clean --all -y && \
|
|
fix-permissions $CONDA_DIR && \
|
|
fix-permissions /home/$NB_USER
|