mirror of
https://github.com/jupyter/docker-stacks.git
synced 2025-10-16 14:32:57 +00:00
update for Spark 1.5.1
This commit is contained in:
@@ -6,7 +6,7 @@ MAINTAINER Jupyter Project <jupyter@googlegroups.com>
|
||||
USER root
|
||||
|
||||
# Spark dependencies
|
||||
ENV APACHE_SPARK_VERSION 1.4.1
|
||||
ENV APACHE_SPARK_VERSION 1.5.1
|
||||
RUN apt-get -y update && \
|
||||
apt-get install -y --no-install-recommends openjdk-7-jre-headless && \
|
||||
apt-get clean
|
||||
|
@@ -5,7 +5,7 @@
|
||||
* Jupyter Notebook 4.0.x
|
||||
* Conda Python 3.x and Python 2.7.x environments
|
||||
* pyspark, pandas, matplotlib, scipy, seaborn, scikit-learn pre-installed
|
||||
* Spark 1.4.1 for use in local mode or to connect to a cluster of Spark workers
|
||||
* Spark 1.5.1 for use in local mode or to connect to a cluster of Spark workers
|
||||
* Mesos client 0.22 binary that can communicate with a Mesos master
|
||||
* Unprivileged user `jovyan` (uid=1000, configurable, see options) in group `users` (gid=100) with ownership over `/home/jovyan` and `/opt/conda`
|
||||
* [tini](https://github.com/krallin/tini) as the container entrypoint and [start-notebook.sh](../minimal-notebook/start-notebook.sh) as the default command
|
||||
@@ -72,8 +72,8 @@ conf = pyspark.SparkConf()
|
||||
# point to mesos master or zookeeper entry (e.g., zk://10.10.10.10:2181/mesos)
|
||||
conf.setMaster("mesos://10.10.10.10:5050")
|
||||
# point to spark binary package in HDFS or on local filesystem on all slave
|
||||
# nodes (e.g., file:///opt/spark/spark-1.4.1-bin-hadoop2.6.tgz)
|
||||
conf.set("spark.executor.uri", "hdfs://10.122.193.209/spark/spark-1.4.1-bin-hadoop2.6.tgz")
|
||||
# nodes (e.g., file:///opt/spark/spark-1.5.1-bin-hadoop2.6.tgz)
|
||||
conf.set("spark.executor.uri", "hdfs://10.122.193.209/spark/spark-1.5.1-bin-hadoop2.6.tgz")
|
||||
# set other options as desired
|
||||
conf.set("spark.executor.memory", "8g")
|
||||
conf.set("spark.core.connection.ack.wait.timeout", "1200")
|
||||
|
Reference in New Issue
Block a user