Commit 5811dcb7 authored by Peter Parente's avatar Peter Parente Committed by GitHub

Merge pull request #664 from parente/spark-2.3.1

Bumping up Spark and Conda to the current version
parents 03b897d0 ede59875
# Copyright (c) Jupyter Development Team. # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License. # Distributed under the terms of the Modified BSD License.
# Ubuntu 18.04 (bionic) from 2018-04-26 # Ubuntu 18.04 (bionic) from 2018-05-26
# https://github.com/docker-library/official-images/commit/aac6a45b9eb2bffb8102353c350d341a410fb169 # https://github.com/docker-library/official-images/commit/aac6a45b9eb2bffb8102353c350d341a410fb169
FROM ubuntu:bionic-20180426@sha256:c8c275751219dadad8fa56b3ac41ca6cb22219ff117ca98fe82b42f24e1ba64e FROM ubuntu:bionic-20180526@sha256:c8c275751219dadad8fa56b3ac41ca6cb22219ff117ca98fe82b42f24e1ba64e
LABEL maintainer="Jupyter Project <jupyter@googlegroups.com>" LABEL maintainer="Jupyter Project <jupyter@googlegroups.com>"
...@@ -63,16 +63,16 @@ RUN mkdir /home/$NB_USER/work && \ ...@@ -63,16 +63,16 @@ RUN mkdir /home/$NB_USER/work && \
fix-permissions /home/$NB_USER fix-permissions /home/$NB_USER
# Install conda as jovyan and check the md5 sum provided on the download site # Install conda as jovyan and check the md5 sum provided on the download site
ENV MINICONDA_VERSION 4.5.1 ENV MINICONDA_VERSION 4.5.4
RUN cd /tmp && \ RUN cd /tmp && \
wget --quiet https://repo.continuum.io/miniconda/Miniconda3-${MINICONDA_VERSION}-Linux-x86_64.sh && \ wget --quiet https://repo.continuum.io/miniconda/Miniconda3-${MINICONDA_VERSION}-Linux-x86_64.sh && \
echo "0c28787e3126238df24c5d4858bd0744 *Miniconda3-${MINICONDA_VERSION}-Linux-x86_64.sh" | md5sum -c - && \ echo "a946ea1d0c4a642ddf0c3a26a18bb16d *Miniconda3-${MINICONDA_VERSION}-Linux-x86_64.sh" | md5sum -c - && \
/bin/bash Miniconda3-${MINICONDA_VERSION}-Linux-x86_64.sh -f -b -p $CONDA_DIR && \ /bin/bash Miniconda3-${MINICONDA_VERSION}-Linux-x86_64.sh -f -b -p $CONDA_DIR && \
rm Miniconda3-${MINICONDA_VERSION}-Linux-x86_64.sh && \ rm Miniconda3-${MINICONDA_VERSION}-Linux-x86_64.sh && \
$CONDA_DIR/bin/conda config --system --prepend channels conda-forge && \ $CONDA_DIR/bin/conda config --system --prepend channels conda-forge && \
$CONDA_DIR/bin/conda config --system --set auto_update_conda false && \ $CONDA_DIR/bin/conda config --system --set auto_update_conda false && \
$CONDA_DIR/bin/conda config --system --set show_channel_urls true && \ $CONDA_DIR/bin/conda config --system --set show_channel_urls true && \
$CONDA_DIR/bin/conda install --quiet --yes conda=4.5.4 && \ $CONDA_DIR/bin/conda install --quiet --yes conda="${MINICONDA_VERSION%.*}.*" && \
$CONDA_DIR/bin/conda update --all --quiet --yes && \ $CONDA_DIR/bin/conda update --all --quiet --yes && \
conda clean -tipsy && \ conda clean -tipsy && \
rm -rf /home/$NB_USER/.cache/yarn && \ rm -rf /home/$NB_USER/.cache/yarn && \
......
...@@ -7,7 +7,7 @@ LABEL maintainer="Jupyter Project <jupyter@googlegroups.com>" ...@@ -7,7 +7,7 @@ LABEL maintainer="Jupyter Project <jupyter@googlegroups.com>"
USER root USER root
# Spark dependencies # Spark dependencies
ENV APACHE_SPARK_VERSION 2.3.0 ENV APACHE_SPARK_VERSION 2.3.1
ENV HADOOP_VERSION 2.7 ENV HADOOP_VERSION 2.7
RUN apt-get -y update && \ RUN apt-get -y update && \
...@@ -17,7 +17,7 @@ RUN apt-get -y update && \ ...@@ -17,7 +17,7 @@ RUN apt-get -y update && \
RUN cd /tmp && \ RUN cd /tmp && \
wget -q http://apache.claz.org/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz && \ wget -q http://apache.claz.org/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz && \
echo "258683885383480BA01485D6C6F7DC7CFD559C1584D6CEB7A3BBCF484287F7F57272278568F16227BE46B4F92591768BA3D164420D87014A136BF66280508B46 *spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" | sha512sum -c - && \ echo "DC3A97F3D99791D363E4F70A622B84D6E313BD852F6FDBC777D31EAB44CBC112CEEAA20F7BF835492FB654F48AE57E9969F93D3B0E6EC92076D1C5E1B40B4696 *spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" | sha512sum -c - && \
tar xzf spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz -C /usr/local --owner root --group root --no-same-owner && \ tar xzf spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz -C /usr/local --owner root --group root --no-same-owner && \
rm spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz rm spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz
RUN cd /usr/local && ln -s spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION} spark RUN cd /usr/local && ln -s spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION} spark
...@@ -38,7 +38,7 @@ RUN apt-get -y update && \ ...@@ -38,7 +38,7 @@ RUN apt-get -y update && \
# Spark and Mesos config # Spark and Mesos config
ENV SPARK_HOME /usr/local/spark ENV SPARK_HOME /usr/local/spark
ENV PYTHONPATH $SPARK_HOME/python:$SPARK_HOME/python/lib/py4j-0.10.6-src.zip ENV PYTHONPATH $SPARK_HOME/python:$SPARK_HOME/python/lib/py4j-0.10.7-src.zip
ENV MESOS_NATIVE_LIBRARY /usr/local/lib/libmesos.so ENV MESOS_NATIVE_LIBRARY /usr/local/lib/libmesos.so
ENV SPARK_OPTS --driver-java-options=-Xms1024M --driver-java-options=-Xmx4096M --driver-java-options=-Dlog4j.logLevel=info ENV SPARK_OPTS --driver-java-options=-Xms1024M --driver-java-options=-Xmx4096M --driver-java-options=-Dlog4j.logLevel=info
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment