This is an automated email from the ASF dual-hosted git repository.

felixcheung pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/zeppelin.git


The following commit(s) were added to refs/heads/master by this push:
     new 966a392  [ZEPPELIN-3944] Update Dockerfiles of spark_standalone and 
spark_yarn_cluster (#3282)
966a392 is described below

commit 966a39240237edfd72b067afe867a244fae6474a
Author: keineahnung2345 <mimifasosofamire1...@gmail.com>
AuthorDate: Wed Jan 16 16:10:05 2019 +0800

    [ZEPPELIN-3944] Update Dockerfiles of spark_standalone and 
spark_yarn_cluster (#3282)
    
    ### What is this PR for?
    Upgrade the Dockerfiles of spark_standalone and spark_yarn_cluster to 
CentOS7 and Spark 2.4.0.
    Java remains in version 7 since hadoop 2.x depends on Java7.
    
    ### What type of PR is it?
    Improvement
    
    ### What is the Jira issue?
    * [ZEPPELIN-3944](https://issues.apache.org/jira/browse/ZEPPELIN-3944)
    
    ### How should this be tested?
    * Follow the instructions here: 
[spark-standalone](https://zeppelin.apache.org/docs/0.8.0/setup/deployment/spark_cluster_mode.html#spark-standalone-mode)
 and 
[spark-yarn-cluster](https://zeppelin.apache.org/docs/0.8.0/setup/deployment/spark_cluster_mode.html#spark-on-yarn-mode)
    
    ### Questions:
    * Does the licenses files need update? No
    * Is there breaking changes for older versions? No
    * Does this needs documentation? No
    
    * Update spark_standalone Dockerfile
    
    * update yarn dockerfile
    
    * back to java 7
    
    since hadoop 2.x depends on java 7
    
    * back to java7
    
    hadoop 2.x depends on java 7
    
    * change the path to /xxx/.../jre-1.7.0-openjdk
---
 scripts/docker/spark-cluster-managers/spark_mesos/Dockerfile   |  4 ++--
 .../docker/spark-cluster-managers/spark_standalone/Dockerfile  | 10 +++++-----
 .../spark-cluster-managers/spark_yarn_cluster/Dockerfile       |  8 ++++----
 3 files changed, 11 insertions(+), 11 deletions(-)

diff --git a/scripts/docker/spark-cluster-managers/spark_mesos/Dockerfile 
b/scripts/docker/spark-cluster-managers/spark_mesos/Dockerfile
index 0eb26ca..b87838e 100644
--- a/scripts/docker/spark-cluster-managers/spark_mesos/Dockerfile
+++ b/scripts/docker/spark-cluster-managers/spark_mesos/Dockerfile
@@ -36,8 +36,8 @@ yum clean all
 # Remove old jdk
 RUN yum remove java; yum remove jdk
 
-# install jdk8
-RUN yum install -y java-1.8.0-openjdk-devel
+# install jdk7
+RUN yum install -y java-1.7.0-openjdk-devel
 ENV JAVA_HOME /usr/lib/jvm/java
 ENV PATH $PATH:$JAVA_HOME/bin
 
diff --git a/scripts/docker/spark-cluster-managers/spark_standalone/Dockerfile 
b/scripts/docker/spark-cluster-managers/spark_standalone/Dockerfile
index 8bf0f8d..e4fb780 100644
--- a/scripts/docker/spark-cluster-managers/spark_standalone/Dockerfile
+++ b/scripts/docker/spark-cluster-managers/spark_standalone/Dockerfile
@@ -12,10 +12,10 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-FROM centos:centos6
+FROM centos:centos7
 
-ENV SPARK_PROFILE 2.1
-ENV SPARK_VERSION 2.1.2
+ENV SPARK_PROFILE 2.4
+ENV SPARK_VERSION 2.4.0
 ENV HADOOP_PROFILE 2.7
 ENV SPARK_HOME /usr/local/spark
 
@@ -33,13 +33,13 @@ yum clean all
 # Remove old jdk
 RUN yum remove java; yum remove jdk
 
-# install jdk7 
+# install jdk7
 RUN yum install -y java-1.7.0-openjdk-devel
 ENV JAVA_HOME /usr/lib/jvm/java
 ENV PATH $PATH:$JAVA_HOME/bin
 
 # install spark
-RUN curl -s 
http://apache.mirror.cdnetworks.com/spark/spark-$SPARK_VERSION/spark-$SPARK_VERSION-bin-hadoop$HADOOP_PROFILE.tgz
 | tar -xz -C /usr/local/
+RUN curl -s 
http://www.apache.org/dist/spark/spark-$SPARK_VERSION/spark-$SPARK_VERSION-bin-hadoop$HADOOP_PROFILE.tgz
 | tar -xz -C /usr/local/
 RUN cd /usr/local && ln -s spark-$SPARK_VERSION-bin-hadoop$HADOOP_PROFILE spark
 
 # update boot script
diff --git 
a/scripts/docker/spark-cluster-managers/spark_yarn_cluster/Dockerfile 
b/scripts/docker/spark-cluster-managers/spark_yarn_cluster/Dockerfile
index cbbda20..da3df1c 100644
--- a/scripts/docker/spark-cluster-managers/spark_yarn_cluster/Dockerfile
+++ b/scripts/docker/spark-cluster-managers/spark_yarn_cluster/Dockerfile
@@ -12,10 +12,10 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-FROM centos:centos6
+FROM centos:centos7
 
-ENV SPARK_PROFILE 2.1
-ENV SPARK_VERSION 2.1.2
+ENV SPARK_PROFILE 2.4
+ENV SPARK_VERSION 2.4.0
 ENV HADOOP_PROFILE 2.7
 ENV HADOOP_VERSION 2.7.0
 
@@ -52,7 +52,7 @@ ENV HADOOP_MAPRED_HOME /usr/local/hadoop
 ENV HADOOP_YARN_HOME /usr/local/hadoop
 ENV HADOOP_CONF_DIR /usr/local/hadoop/etc/hadoop
 
-RUN sed -i '/^export JAVA_HOME/ s:.*:export 
JAVA_HOME=/usr/lib/jvm/jre-1.7.0-openjdk.x86_64\nexport 
HADOOP_PREFIX=/usr/local/hadoop\nexport HADOOP_HOME=/usr/local/hadoop\n:' 
$HADOOP_PREFIX/etc/hadoop/hadoop-env.sh
+RUN sed -i '/^export JAVA_HOME/ s:.*:export 
JAVA_HOME=/usr/lib/jvm/jre-1.7.0-openjdk\nexport 
HADOOP_PREFIX=/usr/local/hadoop\nexport HADOOP_HOME=/usr/local/hadoop\n:' 
$HADOOP_PREFIX/etc/hadoop/hadoop-env.sh
 RUN sed -i '/^export HADOOP_CONF_DIR/ s:.*:export 
HADOOP_CONF_DIR=/usr/local/hadoop/etc/hadoop/:' 
$HADOOP_PREFIX/etc/hadoop/hadoop-env.sh
 
 RUN mkdir $HADOOP_PREFIX/input

Reply via email to