Repository: zeppelin Updated Branches: refs/heads/master 483dc3f2b -> 733a20210
ZEPPELIN-3299. Combine spark integration test with its unit test in one build ### What is this PR for? In ZEPPELIN-3254, we introduce 2 build for spark integration test. And we can run unit test with these system test together so that we could save 2 build in travis and also save time to set up enviroment. ### What type of PR is it? [Improvement ] ### Todos * [ ] - Task ### What is the Jira issue? * https://issues.apache.org/jira/browse/ZEPPELIN-3299 ### How should this be tested? * Travis CI Pass ### Screenshots (if appropriate) ### Questions: * Does the licenses files need update? No * Is there breaking changes for older versions? No * Does this needs documentation? No Author: Jeff Zhang <zjf...@apache.org> Closes #2843 from zjffdu/ZEPPELIN-3299 and squashes the following commits: dcbbb10 [Jeff Zhang] ZEPPELIN-3299. Combine spark integration test with its unit test in one build Project: http://git-wip-us.apache.org/repos/asf/zeppelin/repo Commit: http://git-wip-us.apache.org/repos/asf/zeppelin/commit/733a2021 Tree: http://git-wip-us.apache.org/repos/asf/zeppelin/tree/733a2021 Diff: http://git-wip-us.apache.org/repos/asf/zeppelin/diff/733a2021 Branch: refs/heads/master Commit: 733a202109d9087556f24822347a3903da997a88 Parents: 483dc3f Author: Jeff Zhang <zjf...@apache.org> Authored: Tue Mar 6 22:33:30 2018 +0800 Committer: Jeff Zhang <zjf...@apache.org> Committed: Wed Mar 7 11:15:23 2018 +0800 ---------------------------------------------------------------------- .travis.yml | 19 +++++-------------- 1 file changed, 5 insertions(+), 14 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/zeppelin/blob/733a2021/.travis.yml ---------------------------------------------------------------------- diff --git a/.travis.yml b/.travis.yml index 9edb198..bcef80a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -82,22 +82,17 @@ matrix: # Run ZeppelinSparkClusterTest & SparkIntegrationTest in one build would exceed the time limitation of travis, so running them separately - # Test spark interpreter with different spark versions under python2, only run ZeppelinSparkClusterTest + # Integration test of spark interpreter with different spark versions under python2, only run ZeppelinSparkClusterTest. Also run spark unit test of spark 2.2 in this build. - sudo: required jdk: "oraclejdk8" dist: trusty - env: PYTHON="2" PROFILE="-Pspark-2.2" SPARKR="true" BUILD_FLAG="package -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl zeppelin-zengine,zeppelin-server,spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest -DfailIfNoTests=false" + env: PYTHON="2" PROFILE="-Pspark-2.2" SPARKR="true" BUILD_FLAG="package -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl zeppelin-zengine,zeppelin-server,spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,org.apache.zeppelin.spark.* -DfailIfNoTests=false" - # Test spark interpreter with different spark versions under python3, only run SparkIntegrationTest + # Integration test of spark interpreter with different spark versions under python3, only run SparkIntegrationTest. Also run spark unit test of spark 1.6 in this build. - sudo: required jdk: "oraclejdk8" dist: trusty - env: PYTHON="3" PROFILE="-Pspark-2.2" SPARKR="true" BUILD_FLAG="package -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl zeppelin-zengine,spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=SparkIntegrationTest -DfailIfNoTests=false" - - # Test spark module for 2.2.0 with scala 2.11 - - jdk: "oraclejdk8" - dist: trusty - env: PYTHON="2" SCALA_VER="2.11" SPARK_VER="2.2.0" HADOOP_VER="2.6" PROFILE="-Pspark-2.2 -Phadoop3 -Phadoop-2.6 -Pscala-2.11" SPARKR="true" BUILD_FLAG="package -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.* -DfailIfNoTests=false" + env: PYTHON="3" PROFILE="-Pspark-1.6" SPARKR="true" BUILD_FLAG="package -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl zeppelin-zengine,spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=SparkIntegrationTest,org.apache.zeppelin.spark.* -DfailIfNoTests=false" # Test spark module for 2.1.0 with scala 2.11 - jdk: "openjdk7" @@ -109,11 +104,7 @@ matrix: dist: trusty env: PYTHON="2" SCALA_VER="2.11" SPARK_VER="2.0.2" HADOOP_VER="2.6" PROFILE="-Pspark-2.0 -Phadoop3 -Phadoop-2.6 -Pscala-2.11" SPARKR="true" BUILD_FLAG="package -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.* -DfailIfNoTests=false" - # Test spark module for 1.6.3 with scala 2.10 - - jdk: "openjdk7" - dist: trusty - env: PYTHON="3" SCALA_VER="2.10" SPARK_VER="1.6.3" HADOOP_VER="2.6" PROFILE="-Pspark-1.6 -Phadoop2 -Phadoop-2.6 -Pscala-2.10" SPARKR="true" BUILD_FLAG="package -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.* -DfailIfNoTests=false" - + # Test python/pyspark with python 2, livy 0.5 - sudo: required dist: trusty jdk: "openjdk7"