Repository: incubator-zeppelin Updated Branches: refs/heads/master 0d4c3acc1 -> 65b47e505
[ZEPPELIN-665] travis CI change to not rerun interpreter tests ### What is this PR for? While investigating CI failure with PR #706, I noticed that we were running all tests in each of 6 jobs in the test matrix. It seems we have accidentally switched back to running all tests even with older versions of Spark. (from PR #626 Filtering out interpreter tests for these runs help make test jobs faster (~30%) and also reduce the chance that an interpreter test might intermittently fail, thus hopefully improve CI reliability. Also changing Spark 1.2.1 to 1.2.2 (last 1.2.x release) ### What type of PR is it? Improvement ### Todos * [x] - Travis CI yml changes ### Is there a relevant Jira issue? https://issues.apache.org/jira/browse/ZEPPELIN-665 ### How should this be tested? Run Travis CI ### Screenshots (if appropriate) N/A ### Questions: * Does the licenses files need update? No * Is there breaking changes for older versions? No * Does this needs documentation? No Author: Felix Cheung <[email protected]> Closes #708 from felixcheung/testprofile and squashes the following commits: ac3bd72 [Felix Cheung] fix project list b0c8b9b [Felix Cheung] add spark tests 27d57fe [Felix Cheung] fix project list cd0cb97 [Felix Cheung] test project list for older spark versions Project: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/commit/65b47e50 Tree: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/tree/65b47e50 Diff: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/diff/65b47e50 Branch: refs/heads/master Commit: 65b47e50549e8c64b5aff9a6ceec16668d1ca52e Parents: 0d4c3ac Author: Felix Cheung <[email protected]> Authored: Tue Feb 9 21:58:08 2016 -0800 Committer: Felix Cheung <[email protected]> Committed: Thu Feb 11 16:49:00 2016 -0800 ---------------------------------------------------------------------- .travis.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/65b47e50/.travis.yml ---------------------------------------------------------------------- diff --git a/.travis.yml b/.travis.yml index 2619ed1..e680a5c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,27 +19,27 @@ matrix: include: # Test all modules - jdk: "oraclejdk7" - env: SPARK_VER="1.6.0" HADOOP_VER="2.3" PROFILE="-Pspark-1.6 -Phadoop-2.3 -Ppyspark -Pscalding" BUILD_FLAG="package -Pbuild-distr" TEST_FLAG="verify -Pusing-packaged-distr" + env: SPARK_VER="1.6.0" HADOOP_VER="2.3" PROFILE="-Pspark-1.6 -Phadoop-2.3 -Ppyspark -Pscalding" BUILD_FLAG="package -Pbuild-distr" TEST_FLAG="verify -Pusing-packaged-distr" TEST_PROJECTS="" # Test spark module for 1.5.2 - jdk: "oraclejdk7" - env: SPARK_VER="1.5.2" HADOOP_VER="2.3" PROFILE="-Pspark-1.5 -Phadoop-2.3 -Ppyspark" BUILD_FLAG="package -DskipTests" TEST_FLAG="verify" + env: SPARK_VER="1.5.2" HADOOP_VER="2.3" PROFILE="-Pspark-1.5 -Phadoop-2.3 -Ppyspark" BUILD_FLAG="package -DskipTests" TEST_FLAG="verify" TEST_PROJECTS="-pl zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark -Dtest=org.apache.zeppelin.rest.*Test,org.apache.zeppelin.spark* -DfailIfNoTests=false" # Test spark module for 1.4.1 - jdk: "oraclejdk7" - env: SPARK_VER="1.4.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.4 -Phadoop-2.3 -Ppyspark" BUILD_FLAG="package -DskipTests" TEST_FLAG="verify" + env: SPARK_VER="1.4.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.4 -Phadoop-2.3 -Ppyspark" BUILD_FLAG="package -DskipTests" TEST_FLAG="verify" TEST_PROJECTS="-pl zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark -Dtest=org.apache.zeppelin.rest.*Test,org.apache.zeppelin.spark* -DfailIfNoTests=false" # Test spark module for 1.3.1 - jdk: "oraclejdk7" - env: SPARK_VER="1.3.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.3 -Phadoop-2.3 -Ppyspark" BUILD_FLAG="package -DskipTests" TEST_FLAG="verify" + env: SPARK_VER="1.3.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.3 -Phadoop-2.3 -Ppyspark" BUILD_FLAG="package -DskipTests" TEST_FLAG="verify" TEST_PROJECTS="-pl zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark -Dtest=org.apache.zeppelin.rest.*Test,org.apache.zeppelin.spark* -DfailIfNoTests=false" - # Test spark module for 1.2.1 + # Test spark module for 1.2.2 - jdk: "oraclejdk7" - env: SPARK_VER="1.2.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.2 -Phadoop-2.3 -Ppyspark" BUILD_FLAG="package -DskipTests" TEST_FLAG="verify" + env: SPARK_VER="1.2.2" HADOOP_VER="2.3" PROFILE="-Pspark-1.2 -Phadoop-2.3 -Ppyspark" BUILD_FLAG="package -DskipTests" TEST_FLAG="verify" TEST_PROJECTS="-pl zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark -Dtest=org.apache.zeppelin.rest.*Test,org.apache.zeppelin.spark* -DfailIfNoTests=false" # Test spark module for 1.1.1 - jdk: "oraclejdk7" - env: SPARK_VER="1.1.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.1 -Phadoop-2.3 -Ppyspark" BUILD_FLAG="package -DskipTests" TEST_FLAG="verify" + env: SPARK_VER="1.1.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.1 -Phadoop-2.3 -Ppyspark" BUILD_FLAG="package -DskipTests" TEST_FLAG="verify" TEST_PROJECTS="-pl zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark -Dtest=org.apache.zeppelin.rest.*Test,org.apache.zeppelin.spark* -DfailIfNoTests=false" before_install: - "export DISPLAY=:99.0" @@ -53,7 +53,7 @@ before_script: - echo "export SPARK_HOME=`pwd`/spark-$SPARK_VER-bin-hadoop$HADOOP_VER" > conf/zeppelin-env.sh script: - - mvn $TEST_FLAG $PROFILE -B + - mvn $TEST_FLAG $PROFILE -B $TEST_PROJECTS after_failure: - cat target/rat.txt
