Repository: zeppelin Updated Branches: refs/heads/master 5cd806dc3 -> 0f1701da8
[HOTFIX] Add livy build in .travis ### What is this PR for? livy integration test is ignored, this PR add livy integration test in travis ### What type of PR is it? [Hot Fix] ### Todos * [ ] - Task ### What is the Jira issue? * No jira created ### How should this be tested? CI pass ### Questions: * Does the licenses files need update? No * Is there breaking changes for older versions? No * Does this needs documentation? No Author: Jeff Zhang <zjf...@apache.org> Closes #2279 from zjffdu/hotfix_livy and squashes the following commits: 674c987 [Jeff Zhang] [HOTFIX] Add livy build in .travis Project: http://git-wip-us.apache.org/repos/asf/zeppelin/repo Commit: http://git-wip-us.apache.org/repos/asf/zeppelin/commit/0f1701da Tree: http://git-wip-us.apache.org/repos/asf/zeppelin/tree/0f1701da Diff: http://git-wip-us.apache.org/repos/asf/zeppelin/diff/0f1701da Branch: refs/heads/master Commit: 0f1701da8bb98678ae8e486796b2c1e650125106 Parents: 5cd806d Author: Jeff Zhang <zjf...@apache.org> Authored: Fri Apr 21 17:38:51 2017 +0800 Committer: Jeff Zhang <zjf...@apache.org> Committed: Thu Apr 27 16:36:18 2017 +0800 ---------------------------------------------------------------------- .travis.yml | 16 +++++++++------- .../org/apache/zeppelin/livy/LivyInterpreterIT.java | 8 ++++++-- 2 files changed, 15 insertions(+), 9 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/zeppelin/blob/0f1701da/.travis.yml ---------------------------------------------------------------------- diff --git a/.travis.yml b/.travis.yml index f77906f..72104db 100644 --- a/.travis.yml +++ b/.travis.yml @@ -73,13 +73,15 @@ matrix: - jdk: "oraclejdk7" env: SCALA_VER="2.11" SPARK_VER="1.6.3" HADOOP_VER="2.6" PROFILE="-Pspark-1.6 -Phadoop-2.6 -Pscala-2.11" SPARKR="true" BUILD_FLAG="package -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark" TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest,org.apache.zeppelin.spark.* -DfailIfNoTests=false" - # Test python/pyspark with python 2 - - jdk: "oraclejdk7" - env: PYTHON="2" SCALA_VER="2.10" SPARK_VER="1.6.1" HADOOP_VER="2.6" PROFILE="-Pspark-1.6 -Phadoop-2.6" BUILD_FLAG="package -am -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-display,spark-dependencies,spark,python" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.PySpark*Test,org.apache.zeppelin.python.* -Dpyspark.test.exclude='' -DfailIfNoTests=false" - - # Test python/pyspark with python 3 - - jdk: "oraclejdk7" - env: PYTHON="3" SCALA_VER="2.11" SPARK_VER="2.0.0" HADOOP_VER="2.6" PROFILE="-Pspark-2.0 -Phadoop-2.6 -Pscala-2.11" BUILD_FLAG="package -am -DskipTests -DskipRat" TEST_FLAG="test -DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-display,spark-dependencies,spark,python" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.PySpark*Test,org.apache.zeppelin.python.* -Dpyspark.test.exclude='' -DfailIfNoTests=false" + # Test python/pyspark with python 2, livy 0.2 + - sudo: required + jdk: "oraclejdk7" + env: PYTHON="2" SCALA_VER="2.10" SPARK_VER="1.6.1" HADOOP_VER="2.6" LIVY_VER="0.2.0" PROFILE="-Pspark-1.6 -Phadoop-2.6 -Plivy-0.2" BUILD_FLAG="package -am -DskipTests -DskipRat" TEST_FLAG="verify -DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-display,spark-dependencies,spark,python,livy" TEST_PROJECTS="-Dtest=LivySQLInterpreterTest,org.apache.zeppelin.spark.PySpark*Test,org.apache.zeppelin.python.* -Dpyspark.test.exclude='' -DfailIfNoTests=false" + + # Test python/pyspark with python 3, livy 0.3 + - sudo: required + jdk: "oraclejdk7" + env: PYTHON="3" SCALA_VER="2.11" SPARK_VER="2.0.0" HADOOP_VER="2.6" LIVY_VER="0.3.0" PROFILE="-Pspark-2.0 -Phadoop-2.6 -Pscala-2.11 -Plivy-0.3" BUILD_FLAG="package -am -DskipTests -DskipRat" TEST_FLAG="verify -DskipRat" MODULES="-pl .,zeppelin-interpreter,zeppelin-display,spark-dependencies,spark,python,livy" TEST_PROJECTS="-Dtest=LivySQLInterpreterTest,org.apache.zeppelin.spark.PySpark*Test,org.apache.zeppelin.python.* -Dpyspark.test.exclude='' -DfailIfNoTests=false" before_install: # check files included in commit range, clear bower_components if a bower.json file has changed. http://git-wip-us.apache.org/repos/asf/zeppelin/blob/0f1701da/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java ---------------------------------------------------------------------- diff --git a/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java b/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java index 06da238..007c0ed 100644 --- a/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java +++ b/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java @@ -555,13 +555,17 @@ public class LivyInterpreterIT { + "df.collect()", context); assertEquals(InterpreterResult.Code.SUCCESS, result.code()); assertEquals(1, result.message().size()); - assertTrue(result.message().get(0).getData().contains("[Row(_1=u'hello', _2=20)]")); + //python2 has u and python3 don't have u + assertTrue(result.message().get(0).getData().contains("[Row(_1=u'hello', _2=20)]") + || result.message().get(0).getData().contains("[Row(_1='hello', _2=20)]")); } else { result = pysparkInterpreter.interpret("df=spark.createDataFrame([(\"hello\",20)])\n" + "df.collect()", context); assertEquals(InterpreterResult.Code.SUCCESS, result.code()); assertEquals(1, result.message().size()); - assertTrue(result.message().get(0).getData().contains("[Row(_1=u'hello', _2=20)]")); + //python2 has u and python3 don't have u + assertTrue(result.message().get(0).getData().contains("[Row(_1=u'hello', _2=20)]") + || result.message().get(0).getData().contains("[Row(_1='hello', _2=20)]")); } // test magic api