Repository: zeppelin Updated Branches: refs/heads/master 322c86865 -> 0a47b1393
ZEPPELIN-3142. Fixed Checkstyle errors and warnings in the livy module ### What is this PR for? Fixed the Checkstyle errors and warnings in the **livy** module. ### What type of PR is it? Improvement ### Todos * [ ] - Task ### What is the Jira issue? * https://issues.apache.org/jira/browse/ZEPPELIN-3142 ### How should this be tested? * CI pass ### Screenshots (if appropriate) ### Questions: * Does the licenses files need update? no * Is there breaking changes for older versions? no * Does this needs documentation? no Author: Jan Hentschel <jan.hentsc...@ultratendency.com> Closes #2768 from HorizonNet/ZEPPELIN-3142 and squashes the following commits: 6b6f5b0 [Jan Hentschel] ZEPPELIN-3142. Reverted package-lock.json 0ad26f3 [Jan Hentschel] Merge branch 'master' into ZEPPELIN-3142 c090d24 [Jan Hentschel] Merge branch 'master' into ZEPPELIN-3142 b53d145 [Jan Hentschel] ZEPPELIN-3142. Fixed Checkstyle errors and warnings in the livy module Project: http://git-wip-us.apache.org/repos/asf/zeppelin/repo Commit: http://git-wip-us.apache.org/repos/asf/zeppelin/commit/0a47b139 Tree: http://git-wip-us.apache.org/repos/asf/zeppelin/tree/0a47b139 Diff: http://git-wip-us.apache.org/repos/asf/zeppelin/diff/0a47b139 Branch: refs/heads/master Commit: 0a47b1393a3e8b3f907326ced359fe8b987c4fc7 Parents: 322c868 Author: Jan Hentschel <jan.hentsc...@ultratendency.com> Authored: Tue Mar 27 12:26:12 2018 +0200 Committer: Jeff Zhang <zjf...@apache.org> Committed: Wed Apr 25 11:47:24 2018 +0800 ---------------------------------------------------------------------- livy/pom.xml | 8 ++ .../zeppelin/livy/BaseLivyInterpreter.java | 118 ++++++++++--------- .../org/apache/zeppelin/livy/LivyException.java | 2 +- .../livy/LivyPySparkBaseInterpreter.java | 3 +- .../zeppelin/livy/LivySharedInterpreter.java | 11 +- .../zeppelin/livy/LivySparkSQLInterpreter.java | 29 +++-- .../org/apache/zeppelin/livy/LivyVersion.java | 2 +- .../apache/zeppelin/livy/LivyInterpreterIT.java | 86 ++++++++------ .../zeppelin/livy/LivySQLInterpreterTest.java | 12 +- 9 files changed, 152 insertions(+), 119 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/zeppelin/blob/0a47b139/livy/pom.xml ---------------------------------------------------------------------- diff --git a/livy/pom.xml b/livy/pom.xml index eddeb83..6c911e1 100644 --- a/livy/pom.xml +++ b/livy/pom.xml @@ -407,6 +407,14 @@ </execution> </executions> </plugin> + + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-checkstyle-plugin</artifactId> + <configuration> + <skip>false</skip> + </configuration> + </plugin> </plugins> </build> </project> http://git-wip-us.apache.org/repos/asf/zeppelin/blob/0a47b139/livy/src/main/java/org/apache/zeppelin/livy/BaseLivyInterpreter.java ---------------------------------------------------------------------- diff --git a/livy/src/main/java/org/apache/zeppelin/livy/BaseLivyInterpreter.java b/livy/src/main/java/org/apache/zeppelin/livy/BaseLivyInterpreter.java index de0fff2..9d2c0cf 100644 --- a/livy/src/main/java/org/apache/zeppelin/livy/BaseLivyInterpreter.java +++ b/livy/src/main/java/org/apache/zeppelin/livy/BaseLivyInterpreter.java @@ -17,23 +17,9 @@ package org.apache.zeppelin.livy; -import java.io.FileInputStream; -import java.io.IOException; -import java.nio.charset.Charset; -import java.security.KeyStore; -import java.security.Principal; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import javax.net.ssl.SSLContext; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.annotations.SerializedName; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.exception.ExceptionUtils; @@ -52,16 +38,6 @@ import org.apache.http.impl.auth.SPNegoSchemeFactory; import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.client.HttpClients; -import org.apache.zeppelin.interpreter.Interpreter; -import org.apache.zeppelin.interpreter.Interpreter.FormType; -import org.apache.zeppelin.interpreter.InterpreterContext; -import org.apache.zeppelin.interpreter.InterpreterException; -import org.apache.zeppelin.interpreter.InterpreterResult; -import org.apache.zeppelin.interpreter.InterpreterResultMessage; -import org.apache.zeppelin.interpreter.InterpreterUtils; -import org.apache.zeppelin.interpreter.LazyOpenInterpreter; -import org.apache.zeppelin.interpreter.WrappedInterpreter; -import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpEntity; @@ -77,10 +53,33 @@ import org.springframework.web.client.HttpServerErrorException; import org.springframework.web.client.RestClientException; import org.springframework.web.client.RestTemplate; -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import com.google.gson.annotations.SerializedName; +import java.io.FileInputStream; +import java.io.IOException; +import java.nio.charset.Charset; +import java.security.KeyStore; +import java.security.Principal; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import javax.net.ssl.SSLContext; +import org.apache.zeppelin.interpreter.Interpreter; +import org.apache.zeppelin.interpreter.InterpreterContext; +import org.apache.zeppelin.interpreter.InterpreterException; +import org.apache.zeppelin.interpreter.InterpreterResult; +import org.apache.zeppelin.interpreter.InterpreterResultMessage; +import org.apache.zeppelin.interpreter.InterpreterUtils; +import org.apache.zeppelin.interpreter.LazyOpenInterpreter; +import org.apache.zeppelin.interpreter.WrappedInterpreter; +import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion; /** * Base class for livy interpreters. @@ -89,7 +88,7 @@ public abstract class BaseLivyInterpreter extends Interpreter { protected static final Logger LOGGER = LoggerFactory.getLogger(BaseLivyInterpreter.class); private static Gson gson = new GsonBuilder().setPrettyPrinting().disableHtmlEscaping().create(); - private static String SESSION_NOT_FOUND_PATTERN = "\"Session '\\d+' not found.\""; + private static final String SESSION_NOT_FOUND_PATTERN = "\"Session '\\d+' not found.\""; protected volatile SessionInfo sessionInfo; private String livyURL; @@ -324,8 +323,9 @@ public abstract class BaseLivyInterpreter extends Interpreter { Map<String, String> conf = new HashMap<>(); for (Map.Entry<Object, Object> entry : getProperties().entrySet()) { if (entry.getKey().toString().startsWith("livy.spark.") && - !entry.getValue().toString().isEmpty()) + !entry.getValue().toString().isEmpty()) { conf.put(entry.getKey().toString().substring(5), entry.getValue().toString()); + } } CreateSessionRequest request = new CreateSessionRequest(kind, @@ -450,8 +450,7 @@ public abstract class BaseLivyInterpreter extends Interpreter { cancelStatement(id); } catch (LivyException e) { LOGGER.error("Fail to cancel statement " + id + " for paragraph " + paragraphId, e); - } - finally { + } finally { paragraphsToCancel.remove(paragraphId); } } else { @@ -504,8 +503,10 @@ public abstract class BaseLivyInterpreter extends Interpreter { StringBuilder sb = new StringBuilder(); sb.append(stmtInfo.output.evalue); // in case evalue doesn't have newline char - if (!stmtInfo.output.evalue.contains("\n")) + if (!stmtInfo.output.evalue.contains("\n")) { sb.append("\n"); + } + if (stmtInfo.output.traceback != null) { sb.append(StringUtils.join(stmtInfo.output.traceback)); } @@ -519,14 +520,14 @@ public abstract class BaseLivyInterpreter extends Interpreter { return new InterpreterResult(InterpreterResult.Code.ERROR, "Empty output"); } else { //TODO(zjffdu) support other types of data (like json, image and etc) - String result = stmtInfo.output.data.plain_text; + String result = stmtInfo.output.data.plainText; // check table magic result first - if (stmtInfo.output.data.application_livy_table_json != null) { + if (stmtInfo.output.data.applicationLivyTableJson != null) { StringBuilder outputBuilder = new StringBuilder(); boolean notFirstColumn = false; - for (Map header : stmtInfo.output.data.application_livy_table_json.headers) { + for (Map header : stmtInfo.output.data.applicationLivyTableJson.headers) { if (notFirstColumn) { outputBuilder.append("\t"); } @@ -535,15 +536,15 @@ public abstract class BaseLivyInterpreter extends Interpreter { } outputBuilder.append("\n"); - for (List<Object> row : stmtInfo.output.data.application_livy_table_json.records) { + for (List<Object> row : stmtInfo.output.data.applicationLivyTableJson.records) { outputBuilder.append(StringUtils.join(row, "\t")); outputBuilder.append("\n"); } return new InterpreterResult(InterpreterResult.Code.SUCCESS, InterpreterResult.Type.TABLE, outputBuilder.toString()); - } else if (stmtInfo.output.data.image_png != null) { + } else if (stmtInfo.output.data.imagePng != null) { return new InterpreterResult(InterpreterResult.Code.SUCCESS, - InterpreterResult.Type.IMG, (String) stmtInfo.output.data.image_png); + InterpreterResult.Type.IMG, (String) stmtInfo.output.data.imagePng); } else if (result != null) { result = result.trim(); if (result.startsWith("<link") @@ -669,7 +670,8 @@ public abstract class BaseLivyInterpreter extends Interpreter { restTemplate = new RestTemplate(new HttpComponentsClientHttpRequestFactory(httpClient)); } } - restTemplate.getMessageConverters().add(0, new StringHttpMessageConverter(Charset.forName("UTF-8"))); + restTemplate.getMessageConverters().add(0, + new StringHttpMessageConverter(Charset.forName("UTF-8"))); return restTemplate; } @@ -769,7 +771,7 @@ public abstract class BaseLivyInterpreter extends Interpreter { public final String user; public final Map<String, String> conf; - public CreateSessionRequest(String kind, String user, Map<String, String> conf) { + CreateSessionRequest(String kind, String user, Map<String, String> conf) { this.kind = kind; this.user = user; this.conf = conf; @@ -823,7 +825,8 @@ public abstract class BaseLivyInterpreter extends Interpreter { static class ExecuteRequest { public final String code; public final String kind; - public ExecuteRequest(String code, String kind) { + + ExecuteRequest(String code, String kind) { this.code = code; this.kind = kind; } @@ -839,22 +842,22 @@ public abstract class BaseLivyInterpreter extends Interpreter { public double progress; public StatementOutput output; - public StatementInfo() { + StatementInfo() { } public static StatementInfo fromJson(String json) { - String right_json = ""; + String rightJson = ""; try { gson.fromJson(json, StatementInfo.class); - right_json = json; + rightJson = json; } catch (Exception e) { if (json.contains("\"traceback\":{}")) { LOGGER.debug("traceback type mismatch, replacing the mismatching part "); - right_json = json.replace("\"traceback\":{}", "\"traceback\":[]"); - LOGGER.debug("new json string is {}", right_json); + rightJson = json.replace("\"traceback\":{}", "\"traceback\":[]"); + LOGGER.debug("new json string is {}", rightJson); } } - return gson.fromJson(right_json, StatementInfo.class); + return gson.fromJson(rightJson, StatementInfo.class); } public boolean isAvailable() { @@ -867,7 +870,7 @@ public abstract class BaseLivyInterpreter extends Interpreter { private static class StatementOutput { public String status; - public String execution_count; + public String executionCount; public Data data; public String ename; public String evalue; @@ -884,13 +887,13 @@ public abstract class BaseLivyInterpreter extends Interpreter { private static class Data { @SerializedName("text/plain") - public String plain_text; + public String plainText; @SerializedName("image/png") - public String image_png; + public String imagePng; @SerializedName("application/json") - public String application_json; + public String applicationJson; @SerializedName("application/vnd.livy.table.v1+json") - public TableMagic application_livy_table_json; + public TableMagic applicationLivyTableJson; } private static class TableMagic { @@ -908,7 +911,7 @@ public abstract class BaseLivyInterpreter extends Interpreter { public final String kind; public final int cursor; - public CompletionRequest(String code, String kind, int cursor) { + CompletionRequest(String code, String kind, int cursor) { this.code = code; this.kind = kind; this.cursor = cursor; @@ -922,7 +925,7 @@ public abstract class BaseLivyInterpreter extends Interpreter { static class CompletionResponse { public final String[] candidates; - public CompletionResponse(String[] candidates) { + CompletionResponse(String[] candidates) { this.candidates = candidates; } @@ -943,5 +946,4 @@ public abstract class BaseLivyInterpreter extends Interpreter { return gson.fromJson(json, LivyVersionResponse.class); } } - } http://git-wip-us.apache.org/repos/asf/zeppelin/blob/0a47b139/livy/src/main/java/org/apache/zeppelin/livy/LivyException.java ---------------------------------------------------------------------- diff --git a/livy/src/main/java/org/apache/zeppelin/livy/LivyException.java b/livy/src/main/java/org/apache/zeppelin/livy/LivyException.java index e126a0f..c14351f 100644 --- a/livy/src/main/java/org/apache/zeppelin/livy/LivyException.java +++ b/livy/src/main/java/org/apache/zeppelin/livy/LivyException.java @@ -20,7 +20,7 @@ package org.apache.zeppelin.livy; import org.apache.zeppelin.interpreter.InterpreterException; /** - * Livy api related exception + * Livy api related exception. */ public class LivyException extends InterpreterException { public LivyException() { http://git-wip-us.apache.org/repos/asf/zeppelin/blob/0a47b139/livy/src/main/java/org/apache/zeppelin/livy/LivyPySparkBaseInterpreter.java ---------------------------------------------------------------------- diff --git a/livy/src/main/java/org/apache/zeppelin/livy/LivyPySparkBaseInterpreter.java b/livy/src/main/java/org/apache/zeppelin/livy/LivyPySparkBaseInterpreter.java index 6d39981..32399c6 100644 --- a/livy/src/main/java/org/apache/zeppelin/livy/LivyPySparkBaseInterpreter.java +++ b/livy/src/main/java/org/apache/zeppelin/livy/LivyPySparkBaseInterpreter.java @@ -15,13 +15,12 @@ * limitations under the License. */ - package org.apache.zeppelin.livy; import java.util.Properties; /** - * Base class for PySpark Interpreter + * Base class for PySpark Interpreter. */ public abstract class LivyPySparkBaseInterpreter extends BaseLivyInterpreter { http://git-wip-us.apache.org/repos/asf/zeppelin/blob/0a47b139/livy/src/main/java/org/apache/zeppelin/livy/LivySharedInterpreter.java ---------------------------------------------------------------------- diff --git a/livy/src/main/java/org/apache/zeppelin/livy/LivySharedInterpreter.java b/livy/src/main/java/org/apache/zeppelin/livy/LivySharedInterpreter.java index cef0858..c912dc9 100644 --- a/livy/src/main/java/org/apache/zeppelin/livy/LivySharedInterpreter.java +++ b/livy/src/main/java/org/apache/zeppelin/livy/LivySharedInterpreter.java @@ -18,17 +18,18 @@ package org.apache.zeppelin.livy; import org.apache.commons.lang.StringUtils; -import org.apache.zeppelin.interpreter.InterpreterContext; -import org.apache.zeppelin.interpreter.InterpreterException; -import org.apache.zeppelin.interpreter.InterpreterResult; -import org.apache.zeppelin.interpreter.InterpreterUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Properties; +import org.apache.zeppelin.interpreter.InterpreterContext; +import org.apache.zeppelin.interpreter.InterpreterException; +import org.apache.zeppelin.interpreter.InterpreterResult; +import org.apache.zeppelin.interpreter.InterpreterUtils; + /** - * Livy Interpreter for shared kind which share SparkContext across spark/pyspark/r + * Livy Interpreter for shared kind which share SparkContext across spark/pyspark/r. */ public class LivySharedInterpreter extends BaseLivyInterpreter { http://git-wip-us.apache.org/repos/asf/zeppelin/blob/0a47b139/livy/src/main/java/org/apache/zeppelin/livy/LivySparkSQLInterpreter.java ---------------------------------------------------------------------- diff --git a/livy/src/main/java/org/apache/zeppelin/livy/LivySparkSQLInterpreter.java b/livy/src/main/java/org/apache/zeppelin/livy/LivySparkSQLInterpreter.java index 2faa350..902336b 100644 --- a/livy/src/main/java/org/apache/zeppelin/livy/LivySparkSQLInterpreter.java +++ b/livy/src/main/java/org/apache/zeppelin/livy/LivySparkSQLInterpreter.java @@ -17,24 +17,34 @@ package org.apache.zeppelin.livy; -import org.apache.commons.lang.StringUtils; import static org.apache.commons.lang.StringEscapeUtils.escapeJavaScript; -import org.apache.zeppelin.display.GUI; -import org.apache.zeppelin.interpreter.*; -import org.apache.zeppelin.scheduler.Scheduler; -import org.apache.zeppelin.scheduler.SchedulerFactory; -import org.apache.zeppelin.user.AuthenticationInfo; + +import org.apache.commons.lang.StringUtils; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Properties; +import org.apache.zeppelin.display.GUI; +import org.apache.zeppelin.interpreter.Interpreter; +import org.apache.zeppelin.interpreter.InterpreterContext; +import org.apache.zeppelin.interpreter.InterpreterException; +import org.apache.zeppelin.interpreter.InterpreterOutput; +import org.apache.zeppelin.interpreter.InterpreterResult; +import org.apache.zeppelin.interpreter.InterpreterResultMessage; +import org.apache.zeppelin.interpreter.InterpreterUtils; +import org.apache.zeppelin.interpreter.LazyOpenInterpreter; +import org.apache.zeppelin.interpreter.ResultMessages; +import org.apache.zeppelin.interpreter.WrappedInterpreter; +import org.apache.zeppelin.scheduler.Scheduler; +import org.apache.zeppelin.scheduler.SchedulerFactory; +import org.apache.zeppelin.user.AuthenticationInfo; + /** * Livy SparkSQL Interpreter for Zeppelin. */ public class LivySparkSQLInterpreter extends BaseLivyInterpreter { - public static final String ZEPPELIN_LIVY_SPARK_SQL_FIELD_TRUNCATE = "zeppelin.livy.spark.sql.field.truncate"; @@ -230,12 +240,13 @@ public class LivySparkSQLInterpreter extends BaseLivyInterpreter { } /** - * Represent the start and end index of each cell + * Represent the start and end index of each cell. */ private static class Pair { private int start; private int end; - public Pair(int start, int end) { + + Pair(int start, int end) { this.start = start; this.end = end; } http://git-wip-us.apache.org/repos/asf/zeppelin/blob/0a47b139/livy/src/main/java/org/apache/zeppelin/livy/LivyVersion.java ---------------------------------------------------------------------- diff --git a/livy/src/main/java/org/apache/zeppelin/livy/LivyVersion.java b/livy/src/main/java/org/apache/zeppelin/livy/LivyVersion.java index 81bb8d4..55ebd57 100644 --- a/livy/src/main/java/org/apache/zeppelin/livy/LivyVersion.java +++ b/livy/src/main/java/org/apache/zeppelin/livy/LivyVersion.java @@ -21,7 +21,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * Provide reading comparing capability of livy version + * Provide reading comparing capability of livy version. */ public class LivyVersion { private static final Logger logger = LoggerFactory.getLogger(LivyVersion.class); http://git-wip-us.apache.org/repos/asf/zeppelin/blob/0a47b139/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java ---------------------------------------------------------------------- diff --git a/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java b/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java index c7fbc8d..5deb52d 100644 --- a/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java +++ b/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java @@ -17,21 +17,13 @@ package org.apache.zeppelin.livy; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.mock; import org.apache.commons.io.IOUtils; import org.apache.livy.test.framework.Cluster; import org.apache.livy.test.framework.Cluster$; -import org.apache.zeppelin.interpreter.Interpreter; -import org.apache.zeppelin.interpreter.InterpreterContext; -import org.apache.zeppelin.interpreter.InterpreterException; -import org.apache.zeppelin.interpreter.InterpreterGroup; -import org.apache.zeppelin.interpreter.InterpreterOutput; -import org.apache.zeppelin.interpreter.InterpreterOutputListener; -import org.apache.zeppelin.interpreter.InterpreterResult; -import org.apache.zeppelin.interpreter.InterpreterResultMessage; -import org.apache.zeppelin.interpreter.InterpreterResultMessageOutput; -import org.apache.zeppelin.interpreter.LazyOpenInterpreter; -import org.apache.zeppelin.user.AuthenticationInfo; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -42,13 +34,19 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Properties; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.mockito.Mockito.mock; +import org.apache.zeppelin.interpreter.Interpreter; +import org.apache.zeppelin.interpreter.InterpreterContext; +import org.apache.zeppelin.interpreter.InterpreterException; +import org.apache.zeppelin.interpreter.InterpreterGroup; +import org.apache.zeppelin.interpreter.InterpreterOutput; +import org.apache.zeppelin.interpreter.InterpreterOutputListener; +import org.apache.zeppelin.interpreter.InterpreterResult; +import org.apache.zeppelin.interpreter.InterpreterResultMessageOutput; +import org.apache.zeppelin.interpreter.LazyOpenInterpreter; +import org.apache.zeppelin.user.AuthenticationInfo; public class LivyInterpreterIT { - - private static Logger LOGGER = LoggerFactory.getLogger(LivyInterpreterIT.class); + private static final Logger LOGGER = LoggerFactory.getLogger(LivyInterpreterIT.class); private static Cluster cluster; private static Properties properties; @@ -313,16 +311,18 @@ public class LivyInterpreterIT { // test result string truncate if (!isSpark2) { result = sparkInterpreter.interpret( - "val df=sqlContext.createDataFrame(Seq((\"12characters12characters\",20))).toDF(\"col_1\", \"col_2\")\n" - + "df.collect()", context); + "val df=sqlContext.createDataFrame(Seq((\"12characters12characters\",20)))" + + ".toDF(\"col_1\", \"col_2\")\n" + + "df.collect()", context); assertEquals(InterpreterResult.Code.SUCCESS, result.code()); assertEquals(1, result.message().size()); assertTrue(result.message().get(0).getData() .contains("Array[org.apache.spark.sql.Row] = Array([12characters12characters,20])")); } else { result = sparkInterpreter.interpret( - "val df=spark.createDataFrame(Seq((\"12characters12characters\",20))).toDF(\"col_1\", \"col_2\")\n" - + "df.collect()", context); + "val df=spark.createDataFrame(Seq((\"12characters12characters\",20)))" + + ".toDF(\"col_1\", \"col_2\")\n" + + "df.collect()", context); assertEquals(InterpreterResult.Code.SUCCESS, result.code()); assertEquals(1, result.message().size()); assertTrue(result.message().get(0).getData() @@ -330,7 +330,8 @@ public class LivyInterpreterIT { } sparkInterpreter.interpret("df.registerTempTable(\"df\")", context); // test LivySparkSQLInterpreter which share the same SparkContext with LivySparkInterpreter - result = sqlInterpreter.interpret("select * from df where col_1='12characters12characters'", context); + result = sqlInterpreter.interpret("select * from df where col_1='12characters12characters'", + context); assertEquals(InterpreterResult.Code.SUCCESS, result.code()); assertEquals(InterpreterResult.Type.TABLE, result.message().get(0).getType()); assertEquals("col_1\tcol_2\n12characters12cha...\t20", result.message().get(0).getData()); @@ -348,22 +349,24 @@ public class LivyInterpreterIT { AuthenticationInfo authInfo = new AuthenticationInfo("user1"); MyInterpreterOutputListener outputListener = new MyInterpreterOutputListener(); InterpreterOutput output = new InterpreterOutput(outputListener); - final InterpreterContext context = new InterpreterContext("noteId", "paragraphId", "livy.pyspark", - "title", "text", authInfo, null, null, null, null, null, null, output); + final InterpreterContext context = new InterpreterContext("noteId", "paragraphId", + "livy.pyspark", "title", "text", authInfo, null, null, null, null, null, null, output); pysparkInterpreter.open(); // test traceback msg try { pysparkInterpreter.getLivyVersion(); - // for livy version >=0.3 , input some erroneous spark code, check the shown result is more than one line - InterpreterResult result = pysparkInterpreter.interpret("sc.parallelize(wrongSyntax(1, 2)).count()", context); + // for livy version >=0.3 , input some erroneous spark code, check the shown result is more + // than one line + InterpreterResult result = pysparkInterpreter.interpret( + "sc.parallelize(wrongSyntax(1, 2)).count()", context); assertEquals(InterpreterResult.Code.ERROR, result.code()); assertTrue(result.message().get(0).getData().split("\n").length > 1); assertTrue(result.message().get(0).getData().contains("Traceback")); } catch (APINotFoundException e) { // only livy 0.2 can throw this exception since it doesn't have /version endpoint - // in livy 0.2, most error msg is encapsulated in evalue field, only print(a) in pyspark would return none-empty - // traceback + // in livy 0.2, most error msg is encapsulated in evalue field, only print(a) in pyspark would + // return none-empty traceback InterpreterResult result = pysparkInterpreter.interpret("print(a)", context); assertEquals(InterpreterResult.Code.ERROR, result.code()); assertTrue(result.message().get(0).getData().split("\n").length > 1); @@ -461,7 +464,8 @@ public class LivyInterpreterIT { } @Test - public void testSparkInterpreterWithDisplayAppInfo_StringWithoutTruncation() throws InterpreterException { + public void testSparkInterpreterWithDisplayAppInfo_StringWithoutTruncation() + throws InterpreterException { if (!checkPreCondition()) { return; } @@ -509,16 +513,18 @@ public class LivyInterpreterIT { if (!isSpark2) { result = sparkInterpreter.interpret( - "val df=sqlContext.createDataFrame(Seq((\"12characters12characters\",20))).toDF(\"col_1\", \"col_2\")\n" - + "df.collect()", context); + "val df=sqlContext.createDataFrame(Seq((\"12characters12characters\",20)))" + + ".toDF(\"col_1\", \"col_2\")\n" + + "df.collect()", context); assertEquals(InterpreterResult.Code.SUCCESS, result.code()); assertEquals(2, result.message().size()); assertTrue(result.message().get(0).getData() .contains("Array[org.apache.spark.sql.Row] = Array([12characters12characters,20])")); } else { result = sparkInterpreter.interpret( - "val df=spark.createDataFrame(Seq((\"12characters12characters\",20))).toDF(\"col_1\", \"col_2\")\n" - + "df.collect()", context); + "val df=spark.createDataFrame(Seq((\"12characters12characters\",20)))" + + ".toDF(\"col_1\", \"col_2\")\n" + + "df.collect()", context); assertEquals(InterpreterResult.Code.SUCCESS, result.code()); assertEquals(2, result.message().size()); assertTrue(result.message().get(0).getData() @@ -526,7 +532,8 @@ public class LivyInterpreterIT { } sparkInterpreter.interpret("df.registerTempTable(\"df\")", context); // test LivySparkSQLInterpreter which share the same SparkContext with LivySparkInterpreter - result = sqlInterpreter.interpret("select * from df where col_1='12characters12characters'", context); + result = sqlInterpreter.interpret("select * from df where col_1='12characters12characters'", + context); assertEquals(InterpreterResult.Code.SUCCESS, result.code()); assertEquals(InterpreterResult.Type.TABLE, result.message().get(0).getType()); assertEquals("col_1\tcol_2\n12characters12characters\t20", result.message().get(0).getData()); @@ -554,8 +561,8 @@ public class LivyInterpreterIT { AuthenticationInfo authInfo = new AuthenticationInfo("user1"); MyInterpreterOutputListener outputListener = new MyInterpreterOutputListener(); InterpreterOutput output = new InterpreterOutput(outputListener); - final InterpreterContext context = new InterpreterContext("noteId", "paragraphId", "livy.sparkr", - "title", "text", authInfo, null, null, null, null, null, null, output); + final InterpreterContext context = new InterpreterContext("noteId", "paragraphId", + "livy.sparkr", "title", "text", authInfo, null, null, null, null, null, null, output); sparkRInterpreter.open(); try { @@ -695,7 +702,8 @@ public class LivyInterpreterIT { assertEquals(InterpreterResult.Code.SUCCESS, result.code()); assertEquals(1, result.message().size()); - boolean isSpark2 = isSpark2((BaseLivyInterpreter) sparkInterpreter.getInnerInterpreter(), context); + boolean isSpark2 = isSpark2((BaseLivyInterpreter) sparkInterpreter.getInnerInterpreter(), + context); if (!isSpark2) { result = sparkInterpreter.interpret( @@ -708,7 +716,8 @@ public class LivyInterpreterIT { sparkInterpreter.interpret("df.registerTempTable(\"df\")", context); // access table from pyspark - result = pysparkInterpreter.interpret("sqlContext.sql(\"select * from df\").show()", context); + result = pysparkInterpreter.interpret("sqlContext.sql(\"select * from df\").show()", + context); assertEquals(InterpreterResult.Code.SUCCESS, result.code()); assertEquals(1, result.message().size()); assertTrue(result.message().get(0).getData() @@ -719,7 +728,8 @@ public class LivyInterpreterIT { "+-----+-----+")); // access table from sparkr - result = sparkRInterpreter.interpret("head(sql(sqlContext, \"select * from df\"))", context); + result = sparkRInterpreter.interpret("head(sql(sqlContext, \"select * from df\"))", + context); assertEquals(InterpreterResult.Code.SUCCESS, result.code()); assertEquals(1, result.message().size()); assertTrue(result.message().get(0).getData().contains("col_1 col_2\n1 hello 20")); http://git-wip-us.apache.org/repos/asf/zeppelin/blob/0a47b139/livy/src/test/java/org/apache/zeppelin/livy/LivySQLInterpreterTest.java ---------------------------------------------------------------------- diff --git a/livy/src/test/java/org/apache/zeppelin/livy/LivySQLInterpreterTest.java b/livy/src/test/java/org/apache/zeppelin/livy/LivySQLInterpreterTest.java index 0541b87..8821a86 100644 --- a/livy/src/test/java/org/apache/zeppelin/livy/LivySQLInterpreterTest.java +++ b/livy/src/test/java/org/apache/zeppelin/livy/LivySQLInterpreterTest.java @@ -15,19 +15,20 @@ * limitations under the License. */ - package org.apache.zeppelin.livy; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertTrue; + import org.junit.Before; -import org.junit.BeforeClass; import org.junit.Test; import java.util.List; import java.util.Properties; -import static org.junit.Assert.*; /** - * Unit test for LivySQLInterpreter + * Unit test for LivySQLInterpreter. */ public class LivySQLInterpreterTest { @@ -126,7 +127,8 @@ public class LivySQLInterpreterTest { assertEquals("1", rows.get(1)); - // sql output with 3 rows, 3 columns, showing "only showing top 3 rows" with a line break in the data + // sql output with 3 rows, 3 columns, showing "only showing top 3 rows" with a line break in + // the data // +---+---+---+ // | a| b| c| // +---+---+---+