Repository: hive
Updated Branches:
  refs/heads/branch-1 1a9ed419e -> f8db95803


HIVE-15833: Add unit tests for org.json usage on branch-1 (Daniel Voros via 
Zoltan Haindrich)

Signed-off-by: Zoltan Haindrich <k...@rxd.hu>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f8db9580
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f8db9580
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f8db9580

Branch: refs/heads/branch-1
Commit: f8db9580337e3d01058b8bdcc312c38ed046fbd6
Parents: 1a9ed41
Author: Daniel Voros <daniel.vo...@gmail.com>
Authored: Fri Mar 31 08:53:08 2017 +0200
Committer: Zoltan Haindrich <k...@rxd.hu>
Committed: Fri Mar 31 09:00:42 2017 +0200

----------------------------------------------------------------------
 .../hadoop/hive/common/jsonexplain/tez/Op.java  |   4 +-
 .../hive/common/jsonexplain/tez/TestOp.java     |  58 ++++
 .../hive/common/jsonexplain/tez/TestStage.java  | 195 +++++++++++++
 .../jsonexplain/tez/TestTezJsonParser.java      |  54 ++++
 .../hive/common/jsonexplain/tez/TestVertex.java | 106 +++++++
 .../apache/hadoop/hive/ql/exec/ExplainTask.java |  16 +-
 .../apache/hadoop/hive/ql/hooks/ATSHook.java    |   4 +-
 .../apache/hadoop/hive/ql/parse/EximUtil.java   |   4 +-
 .../hadoop/hive/ql/exec/TestExplainTask.java    | 281 +++++++++++++++++++
 .../hadoop/hive/ql/hooks/TestATSHook.java       |  53 ++++
 .../hadoop/hive/ql/parse/TestEximUtil.java      | 103 +++++++
 11 files changed, 870 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/f8db9580/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java
----------------------------------------------------------------------
diff --git 
a/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java 
b/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java
index fb12f70..cc4947f 100644
--- a/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java
+++ b/common/src/java/org/apache/hadoop/hive/common/jsonexplain/tez/Op.java
@@ -25,6 +25,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.json.JSONException;
 import org.json.JSONObject;
 
@@ -54,7 +55,8 @@ public class Op {
     this.vertex = vertex;
   }
 
-  private void inlineJoinOp() throws Exception {
+  @VisibleForTesting
+  void inlineJoinOp() throws Exception {
     // inline map join operator
     if (this.name.equals("Map Join Operator")) {
       JSONObject mapjoinObj = opObject.getJSONObject("Map Join Operator");

http://git-wip-us.apache.org/repos/asf/hive/blob/f8db9580/common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestOp.java
----------------------------------------------------------------------
diff --git 
a/common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestOp.java 
b/common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestOp.java
new file mode 100644
index 0000000..fc8381b
--- /dev/null
+++ b/common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestOp.java
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common.jsonexplain.tez;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.json.JSONObject;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+
+public class TestOp {
+
+  private ObjectMapper objectMapper = new ObjectMapper();
+
+  @Test
+  public void testInlineJoinOpJsonShouldMatch() throws Exception {
+    String jsonString = "{\"Map Join Operator\":{" +
+            "\"input vertices:\":{\"a\":\"AVERTEX\"}," +
+            "\"keys:\":{\"a\":\"AKEY\",\"b\":\"BKEY\"}}}";
+    JSONObject mapJoin = new JSONObject(jsonString);
+
+    Vertex vertex = new Vertex("vertex-name", null);
+
+    List<Attr> attrs = new ArrayList<>();
+
+    Op uut = new Op("Map Join Operator", "op-id", "output-vertex-name", 
Collections.EMPTY_LIST,
+            attrs, mapJoin, vertex);
+    uut.inlineJoinOp();
+
+    assertEquals(1, attrs.size());
+
+    JsonNode result = objectMapper.readTree(attrs.get(0).value);
+    JsonNode expected = 
objectMapper.readTree("{\"vertex-name\":\"BKEY\",\"AVERTEX\":\"AKEY\"}");
+
+    assertEquals(expected, result);
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/f8db9580/common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestStage.java
----------------------------------------------------------------------
diff --git 
a/common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestStage.java 
b/common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestStage.java
new file mode 100644
index 0000000..eaf03c3
--- /dev/null
+++ 
b/common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestStage.java
@@ -0,0 +1,195 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common.jsonexplain.tez;
+
+import org.json.JSONObject;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.*;
+
+public class TestStage {
+
+  private Stage uut;
+  private Stage stageA;
+  private Stage stageB;
+
+  @Before
+  public void setUp() {
+    this.uut = new Stage("uut");
+    this.stageA = new Stage("stage-a");
+    this.stageB = new Stage("stage-b");
+  }
+
+  @Test
+  public void testAddDependencyNonRoot() throws Exception {
+    Map<String, Stage> children = new HashMap<>();
+    children.put("a", stageA);
+    children.put("b", stageB);
+
+
+    String jsonString = "{\"DEPENDENT STAGES\":\"a,b\"}";
+    JSONObject names = new JSONObject(jsonString);
+
+    uut.addDependency(names, children);
+
+    assertEquals(2, uut.parentStages.size());
+    assertEquals(stageA, uut.parentStages.get(0));
+    assertEquals(stageB, uut.parentStages.get(1));
+
+    assertEquals(1, stageA.childStages.size());
+    assertEquals(uut, stageA.childStages.get(0));
+
+    assertEquals(1, stageB.childStages.size());
+    assertEquals(uut, stageB.childStages.get(0));
+  }
+
+  @Test
+  public void testAddDependencyRoot() throws Exception {
+    Map<String, Stage> children = new HashMap<>();
+    children.put("a", stageA);
+    children.put("b", stageB);
+
+    String jsonString = "{\"ROOT STAGE\":\"X\",\"DEPENDENT STAGES\":\"a,b\"}";
+    JSONObject names = new JSONObject(jsonString);
+
+    uut.addDependency(names, children);
+
+    assertEquals(0, uut.parentStages.size());
+    assertEquals(0, stageA.childStages.size());
+    assertEquals(0, stageB.childStages.size());
+  }
+
+
+  @Test
+  public void testExtractVertexNonTez() throws Exception {
+    String jsonString = "{\"OperatorName\":{\"a\":\"A\",\"b\":\"B\"}," +
+            "\"attr1\":\"ATTR1\"}";
+    JSONObject object = new JSONObject(jsonString);
+
+    uut.extractVertex(object);
+
+    assertEquals("OperatorName", uut.op.name);
+    assertEquals(1, uut.attrs.size());
+    assertEquals("attr1", uut.attrs.get(0).name);
+    assertEquals("ATTR1", uut.attrs.get(0).value);
+  }
+
+  @Test
+  public void testExtractVertexTezNoEdges() throws Exception {
+    String jsonString = "{\"Tez\":{\"a\":\"A\",\"Vertices:\":{\"v1\":{}}}}";
+    JSONObject object = new JSONObject(jsonString);
+    uut.extractVertex(object);
+
+    assertEquals(1, uut.vertexs.size());
+    assertTrue(uut.vertexs.containsKey("v1"));
+  }
+
+  @Test
+  public void testExtractVertexTezWithOneEdge() throws Exception {
+    String jsonString = "{\"Tez\":{\"a\":\"A\"," +
+            "\"Vertices:\":{\"v1\":{},\"v2\":{}}," +
+            "\"Edges:\":{\"v2\":{\"parent\":\"v1\",\"type\":\"TYPE\"}}}}";
+    JSONObject object = new JSONObject(jsonString);
+    uut.extractVertex(object);
+
+    assertEquals(2, uut.vertexs.size());
+    assertTrue(uut.vertexs.containsKey("v1"));
+    assertTrue(uut.vertexs.containsKey("v2"));
+
+    assertEquals(0, uut.vertexs.get("v1").parentConnections.size());
+    assertEquals(1, uut.vertexs.get("v2").parentConnections.size());
+    assertEquals("v1", 
uut.vertexs.get("v2").parentConnections.get(0).from.name);
+    assertEquals("TYPE", uut.vertexs.get("v2").parentConnections.get(0).type);
+
+  }
+
+
+  @Test
+  public void testExtractVertexTezWithOneToManyEdge() throws Exception {
+    String jsonString = "{\"Tez\":{\"a\":\"A\"," +
+            "\"Vertices:\":{\"v1\":{},\"v2\":{},\"v3\":{}}," +
+            "\"Edges:\":{\"v1\":[{\"parent\":\"v2\",\"type\":\"TYPE1\"}," +
+            "{\"parent\":\"v3\",\"type\":\"TYPE2\"}]}}}";
+    JSONObject object = new JSONObject(jsonString);
+
+    uut.extractVertex(object);
+
+    assertEquals(3, uut.vertexs.size());
+    assertTrue(uut.vertexs.containsKey("v1"));
+    assertTrue(uut.vertexs.containsKey("v2"));
+    assertTrue(uut.vertexs.containsKey("v3"));
+
+    assertEquals(2, uut.vertexs.get("v1").parentConnections.size());
+    assertEquals(1, uut.vertexs.get("v2").children.size());
+    assertEquals(1, uut.vertexs.get("v3").children.size());
+    assertEquals("v1", uut.vertexs.get("v2").children.get(0).name);
+    assertEquals("v1", uut.vertexs.get("v3").children.get(0).name);
+    assertEquals("TYPE1", uut.vertexs.get("v1").parentConnections.get(0).type);
+    assertEquals("TYPE2", uut.vertexs.get("v1").parentConnections.get(1).type);
+
+  }
+
+  @Test
+  public void testExtractOpEmptyObject() throws Exception {
+    JSONObject object = new JSONObject();
+    Op result = uut.extractOp("op-name", object);
+
+    assertEquals("op-name", result.name);
+    assertEquals(0, result.attrs.size());
+    assertNull(result.vertex);
+  }
+
+  @Test
+  public void testExtractOpSimple() throws Exception {
+    String jsonString = "{\"a\":\"A\",\"b\":\"B\"}";
+    JSONObject object = new JSONObject(jsonString);
+
+    Op result = uut.extractOp("op-name", object);
+
+    assertEquals("op-name", result.name);
+    assertEquals(2, result.attrs.size());
+    assertNull(result.vertex);
+  }
+
+  @Test
+  public void testExtract() throws Exception {
+    String jsonString = "{\"b\":{\"b2\":\"B2\",\"b1\":\"B1\"}," +
+            "\"Processor Tree:\":{\"a1\":{\"t1\":\"T1\"}}}";
+    JSONObject object = new JSONObject(jsonString);
+
+    Op result = uut.extractOp("op-name", object);
+    assertEquals("op-name", result.name);
+    assertEquals(2, result.attrs.size());
+
+    List<String> attrs = new ArrayList<>();
+    for (Attr attr : result.attrs) {
+      attrs.add(attr.name + "=" + attr.value);
+    }
+    assertTrue(attrs.contains("b1=B1"));
+    assertTrue(attrs.contains("b2=B2"));
+    assertNotNull(result.vertex);
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/f8db9580/common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestTezJsonParser.java
----------------------------------------------------------------------
diff --git 
a/common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestTezJsonParser.java
 
b/common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestTezJsonParser.java
new file mode 100644
index 0000000..ce57e12
--- /dev/null
+++ 
b/common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestTezJsonParser.java
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common.jsonexplain.tez;
+
+import org.json.JSONObject;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+public class TestTezJsonParser {
+
+  private TezJsonParser uut;
+
+  @Before
+  public void setUp() throws Exception {
+    this.uut = new TezJsonParser();
+  }
+
+  @Test
+  public void testExtractStagesAndPlans() throws Exception {
+    String jsonString = "{\"STAGE DEPENDENCIES\":{\"s1\":{\"ROOT 
STAGE\":\"\"}," +
+            "\"s2\":{\"DEPENDENT STAGES\":\"s1\"}},\"STAGE PLANS\":{}}";
+    JSONObject input = new JSONObject(jsonString);
+
+    uut.inputObject = input;
+    uut.extractStagesAndPlans();
+
+    assertEquals(2, uut.stages.size());
+    assertEquals(1, uut.stages.get("s1").childStages.size());
+    assertEquals("s2", uut.stages.get("s1").childStages.get(0).name);
+    assertEquals(0, uut.stages.get("s2").childStages.size());
+    assertEquals(0, uut.stages.get("s1").parentStages.size());
+    assertEquals(1, uut.stages.get("s2").parentStages.size());
+    assertEquals("s1", uut.stages.get("s2").parentStages.get(0).name);
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/f8db9580/common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestVertex.java
----------------------------------------------------------------------
diff --git 
a/common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestVertex.java 
b/common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestVertex.java
new file mode 100644
index 0000000..3086bae
--- /dev/null
+++ 
b/common/src/test/org/apache/hadoop/hive/common/jsonexplain/tez/TestVertex.java
@@ -0,0 +1,106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common.jsonexplain.tez;
+
+import org.json.JSONObject;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+public class TestVertex {
+
+  @Test
+  public void testExtractOpTree() throws Exception {
+    JSONObject object = new JSONObject("{\"Join:\":[{},{}]}");
+
+    Vertex uut = new Vertex("name", object);
+    uut.extractOpTree();
+
+    assertEquals(2, uut.mergeJoinDummyVertexs.size());
+  }
+
+  @Test(expected = Exception.class)
+  public void testExtractOpTreeUnknownKeyShouldThrowException() throws 
Exception {
+    JSONObject object = new JSONObject();
+    object.put("unknown-key", "");
+    Vertex uut = new Vertex("name", object);
+    uut.extractOpTree();
+  }
+
+  @Test
+  public void testExtractOpNonJsonChildrenShouldThrow() throws Exception {
+    String jsonString = "{\"opName\":{\"children\":\"not-json\"}}";
+    JSONObject operator = new JSONObject(jsonString);
+
+    Vertex uut = new Vertex("name", null);
+
+    try {
+      uut.extractOp(operator);
+    } catch (Exception e) {
+      assertEquals("Unsupported operator name's children operator is neither a 
jsonobject nor a jsonarray", e.getMessage());
+    }
+  }
+
+  @Test
+  public void testExtractOpNoChildrenOperatorId() throws Exception {
+    String jsonString = "{\"opName\":{\"OperatorId:\":\"operator-id\"}}";
+    JSONObject operator = new JSONObject(jsonString);
+
+    Vertex uut = new Vertex("name", null);
+
+    Op result = uut.extractOp(operator);
+    assertEquals("opName", result.name);
+    assertEquals("operator-id", result.operatorId);
+    assertEquals(0, result.children.size());
+    assertEquals(0, result.attrs.size());
+  }
+
+  @Test
+  public void testExtractOpOneChild() throws Exception {
+    String jsonString = "{\"opName\":{\"children\":{\"childName\":" +
+            "{\"OperatorId:\":\"child-operator-id\"}}}}";
+    JSONObject operator = new JSONObject(jsonString);
+
+    Vertex uut = new Vertex("name", null);
+
+    Op result = uut.extractOp(operator);
+    assertEquals("opName", result.name);
+    assertEquals(1, result.children.size());
+    assertEquals("childName", result.children.get(0).name);
+    assertEquals("child-operator-id", result.children.get(0).operatorId);
+  }
+
+  @Test
+  public void testExtractOpMultipleChildren() throws Exception {
+    String jsonString = "{\"opName\":{\"children\":[" +
+            "{\"childName1\":{\"OperatorId:\":\"child-operator-id1\"}}," +
+            "{\"childName2\":{\"OperatorId:\":\"child-operator-id2\"}}]}}";
+    JSONObject operator = new JSONObject(jsonString);
+
+    Vertex uut = new Vertex("name", null);
+
+    Op result = uut.extractOp(operator);
+    assertEquals("opName", result.name);
+    assertEquals(2, result.children.size());
+    assertEquals("childName1", result.children.get(0).name);
+    assertEquals("child-operator-id1", result.children.get(0).operatorId);
+    assertEquals("childName2", result.children.get(1).name);
+    assertEquals("child-operator-id2", result.children.get(1).operatorId);
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/f8db9580/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
index 35c4cfc..94fc6b5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
@@ -39,6 +39,7 @@ import java.util.Map.Entry;
 import java.util.Set;
 import java.util.TreeMap;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.jsonexplain.JsonParser;
 import org.apache.hadoop.hive.common.jsonexplain.JsonParserFactory;
@@ -89,7 +90,8 @@ public class ExplainTask extends Task<ExplainWork> implements 
Serializable {
    * {"input_tables":[{"tablename": "default@test_sambavi_v1", "tabletype": 
"TABLE"}],
    *  "input partitions":["default@srcpart@ds=2008-04-08/hr=11"]}
    */
-  private static JSONObject getJSONDependencies(ExplainWork work)
+  @VisibleForTesting
+  static JSONObject getJSONDependencies(ExplainWork work)
       throws Exception {
     assert(work.getDependency());
 
@@ -328,7 +330,8 @@ public class ExplainTask extends Task<ExplainWork> 
implements Serializable {
     }
   }
 
-  private JSONObject collectAuthRelatedEntities(PrintStream out, ExplainWork 
work)
+  @VisibleForTesting
+  JSONObject collectAuthRelatedEntities(PrintStream out, ExplainWork work)
       throws Exception {
 
     BaseSemanticAnalyzer analyzer = work.getAnalyzer();
@@ -392,7 +395,8 @@ public class ExplainTask extends Task<ExplainWork> 
implements Serializable {
     return sb.toString();
   }
 
-  private JSONObject outputMap(Map<?, ?> mp, boolean hasHeader, PrintStream 
out,
+  @VisibleForTesting
+  JSONObject outputMap(Map<?, ?> mp, boolean hasHeader, PrintStream out,
       boolean extended, boolean jsonOutput, int indent) throws Exception {
 
     TreeMap<Object, Object> tree = new TreeMap<Object, Object>();
@@ -576,7 +580,8 @@ public class ExplainTask extends Task<ExplainWork> 
implements Serializable {
     return outputPlan(work, out, extended, jsonOutput, indent, "");
   }
 
-  private JSONObject outputPlan(Object work, PrintStream out,
+  @VisibleForTesting
+  JSONObject outputPlan(Object work, PrintStream out,
       boolean extended, boolean jsonOutput, int indent, String appendToHeader) 
throws Exception {
     // Check if work has an explain annotation
     Annotation note = AnnotationUtils.getAnnotation(work.getClass(), 
Explain.class);
@@ -831,7 +836,8 @@ public class ExplainTask extends Task<ExplainWork> 
implements Serializable {
     return null;
   }
 
-  private JSONObject outputDependencies(Task<?> task,
+  @VisibleForTesting
+  JSONObject outputDependencies(Task<?> task,
       PrintStream out, JSONObject parentJson, boolean jsonOutput, boolean 
taskType, int indent)
       throws Exception {
 

http://git-wip-us.apache.org/repos/asf/hive/blob/f8db9580/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java 
b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
index 53d169d..e4ea7e3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
@@ -22,6 +22,7 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -51,7 +52,8 @@ public class ATSHook implements ExecuteWithHookContext {
   private static TimelineClient timelineClient;
   private enum EntityTypes { HIVE_QUERY_ID };
   private enum EventTypes { QUERY_SUBMITTED, QUERY_COMPLETED };
-  private enum OtherInfoTypes { QUERY, STATUS, TEZ, MAPRED };
+  @VisibleForTesting
+  enum OtherInfoTypes { QUERY, STATUS, TEZ, MAPRED };
   private enum PrimaryFilterTypes { user, requestuser, operationid };
   private static final int WAIT_TIME = 3;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/f8db9580/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
index a4c5d0e..c154d6e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
@@ -32,6 +32,7 @@ import java.util.Map;
 import java.util.StringTokenizer;
 import java.util.TreeMap;
 
+import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Function;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
@@ -345,7 +346,8 @@ public class EximUtil {
     return new ReplicationSpec(keyFetcher);
   }
 
-  private static String getJSONStringEntry(JSONObject jsonContainer, String 
name) {
+  @VisibleForTesting
+  static String getJSONStringEntry(JSONObject jsonContainer, String name) {
     String retval = null;
     try {
       retval = jsonContainer.getString(name);

http://git-wip-us.apache.org/repos/asf/hive/blob/f8db9580/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExplainTask.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExplainTask.java 
b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExplainTask.java
new file mode 100644
index 0000000..72607cc
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExplainTask.java
@@ -0,0 +1,281 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.ql.hooks.Entity;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.plan.*;
+import org.apache.hadoop.hive.ql.plan.api.StageType;
+import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
+import 
org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.PrintStream;
+import java.util.*;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+public class TestExplainTask {
+
+  private static final String BACKUP_ID = "backup-id-mock";
+  private static final String AST = "ast-mock";
+
+  private PrintStream out;
+  private ExplainTask uut;
+  private ObjectMapper objectMapper = new ObjectMapper();
+
+  @Before
+  public void setUp() {
+    uut = new ExplainTask();
+    uut.conf = mock(HiveConf.class);
+    out = mock(PrintStream.class);
+  }
+
+
+  @Test
+  public void testGetJSONDependenciesJsonShhouldMatch() throws Exception {
+    ExplainWork work = mockExplainWork();
+
+    when(work.getDependency()).thenReturn(true);
+
+    // Mock inputs
+    HashSet<ReadEntity> inputs = new HashSet<>();
+
+    // One input table
+    Table table = mock(Table.class);
+    when(table.getCompleteName()).thenReturn("table-name-mock");
+    when(table.getTableType()).thenReturn(TableType.EXTERNAL_TABLE);
+    ReadEntity input1 = mock(ReadEntity.class);
+    when(input1.getType()).thenReturn(Entity.Type.TABLE);
+    when(input1.getTable()).thenReturn(table);
+    inputs.add(input1);
+
+    // And one partition
+    Partition partition = mock(Partition.class);
+    when(partition.getCompleteName()).thenReturn("partition-name-mock");
+    ReadEntity input2 = mock(ReadEntity.class);
+    when(input2.getType()).thenReturn(Entity.Type.PARTITION);
+    when(input2.getPartition()).thenReturn(partition);
+    inputs.add(input2);
+
+    when(work.getInputs()).thenReturn(inputs);
+
+    JsonNode result = 
objectMapper.readTree(ExplainTask.getJSONDependencies(work).toString());
+    JsonNode expected = 
objectMapper.readTree("{\"input_partitions\":[{\"partitionName\":" +
+            
"\"partition-name-mock\"}],\"input_tables\":[{\"tablename\":\"table-name-mock\","
 +
+            "\"tabletype\":\"EXTERNAL_TABLE\"}]}");
+
+    assertEquals(expected, result);
+  }
+
+  @Test
+  public void testGetJSONPlan() throws Exception {
+    
when(uut.conf.getVar(HiveConf.ConfVars.HIVESTAGEIDREARRANGE)).thenReturn("EXECUTION");
+    Task mockTask = mockTask();
+    when(mockTask.getId()).thenReturn("mockTaskId");
+    ExplainWork explainWorkMock = mockExplainWork();
+    when(mockTask.getWork()).thenReturn(explainWorkMock);
+    List<Task<?>> tasks = Arrays.<Task<?>>asList(mockTask);
+
+
+    JsonNode result = objectMapper.readTree(uut.getJSONPlan(null, "ast", 
tasks, null, true,
+            false, false).toString());
+    JsonNode expected = objectMapper.readTree("{\"STAGE 
DEPENDENCIES\":{\"mockTaskId\":" +
+            "{\"ROOT STAGE\":\"TRUE\",\"BACKUP 
STAGE\":\"backup-id-mock\"}},\"STAGE PLANS\":" +
+            "{\"mockTaskId\":{}}}");
+
+    assertEquals(expected, result);
+  }
+
+  @Test
+  public void testOutputDependenciesJsonShouldMatch() throws Exception {
+    Task<? extends ExplainTask> task = mockTask();
+
+    JsonNode result = objectMapper.readTree(
+            uut.outputDependencies(task, out, null, true, true, 0).toString());
+    JsonNode expected = objectMapper.readTree("{\"ROOT 
STAGE\":\"TRUE\",\"BACKUP STAGE\":" +
+            "\""+BACKUP_ID+"\",\"TASK TYPE\":\"EXPLAIN\"}");
+
+    assertEquals(expected, result);
+  }
+
+  @Test
+  public void testGetJSONLogicalPlanJsonShouldMatch() throws Exception {
+    JsonNode result = objectMapper.readTree(
+            uut.getJSONLogicalPlan(null, mockExplainWork()).toString());
+    JsonNode expected = objectMapper.readTree("{\"ABSTRACT SYNTAX 
TREE\":\"ast-mock\"}");
+
+    assertEquals(expected, result);
+  }
+
+  @Test
+  public void testOutputMapJsonShouldMatch() throws Exception {
+    Map<Object, Object> map = new HashMap<>();
+
+    // String
+    map.put("key-1", "value-1");
+
+    // SparkWork
+    map.put("spark-work", new SparkWork("spark-work"));
+
+    // Empty list
+    List<Object> emptList = Collections.emptyList();
+    map.put("empty-list", emptList);
+
+    // List of TezWork.Dependency
+    List<Object> tezList1 = new ArrayList<>(Arrays.asList(new Object[] 
{mockTezWorkDependency()}));
+    map.put("tez-list-1", tezList1);
+    List<Object> tezList2 = new ArrayList<>(
+            Arrays.asList(new Object[] {mockTezWorkDependency(), 
mockTezWorkDependency()}));
+    map.put("tez-list-2", tezList2);
+
+    // List of SparkWork.Dependency
+    List<Object> sparkList1 = new ArrayList<>(
+            Arrays.asList(new Object[]{mockSparkWorkDependency()}));
+    map.put("spark-list-1", sparkList1);
+    List<Object> sparkList2 = new ArrayList<>(
+            Arrays.asList(new Object[]{mockSparkWorkDependency(), 
mockSparkWorkDependency()}));
+    map.put("spark-list-2", sparkList2);
+
+    // inner Map
+    Map<Object, Object> innerMap = new HashMap<>();
+    innerMap.put("inner-key-1", "inner-value-1");
+    innerMap.put("inner-key-2", tezList1);
+    map.put("map-1", innerMap);
+
+    JsonNode result = objectMapper.readTree(
+            uut.outputMap(map, false, null, false, true, 0).toString());
+    JsonNode expected = 
objectMapper.readTree("{\"key-1\":\"value-1\",\"tez-list-2\":" +
+            "[{\"parent\":\"name\"}," + 
"{\"parent\":\"name\"}],\"tez-list-1\":" +
+            "{\"parent\":\"name\"},\"empty-list\":\"[]\",\"spark-list-2\":" +
+            "[{\"parent\":\"mock-name\"},{\"parent\":\"mock-name\"}]," +
+            "\"spark-list-1\":{\"parent\":" +
+            "\"mock-name\"}, \"map-1\":\"{inner-key-1=inner-value-1, " +
+            "inner-key-2=[mock-tez-dependency]}\",\"spark-work\":" +
+            "{\"Spark\":{\"DagName:\":\"spark-work:2\"}}}");
+
+    assertEquals(expected, result);
+  }
+
+  @Test
+  public void testOutputPlanJsonShouldMatch() throws Exception {
+    // SparkWork
+    SparkWork work = new SparkWork("spark-work");
+
+    JsonNode result = objectMapper.readTree(
+            uut.outputPlan(work, null, false, true, 0, null).toString());
+    JsonNode expected = 
objectMapper.readTree("{\"Spark\":{\"DagName:\":\"spark-work:1\"}}");
+    assertEquals(expected, result);
+
+    // Operator with single child
+    CollectOperator parentCollectOperator1 = new CollectOperator();
+    CollectOperator child1 = new CollectOperator();
+    parentCollectOperator1.setChildOperators(new ArrayList<Operator<? extends 
OperatorDesc>>(
+            Arrays.asList(new CollectOperator[] {child1})));
+    parentCollectOperator1.setConf(new CollectDesc());
+
+    result = objectMapper.readTree(
+            uut.outputPlan(parentCollectOperator1, null, false, true, 0, 
null).toString());
+    expected = objectMapper.readTree("{\"Collect\":{\"children\":{}}}");
+    assertEquals(expected, result);
+
+    // Operator with 2 children
+    CollectOperator parentCollectOperator2 = new CollectOperator();
+    CollectOperator child2 = new CollectOperator();
+    parentCollectOperator2.setChildOperators(new ArrayList<Operator<? extends 
OperatorDesc>>(
+            Arrays.asList(new CollectOperator[] {child1, child2})));
+    parentCollectOperator2.setConf(new CollectDesc());
+    result = objectMapper.readTree(
+            uut.outputPlan(parentCollectOperator2, null, false, true, 0, 
null).toString());
+    expected = objectMapper.readTree("{\"Collect\":{\"children\":[{},{}]}}");
+    assertEquals(expected, result);
+  }
+
+  @Test
+  public void testCollectAuthRelatedEntitiesJsonShouldMatch() throws Exception 
{
+    SessionState.start(new HiveConf(ExplainTask.class));
+    SessionState.get().setCommandType(HiveOperation.EXPLAIN);
+    HiveAuthenticationProvider authenticationProviderMock = 
mock(HiveAuthenticationProvider.class);
+    when(authenticationProviderMock.getUserName()).thenReturn("test-user");
+    SessionState.get().setAuthenticator(authenticationProviderMock);
+    SessionState.get().setAuthorizer(mock(HiveAuthorizationProvider.class));
+    ExplainWork work = mockExplainWork();
+
+    JsonNode result = 
objectMapper.readTree(uut.collectAuthRelatedEntities(null, work).toString());
+    JsonNode expected = 
objectMapper.readTree("{\"CURRENT_USER\":\"test-user\"," +
+            "\"OPERATION\":\"EXPLAIN\",\"INPUTS\":[],\"OUTPUTS\":[]}");
+    assertEquals(expected, result);
+  }
+
+  private TezWork.Dependency mockTezWorkDependency() {
+    TezWork.Dependency dep = mock(TezWork.Dependency.class);
+    when(dep.getName()).thenReturn("name");
+    when(dep.toString()).thenReturn("mock-tez-dependency");
+    return dep;
+  }
+
+  private SparkWork.Dependency mockSparkWorkDependency() {
+    SparkWork.Dependency dep = mock(SparkWork.Dependency.class);
+    when(dep.getName()).thenReturn("mock-name");
+    when(dep.toString()).thenReturn("mock-spark-dependency");
+    return dep;
+  }
+
+  private ExplainWork mockExplainWork() {
+    ExplainWork explainWork = mock(ExplainWork.class);
+
+    // Should produce JSON
+    when(explainWork.isFormatted()).thenReturn(true);
+
+    // Should have some AST
+    when(explainWork.getAstStringTree()).thenReturn(AST);
+
+    
when(explainWork.getAnalyzer()).thenReturn(mock(BaseSemanticAnalyzer.class));
+
+    return explainWork;
+  }
+
+  private Task<ExplainTask> mockTask() {
+    Task<ExplainTask> task = mock(Task.class);
+
+    // Explain type
+    when(task.getType()).thenReturn(StageType.EXPLAIN);
+
+    // This is a root task
+    when(task.isRootTask()).thenReturn(true);
+
+    // Set up backup task
+    Task backupTask = mock(Task.class);
+    when(backupTask.getId()).thenReturn(BACKUP_ID);
+    when(task.getBackupTask()).thenReturn(backupTask);
+
+    return task;
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/f8db9580/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestATSHook.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestATSHook.java 
b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestATSHook.java
new file mode 100644
index 0000000..26cee8d
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestATSHook.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.hooks;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+public class TestATSHook {
+
+  private ObjectMapper objectMapper = new ObjectMapper();
+  private ATSHook uut;
+
+  @Before
+  public void setUp() {
+    uut = new ATSHook();
+  }
+
+  @Test
+  public void testCreatePreHookEventJsonShhouldMatch() throws Exception {
+    TimelineEntity timelineEntity =  uut.createPreHookEvent(
+            "test-query-id", "test-query", new org.json.JSONObject(), 0L,
+            "test-user", "test-request-user", 0, 0, "test-opid");
+    String resultStr = (String) timelineEntity.getOtherInfo()
+            .get(ATSHook.OtherInfoTypes.QUERY.name());
+
+    JsonNode result = objectMapper.readTree(resultStr);
+    JsonNode expected = objectMapper.readTree("{\"queryText\":\"test-query\"," 
+
+            "\"queryPlan\":{}}");
+
+    assertEquals(expected, result);
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/f8db9580/ql/src/test/org/apache/hadoop/hive/ql/parse/TestEximUtil.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestEximUtil.java 
b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestEximUtil.java
index 108f8e5..5bca87a 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestEximUtil.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestEximUtil.java
@@ -19,6 +19,22 @@
 package org.apache.hadoop.hive.ql.parse;
 
 import junit.framework.TestCase;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.thrift.TSerializer;
+import org.apache.thrift.protocol.TJSONProtocol;
+import org.json.JSONObject;
+import org.junit.Test;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Iterator;
+
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
 
 /**
  * TestEximUtil.
@@ -26,6 +42,44 @@ import junit.framework.TestCase;
  */
 public class TestEximUtil extends TestCase {
 
+  private class FakeSeekableInputStream extends DataInputStream
+          implements Seekable, PositionedReadable {
+
+    public FakeSeekableInputStream(InputStream in) {
+      super(in);
+    }
+
+    @Override
+    public void seek(long l) throws IOException {
+
+    }
+
+    @Override
+    public long getPos() throws IOException {
+      return 0;
+    }
+
+    @Override
+    public boolean seekToNewSource(long l) throws IOException {
+      return false;
+    }
+
+    @Override
+    public int read(long l, byte[] bytes, int i, int i1) throws IOException {
+      return 0;
+    }
+
+    @Override
+    public void readFully(long l, byte[] bytes, int i, int i1) throws 
IOException {
+
+    }
+
+    @Override
+    public void readFully(long l, byte[] bytes) throws IOException {
+
+    }
+  }
+
   @Override
   protected void setUp() {
   }
@@ -34,6 +88,55 @@ public class TestEximUtil extends TestCase {
   protected void tearDown() {
   }
 
+  @Test
+  public void testReadMetaData() throws Exception {
+
+    // serialize table
+    TSerializer serializer = new TSerializer(new TJSONProtocol.Factory());
+
+    Table table = new Table();
+    table.setDbName("test-db-name-table");
+    String tableJson = serializer.toString(table, "UTF-8");
+
+    Partition partition1 = new Partition();
+    partition1.setTableName("test-table-name-p1");
+    String partition1Json = serializer.toString(partition1, "UTF-8");
+
+    Partition partition2 = new Partition();
+    partition2.setTableName("test-table-name-p2");
+    String partition2Json = serializer.toString(partition2, "UTF-8");
+
+    String json = "{" +
+            "\"version\": \"0.1\"," +
+            "\"fcversion\": \"0.1\"," +
+            "\"table\": " + tableJson + "," +
+            "\"partitions\": [" + partition1Json + ", " + partition2Json + "]" 
+
+            "}";
+    DataInputStream is = new FakeSeekableInputStream(
+            new ByteArrayInputStream(json.getBytes("UTF-8")));
+
+    FSDataInputStream fsis = new FSDataInputStream(is);
+
+    FileSystem fs = mock(FileSystem.class);
+    Path pathMock = mock(Path.class);
+    when(fs.open(pathMock)).thenReturn(fsis);
+    EximUtil.ReadMetaData result = EximUtil.readMetaData(fs, pathMock);
+
+    assertEquals("test-db-name-table", result.getTable().getDbName());
+    Iterator<Partition> iterator = result.getPartitions().iterator();
+    assertEquals("test-table-name-p1", iterator.next().getTableName());
+    assertEquals("test-table-name-p2", iterator.next().getTableName());
+  }
+
+  @Test
+  public void testGetJSONStringEntry() throws Exception {
+    String jsonString = 
"{\"string-key\":\"string-value\",\"non-string-key\":1}";
+    JSONObject jsonObject = new JSONObject(jsonString);
+    assertEquals("string-value", EximUtil.getJSONStringEntry(jsonObject, 
"string-key"));
+    assertEquals("1", EximUtil.getJSONStringEntry(jsonObject, 
"non-string-key"));
+    assertNull(EximUtil.getJSONStringEntry(jsonObject, "no-such-key"));
+  }
+
   public void testCheckCompatibility() throws SemanticException {
 
     // backward/forward compatible

Reply via email to