This is an automated email from the ASF dual-hosted git repository.

kgyrtkirk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new 2006e52  HIVE-23491: Move ParseDriver to parser module (Krisztian Kasa 
via Zoltan Haindrich)
2006e52 is described below

commit 2006e52713508a92fb4d1d28262fd7175eade8b7
Author: Krisztian Kasa <kk...@cloudera.com>
AuthorDate: Mon Jun 8 12:02:31 2020 +0000

    HIVE-23491: Move ParseDriver to parser module (Krisztian Kasa via Zoltan 
Haindrich)
    
    Signed-off-by: Zoltan Haindrich <k...@rxd.hu>
---
 .../org/apache/hadoop/hive/ql/QTestSyntaxUtil.java |  8 +----
 .../java/org/apache/hadoop/hive/ql/QTestUtil.java  |  2 +-
 parser/pom.xml                                     |  6 ++++
 .../apache/hadoop/hive/ql/parse/ParseDriver.java   | 40 ++++++----------------
 .../hadoop/hive/ql/parse/ParseException.java       |  0
 .../apache/hadoop/hive/ql/parse/ParseResult.java   | 36 +++++++------------
 .../org/apache/hadoop/hive/ql/parse/TestIUD.java   | 17 +++------
 .../hadoop/hive/ql/parse/TestMergeStatement.java   |  9 ++---
 .../hadoop/hive/ql/parse/TestParseDriver.java      | 12 +++----
 .../hive/ql/parse/TestParseDriverIntervals.java    |  2 +-
 .../hive/ql/parse/TestParseWithinGroupClause.java  |  5 +--
 .../parse/TestSQL11ReservedKeyWordsNegative.java   | 22 ++----------
 ...mittedCharsInColumnNameCreateTableNegative.java | 15 +++-----
 .../parse/positive/TestTransactionStatement.java   | 22 +-----------
 .../apache/hadoop/hive/ql/parse/ParseUtils.java    | 14 +++++++-
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java     |  2 +-
 .../apache/hadoop/hive/ql/parse/TestQBCompact.java |  4 +--
 .../hadoop/hive/ql/parse/TestQBSubQuery.java       |  2 +-
 .../ql/parse/TestReplicationSemanticAnalyzer.java  |  5 +--
 19 files changed, 75 insertions(+), 148 deletions(-)

diff --git 
a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestSyntaxUtil.java 
b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestSyntaxUtil.java
index c2f7acd..90a52cf 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestSyntaxUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestSyntaxUtil.java
@@ -25,7 +25,6 @@ import java.util.List;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.ParseDriver;
 import org.apache.hadoop.hive.ql.processors.AddResourceProcessor;
@@ -103,12 +102,7 @@ public class QTestSyntaxUtil {
       CommandProcessor proc = CommandProcessorFactory.get(tokens, (HiveConf) 
conf);
       if (proc instanceof IDriver) {
         try {
-          Context ctx = new Context(conf);
-          HiveTxnManager queryTxnMgr = SessionState.get().initTxnMgr(conf);
-          ctx.setHiveTxnManager(queryTxnMgr);
-          ctx.setCmd(cmd);
-          ctx.setHDFSCleanup(true);
-          tree = pd.parse(cmd, ctx);
+          tree = pd.parse(cmd, conf).getTree();
           qTestUtil.analyzeAST(tree);
         } catch (Exception e) {
           return false;
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java 
b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index f7c21a0..3268015 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -980,7 +980,7 @@ public class QTestUtil {
   }
 
   public ASTNode parseQuery(String tname) throws Exception {
-    return pd.parse(qMap.get(tname));
+    return pd.parse(qMap.get(tname)).getTree();
   }
 
   public List<Task<?>> analyzeAST(ASTNode ast) throws Exception {
diff --git a/parser/pom.xml b/parser/pom.xml
index 0edae27..41fee3b 100644
--- a/parser/pom.xml
+++ b/parser/pom.xml
@@ -56,6 +56,12 @@
       <version>3.2.1</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>${junit.version}</version>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 
   <build>
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java 
b/parser/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
similarity index 88%
rename from ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
rename to parser/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
index 46f1ec0..121dbaf 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
+++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
@@ -32,8 +32,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.hadoop.hive.ql.Context;
-
 /**
  * ParseDriver.
  *
@@ -93,14 +91,9 @@ public class ParseDriver {
     }
   };
 
-  public ASTNode parse(String command) throws ParseException {
+  public ParseResult parse(String command) throws ParseException {
     return parse(command, null);
   }
-  
-  public ASTNode parse(String command, Context ctx) 
-      throws ParseException {
-    return parse(command, ctx, null);
-  }
 
   /**
    * Parses a command, optionally assigning the parser's token stream to the
@@ -109,31 +102,19 @@ public class ParseDriver {
    * @param command
    *          command to parse
    *
-   * @param ctx
-   *          context with which to associate this parser's token stream, or
-   *          null if either no context is available or the context already has
-   *          an existing stream
+   * @param configuration
+   *          hive configuration
    *
    * @return parsed AST
    */
-  public ASTNode parse(String command, Context ctx, String 
viewFullyQualifiedName)
+  public ParseResult parse(String command, Configuration configuration)
       throws ParseException {
     if (LOG.isDebugEnabled()) {
       LOG.debug("Parsing command: " + command);
     }
 
-    Configuration configuration = ctx == null ? null : ctx.getConf();
     GenericHiveLexer lexer = GenericHiveLexer.of(command, configuration);
     TokenRewriteStream tokens = new TokenRewriteStream(lexer);
-    if (ctx != null) {
-      if (viewFullyQualifiedName == null) {
-        // Top level query
-        ctx.setTokenRewriteStream(tokens);
-      } else {
-        // It is a view
-        ctx.addViewTokenRewriteStream(viewFullyQualifiedName, tokens);
-      }
-    }
     HiveParser parser = new HiveParser(tokens);
     parser.setTreeAdaptor(adaptor);
     parser.setHiveConf(configuration);
@@ -154,7 +135,7 @@ public class ParseDriver {
 
     ASTNode tree = (ASTNode) r.getTree();
     tree.setUnknownTokenBoundaries();
-    return tree;
+    return new ParseResult(tree, tokens);
   }
 
   /*
@@ -195,15 +176,14 @@ public class ParseDriver {
    * the input schema and hence the Result Expression cannot be analyzed by 
the regular Hive
    * translation process.
    */
-  public ASTNode parseSelect(String command, Context ctx) throws 
ParseException {
+  public ParseResult parseSelect(String command, Configuration configuration) 
throws ParseException {
     LOG.debug("Parsing command: {}", command);
 
-    Configuration configuration = ctx == null ? null : ctx.getConf();
     GenericHiveLexer lexer = GenericHiveLexer.of(command, configuration);
     TokenRewriteStream tokens = new TokenRewriteStream(lexer);
-    if (ctx != null) {
-      ctx.setTokenRewriteStream(tokens);
-    }
+//    if (ctx != null) {
+//      ctx.setTokenRewriteStream(tokens);
+//    }
     HiveParser parser = new HiveParser(tokens);
     parser.setTreeAdaptor(adaptor);
     parser.setHiveConf(configuration);
@@ -222,7 +202,7 @@ public class ParseDriver {
       throw new ParseException(parser.errors);
     }
 
-    return (ASTNode) r.getTree();
+    return new ParseResult((ASTNode) r.getTree(), tokens);
   }
   public ASTNode parseExpression(String command) throws ParseException {
     LOG.debug("Parsing expression: {}", command);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseException.java 
b/parser/src/java/org/apache/hadoop/hive/ql/parse/ParseException.java
similarity index 100%
copy from ql/src/java/org/apache/hadoop/hive/ql/parse/ParseException.java
copy to parser/src/java/org/apache/hadoop/hive/ql/parse/ParseException.java
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseException.java 
b/parser/src/java/org/apache/hadoop/hive/ql/parse/ParseResult.java
similarity index 62%
rename from ql/src/java/org/apache/hadoop/hive/ql/parse/ParseException.java
rename to parser/src/java/org/apache/hadoop/hive/ql/parse/ParseResult.java
index 7d945ad..652ce71 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseException.java
+++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/ParseResult.java
@@ -15,37 +15,27 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hive.ql.parse;
 
-import java.util.ArrayList;
+import org.antlr.runtime.TokenRewriteStream;
 
 /**
- * ParseException.
- *
+ * Contains result of {@link ParseDriver#parse(String)}.
  */
-public class ParseException extends Exception {
-
-  private static final long serialVersionUID = 1L;
-  ArrayList<ParseError> errors;
+public class ParseResult {
+  private final ASTNode tree;
+  private final TokenRewriteStream tokenRewriteStream;
 
-  public ParseException(ArrayList<ParseError> errors) {
-    super();
-    this.errors = errors;
+  public ParseResult(ASTNode tree, TokenRewriteStream tokenRewriteStream) {
+    this.tree = tree;
+    this.tokenRewriteStream = tokenRewriteStream;
   }
 
-  @Override
-  public String getMessage() {
-
-    StringBuilder sb = new StringBuilder();
-    for (ParseError err : errors) {
-      if (sb.length() > 0) {
-        sb.append('\n');
-      }
-      sb.append(err.getMessage());
-    }
-
-    return sb.toString();
+  public ASTNode getTree() {
+    return tree;
   }
 
+  public TokenRewriteStream getTokenRewriteStream() {
+    return tokenRewriteStream;
+  }
 }
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestIUD.java 
b/parser/src/test/org/apache/hadoop/hive/ql/parse/TestIUD.java
similarity index 93%
rename from ql/src/test/org/apache/hadoop/hive/ql/parse/TestIUD.java
rename to parser/src/test/org/apache/hadoop/hive/ql/parse/TestIUD.java
index 4ec111c..35fc2a4 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestIUD.java
+++ b/parser/src/test/org/apache/hadoop/hive/ql/parse/TestIUD.java
@@ -19,9 +19,7 @@ package org.apache.hadoop.hive.ql.parse;
 
 import java.io.IOException;
 
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.Context;
-import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.conf.Configuration;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
@@ -31,13 +29,12 @@ import org.junit.Test;
  * various Parser tests for INSERT/UPDATE/DELETE
  */
 public class TestIUD {
-  private static HiveConf conf;
+  private static Configuration conf;
   private ParseDriver pd;
 
   @BeforeClass
   public static void initialize() {
-    conf = new HiveConf(SemanticAnalyzer.class);
-    SessionState.start(conf);
+    conf = new Configuration();
   }
 
   @Before
@@ -48,13 +45,9 @@ public class TestIUD {
   ASTNode parse(String query) throws ParseException {
     return parse(query, pd, conf);
   }
-  static ASTNode parse(String query, ParseDriver pd, HiveConf conf) throws 
ParseException {
+  static ASTNode parse(String query, ParseDriver pd, Configuration conf) 
throws ParseException {
     ASTNode nd = null;
-    try {
-      nd = pd.parse(query, new Context(conf));
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
+    nd = pd.parse(query, conf).getTree();
     return (ASTNode) nd.getChild(0);
   }
 
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMergeStatement.java 
b/parser/src/test/org/apache/hadoop/hive/ql/parse/TestMergeStatement.java
similarity index 97%
rename from ql/src/test/org/apache/hadoop/hive/ql/parse/TestMergeStatement.java
rename to 
parser/src/test/org/apache/hadoop/hive/ql/parse/TestMergeStatement.java
index 396d344..82deca7 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMergeStatement.java
+++ b/parser/src/test/org/apache/hadoop/hive/ql/parse/TestMergeStatement.java
@@ -18,12 +18,10 @@
 package org.apache.hadoop.hive.ql.parse;
 
 import org.antlr.runtime.tree.RewriteEmptyStreamException;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.conf.Configuration;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
@@ -34,13 +32,12 @@ import java.io.IOException;
  * Testing parsing for SQL Merge statement
  */
 public class TestMergeStatement {
-  private static HiveConf conf;
+  private static Configuration conf;
   private ParseDriver pd;
 
   @BeforeClass
   public static void initialize() {
-    conf = new HiveConf(SemanticAnalyzer.class);
-    SessionState.start(conf);
+    conf = new Configuration();
   }
 
   @Before
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriver.java 
b/parser/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriver.java
similarity index 98%
rename from ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriver.java
rename to parser/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriver.java
index d6250f1..1aa1a40 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriver.java
+++ b/parser/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriver.java
@@ -44,7 +44,7 @@ public class TestParseDriver {
     String whereStr = "field5=1 and field6 in ('a', 'b')";
     String havingStr = "sum(field7) > 11";
     ASTNode tree = parseDriver.parse(selectStr + " from table1 where " + 
whereStr
-        + " group by field1, field2 having  " + havingStr);
+        + " group by field1, field2 having  " + havingStr).getTree();
     assertEquals(tree.getType(), 0);
     assertEquals(tree.getChildCount(), 2);
     ASTNode queryTree = (ASTNode) tree.getChild(0);
@@ -62,7 +62,7 @@ public class TestParseDriver {
     assertEquals(fromAST.getChild(0).getChild(0).getChild(0).getText(), 
"table1");
     assertEquals(insertAST.getChildCount(), 5);
     assertEquals(insertAST.getChild(0).getType(), HiveParser.TOK_DESTINATION);
-    assertTree((ASTNode) insertAST.getChild(1), 
parseDriver.parseSelect(selectStr, null));
+    assertTree((ASTNode) insertAST.getChild(1), 
parseDriver.parseSelect(selectStr, null).getTree());
     assertEquals(insertAST.getChild(2).getType(), HiveParser.TOK_WHERE);
     assertTree((ASTNode) insertAST.getChild(2).getChild(0), 
parseDriver.parseExpression(whereStr));
     assertEquals(insertAST.getChild(3).getType(), HiveParser.TOK_GROUPBY);
@@ -79,7 +79,7 @@ public class TestParseDriver {
 
   @Test
   public void testParseSelect() throws Exception {
-    ASTNode tree = parseDriver.parseSelect("select field1, field2, 
sum(field3+field4)", null);
+    ASTNode tree = parseDriver.parseSelect("select field1, field2, 
sum(field3+field4)", null).getTree();
     assertEquals(tree.getType(), HiveParser.TOK_SELECT);
     assertEquals(tree.getChildCount(), 3);
     for (int i = 0; i < 3; i++) {
@@ -288,7 +288,7 @@ public class TestParseDriver {
             + "( (select key from src)a join (select value from src)b on 
a.key=b.value)";
     System.out.println(q);
 
-    ASTNode root = parseDriver.parse(q);
+    ASTNode root = parseDriver.parse(q).getTree();
     System.out.println(root.dump());
 
   }
@@ -299,7 +299,7 @@ public class TestParseDriver {
         "explain select key from ((select key from src) union (select key from 
src))subq ";
     System.out.println(q);
 
-    ASTNode root = parseDriver.parse(q);
+    ASTNode root = parseDriver.parse(q).getTree();
     System.out.println(root.dump());
 
   }
@@ -325,4 +325,4 @@ public class TestParseDriver {
     parseDriver.parse("drop scheduled query asd");
   }
 
-}
\ No newline at end of file
+}
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriverIntervals.java 
b/parser/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriverIntervals.java
similarity index 98%
rename from 
ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriverIntervals.java
rename to 
parser/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriverIntervals.java
index fdc4cfa..3abaf24 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriverIntervals.java
+++ 
b/parser/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriverIntervals.java
@@ -59,7 +59,7 @@ public class TestParseDriverIntervals {
 
   @Test
   public void parseInterval() throws Exception {
-    ASTNode root = parseDriver.parse(query);
+    ASTNode root = parseDriver.parse(query).getTree();
     assertNotNull("failed: " + query, findFunctionNode(root));
     System.out.println(root.dump());
   }
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseWithinGroupClause.java 
b/parser/src/test/org/apache/hadoop/hive/ql/parse/TestParseWithinGroupClause.java
similarity index 94%
rename from 
ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseWithinGroupClause.java
rename to 
parser/src/test/org/apache/hadoop/hive/ql/parse/TestParseWithinGroupClause.java
index fb7699e..23dca8d 100644
--- 
a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseWithinGroupClause.java
+++ 
b/parser/src/test/org/apache/hadoop/hive/ql/parse/TestParseWithinGroupClause.java
@@ -30,7 +30,8 @@ public class TestParseWithinGroupClause {
 
   @Test
   public void testParsePercentileCont() throws Exception {
-    ASTNode tree = parseDriver.parseSelect("SELECT percentile_cont(0.4) WITHIN 
GROUP (ORDER BY val) FROM src", null);
+    ASTNode tree = parseDriver.parseSelect(
+        "SELECT percentile_cont(0.4) WITHIN GROUP (ORDER BY val) FROM src", 
null).getTree();
 
     assertEquals(1, tree.getChildCount());
     ASTNode selExprNode = (ASTNode) tree.getChild(0);
@@ -60,7 +61,7 @@ public class TestParseWithinGroupClause {
   @Test
   public void testParseMultipleColumnRefs() throws Exception {
     ASTNode tree = parseDriver.parseSelect(
-            "SELECT rank(3, 4) WITHIN GROUP (ORDER BY val, val2) FROM src", 
null);
+            "SELECT rank(3, 4) WITHIN GROUP (ORDER BY val, val2) FROM src", 
null).getTree();
     ASTNode selExprNode = (ASTNode) tree.getChild(0);
     ASTNode functionNode = (ASTNode) selExprNode.getChild(0);
     ASTNode withinGroupNode = (ASTNode) functionNode.getChild(3);
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSQL11ReservedKeyWordsNegative.java
 
b/parser/src/test/org/apache/hadoop/hive/ql/parse/TestSQL11ReservedKeyWordsNegative.java
similarity index 87%
rename from 
ql/src/test/org/apache/hadoop/hive/ql/parse/TestSQL11ReservedKeyWordsNegative.java
rename to 
parser/src/test/org/apache/hadoop/hive/ql/parse/TestSQL11ReservedKeyWordsNegative.java
index fbf2b8b..6def1c0 100644
--- 
a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSQL11ReservedKeyWordsNegative.java
+++ 
b/parser/src/test/org/apache/hadoop/hive/ql/parse/TestSQL11ReservedKeyWordsNegative.java
@@ -17,15 +17,11 @@
  */
 package org.apache.hadoop.hive.ql.parse;
 
-import java.io.IOException;
 import java.util.Arrays;
 import java.util.Collection;
 
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.Context;
-import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.conf.Configuration;
 import org.junit.Assert;
-import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
@@ -37,24 +33,16 @@ import org.junit.experimental.runners.Enclosed;
  */
 @RunWith(Enclosed.class)
 public class TestSQL11ReservedKeyWordsNegative {
-  private static HiveConf conf = new HiveConf(SemanticAnalyzer.class);
+  private static Configuration conf = new Configuration();
   private static ParseDriver pd = new ParseDriver();
 
   private static ASTNode parse(String query) throws ParseException {
     ASTNode nd = null;
-    try {
-      nd = pd.parse(query, new Context(conf));
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
+    nd = pd.parse(query, conf).getTree();
     return (ASTNode) nd.getChild(0);
   }
 
   public static class TestSQL11ReservedKeyWordsNegativeMisc {
-    @BeforeClass
-    public static void initialize() {
-      SessionState.start(conf);
-    }
 
     @Test
     public void testSQL11ReservedKeyWords_KILL() {
@@ -71,10 +59,6 @@ public class TestSQL11ReservedKeyWordsNegative {
 
   @RunWith(Parameterized.class)
   public static class TestSQL11ReservedKeyWordsNegativeParametrized {
-    @BeforeClass
-    public static void initialize() {
-      SessionState.start(conf);
-    }
 
     @Parameters(name = "{0}")
     public static Collection<String[]> data() {
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUnpermittedCharsInColumnNameCreateTableNegative.java
 
b/parser/src/test/org/apache/hadoop/hive/ql/parse/TestUnpermittedCharsInColumnNameCreateTableNegative.java
similarity index 86%
rename from 
ql/src/test/org/apache/hadoop/hive/ql/parse/TestUnpermittedCharsInColumnNameCreateTableNegative.java
rename to 
parser/src/test/org/apache/hadoop/hive/ql/parse/TestUnpermittedCharsInColumnNameCreateTableNegative.java
index 02d46dc..f8d6c62 100644
--- 
a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUnpermittedCharsInColumnNameCreateTableNegative.java
+++ 
b/parser/src/test/org/apache/hadoop/hive/ql/parse/TestUnpermittedCharsInColumnNameCreateTableNegative.java
@@ -19,9 +19,7 @@ package org.apache.hadoop.hive.ql.parse;
 
 import java.io.IOException;
 
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.Context;
-import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.conf.Configuration;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
@@ -32,14 +30,13 @@ import org.junit.Test;
  * information in HIVE-10120
  */
 public class TestUnpermittedCharsInColumnNameCreateTableNegative {
-  private static HiveConf conf;
+  private static Configuration conf;
 
   private ParseDriver pd;
 
   @BeforeClass
   public static void initialize() {
-    conf = new HiveConf(SemanticAnalyzer.class);
-    SessionState.start(conf);
+    conf = new Configuration();
   }
 
   @Before
@@ -49,11 +46,7 @@ public class 
TestUnpermittedCharsInColumnNameCreateTableNegative {
 
   ASTNode parse(String query) throws ParseException {
     ASTNode nd = null;
-    try {
-      nd = pd.parse(query, new Context(conf));
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
+    nd = pd.parse(query, conf).getTree();
     return (ASTNode) nd.getChild(0);
   }
 
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/parse/positive/TestTransactionStatement.java
 
b/parser/src/test/org/apache/hadoop/hive/ql/parse/positive/TestTransactionStatement.java
similarity index 83%
rename from 
ql/src/test/org/apache/hadoop/hive/ql/parse/positive/TestTransactionStatement.java
rename to 
parser/src/test/org/apache/hadoop/hive/ql/parse/positive/TestTransactionStatement.java
index b13aa68..ff4a6b5 100644
--- 
a/ql/src/test/org/apache/hadoop/hive/ql/parse/positive/TestTransactionStatement.java
+++ 
b/parser/src/test/org/apache/hadoop/hive/ql/parse/positive/TestTransactionStatement.java
@@ -17,47 +17,27 @@
  */
 package org.apache.hadoop.hive.ql.parse.positive;
 
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.ParseDriver;
 import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.Before;
-import org.junit.BeforeClass;
 import org.junit.Test;
 
-import java.io.IOException;
-
 /**
  * Basic parser tests for multi-statement transactions
  */
 public class TestTransactionStatement {
-  private static SessionState sessionState;
   private ParseDriver pd;
 
-  @BeforeClass
-  public static void initialize() {
-    HiveConf conf = new HiveConf(SemanticAnalyzer.class);
-    sessionState = SessionState.start(conf);
-  }
-  @AfterClass
-  public static void cleanUp() throws IOException {
-    if(sessionState != null) {
-      sessionState.close();
-    }
-  }
-
   @Before
   public void setup() throws SemanticException {
     pd = new ParseDriver();
   }
 
   ASTNode parse(String query) throws ParseException {
-    ASTNode nd = pd.parse(query);
+    ASTNode nd = pd.parse(query).getTree();
     return (ASTNode) nd.getChild(0);
   }
   @Test
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
index e89d154..7ae4f70 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
@@ -34,6 +34,7 @@ import java.util.Stack;
 import org.antlr.runtime.tree.CommonTree;
 import org.antlr.runtime.tree.Tree;
 import org.apache.calcite.rel.RelNode;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -77,7 +78,18 @@ public final class ParseUtils {
   public static ASTNode parse(
       String command, Context ctx, String viewFullyQualifiedName) throws 
ParseException {
     ParseDriver pd = new ParseDriver();
-    ASTNode tree = pd.parse(command, ctx, viewFullyQualifiedName);
+    Configuration configuration = ctx != null ? ctx.getConf() : null;
+    ParseResult parseResult = pd.parse(command, configuration);
+    if (ctx != null) {
+      if (viewFullyQualifiedName == null) {
+        // Top level query
+        ctx.setTokenRewriteStream(parseResult.getTokenRewriteStream());
+      } else {
+        // It is a view
+        ctx.addViewTokenRewriteStream(viewFullyQualifiedName, 
parseResult.getTokenRewriteStream());
+      }
+    }
+    ASTNode tree = parseResult.getTree();
     tree = findRootNonNullToken(tree);
     handleSetColRefs(tree);
     return tree;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 68a43d7..de746a8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -14767,7 +14767,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
     ASTNode selNode = null;
     try {
       ParseDriver pd = new ParseDriver();
-      selNode = pd.parseSelect(selectExprStr, null);
+      selNode = pd.parseSelect(selectExprStr, null).getTree();
     } catch (ParseException pe) {
       throw new SemanticException(pe);
     }
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java 
b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java
index e564525..d198830 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java
@@ -77,7 +77,7 @@ public class TestQBCompact {
 
   private void parseAndAnalyze(String query) throws Exception {
     ParseDriver hd = new ParseDriver();
-    ASTNode head = (ASTNode)hd.parse(query).getChild(0);
+    ASTNode head = (ASTNode)hd.parse(query).getTree().getChild(0);
     BaseSemanticAnalyzer a = SemanticAnalyzerFactory.get(queryState, head);
     a.analyze(head, new Context(conf));
     List<Task<?>> roots = a.getRootTasks();
@@ -86,7 +86,7 @@ public class TestQBCompact {
 
   private AlterTableCompactDesc parseAndAnalyzeAlterTable(String query) throws 
Exception {
     ParseDriver hd = new ParseDriver();
-    ASTNode head = (ASTNode)hd.parse(query).getChild(0);
+    ASTNode head = (ASTNode)hd.parse(query).getTree().getChild(0);
     BaseSemanticAnalyzer a = SemanticAnalyzerFactory.get(queryState, head);
     a.analyze(head, new Context(conf));
     List<Task<?>> roots = a.getRootTasks();
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java 
b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java
index 5a0898c..5749fb2 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java
@@ -61,7 +61,7 @@ public class TestQBSubQuery {
   }
 
   ASTNode parse(String query) throws ParseException {
-    ASTNode nd = pd.parse(query);
+    ASTNode nd = pd.parse(query).getTree();
     return (ASTNode) nd.getChild(0);
   }
 
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java
 
b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java
index e52216c..b1c8c0f 100644
--- 
a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java
@@ -18,9 +18,7 @@
 package org.apache.hadoop.hive.ql.parse;
 
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.session.SessionState;
-import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.runners.Enclosed;
 import org.junit.runner.RunWith;
@@ -41,8 +39,7 @@ public class TestReplicationSemanticAnalyzer {
 
   private static ASTNode parse(String command) throws Exception {
     SessionState.start(hiveConf);
-    Context context = new Context(hiveConf);
-    return (ASTNode) driver.parse(command, context).getChild(0);
+    return (ASTNode) driver.parse(command, hiveConf).getTree().getChild(0);
   }
 
   private static void assertWithClause(ASTNode root, int replConfigIndex) {

Reply via email to