Repository: hive
Updated Branches:
  refs/heads/master dfd63d979 -> eba9646b4


HIVE-16907: "INSERT INTO"  overwrite old data when destination table 
encapsulated by backquote (Zoltan Haindrich reviewed by Jesus Camacho Rodriguez)

Signed-off-by: Zoltan Haindrich <k...@rxd.hu>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/461d8a04
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/461d8a04
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/461d8a04

Branch: refs/heads/master
Commit: 461d8a04fa233b4351ab514d408eaa49f5167fff
Parents: dfd63d9
Author: Zoltan Haindrich <k...@rxd.hu>
Authored: Wed Jan 23 10:40:13 2019 +0100
Committer: Zoltan Haindrich <k...@rxd.hu>
Committed: Wed Jan 23 10:40:13 2019 +0100

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/ql/ErrorMsg.java     |   1 +
 .../hive/ql/parse/BaseSemanticAnalyzer.java     |  32 ++++--
 .../hive/ql/parse/DDLSemanticAnalyzer.java      |   4 +-
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |   6 +-
 .../hadoop/hive/ql/parse/SubQueryUtils.java     | 106 +++++++++----------
 .../clientnegative/create_table_failure2.q      |   2 +-
 .../clientnegative/create_table_failure4.q      |   2 +-
 .../clientnegative/incorrectly_quoted_insert.q  |   5 +
 .../clientnegative/table_create_with_dot.q      |   2 +
 .../incorrectly_quoted_insert.q.out             |  29 +++++
 .../clientnegative/table_create_with_dot.q.out  |   7 ++
 11 files changed, 132 insertions(+), 64 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/461d8a04/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
index d58f626..83053d1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
@@ -470,6 +470,7 @@ public enum ErrorMsg {
   RESOURCE_PLAN_ALREADY_EXISTS(10417, "Resource plan {0} already exists", 
true),
   RESOURCE_PLAN_NOT_EXISTS(10418, "Resource plan {0} does not exist", true),
   INCOMPATIBLE_STRUCT(10419, "Incompatible structs.", true),
+  OBJECTNAME_CONTAINS_DOT(10420, "Table or database name may not contain 
dot(.) character", true),
 
   //========================== 20000 range starts here 
========================//
 

http://git-wip-us.apache.org/repos/asf/hive/blob/461d8a04/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
index fb31254..e6779b2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
@@ -364,24 +364,36 @@ public abstract class BaseSemanticAnalyzer {
    * Get dequoted name from a table/column node.
    * @param tableOrColumnNode the table or column node
    * @return for table node, db.tab or tab. for column node column.
+   * @throws SemanticException
    */
-  public static String getUnescapedName(ASTNode tableOrColumnNode) {
+  public static String getUnescapedName(ASTNode tableOrColumnNode) throws 
SemanticException {
     return getUnescapedName(tableOrColumnNode, null);
   }
 
-  public static Map.Entry<String,String> getDbTableNamePair(ASTNode 
tableNameNode) {
-    assert(tableNameNode.getToken().getType() == HiveParser.TOK_TABNAME);
+  public static Map.Entry<String, String> getDbTableNamePair(ASTNode 
tableNameNode) throws SemanticException {
+
+    if (tableNameNode.getType() != HiveParser.TOK_TABNAME ||
+        (tableNameNode.getChildCount() != 1 && tableNameNode.getChildCount() 
!= 2)) {
+      throw new 
SemanticException(ErrorMsg.INVALID_TABLE_NAME.getMsg(tableNameNode));
+    }
+
     if (tableNameNode.getChildCount() == 2) {
       String dbName = unescapeIdentifier(tableNameNode.getChild(0).getText());
       String tableName = 
unescapeIdentifier(tableNameNode.getChild(1).getText());
+      if (dbName.contains(".") || tableName.contains(".")) {
+        throw new 
SemanticException(ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(tableNameNode));
+      }
       return Pair.of(dbName, tableName);
     } else {
       String tableName = 
unescapeIdentifier(tableNameNode.getChild(0).getText());
+      if (tableName.contains(".")) {
+        throw new 
SemanticException(ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(tableNameNode));
+      }
       return Pair.of(null,tableName);
     }
   }
 
-  public static String getUnescapedName(ASTNode tableOrColumnNode, String 
currentDatabase) {
+  public static String getUnescapedName(ASTNode tableOrColumnNode, String 
currentDatabase) throws SemanticException {
     int tokenType = tableOrColumnNode.getToken().getType();
     if (tokenType == HiveParser.TOK_TABNAME) {
       // table node
@@ -410,9 +422,15 @@ public abstract class BaseSemanticAnalyzer {
     if (tabNameNode.getChildCount() == 2) {
       String dbName = unescapeIdentifier(tabNameNode.getChild(0).getText());
       String tableName = unescapeIdentifier(tabNameNode.getChild(1).getText());
+      if (dbName.contains(".") || tableName.contains(".")) {
+        throw new 
SemanticException(ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(tabNameNode));
+      }
       return new String[] {dbName, tableName};
     }
     String tableName = unescapeIdentifier(tabNameNode.getChild(0).getText());
+    if (tableName.contains(".")) {
+      throw new 
SemanticException(ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(tabNameNode));
+    }
     return Utilities.getDbTableName(tableName);
   }
 
@@ -434,8 +452,9 @@ public abstract class BaseSemanticAnalyzer {
    * @param node the table node
    * @return the table name without schema qualification
    *         (i.e., if name is "db.table" or "table", returns "table")
+   * @throws SemanticException
    */
-  public static String getUnescapedUnqualifiedTableName(ASTNode node) {
+  public static String getUnescapedUnqualifiedTableName(ASTNode node) throws 
SemanticException {
     assert node.getChildCount() <= 2;
 
     if (node.getChildCount() == 2) {
@@ -2288,12 +2307,13 @@ public abstract class BaseSemanticAnalyzer {
         Configuration conf = new Configuration();
         conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, keystore);
         boolean found = false;
-        for (CredentialProvider provider : 
CredentialProviderFactory.getProviders(conf))
+        for (CredentialProvider provider : 
CredentialProviderFactory.getProviders(conf)) {
           if (provider instanceof AbstractJavaKeyStoreProvider) {
             Path path = ((AbstractJavaKeyStoreProvider) provider).getPath();
             inputs.add(toReadEntity(path));
             found = true;
           }
+        }
         if (!found) {
           throw new SemanticException("Cannot recognize keystore " + keystore 
+ ", only JavaKeyStoreProvider is " +
                   "supported");

http://git-wip-us.apache.org/repos/asf/hive/blob/461d8a04/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 0e5b3e5..db3b427 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -3593,7 +3593,9 @@ public class DDLSemanticAnalyzer extends 
BaseSemanticAnalyzer {
       // Compile internal query to capture underlying table partition 
dependencies
       StringBuilder cmd = new StringBuilder();
       cmd.append("SELECT * FROM ");
-      cmd.append(HiveUtils.unparseIdentifier(getDotName(qualified)));
+      cmd.append(HiveUtils.unparseIdentifier(qualified[0]));
+      cmd.append(".");
+      cmd.append(HiveUtils.unparseIdentifier(qualified[1]));
       cmd.append(" WHERE ");
       boolean firstOr = true;
       for (int i = 0; i < addPartitionDesc.getPartitionCount(); ++i) {

http://git-wip-us.apache.org/repos/asf/hive/blob/461d8a04/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 54f34f6..adce54c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -995,7 +995,8 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
     }
     return new int[] {aliasIndex, propsIndex, tsampleIndex, ssampleIndex};
   }
-  String findSimpleTableName(ASTNode tabref, int aliasIndex) {
+
+  String findSimpleTableName(ASTNode tabref, int aliasIndex) throws 
SemanticException {
     assert tabref.getType() == HiveParser.TOK_TABREF;
     ASTNode tableTree = (ASTNode) (tabref.getChild(0));
 
@@ -11840,7 +11841,8 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
   }
 
   private static void walkASTAndQualifyNames(ASTNode ast,
-      Set<String> cteAlias, Context ctx, Hive db, Set<Integer> ignoredTokens, 
UnparseTranslator unparseTranslator) {
+      Set<String> cteAlias, Context ctx, Hive db, Set<Integer> ignoredTokens, 
UnparseTranslator unparseTranslator)
+      throws SemanticException {
     Queue<Node> queue = new LinkedList<>();
     queue.add(ast);
     while (!queue.isEmpty()) {

http://git-wip-us.apache.org/repos/asf/hive/blob/461d8a04/ql/src/java/org/apache/hadoop/hive/ql/parse/SubQueryUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SubQueryUtils.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/SubQueryUtils.java
index 3c4e3d5..099157f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SubQueryUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SubQueryUtils.java
@@ -297,13 +297,13 @@ public class SubQueryUtils {
     return r;
   }
 
-  static List<String> getTableAliasesInSubQuery(ASTNode fromClause) {
+  static List<String> getTableAliasesInSubQuery(ASTNode fromClause) throws 
SemanticException {
     List<String> aliases = new ArrayList<String>();
     getTableAliasesInSubQuery((ASTNode) fromClause.getChild(0), aliases);
     return aliases;
   }
 
-  private static void getTableAliasesInSubQuery(ASTNode joinNode, List<String> 
aliases) {
+  private static void getTableAliasesInSubQuery(ASTNode joinNode, List<String> 
aliases) throws SemanticException {
 
     if ((joinNode.getToken().getType() == HiveParser.TOK_TABREF)
         || (joinNode.getToken().getType() == HiveParser.TOK_SUBQUERY)
@@ -324,7 +324,7 @@ public class SubQueryUtils {
       getTableAliasesInSubQuery(right, aliases);
     }
   }
-  
+
   static ASTNode hasUnQualifiedColumnReferences(ASTNode ast) {
     int type = ast.getType();
     if ( type == HiveParser.DOT ) {
@@ -333,7 +333,7 @@ public class SubQueryUtils {
     else if ( type == HiveParser.TOK_TABLE_OR_COL ) {
       return ast;
     }
-    
+
     for(int i=0; i < ast.getChildCount(); i++ ) {
       ASTNode c = hasUnQualifiedColumnReferences((ASTNode) ast.getChild(i));
       if ( c != null ) {
@@ -368,7 +368,7 @@ public class SubQueryUtils {
     }
     return ast;
   }
-  
+
   static ASTNode subQueryWhere(ASTNode insertClause) {
     if (insertClause.getChildCount() > 2 &&
         insertClause.getChild(2).getType() == HiveParser.TOK_WHERE ) {
@@ -502,15 +502,15 @@ public class SubQueryUtils {
    * This Subquery is joined with the Outer Query plan on the join condition 
'c = 0'.
    * The join condition ensures that in case there are null values in the 
joining column
    * the Query returns no rows.
-   * 
+   *
    * The AST tree for this is:
-   * 
+   *
    * ^(TOK_QUERY
    *    ^(TOK FROM
    *        ^(TOK_SUBQUERY
    *            {the input SubQuery, with correlation removed}
-   *            subQueryAlias 
-   *          ) 
+   *            subQueryAlias
+   *          )
    *     )
    *     ^(TOK_INSERT
    *         ^(TOK_DESTINATION...)
@@ -518,51 +518,51 @@ public class SubQueryUtils {
    *             ^(TOK_SELECTEXPR {ast tree for count *}
    *          )
    *          ^(TOK_WHERE
-   *             {is null check for joining column} 
+   *             {is null check for joining column}
    *           )
    *      )
    * )
-   */  
-  static ASTNode buildNotInNullCheckQuery(ASTNode subQueryAST, 
-      String subQueryAlias, 
+   */
+  static ASTNode buildNotInNullCheckQuery(ASTNode subQueryAST,
+      String subQueryAlias,
       String cntAlias,
       List<ASTNode> corrExprs,
       RowResolver sqRR) {
-    
+
     subQueryAST = (ASTNode) ParseDriver.adaptor.dupTree(subQueryAST);
-    ASTNode qry = (ASTNode) 
+    ASTNode qry = (ASTNode)
         ParseDriver.adaptor.create(HiveParser.TOK_QUERY, "TOK_QUERY");
-    
+
     qry.addChild(buildNotInNullCheckFrom(subQueryAST, subQueryAlias));
     ASTNode insertAST = buildNotInNullCheckInsert();
     qry.addChild(insertAST);
     insertAST.addChild(buildNotInNullCheckSelect(cntAlias));
-    insertAST.addChild(buildNotInNullCheckWhere(subQueryAST, 
+    insertAST.addChild(buildNotInNullCheckWhere(subQueryAST,
         subQueryAlias, corrExprs, sqRR));
-    
+
     return qry;
   }
-  
+
   /*
    * build:
    *    ^(TOK FROM
    *        ^(TOK_SUBQUERY
    *            {the input SubQuery, with correlation removed}
-   *            subQueryAlias 
-   *          ) 
+   *            subQueryAlias
+   *          )
    *     )
 
    */
   static ASTNode buildNotInNullCheckFrom(ASTNode subQueryAST, String 
subQueryAlias) {
     ASTNode from = (ASTNode) ParseDriver.adaptor.create(HiveParser.TOK_FROM, 
"TOK_FROM");
-    ASTNode sqExpr = (ASTNode) 
+    ASTNode sqExpr = (ASTNode)
         ParseDriver.adaptor.create(HiveParser.TOK_SUBQUERY, "TOK_SUBQUERY");
     sqExpr.addChild(subQueryAST);
     sqExpr.addChild(createAliasAST(subQueryAlias));
     from.addChild(sqExpr);
     return from;
   }
-  
+
   /*
    * build
    *     ^(TOK_INSERT
@@ -570,21 +570,21 @@ public class SubQueryUtils {
    *      )
    */
   static ASTNode buildNotInNullCheckInsert() {
-    ASTNode insert = (ASTNode) 
+    ASTNode insert = (ASTNode)
         ParseDriver.adaptor.create(HiveParser.TOK_INSERT, "TOK_INSERT");
-    ASTNode dest = (ASTNode) 
+    ASTNode dest = (ASTNode)
         ParseDriver.adaptor.create(HiveParser.TOK_DESTINATION, 
"TOK_DESTINATION");
-    ASTNode dir = (ASTNode) 
+    ASTNode dir = (ASTNode)
         ParseDriver.adaptor.create(HiveParser.TOK_DIR, "TOK_DIR");
-    ASTNode tfile = (ASTNode) 
+    ASTNode tfile = (ASTNode)
         ParseDriver.adaptor.create(HiveParser.TOK_TMP_FILE, "TOK_TMP_FILE");
     insert.addChild(dest);
     dest.addChild(dir);
     dir.addChild(tfile);
-    
+
     return insert;
   }
-  
+
   /*
    * build:
    *         ^(TOK_SELECT
@@ -592,37 +592,37 @@ public class SubQueryUtils {
    *          )
    */
   static ASTNode buildNotInNullCheckSelect(String cntAlias) {
-    ASTNode select = (ASTNode) 
+    ASTNode select = (ASTNode)
         ParseDriver.adaptor.create(HiveParser.TOK_SELECT, "TOK_SELECT");
-    ASTNode selectExpr = (ASTNode) 
+    ASTNode selectExpr = (ASTNode)
         ParseDriver.adaptor.create(HiveParser.TOK_SELEXPR, "TOK_SELEXPR");
-    ASTNode countStar = (ASTNode) 
+    ASTNode countStar = (ASTNode)
         ParseDriver.adaptor.create(HiveParser.TOK_FUNCTIONSTAR, 
"TOK_FUNCTIONSTAR");
     ASTNode alias = (createAliasAST(cntAlias));
-    
+
     countStar.addChild((ASTNode) 
ParseDriver.adaptor.create(HiveParser.Identifier, "count"));
     select.addChild(selectExpr);
     selectExpr.addChild(countStar);
     selectExpr.addChild(alias);
-    
+
     return select;
   }
-  
+
   /*
    * build:
    *          ^(TOK_WHERE
-   *             {is null check for joining column} 
+   *             {is null check for joining column}
    *           )
    */
-  static ASTNode buildNotInNullCheckWhere(ASTNode subQueryAST, 
-      String sqAlias, 
+  static ASTNode buildNotInNullCheckWhere(ASTNode subQueryAST,
+      String sqAlias,
       List<ASTNode> corrExprs,
       RowResolver sqRR) {
-    
+
     ASTNode sqSelect = (ASTNode) subQueryAST.getChild(1).getChild(1);
     ASTNode selExpr = (ASTNode) sqSelect.getChild(0);
     String colAlias = null;
-    
+
     if ( selExpr.getChildCount() == 2 ) {
       colAlias = selExpr.getChild(1).getText();
     } else if (selExpr.getChild(0).getType() != HiveParser.TOK_ALLCOLREF) {
@@ -634,29 +634,29 @@ public class SubQueryUtils {
       String[] joinColName = sqRR.reverseLookup(joinColumn.getInternalName());
       colAlias = joinColName[1];
     }
-    
+
     ASTNode searchCond = isNull(createColRefAST(sqAlias, colAlias));
-    
+
     for(ASTNode e : corrExprs ) {
       ASTNode p = (ASTNode) ParseDriver.adaptor.dupTree(e);
-      p = isNull(p);      
-      searchCond = orAST(searchCond, p);      
+      p = isNull(p);
+      searchCond = orAST(searchCond, p);
     }
-    
+
     ASTNode where = (ASTNode) ParseDriver.adaptor.create(HiveParser.TOK_WHERE, 
"TOK_WHERE");
     where.addChild(searchCond);
     return where;
   }
-  
+
   static ASTNode buildNotInNullJoinCond(String subqueryAlias, String cntAlias) 
{
-    
-    ASTNode eq = (ASTNode) 
+
+    ASTNode eq = (ASTNode)
         ParseDriver.adaptor.create(HiveParser.EQUAL, "=");
-    
+
     eq.addChild(createColRefAST(subqueryAlias, cntAlias));
-    eq.addChild((ASTNode) 
+    eq.addChild((ASTNode)
         ParseDriver.adaptor.create(HiveParser.Number, "0"));
-    
+
     return eq;
   }
 
@@ -716,7 +716,7 @@ public class SubQueryUtils {
       }
     }
   }
-  
+
   public static interface ISubQueryJoinInfo {
     public String getAlias();
     public JoinType getJoinType();
@@ -726,7 +726,7 @@ public class SubQueryUtils {
     public String getOuterQueryId();
   };
 
-    
+
   /*
    * Using CommonTreeAdaptor because the Adaptor in ParseDriver doesn't carry
    * the token indexes when duplicating a Tree.

http://git-wip-us.apache.org/repos/asf/hive/blob/461d8a04/ql/src/test/queries/clientnegative/create_table_failure2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/create_table_failure2.q 
b/ql/src/test/queries/clientnegative/create_table_failure2.q
index e873f34..48f834b 100644
--- a/ql/src/test/queries/clientnegative/create_table_failure2.q
+++ b/ql/src/test/queries/clientnegative/create_table_failure2.q
@@ -1,2 +1,2 @@
 --! qt:dataset:src
-create table `table_in_database_creation_not_exist.test` as select * from src 
limit 1;
\ No newline at end of file
+create table table_in_database_creation_not_exist.test as select * from src 
limit 1;

http://git-wip-us.apache.org/repos/asf/hive/blob/461d8a04/ql/src/test/queries/clientnegative/create_table_failure4.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/create_table_failure4.q 
b/ql/src/test/queries/clientnegative/create_table_failure4.q
index 67745e0..6a54873 100644
--- a/ql/src/test/queries/clientnegative/create_table_failure4.q
+++ b/ql/src/test/queries/clientnegative/create_table_failure4.q
@@ -1 +1 @@
-create table `table_in_database_creation_not_exist.test` (a string);
\ No newline at end of file
+create table table_in_database_creation_not_exist.test (a string);

http://git-wip-us.apache.org/repos/asf/hive/blob/461d8a04/ql/src/test/queries/clientnegative/incorrectly_quoted_insert.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/incorrectly_quoted_insert.q 
b/ql/src/test/queries/clientnegative/incorrectly_quoted_insert.q
new file mode 100644
index 0000000..bfdf1b6
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/incorrectly_quoted_insert.q
@@ -0,0 +1,5 @@
+create database tdb;
+use tdb;
+create table t1(id int);
+create table t2(id int);
+explain insert into `tdb.t1` select * from t2;

http://git-wip-us.apache.org/repos/asf/hive/blob/461d8a04/ql/src/test/queries/clientnegative/table_create_with_dot.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/table_create_with_dot.q 
b/ql/src/test/queries/clientnegative/table_create_with_dot.q
new file mode 100644
index 0000000..5b3a253
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/table_create_with_dot.q
@@ -0,0 +1,2 @@
+create database asd;
+create table `asd.tbl` (a integer);

http://git-wip-us.apache.org/repos/asf/hive/blob/461d8a04/ql/src/test/results/clientnegative/incorrectly_quoted_insert.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/incorrectly_quoted_insert.q.out 
b/ql/src/test/results/clientnegative/incorrectly_quoted_insert.q.out
new file mode 100644
index 0000000..7b476d5
--- /dev/null
+++ b/ql/src/test/results/clientnegative/incorrectly_quoted_insert.q.out
@@ -0,0 +1,29 @@
+PREHOOK: query: create database tdb
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:tdb
+POSTHOOK: query: create database tdb
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:tdb
+PREHOOK: query: use tdb
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:tdb
+POSTHOOK: query: use tdb
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:tdb
+PREHOOK: query: create table t1(id int)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:tdb
+PREHOOK: Output: tdb@t1
+POSTHOOK: query: create table t1(id int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:tdb
+POSTHOOK: Output: tdb@t1
+PREHOOK: query: create table t2(id int)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:tdb
+PREHOOK: Output: tdb@t2
+POSTHOOK: query: create table t2(id int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:tdb
+POSTHOOK: Output: tdb@t2
+FAILED: SemanticException Line 2:20 Table or database name may not contain 
dot(.) character 'tdb.t1'

http://git-wip-us.apache.org/repos/asf/hive/blob/461d8a04/ql/src/test/results/clientnegative/table_create_with_dot.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/table_create_with_dot.q.out 
b/ql/src/test/results/clientnegative/table_create_with_dot.q.out
new file mode 100644
index 0000000..99cdf0c
--- /dev/null
+++ b/ql/src/test/results/clientnegative/table_create_with_dot.q.out
@@ -0,0 +1,7 @@
+PREHOOK: query: create database asd
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:asd
+POSTHOOK: query: create database asd
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:asd
+FAILED: SemanticException Line 2:13 Table or database name may not contain 
dot(.) character 'asd.tbl'

Reply via email to