Repository: hive
Updated Branches:
  refs/heads/branch-1.2 f0c790df2 -> 2e135e269


HIVE-10828 - Insert with schema and dynamic partitions NullPointerException 
(Eugene Koifman, reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/2e135e26
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/2e135e26
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/2e135e26

Branch: refs/heads/branch-1.2
Commit: 2e135e269909a8d69a9fa81ab7e73385409a3b11
Parents: f0c790d
Author: Eugene Koifman <ekoif...@hortonworks.com>
Authored: Thu May 28 10:57:26 2015 -0700
Committer: Eugene Koifman <ekoif...@hortonworks.com>
Committed: Thu May 28 10:57:26 2015 -0700

----------------------------------------------------------------------
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  | 14 ++++----
 .../clientpositive/insert_into_with_schema2.q   | 11 ++++++
 .../insert_into_with_schema2.q.out              | 37 ++++++++++++++++++++
 3 files changed, 55 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/2e135e26/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 50c57fc..04fd6cd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -3861,7 +3861,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
     }
     selectStar = selectStar && exprList.getChildCount() == posn + 1;
 
-    handleInsertStatementSpec(col_list, dest, out_rwsch, inputRR, qb, 
selExprList);
+    out_rwsch = handleInsertStatementSpec(col_list, dest, out_rwsch, inputRR, 
qb, selExprList);
 
     ArrayList<String> columnNames = new ArrayList<String>();
     Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
@@ -3906,14 +3906,14 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
    * @see #handleInsertStatementSpecPhase1(ASTNode, QBParseInfo, 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.Phase1Ctx)
    * @throws SemanticException
    */
-  private void handleInsertStatementSpec(List<ExprNodeDesc> col_list, String 
dest,
+  private RowResolver handleInsertStatementSpec(List<ExprNodeDesc> col_list, 
String dest,
                                          RowResolver outputRR, RowResolver 
inputRR, QB qb,
                                          ASTNode selExprList) throws 
SemanticException {
     //(z,x)
     List<String> targetTableSchema = 
qb.getParseInfo().getDestSchemaForClause(dest);//specified in the query
     if(targetTableSchema == null) {
       //no insert schema was specified
-      return;
+      return outputRR;
     }
     if(targetTableSchema.size() != col_list.size()) {
       Table target = qb.getMetaData().getDestTableForAlias(dest);
@@ -3956,6 +3956,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
         }
       }
     }
+    RowResolver newOutputRR = new RowResolver();
     //now make the select produce <regular columns>,<dynamic partition 
columns> with
     //where missing columns are NULL-filled
     for(String f : targetTableColNames) {
@@ -3964,7 +3965,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
         new_col_list.add(targetCol2Projection.get(f));
         ColumnInfo ci = targetCol2ColumnInfo.get(f);//todo: is this OK?
         ci.setInternalName(getColumnInternalName(colListPos));
-        newSchema.add(ci);
+        newOutputRR.put(ci.getTabAlias(), ci.getInternalName(), ci);
       }
       else {
         //add new 'synthetic' columns for projections not provided by Select
@@ -3976,14 +3977,13 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
         final String tableAlias = null;//this column doesn't come from any 
table
         ColumnInfo colInfo = new ColumnInfo(getColumnInternalName(colListPos),
           exp.getWritableObjectInspector(), tableAlias, false);
-        newSchema.add(colInfo);
-        outputRR.addMappingOnly(colInfo.getTabAlias(), 
colInfo.getInternalName(), colInfo);
+        newOutputRR.put(colInfo.getTabAlias(), colInfo.getInternalName(), 
colInfo);
       }
       colListPos++;
     }
     col_list.clear();
     col_list.addAll(new_col_list);
-    outputRR.setRowSchema(new RowSchema(newSchema));
+    return newOutputRR;
   }
   String recommendName(ExprNodeDesc exp, String colAlias) {
     if (!colAlias.startsWith(autogenColAliasPrfxLbl)) {

http://git-wip-us.apache.org/repos/asf/hive/blob/2e135e26/ql/src/test/queries/clientpositive/insert_into_with_schema2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/insert_into_with_schema2.q 
b/ql/src/test/queries/clientpositive/insert_into_with_schema2.q
index b7c6b58..a5352ec 100644
--- a/ql/src/test/queries/clientpositive/insert_into_with_schema2.q
+++ b/ql/src/test/queries/clientpositive/insert_into_with_schema2.q
@@ -21,3 +21,14 @@ insert into student_acid(grade, age) values(20, 2);
 insert into student_acid(age) values(22);
 
 select * from student_acid;
+
+set hive.exec.dynamic.partition.mode=nonstrict;
+
+drop table if exists acid_partitioned;
+create table acid_partitioned (a int, c string)
+  partitioned by (p int)
+  clustered by (a) into 1 buckets;
+
+insert into acid_partitioned partition (p) (a,p) values(1,2);
+
+select * from acid_partitioned;

http://git-wip-us.apache.org/repos/asf/hive/blob/2e135e26/ql/src/test/results/clientpositive/insert_into_with_schema2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/insert_into_with_schema2.q.out 
b/ql/src/test/results/clientpositive/insert_into_with_schema2.q.out
index a55a82f..32e6e92 100644
--- a/ql/src/test/results/clientpositive/insert_into_with_schema2.q.out
+++ b/ql/src/test/results/clientpositive/insert_into_with_schema2.q.out
@@ -96,3 +96,40 @@ POSTHOOK: Input: default@student_acid
 1      NULL
 2      20
 22     NULL
+PREHOOK: query: drop table if exists acid_partitioned
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table if exists acid_partitioned
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table acid_partitioned (a int, c string)
+  partitioned by (p int)
+  clustered by (a) into 1 buckets
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@acid_partitioned
+POSTHOOK: query: create table acid_partitioned (a int, c string)
+  partitioned by (p int)
+  clustered by (a) into 1 buckets
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@acid_partitioned
+PREHOOK: query: insert into acid_partitioned partition (p) (a,p) values(1,2)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__4
+PREHOOK: Output: default@acid_partitioned
+POSTHOOK: query: insert into acid_partitioned partition (p) (a,p) values(1,2)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__4
+POSTHOOK: Output: default@acid_partitioned@p=2
+POSTHOOK: Lineage: acid_partitioned PARTITION(p=2).a EXPRESSION 
[(values__tmp__table__4)values__tmp__table__4.FieldSchema(name:tmp_values_col1, 
type:string, comment:), ]
+POSTHOOK: Lineage: acid_partitioned PARTITION(p=2).c SIMPLE []
+PREHOOK: query: select * from acid_partitioned
+PREHOOK: type: QUERY
+PREHOOK: Input: default@acid_partitioned
+PREHOOK: Input: default@acid_partitioned@p=2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from acid_partitioned
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@acid_partitioned
+POSTHOOK: Input: default@acid_partitioned@p=2
+#### A masked pattern was here ####
+1      NULL    2

Reply via email to